Bug fix (#2380)
* Change request.infer to use param_names instead of request._input_list because it fix the bug * remove xfail mark * remove xfail_issue_35893 form tests/ __init__.py * Remove xfail_issue_35893. Add OnnxBackendNodeModelTest.test_convtranspose_3d_cpu to xfail_issue_38091 * Remove OnnxBackendNodeModelTest.test_convtranspose_3d_cpu from xfail_issue_38091 in test_backend.py
This commit is contained in:
parent
30eeb1a5a0
commit
ac1a1e3e9f
@ -45,7 +45,6 @@ xfail_issue_34314 = xfail_test(reason="RuntimeError: RNNCell operation has a for
|
||||
xfail_issue_34323 = xfail_test(reason="RuntimeError: data [value] doesn't exist")
|
||||
xfail_issue_34327 = xfail_test(reason="RuntimeError: '<value>' layer has different "
|
||||
"IN and OUT channels number")
|
||||
xfail_issue_35893 = xfail_test(reason="ValueError: could not broadcast input array")
|
||||
xfail_issue_35911 = xfail_test(reason="Assertion error: Pad model mismatch error")
|
||||
xfail_issue_35912 = xfail_test(reason="RuntimeError: Error of validate layer: B with type: "
|
||||
"Pad. Cannot parse parameter pads_end from IR for layer B. "
|
||||
|
@ -91,11 +91,12 @@ class Computation(object):
|
||||
input_values = [np.array(input_value) for input_value in input_values]
|
||||
input_shapes = [get_shape(input_value) for input_value in input_values]
|
||||
|
||||
param_names = [param.friendly_name for param in self.parameters]
|
||||
|
||||
if self.network_cache.get(str(input_shapes)) is None:
|
||||
capsule = Function.to_capsule(self.function)
|
||||
cnn_network = IENetwork(capsule)
|
||||
if self.function.is_dynamic():
|
||||
param_names = [param.friendly_name for param in self.parameters]
|
||||
cnn_network.reshape(dict(zip(param_names, input_shapes)))
|
||||
self.network_cache[str(input_shapes)] = cnn_network
|
||||
else:
|
||||
@ -119,6 +120,5 @@ class Computation(object):
|
||||
)
|
||||
|
||||
request = executable_network.requests[0]
|
||||
|
||||
request.infer(dict(zip(request._inputs_list, input_values)))
|
||||
request.infer(dict(zip(param_names, input_values)))
|
||||
return [blob.buffer for blob in request.output_blobs.values()]
|
||||
|
@ -33,7 +33,6 @@ from tests import (BACKEND_NAME,
|
||||
xfail_issue_38085,
|
||||
xfail_issue_38086,
|
||||
xfail_issue_38087,
|
||||
xfail_issue_35893,
|
||||
xfail_issue_35923,
|
||||
xfail_issue_35914,
|
||||
xfail_issue_36483,
|
||||
@ -212,24 +211,6 @@ tests_expected_to_fail = [
|
||||
"OnnxBackendNodeModelTest.test_quantizelinear_cpu"),
|
||||
(xfail_issue_38087,
|
||||
"OnnxBackendNodeModelTest.test_convtranspose_1d_cpu"),
|
||||
(xfail_issue_35893,
|
||||
"OnnxBackendNodeModelTest.test_convtranspose_3d_cpu",
|
||||
"OnnxBackendNodeModelTest.test_convtranspose_cpu",
|
||||
"OnnxBackendNodeModelTest.test_convtranspose_dilations_cpu",
|
||||
"OnnxBackendNodeModelTest.test_convtranspose_kernel_shape_cpu",
|
||||
"OnnxBackendNodeModelTest.test_convtranspose_output_shape_cpu",
|
||||
"OnnxBackendNodeModelTest.test_convtranspose_pad_cpu",
|
||||
"OnnxBackendNodeModelTest.test_convtranspose_pads_cpu",
|
||||
"OnnxBackendNodeModelTest.test_convtranspose_with_kernel_cpu",
|
||||
"OnnxBackendNodeModelTest.test_instancenorm_example_cpu",
|
||||
"OnnxBackendNodeModelTest.test_basic_conv_without_padding_cpu",
|
||||
"OnnxBackendNodeModelTest.test_batchnorm_epsilon_cpu",
|
||||
"OnnxBackendNodeModelTest.test_batchnorm_example_cpu",
|
||||
"OnnxBackendNodeModelTest.test_conv_with_strides_and_asymmetric_padding_cpu",
|
||||
"OnnxBackendNodeModelTest.test_conv_with_strides_no_padding_cpu",
|
||||
"OnnxBackendNodeModelTest.test_conv_with_strides_padding_cpu",
|
||||
"OnnxBackendNodeModelTest.test_instancenorm_epsilon_cpu",
|
||||
"OnnxBackendNodeModelTest.test_basic_conv_with_padding_cpu"),
|
||||
(xfail_issue_35923,
|
||||
"OnnxBackendNodeModelTest.test_prelu_broadcast_cpu",
|
||||
"OnnxBackendNodeModelTest.test_prelu_example_cpu"),
|
||||
|
@ -18,7 +18,6 @@ import numpy as np
|
||||
import onnx
|
||||
|
||||
from tests.test_onnx.utils import run_node
|
||||
from tests import xfail_issue_35893
|
||||
|
||||
|
||||
def make_batch_norm_node(**node_attributes):
|
||||
@ -27,7 +26,6 @@ def make_batch_norm_node(**node_attributes):
|
||||
)
|
||||
|
||||
|
||||
@xfail_issue_35893
|
||||
def test_batch_norm_test_node():
|
||||
data = np.arange(48).reshape((1, 3, 4, 4)).astype(np.float32)
|
||||
scale = np.ones((3,)).astype(np.float32) # Gamma
|
||||
|
Loading…
Reference in New Issue
Block a user