Revise ONNX Mod operator (Add support for attribute fmod == 0) (#5173)
This commit is contained in:
parent
d2adcb354e
commit
94e9b632e8
@ -26,7 +26,10 @@ namespace ngraph
|
||||
{
|
||||
autobroadcast_binop(
|
||||
arg0, arg1, out, arg0_shape, arg1_shape, broadcast_spec, [](T x, T y) -> T {
|
||||
return x - y * std::floor(x / y);
|
||||
// Cast to double is needed for integer input,
|
||||
// otherwise std::floor will act like std::trunc
|
||||
const double divisor = static_cast<double>(y);
|
||||
return x - y * std::floor(x / divisor);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -25,10 +25,25 @@ namespace ngraph
|
||||
Output<ngraph::Node> divisor{node.get_ng_inputs().at(1)};
|
||||
|
||||
std::int64_t fmod = node.get_attribute_value<std::int64_t>("fmod", 0);
|
||||
CHECK_VALID_NODE(
|
||||
node, fmod == 1, "Only 'fmod=1' mode is supported for mod operator.");
|
||||
|
||||
return {std::make_shared<default_opset::Mod>(dividend, divisor)};
|
||||
OutputVector output;
|
||||
if (fmod == 1)
|
||||
{
|
||||
output = {std::make_shared<default_opset::Mod>(dividend, divisor)};
|
||||
}
|
||||
else if (fmod == 0)
|
||||
{
|
||||
NGRAPH_CHECK(dividend.get_element_type().is_integral() &&
|
||||
divisor.get_element_type().is_integral(),
|
||||
"If the input type is floating point, then `fmod` attribute "
|
||||
"must be set to 1.");
|
||||
output = {std::make_shared<default_opset::FloorMod>(dividend, divisor)};
|
||||
}
|
||||
else
|
||||
{
|
||||
throw ngraph_error(
|
||||
"Unsupported value of 'fmod' attribute (should be: 0 or 1)");
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
} // namespace set_1
|
||||
|
@ -81,9 +81,6 @@ xfail_issue_38706 = xfail_test(reason="RuntimeError: output_3.0 has zero dimensi
|
||||
xfail_issue_38708 = xfail_test(reason="RuntimeError: While validating ONNX node '<Node(Slice): y>': "
|
||||
"Axes input must be constant")
|
||||
xfail_issue_38710 = xfail_test(reason="RuntimeError: roi has zero dimension which is not allowed")
|
||||
xfail_issue_38712 = xfail_test(reason="RuntimeError: Check '(fmod == 1) "
|
||||
"While validating ONNX node '<Node(Mod): z>': "
|
||||
"Only 'fmod=1' mode is supported for mod operator.")
|
||||
xfail_issue_38713 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations:"
|
||||
"ai.onnx.preview.training.Momentum")
|
||||
xfail_issue_43742 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations:"
|
||||
|
@ -26,7 +26,6 @@ from tests import (BACKEND_NAME,
|
||||
xfail_issue_38701,
|
||||
xfail_issue_38706,
|
||||
xfail_issue_38708,
|
||||
xfail_issue_38712,
|
||||
xfail_issue_38713,
|
||||
xfail_issue_38722,
|
||||
xfail_issue_38723,
|
||||
@ -273,16 +272,6 @@ tests_expected_to_fail = [
|
||||
"OnnxBackendNodeModelTest.test_resize_downsample_sizes_nearest_tf_half_pixel_for_nn_cpu"),
|
||||
(xfail_issue_33581,
|
||||
"OnnxBackendNodeModelTest.test_gather_elements_negative_indices_cpu"),
|
||||
(xfail_issue_38712,
|
||||
"OnnxBackendNodeModelTest.test_mod_mixed_sign_int16_cpu",
|
||||
"OnnxBackendNodeModelTest.test_mod_uint8_cpu",
|
||||
"OnnxBackendNodeModelTest.test_mod_uint64_cpu",
|
||||
"OnnxBackendNodeModelTest.test_mod_uint32_cpu",
|
||||
"OnnxBackendNodeModelTest.test_mod_uint16_cpu",
|
||||
"OnnxBackendNodeModelTest.test_mod_mixed_sign_int8_cpu",
|
||||
"OnnxBackendNodeModelTest.test_mod_mixed_sign_int64_cpu",
|
||||
"OnnxBackendNodeModelTest.test_mod_broadcast_cpu",
|
||||
"OnnxBackendNodeModelTest.test_mod_mixed_sign_int32_cpu"),
|
||||
(xfail_issue_38713,
|
||||
"OnnxBackendNodeModelTest.test_momentum_cpu",
|
||||
"OnnxBackendNodeModelTest.test_nesterov_momentum_cpu",
|
||||
|
58
ngraph/test/models/onnx/mod_incorrect_fmod.prototxt
Normal file
58
ngraph/test/models/onnx/mod_incorrect_fmod.prototxt
Normal file
@ -0,0 +1,58 @@
|
||||
ir_version: 5
|
||||
producer_name: "nGraph ONNX Importer"
|
||||
graph {
|
||||
node {
|
||||
input: "A"
|
||||
input: "B"
|
||||
output: "Y"
|
||||
op_type: "Mod"
|
||||
attribute {
|
||||
name: "fmod"
|
||||
i: 2
|
||||
type: INT
|
||||
}
|
||||
}
|
||||
name: "test_mod"
|
||||
input {
|
||||
name: "A"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
input {
|
||||
name: "B"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
name: "Y"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
opset_import {
|
||||
version: 10
|
||||
}
|
@ -6,18 +6,13 @@ graph {
|
||||
input: "B"
|
||||
output: "Y"
|
||||
op_type: "Mod"
|
||||
attribute {
|
||||
name: "fmod"
|
||||
i: 1
|
||||
type: INT
|
||||
}
|
||||
}
|
||||
name: "test_mod"
|
||||
input {
|
||||
name: "A"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 7
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
@ -26,11 +21,11 @@ graph {
|
||||
}
|
||||
}
|
||||
}
|
||||
input {
|
||||
input {
|
||||
name: "B"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 7
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
@ -43,7 +38,7 @@ graph {
|
||||
name: "Y"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 7
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
|
53
ngraph/test/models/onnx/mod_sign_broadcast.prototxt
Normal file
53
ngraph/test/models/onnx/mod_sign_broadcast.prototxt
Normal file
@ -0,0 +1,53 @@
|
||||
ir_version: 5
|
||||
producer_name: "nGraph ONNX Importer"
|
||||
graph {
|
||||
node {
|
||||
input: "A"
|
||||
input: "B"
|
||||
output: "Y"
|
||||
op_type: "Mod"
|
||||
}
|
||||
name: "test_mod"
|
||||
input {
|
||||
name: "A"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
input {
|
||||
name: "B"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
name: "Y"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
opset_import {
|
||||
version: 10
|
||||
}
|
53
ngraph/test/models/onnx/mod_sign_f32.prototxt
Normal file
53
ngraph/test/models/onnx/mod_sign_f32.prototxt
Normal file
@ -0,0 +1,53 @@
|
||||
ir_version: 5
|
||||
producer_name: "nGraph ONNX Importer"
|
||||
graph {
|
||||
node {
|
||||
input: "A"
|
||||
input: "B"
|
||||
output: "Y"
|
||||
op_type: "Mod"
|
||||
}
|
||||
name: "test_mod"
|
||||
input {
|
||||
name: "A"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 1
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
input {
|
||||
name: "B"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 1
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
name: "Y"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 1
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
opset_import {
|
||||
version: 10
|
||||
}
|
58
ngraph/test/models/onnx/mod_sign_fmod.prototxt
Normal file
58
ngraph/test/models/onnx/mod_sign_fmod.prototxt
Normal file
@ -0,0 +1,58 @@
|
||||
ir_version: 5
|
||||
producer_name: "nGraph ONNX Importer"
|
||||
graph {
|
||||
node {
|
||||
input: "A"
|
||||
input: "B"
|
||||
output: "Y"
|
||||
op_type: "Mod"
|
||||
attribute {
|
||||
name: "fmod"
|
||||
i: 1
|
||||
type: INT
|
||||
}
|
||||
}
|
||||
name: "test_mod"
|
||||
input {
|
||||
name: "A"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
input {
|
||||
name: "B"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
name: "Y"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
opset_import {
|
||||
version: 10
|
||||
}
|
58
ngraph/test/models/onnx/mod_sign_fmod_broadcast.prototxt
Normal file
58
ngraph/test/models/onnx/mod_sign_fmod_broadcast.prototxt
Normal file
@ -0,0 +1,58 @@
|
||||
ir_version: 5
|
||||
producer_name: "nGraph ONNX Importer"
|
||||
graph {
|
||||
node {
|
||||
input: "A"
|
||||
input: "B"
|
||||
output: "Y"
|
||||
op_type: "Mod"
|
||||
attribute {
|
||||
name: "fmod"
|
||||
i: 1
|
||||
type: INT
|
||||
}
|
||||
}
|
||||
name: "test_mod"
|
||||
input {
|
||||
name: "A"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
input {
|
||||
name: "B"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
name: "Y"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 6
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
opset_import {
|
||||
version: 10
|
||||
}
|
58
ngraph/test/models/onnx/mod_sign_fmod_f32.prototxt
Normal file
58
ngraph/test/models/onnx/mod_sign_fmod_f32.prototxt
Normal file
@ -0,0 +1,58 @@
|
||||
ir_version: 5
|
||||
producer_name: "nGraph ONNX Importer"
|
||||
graph {
|
||||
node {
|
||||
input: "A"
|
||||
input: "B"
|
||||
output: "Y"
|
||||
op_type: "Mod"
|
||||
attribute {
|
||||
name: "fmod"
|
||||
i: 1
|
||||
type: INT
|
||||
}
|
||||
}
|
||||
name: "test_mod"
|
||||
input {
|
||||
name: "A"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 1
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
input {
|
||||
name: "B"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 1
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
name: "Y"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 1
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
opset_import {
|
||||
version: 10
|
||||
}
|
53
ngraph/test/models/onnx/mod_sign_i64.prototxt
Normal file
53
ngraph/test/models/onnx/mod_sign_i64.prototxt
Normal file
@ -0,0 +1,53 @@
|
||||
ir_version: 5
|
||||
producer_name: "nGraph ONNX Importer"
|
||||
graph {
|
||||
node {
|
||||
input: "A"
|
||||
input: "B"
|
||||
output: "Y"
|
||||
op_type: "Mod"
|
||||
}
|
||||
name: "test_mod"
|
||||
input {
|
||||
name: "A"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 7
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
input {
|
||||
name: "B"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 7
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
output {
|
||||
name: "Y"
|
||||
type {
|
||||
tensor_type {
|
||||
elem_type: 7
|
||||
shape {
|
||||
dim {
|
||||
dim_value: 6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
opset_import {
|
||||
version: 10
|
||||
}
|
@ -3012,19 +3012,122 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_matmul_float_type)
|
||||
test_case.run();
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_mod)
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_mod_sign)
|
||||
{
|
||||
const auto function = onnx_import::import_onnx_model(
|
||||
file_util::path_join(SERIALIZED_ZOO, "onnx/mod_sign.prototxt"));
|
||||
auto test_case = test::TestCase<TestEngine>(function);
|
||||
|
||||
test_case.add_input<int64_t>({-8, 3, 4, 9, -17, 1});
|
||||
test_case.add_input<int64_t>({22, -13, 8, -3, 7, 2});
|
||||
test_case.add_expected_output<int64_t>(Shape{6}, {-8, 3, 4, 0, -3, 1});
|
||||
test_case.add_input<int32_t>({-4, 7, 5, 4, -7, 8});
|
||||
test_case.add_input<int32_t>({2, -3, 8, -2, 3, 5});
|
||||
test_case.add_expected_output<int32_t>(Shape{6}, {0, -2, 5, 0, 2, 3});
|
||||
|
||||
test_case.run();
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_mod_sign_i64)
|
||||
{
|
||||
const auto function = onnx_import::import_onnx_model(
|
||||
file_util::path_join(SERIALIZED_ZOO, "onnx/mod_sign_i64.prototxt"));
|
||||
auto test_case = test::TestCase<TestEngine>(function);
|
||||
|
||||
test_case.add_input<int64_t>({-4, 7, 5, 4, -7, 8});
|
||||
test_case.add_input<int64_t>({2, -3, 8, -2, 3, 5});
|
||||
test_case.add_expected_output<int64_t>(Shape{6}, {0, -2, 5, 0, 2, 3});
|
||||
|
||||
test_case.run();
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_mod_sign_broadcast)
|
||||
{
|
||||
const auto function = onnx_import::import_onnx_model(
|
||||
file_util::path_join(SERIALIZED_ZOO, "onnx/mod_sign_broadcast.prototxt"));
|
||||
auto test_case = test::TestCase<TestEngine>(function);
|
||||
|
||||
test_case.add_input<int32_t>({-8, 3, 4, 9, -17, 1});
|
||||
test_case.add_input<int32_t>({3});
|
||||
test_case.add_expected_output<int32_t>(Shape{6}, {1, 0, 1, 0, 1, 1});
|
||||
|
||||
test_case.run();
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_mod_sign_f32)
|
||||
{
|
||||
try
|
||||
{
|
||||
const auto function = onnx_import::import_onnx_model(
|
||||
file_util::path_join(SERIALIZED_ZOO, "onnx/mod_sign_f32.prototxt"));
|
||||
FAIL() << "Expected exception was not thrown";
|
||||
}
|
||||
catch (const ngraph::ngraph_error& e)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(e.what(),
|
||||
std::string("If the input type is floating point, then `fmod` attribute must be set to 1."));
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Expected ngraph_error exception was not thrown";
|
||||
}
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_mod_sign_fmod)
|
||||
{
|
||||
const auto function = onnx_import::import_onnx_model(
|
||||
file_util::path_join(SERIALIZED_ZOO, "onnx/mod_sign_fmod.prototxt"));
|
||||
auto test_case = test::TestCase<TestEngine>(function);
|
||||
|
||||
test_case.add_input<int32_t>({-8, 3, 4, 9, -17, 1});
|
||||
test_case.add_input<int32_t>({22, -13, 8, -3, 7, 2});
|
||||
test_case.add_expected_output<int32_t>(Shape{6}, {-8, 3, 4, 0, -3, 1});
|
||||
|
||||
test_case.run();
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_mod_sign_fmod_broadcast)
|
||||
{
|
||||
const auto function = onnx_import::import_onnx_model(
|
||||
file_util::path_join(SERIALIZED_ZOO, "onnx/mod_sign_fmod_broadcast.prototxt"));
|
||||
auto test_case = test::TestCase<TestEngine>(function);
|
||||
|
||||
test_case.add_input<int32_t>({-8, 3, 4, 9, -17, 1});
|
||||
test_case.add_input<int32_t>({3});
|
||||
test_case.add_expected_output<int32_t>(Shape{6}, {-2, 0, 1, 0, -2, 1});
|
||||
|
||||
test_case.run();
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_mod_sign_fmod_f32)
|
||||
{
|
||||
const auto function = onnx_import::import_onnx_model(
|
||||
file_util::path_join(SERIALIZED_ZOO, "onnx/mod_sign_fmod_f32.prototxt"));
|
||||
auto test_case = test::TestCase<TestEngine>(function);
|
||||
|
||||
test_case.add_input<float>({-4.3, 7.2, 5.0, 4.3, -7.2, 8.0});
|
||||
test_case.add_input<float>({2.1, -3.4, 8.0, -2.1, 3.4, 5.0});
|
||||
test_case.add_expected_output<float>(Shape{6}, {-0.10000038, 0.39999962, 5. , 0.10000038, -0.39999962, 3.});
|
||||
|
||||
test_case.run();
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_mod_incorrect_fmod)
|
||||
{
|
||||
try
|
||||
{
|
||||
const auto function = onnx_import::import_onnx_model(
|
||||
file_util::path_join(SERIALIZED_ZOO, "onnx/mod_incorrect_fmod.prototxt"));
|
||||
FAIL() << "Expected exception was not thrown";
|
||||
}
|
||||
catch (const ngraph::ngraph_error& e)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(e.what(),
|
||||
std::string("Unsupported value of 'fmod' attribute (should be: 0 or 1)"));
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Expected ngraph_error exception was not thrown";
|
||||
}
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, onnx_model_scatterND_param_i64_indices)
|
||||
{
|
||||
const auto function = onnx_import::import_onnx_model(
|
||||
|
@ -37,3 +37,25 @@ TEST(op_eval, floor_mod)
|
||||
for (size_t i = 0; i < expected_result.size(); i++)
|
||||
EXPECT_NEAR(result_data[i], expected_result[i], 0.000001);
|
||||
}
|
||||
|
||||
TEST(op_eval, floor_mod_i32)
|
||||
{
|
||||
auto a = make_shared<op::Parameter>(element::i32, Shape{6});
|
||||
auto b = make_shared<op::Parameter>(element::i32, Shape{6});
|
||||
auto floor_mod = make_shared<op::v1::FloorMod>(a, b);
|
||||
auto fun = make_shared<Function>(OutputVector{floor_mod}, ParameterVector{a, b});
|
||||
|
||||
std::vector<int32_t> a_value{-4, 7, 5, 4, -7, 8};
|
||||
std::vector<int32_t> b_value{2, -3, 8, -2, 3, 5};
|
||||
std::vector<int32_t> expected_result{0, -2, 5, 0, 2, 3};
|
||||
|
||||
auto result = make_shared<HostTensor>();
|
||||
ASSERT_TRUE(fun->evaluate({result},
|
||||
{make_host_tensor<element::Type_t::i32>(Shape{6}, a_value),
|
||||
make_host_tensor<element::Type_t::i32>(Shape{6}, b_value)}));
|
||||
EXPECT_EQ(result->get_element_type(), element::i32);
|
||||
EXPECT_EQ(result->get_shape(), Shape{6});
|
||||
auto result_data = read_vector<int32_t>(result);
|
||||
for (size_t i = 0; i < expected_result.size(); i++)
|
||||
EXPECT_NEAR(result_data[i], expected_result[i], 0.000001);
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ onnx_model_conv_transpose_w_groups
|
||||
|
||||
# [NOT_IMPLEMENTED] Input image format I64 is not supported yet...
|
||||
onnx_model_global_lp_pool_p0
|
||||
onnx_model_mod
|
||||
onnx_model_mod_sign_i64
|
||||
onnx_model_constant_of_shape_float_zeros
|
||||
onnx_model_constant_of_shape_int_ones
|
||||
ab_plus_c_inference
|
||||
|
Loading…
Reference in New Issue
Block a user