backend unit-test for divide op (binary_op) (#4540)
Co-authored-by: Patryk Elszkowski <patryk.elszkowki@intel.com>
This commit is contained in:
parent
07f1b2511b
commit
e5e7715a52
@ -32,6 +32,7 @@
|
||||
// clang-format on
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "util/type_prop.hpp"
|
||||
#include "runtime/backend.hpp"
|
||||
#include "ngraph/runtime/tensor.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
@ -156,6 +157,99 @@ NGRAPH_TEST(${BACKEND_NAME}, divide_overload)
|
||||
EXPECT_TRUE(test::all_close_f((vector<float>{2, 2, 2, 2}), read_vector<float>(result)));
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
template <typename Value>
|
||||
void divide_broadcast()
|
||||
{
|
||||
const auto element_type = ngraph::element::from<Value>();
|
||||
const Shape shape_a{3, 2, 1};
|
||||
const Shape shape_b{1, 6};
|
||||
const Shape shape_o{3, 2, 6};
|
||||
std::vector<Value> in_a{12, 24, 36, 48, 60, 72};
|
||||
std::vector<Value> in_b{1, 2, 3, 4, 6, 1};
|
||||
// clang-format off
|
||||
std::vector<Value> out{12, 6, 4, 3, 2, 12,
|
||||
24, 12, 8, 6, 4, 24,
|
||||
|
||||
36, 18, 12, 9, 6, 36,
|
||||
48, 24, 16, 12, 8, 48,
|
||||
|
||||
60, 30, 20, 15, 10, 60,
|
||||
72, 36, 24, 18, 12, 72};
|
||||
// clang-format on
|
||||
|
||||
auto A = make_shared<op::Parameter>(element_type, shape_a);
|
||||
auto B = make_shared<op::Parameter>(element_type, shape_b);
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Divide>(A, B), ParameterVector{A, B});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
// Create some tensors for input/output
|
||||
auto a = backend->create_tensor(element_type, shape_a, in_a.data());
|
||||
auto b = backend->create_tensor(element_type, shape_b, in_b.data());
|
||||
auto result = backend->create_tensor(element_type, shape_o);
|
||||
|
||||
auto handle = backend->compile(f);
|
||||
handle->call_with_validate({result}, {a, b});
|
||||
EXPECT_EQ(out, read_vector<Value>(result));
|
||||
}
|
||||
} // namespace
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, divide_int32_broadcast)
|
||||
{
|
||||
divide_broadcast<int32_t>();
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, divide_f32_broadcast)
|
||||
{
|
||||
divide_broadcast<float>();
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, divide_int32_scalar)
|
||||
{
|
||||
Shape shape{};
|
||||
|
||||
auto A = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto B = make_shared<op::Parameter>(element::i32, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Divide>(A, B), ParameterVector{A, B});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
// Create some tensors for input/output
|
||||
auto a = backend->create_tensor(element::i32, shape);
|
||||
copy_data(a, vector<int32_t>{18});
|
||||
auto b = backend->create_tensor(element::i32, shape);
|
||||
copy_data(b, vector<int32_t>{8});
|
||||
auto result = backend->create_tensor(element::i32, shape);
|
||||
|
||||
auto handle = backend->compile(f);
|
||||
handle->call_with_validate({result}, {a, b});
|
||||
EXPECT_EQ(vector<int32_t>{2}, read_vector<int32_t>(result));
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, divide_f32_scalar)
|
||||
{
|
||||
Shape shape{};
|
||||
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto B = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Divide>(A, B), ParameterVector{A, B});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
// Create some tensors for input/output
|
||||
auto a = backend->create_tensor(element::f32, shape);
|
||||
copy_data(a, vector<float>{18});
|
||||
auto b = backend->create_tensor(element::f32, shape);
|
||||
copy_data(b, vector<float>{8});
|
||||
auto result = backend->create_tensor(element::f32, shape);
|
||||
|
||||
auto handle = backend->compile(f);
|
||||
handle->call_with_validate({result}, {a, b});
|
||||
EXPECT_TRUE(test::all_close_f((vector<float>{2.25}), read_vector<float>(result)));
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, divide_by_zero_float32)
|
||||
{
|
||||
Shape shape{2, 2};
|
||||
|
@ -274,24 +274,47 @@ TEST(type_prop, binary_arithmetic_bad_argument_element_types)
|
||||
}
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
template <typename T>
|
||||
void test_binary_eltwise_bad_argument_shape(const element::Type& et)
|
||||
{
|
||||
auto input1 = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto input2 = make_shared<op::Parameter>(element::f32, Shape{1, 2, 4});
|
||||
try
|
||||
{
|
||||
auto bc = make_shared<T>(input1, input2, op::AutoBroadcastType::NONE);
|
||||
// Should have thrown, so fail if it didn't
|
||||
FAIL() << "Did not detect incorrect element types for arithmetic operator";
|
||||
}
|
||||
catch (const NodeValidationFailure& error)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(error.what(), std::string("Argument shapes are inconsistent"));
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Deduced type check failed for unexpected reason";
|
||||
}
|
||||
}
|
||||
} // namespace
|
||||
|
||||
TEST(type_prop, binary_arithmetic_bad_argument_shape_with_none_autobroadcast_attribute)
|
||||
{
|
||||
auto input1 = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto input2 = make_shared<op::Parameter>(element::f32, Shape{1, 2, 4});
|
||||
try
|
||||
{
|
||||
auto bc = make_shared<op::v1::Add>(input1, input2, op::AutoBroadcastType::NONE);
|
||||
// Should have thrown, so fail if it didn't
|
||||
FAIL() << "Did not detect incorrect element types for arithmetic operator";
|
||||
}
|
||||
catch (const NodeValidationFailure& error)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(error.what(), std::string("Argument shapes are inconsistent"));
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Deduced type check failed for unexpected reason";
|
||||
}
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Add>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Divide>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Equal>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Greater>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::GreaterEqual>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Less>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::LessEqual>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Maximum>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Minimum>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Multiply>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::NotEqual>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::LogicalOr>(element::boolean);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Power>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::v1::Subtract>(element::f32);
|
||||
test_binary_eltwise_bad_argument_shape<op::Xor>(element::boolean);
|
||||
}
|
||||
|
||||
TEST(type_prop, binary_elementwise_arithmetic_both_dynamic)
|
||||
|
Loading…
Reference in New Issue
Block a user