Migrate LogicalAnd to new API (#20423)

This commit is contained in:
Pawel Raasz
2023-10-16 06:54:38 +02:00
committed by GitHub
parent f107b7663f
commit 893517dd4a
3 changed files with 51 additions and 78 deletions

View File

@@ -35,10 +35,7 @@ public:
const AutoBroadcastSpec& auto_broadcast = AutoBroadcastSpec(AutoBroadcastType::NUMPY));
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor& visitor) override;
OPENVINO_SUPPRESS_DEPRECATED_START
bool evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const override;
OPENVINO_SUPPRESS_DEPRECATED_END
bool evaluate(TensorVector& outputs, const TensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1

View File

@@ -4,31 +4,37 @@
#pragma once
#include <cstddef>
#include <algorithm>
#include <functional>
#include "openvino/core/shape.hpp"
#include "openvino/op/util/attr_types.hpp"
#include "openvino/reference/autobroadcast_binop.hpp"
namespace ov {
namespace reference {
template <typename T>
template <class T>
void logical_and(const T* arg0, const T* arg1, T* out, size_t count) {
for (size_t i = 0; i < count; i++) {
out[i] = static_cast<T>(arg0[i] && arg1[i]);
}
std::transform(arg0, std::next(arg0, count), arg1, out, std::logical_and<T>());
}
template <typename T>
/**
* @brief Reference implementation of binary elementwise LogicalAnd operator.
*
* @param arg0 Pointer to input 0 data.
* @param arg1 Pointer to input 1 data.
* @param out Pointer to output data.
* @param arg_shape0 Input 0 shape.
* @param arg_shape1 Input 1 shape.
* @param broadcast_spec Broadcast specification mode.
*/
template <class T>
void logical_and(const T* arg0,
const T* arg1,
T* out,
const Shape& arg0_shape,
const Shape& arg1_shape,
const op::AutoBroadcastSpec& broadcast_spec) {
autobroadcast_binop(arg0, arg1, out, arg0_shape, arg1_shape, broadcast_spec, [](T x, T y) -> T {
return static_cast<T>(x && y);
});
autobroadcast_binop(arg0, arg1, out, arg0_shape, arg1_shape, broadcast_spec, std::logical_and<T>());
}
} // namespace reference
} // namespace ov

View File

@@ -2,83 +2,53 @@
// SPDX-License-Identifier: Apache-2.0
//
#include "openvino/op/logical_and.hpp"
#include "itt.hpp"
#include "ngraph/op/and.hpp"
#include "ngraph/runtime/host_tensor.hpp"
#include "ngraph/validation_util.hpp"
#include "openvino/reference/and.hpp"
#include "utils.hpp"
using namespace std;
using namespace ngraph;
op::v1::LogicalAnd::LogicalAnd(const Output<Node>& arg0,
const Output<Node>& arg1,
const AutoBroadcastSpec& auto_broadcast)
namespace ov {
namespace op {
namespace v1 {
LogicalAnd::LogicalAnd(const Output<Node>& arg0, const Output<Node>& arg1, const AutoBroadcastSpec& auto_broadcast)
: BinaryElementwiseLogical(arg0, arg1, auto_broadcast) {
constructor_validate_and_infer_types();
}
bool op::v1::LogicalAnd::visit_attributes(AttributeVisitor& visitor) {
OV_OP_SCOPE(v1_LogicalAnd_visit_attributes);
BinaryElementwiseLogical::visit_attributes(visitor);
return true;
}
shared_ptr<Node> op::v1::LogicalAnd::clone_with_new_inputs(const OutputVector& new_args) const {
std::shared_ptr<Node> LogicalAnd::clone_with_new_inputs(const OutputVector& new_args) const {
OV_OP_SCOPE(v1_LogicalAnd_clone_with_new_inputs);
check_new_args_count(this, new_args);
return make_shared<v1::LogicalAnd>(new_args.at(0), new_args.at(1), this->get_autob());
return std::make_shared<LogicalAnd>(new_args.at(0), new_args.at(1), get_autob());
}
OPENVINO_SUPPRESS_DEPRECATED_START
namespace logand {
namespace {
template <element::Type_t ET>
bool evaluate(const HostTensorPtr& arg0,
const HostTensorPtr& arg1,
const HostTensorPtr& out,
const op::AutoBroadcastSpec& broadcast_spec) {
ov::reference::logical_and(arg0->get_data_ptr<ET>(),
arg1->get_data_ptr<ET>(),
out->get_data_ptr<ET>(),
arg0->get_shape(),
arg1->get_shape(),
broadcast_spec);
return true;
}
bool evaluate_logand(const HostTensorPtr& arg0,
const HostTensorPtr& arg1,
const HostTensorPtr& out,
const op::AutoBroadcastSpec& broadcast_spec) {
bool rc = true;
out->set_broadcast(broadcast_spec, arg0, arg1);
switch (arg0->get_element_type()) {
OPENVINO_TYPE_CASE(evaluate_logand, boolean, arg0, arg1, out, broadcast_spec);
default:
rc = false;
break;
}
return rc;
}
} // namespace
} // namespace logand
bool op::v1::LogicalAnd::evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const {
bool LogicalAnd::evaluate(TensorVector& outputs, const TensorVector& inputs) const {
OV_OP_SCOPE(v1_LogicalAnd_evaluate);
OPENVINO_SUPPRESS_DEPRECATED_START
OPENVINO_ASSERT(validate_host_tensor_vector(outputs, 1) && validate_host_tensor_vector(inputs, 2));
OPENVINO_SUPPRESS_DEPRECATED_END
return logand::evaluate_logand(inputs[0], inputs[1], outputs[0], get_autob());
OPENVINO_ASSERT(outputs.size() == 1);
OPENVINO_ASSERT(inputs.size() == 2);
const auto& shape_0 = inputs[0].get_shape();
const auto& shape_1 = inputs[1].get_shape();
outputs[0].set_shape(infer_broadcast_shape(this, shape_0, shape_1));
if (inputs[0].get_element_type() == element::boolean) {
using T = fundamental_type_for<element::boolean>;
reference::logical_and(inputs[0].data<const T>(),
inputs[1].data<const T>(),
outputs[0].data<T>(),
shape_0,
shape_1,
get_autob());
return true;
} else {
return false;
}
}
bool op::v1::LogicalAnd::has_evaluate() const {
bool LogicalAnd::has_evaluate() const {
OV_OP_SCOPE(v1_LogicalAnd_has_evaluate);
switch (get_input_element_type(0)) {
case ngraph::element::boolean:
return true;
default:
break;
}
return false;
return get_input_element_type(0) == element::boolean;
}
} // namespace v1
} // namespace op
} // namespace ov