Remove obsoleted v0::Broadcast and BroadcastLike operators (#2779)
* Remove obsoleted v0::Broadcast and BroadcastLike operators * remove NGRAPH_DEPRECATED marks from autobroadcast functions * restore NGRAPH_SUPPRESS_DEPRECATED_START in autobroadcast.cpp
This commit is contained in:
@@ -347,8 +347,8 @@ bool pass::NopElimination::run_on_function(std::shared_ptr<Function> function) {
|
||||
{TI(opset3::Reshape), &eliminate_reshape_v1},
|
||||
{TI(opset3::Concat), &eliminate_concat},
|
||||
{TI(opset3::Squeeze), &eliminate_squeeze},
|
||||
{TI(opset3::Unsqueeze), &eliminate_unsqueeze},
|
||||
{TI(op::v0::Broadcast), &eliminate_nop}};
|
||||
{TI(op::v1::Broadcast), &eliminate_nop},
|
||||
{TI(opset3::Unsqueeze), &eliminate_unsqueeze}};
|
||||
|
||||
bool clobbered = false;
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
#include <ngraph/opsets/opset3.hpp>
|
||||
#include <ngraph/pass/manager.hpp>
|
||||
#include <ngraph/pass/constant_folding.hpp>
|
||||
#include <ngraph/builder/autobroadcast.hpp>
|
||||
#include <transformations/common_optimizations/algebraic_simplification.hpp>
|
||||
#include <transformations/utils/utils.hpp>
|
||||
#include <transformations/init_node_info.hpp>
|
||||
@@ -80,7 +81,7 @@ TEST(algebraic_simplification, multiply_negative_tests) {
|
||||
|
||||
TEST(algebraic_simplification, multiply_prod_negative) {
|
||||
auto fconst1 = ngraph::op::Constant::create(element::f64, Shape{2}, {1.0, 1.0});
|
||||
auto broadcast = std::make_shared<op::Broadcast>(fconst1, Shape{2, 5}, AxisSet{1});
|
||||
auto broadcast = builder::opset1::make_broadcast(fconst1, Shape{2, 5}, AxisSet{1});
|
||||
auto prod_fconst1 = std::make_shared<op::Product>(broadcast, AxisSet{0, 1});
|
||||
|
||||
pass::Manager pass_manager;
|
||||
@@ -94,7 +95,7 @@ TEST(algebraic_simplification, multiply_prod_negative) {
|
||||
|
||||
TEST(algebraic_simplification, multiply_sum_negative) {
|
||||
auto fconst1 = ngraph::op::Constant::create(element::f64, Shape{2}, {1.0, 1.0});
|
||||
auto broadcast = std::make_shared<op::Broadcast>(fconst1, Shape{2, 5}, AxisSet{1});
|
||||
auto broadcast = builder::opset1::make_broadcast(fconst1, Shape{2, 5}, AxisSet{1});
|
||||
auto sum_fconst1 = std::make_shared<op::Sum>(broadcast, AxisSet{0, 1});
|
||||
|
||||
pass::Manager pass_manager;
|
||||
|
||||
@@ -84,16 +84,17 @@ TEST(nop_elimination, eliminate_slice) {
|
||||
}
|
||||
|
||||
TEST(nop_elimination, eliminate_broadcast) {
|
||||
Shape shape{};
|
||||
Shape shape{1};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto b = make_shared<op::v0::Broadcast>(A, shape, AxisSet{});
|
||||
auto b = make_shared<op::v1::Broadcast>(A,
|
||||
op::Constant::create(element::u64, Shape{1}, {1}));
|
||||
auto f = make_shared<Function>(make_shared<op::v0::Abs>(b), ParameterVector{A});
|
||||
|
||||
pass::Manager pass_manager;
|
||||
pass_manager.register_pass<pass::NopElimination>();
|
||||
pass_manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<op::v0::Broadcast>(f), 0);
|
||||
ASSERT_EQ(count_ops_of_type<op::v1::Broadcast>(f), 0);
|
||||
}
|
||||
|
||||
TEST(nop_elimination, eliminate_stop_gradient) {
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
#include "ngraph/except.hpp"
|
||||
#include "ngraph/node.hpp"
|
||||
#include "ngraph/op/broadcast.hpp"
|
||||
#include "ngraph/op/constant.hpp"
|
||||
|
||||
namespace ngraph
|
||||
{
|
||||
@@ -54,7 +55,6 @@ namespace ngraph
|
||||
///
|
||||
/// \return Vector of broadcasted values.
|
||||
///
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
OutputVector numpy_broadcast_outputs(const OutputVector& values);
|
||||
|
||||
///
|
||||
@@ -66,7 +66,6 @@ namespace ngraph
|
||||
///
|
||||
/// \return Node producing values with requested shape.
|
||||
///
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
std::shared_ptr<Node> numpy_broadcast(const Output<Node>& value, const Shape& shape);
|
||||
|
||||
/// \brief Wrap two graph values, if necessary, to obtain values with identical shapes,
|
||||
@@ -97,66 +96,9 @@ namespace ngraph
|
||||
/// elements point to ngraph::Node objects whose output values have the same shape.
|
||||
///
|
||||
/// \exception ngraph::builder::numpy_autobroadcast_incompatible_shapes
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
std::pair<std::shared_ptr<Node>, std::shared_ptr<Node>>
|
||||
numpy_broadcast(const std::pair<Output<Node>, Output<Node>>& args);
|
||||
|
||||
/// Create a new \p NodeType node, and any additional nodes required to simulate NumPy-style
|
||||
/// autobroadcast semantics. Intended for binary operations such as "Add".
|
||||
///
|
||||
/// \param [in] operand1_reshapeable The first operand to supply to the \p NodeType
|
||||
/// constructor. Subject to being wrapped with additional
|
||||
/// nodes required for autobroadcasting. Must not be null.
|
||||
///
|
||||
/// \param [in] operand2_reshapeable The second operand to supply to the \p NodeType
|
||||
/// constructor. Subject to being wrapped with additional
|
||||
/// nodes required for autobroadcasting. Must not be null.
|
||||
///
|
||||
/// \return The sink node of any/all nodes created by this function. Will never be null.
|
||||
///
|
||||
/// \exception ngraph::builder::numpy_autobroadcast_incompatible_shapes
|
||||
template <typename NodeType>
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
std::shared_ptr<NodeType> make_with_numpy_broadcast(
|
||||
const Output<Node>& operand1_reshapeable, const Output<Node>& operand2_reshapeable)
|
||||
{
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
auto shaped_op1_op2 = numpy_broadcast({operand1_reshapeable, operand2_reshapeable});
|
||||
return std::make_shared<NodeType>(shaped_op1_op2.first, shaped_op1_op2.second);
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
}
|
||||
|
||||
/// Create a new \p NodeType node, and any additional nodes required to simulate NumPy-style
|
||||
/// autobroadcast semantics. Intended for non-binary operations such as "Select", where
|
||||
/// precisely the second and third operands are subject to autobroadcast semantics.
|
||||
///
|
||||
/// \param [in] operand1 This operand is not subject to autobraodcast logic, and will be
|
||||
/// passed as-is as the first argument to the \p NodeType constructor.
|
||||
///
|
||||
/// \param [in] operand2_reshapeable The second operand to supply to the \p NodeType
|
||||
/// constructor. Subject to being wrapped with additional
|
||||
/// nodes required for autobroadcasting. Must not be null.
|
||||
///
|
||||
/// \param [in] operand3_reshapeable The third operand to supply to the \p NodeType
|
||||
/// constructor. Subject to being wrapped with additional
|
||||
/// nodes required for autobroadcasting. Must not be null.
|
||||
///
|
||||
/// \return The sink node of any/all nodes created by this function. Will never be null.
|
||||
///
|
||||
/// \exception ngraph::builder::numpy_autobroadcast_incompatible_shapes
|
||||
template <typename NodeType>
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
std::shared_ptr<Node> make_with_numpy_broadcast(const Output<Node>& operand1,
|
||||
const Output<Node>& operand2_reshapeable,
|
||||
const Output<Node>& operand3_reshapeable)
|
||||
{
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
auto shaped_op2_op3 = numpy_broadcast({operand2_reshapeable, operand3_reshapeable});
|
||||
return std::make_shared<NodeType>(
|
||||
operand1, shaped_op2_op3.first, shaped_op2_op3.second);
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
}
|
||||
|
||||
/// \brief Broadcast shape of two nodes to make them compatible for a matrix
|
||||
/// multiplication.
|
||||
///
|
||||
@@ -173,7 +115,6 @@ namespace ngraph
|
||||
///
|
||||
/// \return The vector containing both outputs broadcasted.
|
||||
///
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
OutputVector numpy_broadcast_for_matmul_operation(const Output<Node>& left,
|
||||
const Output<Node>& right);
|
||||
|
||||
@@ -184,7 +125,6 @@ namespace ngraph
|
||||
/// \param axis Index starting to align
|
||||
///
|
||||
/// \return pdpd-style broadcasted list of nodes.
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
OutputVector pdpd_broadcast(const OutputVector& inputs, int64_t axis);
|
||||
|
||||
/// \brief Generate a list of broadcast axes.
|
||||
@@ -201,10 +141,9 @@ namespace ngraph
|
||||
/// matches the desired new shape.
|
||||
///
|
||||
/// \return The indices of added axes.
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
AxisSet calculate_broadcast_axes(const Shape& output_shape,
|
||||
const Shape& input_shape,
|
||||
std::size_t start_match_axis);
|
||||
std::shared_ptr<Node> calculate_broadcast_axes(const Shape& output_shape,
|
||||
const Shape& input_shape,
|
||||
std::size_t start_match_axis);
|
||||
|
||||
///
|
||||
/// \brief Calculate the output shape of numpy-style broadcast operation for all input
|
||||
@@ -222,54 +161,19 @@ namespace ngraph
|
||||
/// \return A pair that contains the target shape as its first object and a vector of
|
||||
/// padded input shapes ready to be broadcasted as the second object
|
||||
///
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
std::pair<Shape, std::vector<Shape>>
|
||||
get_numpy_broadcast_shapes(const std::vector<Shape>& input_shapes);
|
||||
|
||||
/// \brief Generate a list of broadcast along axes.
|
||||
///
|
||||
/// \details Broadcast "adds" elements along axes to the input tensor, replicating
|
||||
/// elements from the input tensor as needed to fill the new dimensions.
|
||||
/// Function calculate which of the output axes are added in this way.
|
||||
///
|
||||
/// This function will attempt to match shapes, assuming the current shape
|
||||
/// matches the rightmost positions of the desired new shape. This behaviour
|
||||
/// is similar to NumPy's broadcasting.
|
||||
///
|
||||
/// \param output_shape The new shape for the output tensor.
|
||||
/// \param input_shape The shape of input tensor.
|
||||
///
|
||||
/// \return The indices of added axes.
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
inline AxisSet calculate_broadcast_axes(const Shape& output_shape, const Shape& input_shape)
|
||||
{
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
return calculate_broadcast_axes(
|
||||
output_shape, input_shape, output_shape.size() - input_shape.size());
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
}
|
||||
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
inline std::shared_ptr<Node> make_broadcast_node(const Output<Node>& output,
|
||||
Shape new_shape)
|
||||
{
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
return std::make_shared<op::Broadcast>(
|
||||
output, new_shape, calculate_broadcast_axes(new_shape, output.get_shape()));
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
}
|
||||
|
||||
NGRAPH_DEPRECATED("This builder was deprecated.")
|
||||
inline std::shared_ptr<Node> make_broadcast_node(const Output<Node>& value,
|
||||
const Shape& new_shape,
|
||||
std::size_t start_match_axis)
|
||||
{
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
return std::make_shared<op::Broadcast>(
|
||||
auto shape_const =
|
||||
op::Constant::create(element::u64, Shape{new_shape.size()}, new_shape);
|
||||
return std::make_shared<op::v1::Broadcast>(
|
||||
value,
|
||||
new_shape,
|
||||
shape_const,
|
||||
calculate_broadcast_axes(new_shape, value.get_shape(), start_match_axis));
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
}
|
||||
|
||||
namespace opset1
|
||||
|
||||
@@ -24,7 +24,6 @@
|
||||
#include "ngraph/axis_vector.hpp"
|
||||
#include "ngraph/check.hpp"
|
||||
#include "ngraph/op/broadcast.hpp"
|
||||
#include "ngraph/op/constant.hpp"
|
||||
#include "ngraph/op/reshape.hpp"
|
||||
#include "ngraph/util.hpp"
|
||||
|
||||
@@ -178,8 +177,12 @@ namespace ngraph
|
||||
|
||||
if (!broadcast_axes.empty())
|
||||
{
|
||||
broadcasted_node =
|
||||
make_shared<op::Broadcast>(broadcasted_node, output_shape, broadcast_axes);
|
||||
auto shape_const =
|
||||
op::Constant::create(element::u64, Shape{output_shape.size()}, output_shape);
|
||||
broadcasted_node = make_shared<op::v1::Broadcast>(
|
||||
broadcasted_node,
|
||||
shape_const,
|
||||
opset1::get_axes_mapping_output(output_shape, broadcast_axes));
|
||||
}
|
||||
|
||||
return broadcasted_node;
|
||||
@@ -234,7 +237,10 @@ namespace ngraph
|
||||
value, get_default_order(value_shape), trimmed_value_shape);
|
||||
}
|
||||
|
||||
auto value_bcast = make_shared<op::Broadcast>(trimmed_value, output_shape, axes);
|
||||
auto shape_const =
|
||||
op::Constant::create(element::u64, Shape{output_shape.size()}, output_shape);
|
||||
auto value_bcast = make_shared<op::v1::Broadcast>(
|
||||
trimmed_value, shape_const, opset1::get_axes_mapping_output(output_shape, axes));
|
||||
|
||||
return move(value_bcast);
|
||||
}
|
||||
@@ -337,19 +343,19 @@ namespace ngraph
|
||||
return broadcasted_inputs;
|
||||
}
|
||||
|
||||
AxisSet calculate_broadcast_axes(const Shape& output_shape,
|
||||
const Shape& input_shape,
|
||||
size_t start_match_axis)
|
||||
std::shared_ptr<Node> calculate_broadcast_axes(const Shape& output_shape,
|
||||
const Shape& input_shape,
|
||||
size_t start_match_axis)
|
||||
{
|
||||
vector<size_t> result(output_shape.size() - input_shape.size());
|
||||
// Populate the result vector with monotonic increasing series from 0 until
|
||||
vector<size_t> axes(output_shape.size() - input_shape.size());
|
||||
// Populate the axes vector with monotonic increasing series from 0 until
|
||||
// output_shape_size, excluding values in range:
|
||||
// [start_match_axis, start_match_axis + input_shape.size()]
|
||||
iota(begin(result), begin(result) + start_match_axis, 0);
|
||||
iota(begin(result) + start_match_axis,
|
||||
end(result),
|
||||
start_match_axis + input_shape.size());
|
||||
return result;
|
||||
iota(begin(axes), begin(axes) + start_match_axis, 0);
|
||||
iota(begin(axes) + start_match_axis, end(axes), start_match_axis + input_shape.size());
|
||||
|
||||
auto axes_mapping = opset1::get_axes_mapping(output_shape, axes);
|
||||
return op::Constant::create(element::i64, Shape{axes_mapping.size()}, axes_mapping);
|
||||
}
|
||||
|
||||
namespace opset1
|
||||
|
||||
@@ -145,103 +145,5 @@ namespace ngraph
|
||||
AutoBroadcastSpec m_broadcast_spec;
|
||||
};
|
||||
} // namespace v1
|
||||
|
||||
namespace v0
|
||||
{
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
/// \brief Operation which "adds" axes to an input tensor, replicating elements from the
|
||||
/// input as needed along the new axes.
|
||||
class NGRAPH_DEPRECATED(
|
||||
"This operation is deprecated and will be removed soon. "
|
||||
"Use v1::Broadcast instead of it.") NGRAPH_API Broadcast : public Op
|
||||
{
|
||||
public:
|
||||
static constexpr NodeTypeInfo type_info{"Broadcast", 0};
|
||||
const NodeTypeInfo& get_type_info() const override { return type_info; }
|
||||
/// \brief Constructs a broadcast operation.
|
||||
Broadcast() = default;
|
||||
/// \brief Constructs a broadcast operation.
|
||||
///
|
||||
/// \param arg The input tensor to be broadcast.
|
||||
/// \param shape The shape of the output tensor.
|
||||
/// \param broadcast_axes The axis positions (0-based) in the result that are being
|
||||
/// broadcast. The remaining axes in shape must be the same as
|
||||
/// the shape of arg.
|
||||
Broadcast(const Output<Node>& arg,
|
||||
const Shape& shape,
|
||||
const AxisSet& broadcast_axes);
|
||||
bool visit_attributes(AttributeVisitor& visitor) override;
|
||||
void validate_and_infer_types() override;
|
||||
|
||||
std::shared_ptr<Node>
|
||||
clone_with_new_inputs(const OutputVector& new_args) const override;
|
||||
|
||||
/// \return A set containing the indices of the broadcast axes (0-based).
|
||||
const AxisSet& get_broadcast_axes() const { return m_broadcast_axes; }
|
||||
void set_broadcast_axes(const AxisSet& broadcast_axes)
|
||||
{
|
||||
m_broadcast_axes = broadcast_axes;
|
||||
}
|
||||
const Shape& get_broadcast_shape() const { return m_shape; }
|
||||
void set_broadcast_shape(const Shape& shape) { m_shape = shape; }
|
||||
bool evaluate(const HostTensorVector& outputs,
|
||||
const HostTensorVector& inputs) const override;
|
||||
|
||||
protected:
|
||||
Broadcast(const OutputVector& args,
|
||||
const Shape& shape,
|
||||
const AxisSet& broadcast_axes);
|
||||
|
||||
virtual void infer_shape() {}
|
||||
Shape m_shape;
|
||||
AxisSet m_broadcast_axes;
|
||||
};
|
||||
|
||||
/// \brief Broadcast arg to the same shape as like_arg.
|
||||
class NGRAPH_DEPRECATED(
|
||||
"This operation is deprecated and will be removed soon. Please don't use it.")
|
||||
NGRAPH_API BroadcastLike : public v0::Broadcast
|
||||
{
|
||||
public:
|
||||
static constexpr NodeTypeInfo type_info{"BroadcastLike", 0};
|
||||
const NodeTypeInfo& get_type_info() const override { return type_info; }
|
||||
/// \brief Broadcast arg to the same shape as like_arg.
|
||||
BroadcastLike() = default;
|
||||
/// \brief Broadcast arg to the same shape as like_arg.
|
||||
///
|
||||
/// Once the shape of like_arg is known, this op will be replaced with an equivalent
|
||||
/// Broadcast op.
|
||||
///
|
||||
/// \param arg The argument to be broadcast.
|
||||
/// \param like_arg Provides the shape for the result.
|
||||
/// \param initial_broadcast_axes indicates which axes will be broadcast. If empty,
|
||||
/// arg must be scalar and all axes are broadcast.
|
||||
BroadcastLike(const Output<Node>& arg,
|
||||
const Output<Node>& like_arg,
|
||||
const AxisSet& initial_broadcast_axes);
|
||||
bool visit_attributes(AttributeVisitor& visitor) override;
|
||||
std::shared_ptr<Node>
|
||||
clone_with_new_inputs(const OutputVector& new_args) const override;
|
||||
|
||||
void infer_shape() override;
|
||||
const AxisSet& get_initial_broadcast_axes() const
|
||||
{
|
||||
return m_initial_broadcast_axes;
|
||||
}
|
||||
void set_initial_broadcast_axes(const AxisSet& initial_broadcast_axes)
|
||||
{
|
||||
m_initial_broadcast_axes = initial_broadcast_axes;
|
||||
}
|
||||
|
||||
protected:
|
||||
AxisSet m_initial_broadcast_axes;
|
||||
};
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
} // namespace v0
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
using v0::Broadcast;
|
||||
using v0::BroadcastLike;
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,10 +41,8 @@ NGRAPH_OP(AvgPool, ngraph::op::v1, 1)
|
||||
NGRAPH_OP(BatchNormInference, ngraph::op::v0, 0)
|
||||
NGRAPH_OP(BatchToSpace, ngraph::op::v1, 1)
|
||||
NGRAPH_OP(BinaryConvolution, ngraph::op::v1, 1)
|
||||
NGRAPH_OP(Broadcast, ngraph::op::v0, 0)
|
||||
NGRAPH_OP(Broadcast, ngraph::op::v1, 1)
|
||||
NGRAPH_OP(Broadcast, ngraph::op::v3, 3)
|
||||
NGRAPH_OP(BroadcastLike, ngraph::op::v0, 0)
|
||||
NGRAPH_OP(Bucketize, ngraph::op::v3, 3)
|
||||
NGRAPH_OP(CTCGreedyDecoder, ngraph::op::v0, 0)
|
||||
NGRAPH_OP(Ceiling, ngraph::op::v0, 0)
|
||||
|
||||
@@ -45,6 +45,20 @@ namespace ngraph
|
||||
set_output_type(0, arg.get_element_type(), arg.get_partial_shape());
|
||||
}
|
||||
|
||||
Skip(const OutputVector& args, ValuePredicate pred)
|
||||
: Pattern(args, pred)
|
||||
{
|
||||
set_output_type(
|
||||
0, args.at(0).get_element_type(), args.at(0).get_partial_shape());
|
||||
}
|
||||
|
||||
Skip(const OutputVector& args, NodePredicate pred = nullptr)
|
||||
: Pattern(args, as_value_predicate(pred))
|
||||
{
|
||||
set_output_type(
|
||||
0, args.at(0).get_element_type(), args.at(0).get_partial_shape());
|
||||
}
|
||||
|
||||
virtual bool match_value(pattern::Matcher* matcher,
|
||||
const Output<Node>& pattern_value,
|
||||
const Output<Node>& graph_value) override;
|
||||
|
||||
@@ -571,15 +571,11 @@ void ngraph::insert_new_node_between(const shared_ptr<Node>& src_node,
|
||||
|
||||
std::shared_ptr<Node> ngraph::make_zero(const element::Type& element_type, const Shape& shape)
|
||||
{
|
||||
std::shared_ptr<Node> zero = op::Constant::create(element_type, Shape{}, {0.0});
|
||||
auto zero = op::Constant::create(element_type, Shape{}, {0.0});
|
||||
if (shape.size() > 0)
|
||||
{
|
||||
AxisSet axes;
|
||||
for (size_t i = 0; i < shape.size(); i++)
|
||||
{
|
||||
axes.insert(i);
|
||||
}
|
||||
zero = std::make_shared<op::Broadcast>(zero, shape, axes);
|
||||
return std::make_shared<op::v1::Broadcast>(
|
||||
zero, op::Constant::create(element::u64, Shape{shape.size()}, shape));
|
||||
}
|
||||
return zero;
|
||||
}
|
||||
|
||||
@@ -296,199 +296,3 @@ bool op::v1::Broadcast::evaluate(const HostTensorVector& outputs,
|
||||
OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::v1::Broadcast::evaluate");
|
||||
return op::util::BroadcastBase::evaluate(outputs, inputs);
|
||||
}
|
||||
|
||||
constexpr NodeTypeInfo op::v0::Broadcast::type_info;
|
||||
|
||||
op::v0::Broadcast::Broadcast(const OutputVector& args,
|
||||
const Shape& shape,
|
||||
const AxisSet& broadcast_axes)
|
||||
: Op(args)
|
||||
, m_shape(shape)
|
||||
, m_broadcast_axes(broadcast_axes)
|
||||
{
|
||||
constructor_validate_and_infer_types();
|
||||
}
|
||||
|
||||
op::v0::Broadcast::Broadcast(const Output<Node>& arg,
|
||||
const Shape& shape,
|
||||
const AxisSet& broadcast_axes)
|
||||
: Broadcast(OutputVector{arg}, shape, broadcast_axes)
|
||||
{
|
||||
}
|
||||
|
||||
bool op::v0::Broadcast::visit_attributes(AttributeVisitor& visitor)
|
||||
{
|
||||
visitor.on_attribute("shape", m_shape);
|
||||
visitor.on_attribute("broadcast_axes", m_broadcast_axes);
|
||||
return true;
|
||||
}
|
||||
|
||||
void op::v0::Broadcast::validate_and_infer_types()
|
||||
{
|
||||
infer_shape();
|
||||
|
||||
for (auto axis : m_broadcast_axes)
|
||||
{
|
||||
NODE_VALIDATION_CHECK(this,
|
||||
axis < m_shape.size(),
|
||||
"Broadcast axis index (",
|
||||
axis,
|
||||
") exceeds specified output shape rank ",
|
||||
"(broadcast axes: ",
|
||||
m_broadcast_axes,
|
||||
", output shape: ",
|
||||
m_shape,
|
||||
").");
|
||||
}
|
||||
|
||||
Shape required_input_shape = m_shape;
|
||||
for (auto i = m_broadcast_axes.rbegin(); i != m_broadcast_axes.rend(); ++i)
|
||||
{
|
||||
required_input_shape.erase(required_input_shape.begin() + *i);
|
||||
}
|
||||
|
||||
// TODO(amprocte): We can probably have a more helpful error message here.
|
||||
// There are two things that can go wrong, which are being picked up in
|
||||
// one fell swoop by this check: either the number of broadcast axes is not
|
||||
// enough, or there is a mismatch with one of the pre-broadcast axis lengths.
|
||||
NODE_VALIDATION_CHECK(
|
||||
this,
|
||||
get_input_partial_shape(0).compatible(required_input_shape),
|
||||
"Broadcast argument shape, specified output shape, and axes are incompatible ",
|
||||
"(argument shape: ",
|
||||
get_input_partial_shape(0),
|
||||
", output shape: ",
|
||||
m_shape,
|
||||
", broadcast axes: ",
|
||||
m_broadcast_axes,
|
||||
").");
|
||||
|
||||
set_output_type(0, get_input_element_type(0), m_shape);
|
||||
}
|
||||
|
||||
shared_ptr<Node> op::v0::Broadcast::clone_with_new_inputs(const OutputVector& new_args) const
|
||||
{
|
||||
check_new_args_count(this, new_args);
|
||||
return make_shared<v0::Broadcast>(new_args.at(0), m_shape, m_broadcast_axes);
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
#define TYPE_CASE_v0(a) \
|
||||
case element::Type_t::a: rc = evaluate_v0<element::Type_t::a>
|
||||
|
||||
template <element::Type_t ET>
|
||||
inline bool evaluate_v0(const HostTensorPtr& arg0,
|
||||
const HostTensorPtr& out,
|
||||
const AxisSet& broadcast_axes)
|
||||
{
|
||||
using T = typename element_type_traits<ET>::value_type;
|
||||
runtime::reference::broadcast<T>((arg0->get_data_ptr<ET>()),
|
||||
(out->get_data_ptr<ET>()),
|
||||
arg0->get_shape(),
|
||||
out->get_shape(),
|
||||
broadcast_axes);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool evaluate_broadcast_v0(const HostTensorPtr& arg0,
|
||||
const HostTensorPtr& out,
|
||||
const AxisSet broadcast_axes,
|
||||
const Shape output_shape)
|
||||
{
|
||||
bool rc = true;
|
||||
Shape in_shape = arg0->get_shape();
|
||||
out->set_shape(output_shape);
|
||||
out->set_element_type(arg0->get_element_type());
|
||||
switch (arg0->get_element_type())
|
||||
{
|
||||
TYPE_CASE_v0(boolean)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(i8)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(i16)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(i32)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(i64)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(u8)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(u16)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(u32)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(u64)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(bf16)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(f16)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(f32)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
TYPE_CASE_v0(f64)(arg0, out, broadcast_axes);
|
||||
break;
|
||||
default: rc = false; break;
|
||||
}
|
||||
return rc;
|
||||
}
|
||||
}
|
||||
|
||||
bool op::v0::Broadcast::evaluate(const HostTensorVector& outputs,
|
||||
const HostTensorVector& inputs) const
|
||||
{
|
||||
OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::v0::Broadcast::evaluate");
|
||||
return evaluate_broadcast_v0(inputs[0], outputs[0], get_broadcast_axes(), get_output_shape(0));
|
||||
}
|
||||
|
||||
constexpr NodeTypeInfo op::v0::BroadcastLike::type_info;
|
||||
|
||||
op::v0::BroadcastLike::BroadcastLike(const Output<Node>& arg,
|
||||
const Output<Node>& like_arg,
|
||||
const AxisSet& initial_broadcast_axes)
|
||||
: op::v0::Broadcast({arg, like_arg}, {}, {})
|
||||
, m_initial_broadcast_axes(initial_broadcast_axes)
|
||||
{
|
||||
constructor_validate_and_infer_types();
|
||||
}
|
||||
|
||||
bool op::v0::BroadcastLike::visit_attributes(AttributeVisitor& visitor)
|
||||
{
|
||||
visitor.on_attribute("shape", m_shape);
|
||||
visitor.on_attribute("broadcast_axes", m_broadcast_axes);
|
||||
visitor.on_attribute("initial_broadcast_axes", m_initial_broadcast_axes);
|
||||
return true;
|
||||
}
|
||||
|
||||
shared_ptr<Node> op::v0::BroadcastLike::clone_with_new_inputs(const OutputVector& new_args) const
|
||||
{
|
||||
if (new_args.size() != 2)
|
||||
{
|
||||
throw ngraph_error("Incorrect number of new arguments");
|
||||
}
|
||||
return make_shared<v0::BroadcastLike>(new_args.at(0), new_args.at(1), m_initial_broadcast_axes);
|
||||
}
|
||||
|
||||
void op::v0::BroadcastLike::infer_shape()
|
||||
{
|
||||
const Shape& in_shape = get_input_shape(0);
|
||||
m_shape = get_input_shape(1);
|
||||
m_broadcast_axes = m_initial_broadcast_axes;
|
||||
if (m_broadcast_axes.size() == 0)
|
||||
{
|
||||
for (size_t i = 0; i < m_shape.size(); ++i)
|
||||
{
|
||||
if (i < in_shape.size())
|
||||
{
|
||||
if (in_shape.at(i) == 1 && m_shape.at(i) > 1)
|
||||
{
|
||||
m_broadcast_axes.insert(i);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
m_broadcast_axes.insert(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
#include "grn.hpp"
|
||||
#include "ngraph/attribute_visitor.hpp"
|
||||
#include "ngraph/axis_set.hpp"
|
||||
#include "ngraph/builder/autobroadcast.hpp"
|
||||
#include "ngraph/builder/norm.hpp"
|
||||
#include "ngraph/builder/reshape.hpp"
|
||||
#include "ngraph/op/broadcast.hpp"
|
||||
@@ -81,8 +82,8 @@ OutputVector op::GRN::decompose_op() const
|
||||
// Calculate l2 norm across channels.
|
||||
shared_ptr<Node> norm = builder::opset1::l2_norm(data, axis_set_const, m_bias);
|
||||
// Get back reduced axis.
|
||||
norm = std::make_shared<Broadcast>(norm, data.get_shape(), AxisSet{1});
|
||||
data = data / norm;
|
||||
data = std::make_shared<op::v1::Divide>(
|
||||
data, builder::opset1::make_broadcast(norm, data.get_shape(), AxisSet{1}));
|
||||
|
||||
// get back original input tensor rank
|
||||
if (input_shape.size() != 4)
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
#include <algorithm>
|
||||
|
||||
#include "mvn.hpp"
|
||||
#include "ngraph/builder/autobroadcast.hpp"
|
||||
#include "ngraph/builder/reduce_ops.hpp"
|
||||
#include "ngraph/op/add.hpp"
|
||||
#include "ngraph/op/broadcast.hpp"
|
||||
@@ -78,8 +79,8 @@ OutputVector op::MVN::decompose_op() const
|
||||
|
||||
// calculate mean normalization
|
||||
auto mean = builder::opset1::mean(data, m_reduction_axes);
|
||||
mean = std::make_shared<op::Broadcast>(mean, data_shape, m_reduction_axes);
|
||||
auto mean_normalization = data - mean;
|
||||
auto mean_normalization =
|
||||
data - builder::opset1::make_broadcast(mean, data_shape, m_reduction_axes);
|
||||
|
||||
if (!m_normalize_variance)
|
||||
{
|
||||
@@ -93,9 +94,9 @@ OutputVector op::MVN::decompose_op() const
|
||||
auto eps_node = op::Constant::create(
|
||||
data.get_element_type(), Output<Node>(variance).get_shape(), vector<double>{m_eps});
|
||||
variance = std::make_shared<op::Sqrt>(variance + eps_node);
|
||||
variance = std::make_shared<op::Broadcast>(variance, data_shape, m_reduction_axes);
|
||||
|
||||
return OutputVector{mean_normalization / variance};
|
||||
return OutputVector{mean_normalization / builder::opset1::make_broadcast(
|
||||
variance, data_shape, m_reduction_axes)};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -72,9 +72,7 @@ OutputVector op::PRelu::decompose_op() const
|
||||
// x < 0 => f(x) = x * slope
|
||||
// x >= 0 => f(x) = x
|
||||
|
||||
std::shared_ptr<ngraph::Node> zero_node = std::make_shared<ngraph::op::Constant>(
|
||||
data.get_element_type(), ngraph::Shape{}, std::vector<double>{0});
|
||||
zero_node = builder::make_broadcast_node(zero_node, data.get_shape());
|
||||
std::shared_ptr<ngraph::Node> zero_node = make_zero(data.get_element_type(), data.get_shape());
|
||||
|
||||
std::shared_ptr<ngraph::Node> negative_map = std::make_shared<ngraph::op::Convert>(
|
||||
std::make_shared<ngraph::op::Less>(data, zero_node), data.get_element_type());
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
#include <string>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "ngraph/builder/autobroadcast.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "ngraph/runtime/tensor.hpp"
|
||||
#include "runtime/backend.hpp"
|
||||
@@ -44,8 +45,10 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_scalar_vector)
|
||||
Shape shape_a{};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape shape_r{4};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{0}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A, op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r)),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -65,8 +68,10 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_scalar_matrix)
|
||||
Shape shape_a{};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape shape_r{2, 2};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{0, 1}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v3::Broadcast>(
|
||||
A, op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r)),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -86,8 +91,10 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_scalar_tensor)
|
||||
Shape shape_a{};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape shape_r{2, 2, 2};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{0, 1, 2}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A, op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r)),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -107,8 +114,10 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_trivial)
|
||||
{
|
||||
Shape shape{2, 2, 2};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto f =
|
||||
make_shared<Function>(make_shared<op::Broadcast>(A, shape, AxisSet{}), ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A, op::Constant::create(element::u64, Shape{shape.size()}, shape)),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -129,8 +138,12 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_vector_colwise)
|
||||
Shape shape_a{3};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape shape_r{3, 4};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{1}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A,
|
||||
op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r),
|
||||
op::Constant::create(element::i64, Shape{1}, {0})),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -151,8 +164,12 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_vector_rowwise)
|
||||
Shape shape_a{4};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape shape_r{3, 4};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{0}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A,
|
||||
op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r),
|
||||
op::Constant::create(element::i64, Shape{1}, {1})),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -174,7 +191,10 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_vector_rowwise_reversed)
|
||||
Shape shape_a{4};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape shape_r{3, 4};
|
||||
auto broadcast = make_shared<op::Broadcast>(A, shape_r, AxisSet{0});
|
||||
auto broadcast = make_shared<op::v1::Broadcast>(
|
||||
A,
|
||||
op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r),
|
||||
op::Constant::create(element::i64, Shape{1}, {1}));
|
||||
auto reverse = make_shared<op::Reverse>(broadcast, AxisSet{1});
|
||||
auto f = make_shared<Function>(reverse, ParameterVector{A});
|
||||
|
||||
@@ -197,8 +217,12 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_vector_rowwise_int64)
|
||||
Shape shape_a{4};
|
||||
auto A = make_shared<op::Parameter>(element::i64, shape_a);
|
||||
Shape shape_r{3, 4};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{0}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A,
|
||||
op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r),
|
||||
op::Constant::create(element::i64, Shape{1}, {1})),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -217,8 +241,12 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_scalar_to_matrix_int64)
|
||||
Shape shape_a{1};
|
||||
auto A = make_shared<op::Parameter>(element::i64, shape_a);
|
||||
Shape shape_r{3, 1};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{0}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A,
|
||||
op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r),
|
||||
op::Constant::create(element::i64, Shape{1}, {1})),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -237,8 +265,12 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_scalar_to_matrix_int32)
|
||||
Shape shape_a{1};
|
||||
auto A = make_shared<op::Parameter>(element::i32, shape_a);
|
||||
Shape shape_r{3, 1};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{0}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A,
|
||||
op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r),
|
||||
op::Constant::create(element::i64, Shape{1}, {1})),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -252,15 +284,24 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_scalar_to_matrix_int32)
|
||||
EXPECT_EQ((vector<int32_t>{4, 4, 4}), read_vector<int32_t>(result));
|
||||
}
|
||||
|
||||
static void broadcast_test_helper(const Shape& shape_a, const Shape& shape_r, const AxisSet& axis)
|
||||
static void broadcast_test_helper(const Shape& shape_a, const Shape& shape_r, const AxisSet& axes)
|
||||
{
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
|
||||
vector<float> inp_data(shape_size<const Shape>(shape_a));
|
||||
iota(inp_data.begin(), inp_data.end(), 1.f);
|
||||
|
||||
auto f =
|
||||
make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, axis), ParameterVector{A});
|
||||
auto shape_const = op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r);
|
||||
std::shared_ptr<Node> broadcast;
|
||||
if (axes.size() > 0)
|
||||
{
|
||||
auto axes_const = op::Constant::create(element::i64, Shape{axes.size()}, axes.to_vector());
|
||||
broadcast = make_shared<op::v1::Broadcast>(A, shape_const, axes_const);
|
||||
}
|
||||
else
|
||||
{
|
||||
broadcast = make_shared<op::v1::Broadcast>(A, shape_const);
|
||||
}
|
||||
auto f = make_shared<Function>(broadcast, ParameterVector{A});
|
||||
|
||||
auto ref_backend = runtime::Backend::create("INTERPRETER");
|
||||
auto wrk_backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
@@ -286,7 +327,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_vector_middle)
|
||||
{
|
||||
Shape shape_a{2};
|
||||
Shape shape_r{3, 2, 4};
|
||||
AxisSet axis{0, 2};
|
||||
AxisSet axis{1};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -294,7 +335,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_vector_forward_2)
|
||||
{
|
||||
Shape shape_a{2};
|
||||
Shape shape_r{3, 2};
|
||||
AxisSet axis{0};
|
||||
AxisSet axis{1};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -302,14 +343,14 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_vector_forward_3)
|
||||
{
|
||||
Shape shape_a{2};
|
||||
Shape shape_r{4, 3, 2};
|
||||
AxisSet axis{0, 1};
|
||||
AxisSet axis{2};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_vector_forward_4)
|
||||
{
|
||||
Shape shape_a{2};
|
||||
Shape shape_r{5, 4, 3, 2};
|
||||
AxisSet axis{0, 1, 2};
|
||||
AxisSet axis{3};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -317,7 +358,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_scalar)
|
||||
{
|
||||
Shape shape_a{};
|
||||
Shape shape_r{5, 4, 3, 2};
|
||||
AxisSet axis{0, 1, 2, 3};
|
||||
AxisSet axis{};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -325,7 +366,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_vector_backward_2)
|
||||
{
|
||||
Shape shape_a{2};
|
||||
Shape shape_r{2, 3};
|
||||
AxisSet axis{1};
|
||||
AxisSet axis{0};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -333,7 +374,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_vector_backward_3)
|
||||
{
|
||||
Shape shape_a{2};
|
||||
Shape shape_r{2, 3, 4};
|
||||
AxisSet axis{1, 2};
|
||||
AxisSet axis{0};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -341,7 +382,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_vector_backward_4)
|
||||
{
|
||||
Shape shape_a{2};
|
||||
Shape shape_r{2, 3, 4, 5};
|
||||
AxisSet axis{1, 2, 3};
|
||||
AxisSet axis{0};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -349,7 +390,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_matrix_backward_4)
|
||||
{
|
||||
Shape shape_a{4, 5};
|
||||
Shape shape_r{2, 3, 4, 5};
|
||||
AxisSet axis{0, 1};
|
||||
AxisSet axis{2, 3};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -357,7 +398,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_matrix_stride_1)
|
||||
{
|
||||
Shape shape_a{3, 5};
|
||||
Shape shape_r{2, 3, 4, 5};
|
||||
AxisSet axis{0, 2};
|
||||
AxisSet axis{1, 3};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -365,7 +406,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_matrix_stride_2)
|
||||
{
|
||||
Shape shape_a{3, 4};
|
||||
Shape shape_r{2, 3, 4, 5};
|
||||
AxisSet axis{0, 3};
|
||||
AxisSet axis{1, 2};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -373,7 +414,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_matrix_stride_3)
|
||||
{
|
||||
Shape shape_a{2, 4};
|
||||
Shape shape_r{2, 3, 4, 5};
|
||||
AxisSet axis{1, 3};
|
||||
AxisSet axis{0, 2};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -381,7 +422,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_3d_backward)
|
||||
{
|
||||
Shape shape_a{2, 3, 4};
|
||||
Shape shape_r{5, 2, 3, 4};
|
||||
AxisSet axis{0};
|
||||
AxisSet axis{1, 2, 3};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -389,7 +430,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_3d_stride_1)
|
||||
{
|
||||
Shape shape_a{2, 3, 4};
|
||||
Shape shape_r{2, 5, 3, 4};
|
||||
AxisSet axis{1};
|
||||
AxisSet axis{0, 2, 3};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -397,7 +438,7 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_algo_3d_stride_2)
|
||||
{
|
||||
Shape shape_a{2, 3, 4};
|
||||
Shape shape_r{2, 3, 5, 4};
|
||||
AxisSet axis{2};
|
||||
AxisSet axis{0, 1, 3};
|
||||
broadcast_test_helper(shape_a, shape_r, axis);
|
||||
}
|
||||
|
||||
@@ -406,8 +447,10 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_matrix_0)
|
||||
Shape shape_a{2, 2};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape shape_r{2, 2, 2};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{0}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A, op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r)),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -428,8 +471,12 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_matrix_1)
|
||||
Shape shape_a{2, 2};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape shape_r{2, 2, 2};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{1}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A,
|
||||
op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r),
|
||||
op::Constant::create(element::i64, Shape{2}, {0, 2})),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@@ -450,8 +497,12 @@ NGRAPH_TEST(${BACKEND_NAME}, broadcast_matrix_2)
|
||||
Shape shape_a{2, 2};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape shape_r{2, 2, 2};
|
||||
auto f = make_shared<Function>(make_shared<op::Broadcast>(A, shape_r, AxisSet{2}),
|
||||
ParameterVector{A});
|
||||
auto f = make_shared<Function>(
|
||||
make_shared<op::v1::Broadcast>(
|
||||
A,
|
||||
op::Constant::create(element::u64, Shape{shape_r.size()}, shape_r),
|
||||
op::Constant::create(element::i64, Shape{2}, {0, 1})),
|
||||
ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
|
||||
#include "ngraph/builder/autobroadcast.hpp"
|
||||
#include "ngraph/file_util.hpp"
|
||||
#include "ngraph/ngraph.hpp"
|
||||
#include "util/test_tools.hpp"
|
||||
@@ -34,8 +35,8 @@ TEST(build_graph, build_simple)
|
||||
auto arg1 = make_shared<op::Parameter>(element::f32, Shape{3});
|
||||
auto arg2 = make_shared<op::Parameter>(element::f32, Shape{32, 7});
|
||||
auto arg3 = make_shared<op::Parameter>(element::f32, Shape{32, 7});
|
||||
auto broadcast_1 = make_shared<op::Broadcast>(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = make_shared<op::Broadcast>(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto broadcast_1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto dot = make_shared<op::Dot>(arg2, arg0);
|
||||
ASSERT_EQ(dot->input_value(0).get_node_shared_ptr(), arg2);
|
||||
ASSERT_EQ(dot->input_value(1).get_node_shared_ptr(), arg0);
|
||||
@@ -107,8 +108,8 @@ TEST(build_graph, function_undeclared_parameters)
|
||||
auto arg1 = make_shared<op::Parameter>(element::f32, Shape{3});
|
||||
auto arg2 = make_shared<op::Parameter>(element::f32, Shape{32, 7});
|
||||
auto arg3 = make_shared<op::Parameter>(element::f32, Shape{32, 7});
|
||||
auto broadcast_1 = make_shared<op::Broadcast>(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = make_shared<op::Broadcast>(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto broadcast_1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto b1 = builder::opset1::make_broadcast(arg3, Shape{10, 32, 7}, AxisSet{0});
|
||||
auto dot = make_shared<op::Dot>(arg2, arg0);
|
||||
ASSERT_EQ(dot->input_values()[0].get_node_shared_ptr(), arg2);
|
||||
ASSERT_EQ(dot->input_values()[1].get_node_shared_ptr(), arg0);
|
||||
|
||||
@@ -213,30 +213,6 @@ TEST(autobroadcast, broadcast_with_leading_dim1)
|
||||
EXPECT_EQ(getShapeFromParam(ab_rhs), s1345);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, make_node_2_args)
|
||||
{
|
||||
Shape s21{2, 1};
|
||||
Shape s23{2, 3};
|
||||
auto lhs = getParamFromShape(s21);
|
||||
auto rhs = getParamFromShape(s23);
|
||||
|
||||
shared_ptr<Node> op = builder::make_with_numpy_broadcast<op::Add>(lhs, rhs);
|
||||
EXPECT_NE(op, nullptr);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, make_node_3_args)
|
||||
{
|
||||
Shape s21{2, 1};
|
||||
Shape s23{2, 3};
|
||||
|
||||
auto predicates = make_shared<op::Parameter>(element::boolean, s23);
|
||||
auto lhs = getParamFromShape(s21);
|
||||
auto rhs = getParamFromShape(s23);
|
||||
|
||||
shared_ptr<Node> op = builder::make_with_numpy_broadcast<op::Select>(predicates, lhs, rhs);
|
||||
EXPECT_NE(op, nullptr);
|
||||
}
|
||||
|
||||
TEST(autobroadcast, numpy_broadcast_for_matmul_op_2d)
|
||||
{
|
||||
const Shape lhs{3, 1, 4, 6};
|
||||
|
||||
@@ -265,35 +265,6 @@ TEST(constant_folding, DISABLED_constant_reshape_permute)
|
||||
ASSERT_TRUE(test::all_close_f(values_permute, values_out, MIN_FLOAT_TOLERANCE_BITS));
|
||||
}
|
||||
|
||||
TEST(constant_folding, constant_broadcast)
|
||||
{
|
||||
Shape shape_in{2};
|
||||
Shape shape_out{2, 4};
|
||||
|
||||
vector<int> values_in{0, 1};
|
||||
auto constant = make_shared<op::Constant>(element::i32, shape_in, values_in);
|
||||
auto broadcast = make_shared<op::Broadcast>(constant, shape_out, AxisSet{1});
|
||||
broadcast->set_friendly_name("test");
|
||||
auto f = make_shared<Function>(broadcast, ParameterVector{});
|
||||
|
||||
pass::Manager pass_manager;
|
||||
pass_manager.register_pass<pass::ConstantFolding>();
|
||||
pass_manager.run_passes(f);
|
||||
|
||||
ASSERT_EQ(count_ops_of_type<op::Broadcast>(f), 0);
|
||||
ASSERT_EQ(count_ops_of_type<op::Constant>(f), 1);
|
||||
|
||||
auto new_const =
|
||||
as_type_ptr<op::Constant>(f->get_results().at(0)->input_value(0).get_node_shared_ptr());
|
||||
ASSERT_TRUE(new_const);
|
||||
ASSERT_EQ(new_const->get_friendly_name(), "test");
|
||||
|
||||
auto values_out = new_const->get_vector<int>();
|
||||
|
||||
vector<int> values_expected{0, 0, 0, 0, 1, 1, 1, 1};
|
||||
ASSERT_EQ(values_expected, values_out);
|
||||
}
|
||||
|
||||
TEST(constant_folding, constant_broadcast_v1)
|
||||
{
|
||||
vector<int32_t> values_in{0, 1};
|
||||
|
||||
@@ -84,22 +84,29 @@ TEST(copy, atan)
|
||||
|
||||
TEST(copy, broadcast)
|
||||
{
|
||||
Shape shape1{1};
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape1);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape1)};
|
||||
Shape shape{1, 3};
|
||||
Shape new_shape{4, 1, 3};
|
||||
AxisSet axes{1, 2};
|
||||
auto arg0 = make_shared<op::Parameter>(element::f32, shape);
|
||||
OutputVector new_args{make_shared<op::Parameter>(element::f32, shape),
|
||||
op::Constant::create(element::u64, Shape{new_shape.size()}, new_shape),
|
||||
op::Constant::create(element::i64, Shape{axes.size()}, axes.to_vector())};
|
||||
|
||||
Shape shape{4, 1, 3};
|
||||
AxisSet axes{0, 2};
|
||||
|
||||
auto node = make_shared<op::Broadcast>(arg0, shape, axes);
|
||||
auto node = make_shared<op::v1::Broadcast>(
|
||||
arg0,
|
||||
op::Constant::create(element::u64, Shape{new_shape.size()}, new_shape),
|
||||
op::Constant::create(element::i64, Shape{axes.size()}, axes.to_vector()));
|
||||
auto new_node = node->copy_with_new_inputs(new_args);
|
||||
auto node_cast = as_type_ptr<op::Broadcast>(new_node);
|
||||
auto node_cast = as_type_ptr<op::v1::Broadcast>(new_node);
|
||||
ASSERT_NE(node_cast, nullptr);
|
||||
|
||||
ASSERT_TRUE(nullptr != new_node);
|
||||
ASSERT_TRUE(new_args == new_node->input_values());
|
||||
ASSERT_TRUE(shape == node_cast->get_broadcast_shape());
|
||||
ASSERT_TRUE(axes == node_cast->get_broadcast_axes());
|
||||
ASSERT_NE(nullptr, new_node);
|
||||
ASSERT_EQ(new_args, new_node->input_values());
|
||||
bool axes_determined;
|
||||
AxisSet broadcast_axes;
|
||||
std::tie(axes_determined, broadcast_axes) = node_cast->get_broadcast_axes();
|
||||
ASSERT_EQ(true, axes_determined);
|
||||
ASSERT_EQ(AxisSet{0}, broadcast_axes);
|
||||
}
|
||||
|
||||
TEST(copy, ceiling)
|
||||
@@ -435,4 +442,4 @@ TEST(copy, loop)
|
||||
EXPECT_EQ(loop_copy->get_output_shape(0), out0_shape);
|
||||
EXPECT_EQ(loop_copy->get_output_shape(1), out1_shape);
|
||||
EXPECT_EQ(loop_copy->get_output_shape(2), out2_shape);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -593,24 +593,6 @@ TEST(eval, evaluate_broadcast_v3_explicit_dyn)
|
||||
ASSERT_EQ(result_val, expec);
|
||||
}
|
||||
|
||||
TEST(eval, evaluate_broadcast_v0)
|
||||
{
|
||||
Shape shape_a{2, 4};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_a);
|
||||
Shape target_shape = Shape{2, 3, 4};
|
||||
auto bcast_v0 = make_shared<op::v0::Broadcast>(A, target_shape, AxisSet{1});
|
||||
auto fun = make_shared<Function>(OutputVector{bcast_v0}, ParameterVector{A});
|
||||
|
||||
auto result = make_shared<HostTensor>();
|
||||
ASSERT_TRUE(fun->evaluate(
|
||||
{result}, {make_host_tensor<element::Type_t::f32>(Shape{2, 4}, {1, 2, 3, 4, 1, 2, 3, 4})}));
|
||||
EXPECT_EQ(result->get_element_type(), element::f32);
|
||||
EXPECT_EQ(result->get_partial_shape(), (PartialShape{2, 3, 4}));
|
||||
auto result_val = read_vector<float>(result);
|
||||
vector<float> expec{1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4};
|
||||
ASSERT_EQ(result_val, expec);
|
||||
}
|
||||
|
||||
TEST(eval, test_op_multi_out)
|
||||
{
|
||||
auto p = make_shared<op::Parameter>(element::f32, PartialShape{2, 3});
|
||||
|
||||
@@ -94,16 +94,7 @@ namespace
|
||||
|
||||
void op_is_Broadcast()
|
||||
{
|
||||
op::Broadcast node;
|
||||
EXPECT_FALSE(op::is_unary_elementwise_arithmetic(&node));
|
||||
EXPECT_FALSE(op::is_binary_elementwise_arithmetic(&node));
|
||||
EXPECT_FALSE(op::is_binary_elementwise_comparison(&node));
|
||||
EXPECT_FALSE(op::is_binary_elementwise_logical(&node));
|
||||
}
|
||||
|
||||
void op_is_BroadcastLike()
|
||||
{
|
||||
op::BroadcastLike node;
|
||||
op::v1::Broadcast node;
|
||||
EXPECT_FALSE(op::is_unary_elementwise_arithmetic(&node));
|
||||
EXPECT_FALSE(op::is_binary_elementwise_arithmetic(&node));
|
||||
EXPECT_FALSE(op::is_binary_elementwise_comparison(&node));
|
||||
|
||||
@@ -731,15 +731,18 @@ TEST(pattern, label_on_skip)
|
||||
std::make_shared<pattern::op::Label>(iconst, ngraph::is_zero, NodeVector{iconst});
|
||||
|
||||
auto bcst_pred = [](std::shared_ptr<Node> n) {
|
||||
return as_type_ptr<op::Broadcast>(n) != nullptr;
|
||||
return as_type_ptr<op::v1::Broadcast>(n) != nullptr;
|
||||
};
|
||||
|
||||
auto bcst = std::make_shared<pattern::op::Skip>(const_label, bcst_pred);
|
||||
auto shape_const = op::Constant::create(element::u64, Shape{shape.size()}, shape);
|
||||
auto axes_const = op::Constant::create(element::u8, Shape{}, {0});
|
||||
auto bcst = std::make_shared<pattern::op::Skip>(
|
||||
OutputVector{const_label, shape_const, axes_const}, bcst_pred);
|
||||
auto bcst_label = std::make_shared<pattern::op::Label>(bcst, nullptr, NodeVector{bcst});
|
||||
auto matcher = std::make_shared<pattern::Matcher>(
|
||||
std::make_shared<op::Multiply>(label, bcst_label), "label_on_skip");
|
||||
|
||||
auto const_broadcast = make_shared<op::Broadcast>(iconst, shape, AxisSet{0, 1});
|
||||
auto const_broadcast = make_shared<op::v1::Broadcast>(iconst, shape_const);
|
||||
auto mul = a * const_broadcast;
|
||||
auto mul_scalar = b * iconst;
|
||||
ASSERT_TRUE(matcher->match(mul));
|
||||
|
||||
@@ -37,8 +37,6 @@ set (SRC
|
||||
pass/fused_op_decomposition.hpp
|
||||
pass/implicit_broadcast_elimination.cpp
|
||||
pass/implicit_broadcast_elimination.hpp
|
||||
pass/like_replacement.cpp
|
||||
pass/like_replacement.hpp
|
||||
pass/liveness.cpp
|
||||
pass/liveness.hpp
|
||||
pass/opset0_downgrade.cpp
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
#include "ngraph/pass/manager.hpp"
|
||||
#include "ngraph/util.hpp"
|
||||
#include "pass/fused_op_decomposition.hpp"
|
||||
#include "pass/like_replacement.hpp"
|
||||
#include "pass/liveness.hpp"
|
||||
#include "pass/opset0_downgrade.hpp"
|
||||
#include "pass/opset1_downgrade.hpp"
|
||||
@@ -76,7 +75,6 @@ runtime::interpreter::INTExecutable::INTExecutable(const shared_ptr<Function>& f
|
||||
return retval;
|
||||
};
|
||||
pass::Manager pass_manager;
|
||||
pass_manager.register_pass<pass::LikeReplacement>();
|
||||
pass_manager.register_pass<pass::FusedOpDecomposition>(is_supported);
|
||||
pass_manager.register_pass<pass::Opset1Downgrade>();
|
||||
pass_manager.register_pass<pass::Opset0Downgrade>();
|
||||
|
||||
@@ -275,7 +275,6 @@ protected:
|
||||
node.get_input_shape(0));
|
||||
break;
|
||||
}
|
||||
case OP_TYPEID::BroadcastLike: break;
|
||||
case OP_TYPEID::Ceiling:
|
||||
{
|
||||
size_t element_count = shape_size(node.get_output_shape(0));
|
||||
|
||||
@@ -57,8 +57,7 @@ NGRAPH_OP(Asin, ngraph::op)
|
||||
NGRAPH_OP(Atan, ngraph::op)
|
||||
NGRAPH_OP(AvgPool, ngraph::op::v0)
|
||||
NGRAPH_OP(BatchNormInference, ngraph::op::v0)
|
||||
NGRAPH_OP(Broadcast, ngraph::op)
|
||||
NGRAPH_OP(BroadcastLike, ngraph::op)
|
||||
NGRAPH_OP(Broadcast, ngraph::op::v1)
|
||||
NGRAPH_OP(Ceiling, ngraph::op)
|
||||
NGRAPH_OP(Clamp, ngraph::op)
|
||||
NGRAPH_OP(Concat, ngraph::op)
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
//*****************************************************************************
|
||||
// Copyright 2017-2020 Intel Corporation
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//*****************************************************************************
|
||||
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <typeindex>
|
||||
#include <typeinfo>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "like_replacement.hpp"
|
||||
#include "ngraph/op/broadcast.hpp"
|
||||
#include "ngraph/op/constant.hpp"
|
||||
#include "ngraph/op/convert.hpp"
|
||||
#include "ngraph/op/pad.hpp"
|
||||
#include "ngraph/op/slice.hpp"
|
||||
#include "ngraph/op/stop_gradient.hpp"
|
||||
#include "ngraph/op/sum.hpp"
|
||||
#include "ngraph/util.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
|
||||
static bool replace_broadcast_like(const std::shared_ptr<ngraph::Node>& node)
|
||||
{
|
||||
// Replace a broadcast like with the broadcast to eliminate the pseudo-dependency on the "like"
|
||||
// argument
|
||||
auto broadcast_like = as_type_ptr<op::BroadcastLike>(node);
|
||||
replace_node(node,
|
||||
make_shared<op::Broadcast>(broadcast_like->input_value(0),
|
||||
broadcast_like->get_broadcast_shape(),
|
||||
broadcast_like->get_broadcast_axes()));
|
||||
return true;
|
||||
}
|
||||
|
||||
static const map<NodeTypeInfo, function<bool(const shared_ptr<Node>&)>> dispatcher{
|
||||
{op::BroadcastLike::type_info, replace_broadcast_like}};
|
||||
|
||||
bool pass::LikeReplacement::run_on_function(shared_ptr<Function> function_ptr)
|
||||
{
|
||||
static const map<NodeTypeInfo, function<bool(const shared_ptr<Node>&)>> dispatcher{
|
||||
{op::BroadcastLike::type_info, replace_broadcast_like}};
|
||||
|
||||
bool clobbered = false;
|
||||
for (const auto& n : function_ptr->get_ops())
|
||||
{
|
||||
// Work around a warning [-Wpotentially-evaluated-expression]
|
||||
auto handler = dispatcher.find(n->get_type_info());
|
||||
if (handler != dispatcher.end())
|
||||
{
|
||||
clobbered = handler->second(n) || clobbered;
|
||||
}
|
||||
}
|
||||
|
||||
return clobbered;
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
//*****************************************************************************
|
||||
// Copyright 2017-2020 Intel Corporation
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//*****************************************************************************
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "backend_visibility.hpp"
|
||||
#include "ngraph/pass/pass.hpp"
|
||||
|
||||
namespace ngraph
|
||||
{
|
||||
namespace pass
|
||||
{
|
||||
class BACKEND_API LikeReplacement : public FunctionPass
|
||||
{
|
||||
public:
|
||||
bool run_on_function(std::shared_ptr<ngraph::Function> function) override;
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -125,89 +125,6 @@ namespace opset0_downgrade
|
||||
return replacement_node;
|
||||
}
|
||||
|
||||
shared_ptr<Node> op_cast(shared_ptr<op::v1::Broadcast> node)
|
||||
{
|
||||
auto arg = node->input_value(0);
|
||||
auto arg_pshape = arg.get_partial_shape();
|
||||
auto arg_rank = arg_pshape.rank();
|
||||
auto target_shape_input = node->input_value(1);
|
||||
|
||||
shared_ptr<Node> replacement_node;
|
||||
|
||||
NGRAPH_CHECK(arg_pshape.is_static(),
|
||||
"Unable to convert Broadcast:v1 to Broadcast:v0 "
|
||||
"if argument shape is not static. Node: ",
|
||||
*node);
|
||||
const auto& arg_shape = arg_pshape.to_shape();
|
||||
|
||||
NGRAPH_CHECK(op::is_constant(target_shape_input.get_node()));
|
||||
auto target_shape = node->get_output_shape(0);
|
||||
NGRAPH_CHECK(node->get_broadcast_axes().first);
|
||||
|
||||
// (Re)construct axes_mapping.
|
||||
AxisSet broadcast_axes = node->get_broadcast_axes().second;
|
||||
std::vector<size_t> axes_mapping{
|
||||
ngraph::builder::opset1::get_axes_mapping(target_shape, broadcast_axes)};
|
||||
|
||||
Output<Node> squeezed_arg = arg;
|
||||
// Collect axes to squeeze. Broadcast v0 "adds" new axes, thus we have to squeeze
|
||||
// the empty ones (dim:=1), which would be broadcasted by Broadcast v1.
|
||||
std::vector<size_t> empty_axes;
|
||||
for (size_t a{0}; a < axes_mapping.size(); ++a)
|
||||
{
|
||||
if (arg_shape.at(a) == 1 && target_shape.at(axes_mapping.at(a)) != 1)
|
||||
{
|
||||
empty_axes.push_back(a);
|
||||
}
|
||||
}
|
||||
// Check if arg_shape contains some more empty dimensions marked to broadcast.
|
||||
// If axes_mapping size is less than arg_shape size, then some of arg dimensions may
|
||||
// be equal to one and marked to broadcast.
|
||||
if (axes_mapping.size() < arg_shape.size())
|
||||
{
|
||||
for (size_t a{axes_mapping.size()}; a < arg_shape.size(); ++a)
|
||||
{
|
||||
if (arg_shape.at(a) == 1)
|
||||
{
|
||||
empty_axes.push_back(a);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!empty_axes.empty())
|
||||
{
|
||||
auto v0squeeze = [](const Output<Node>& value, vector<size_t> axes) {
|
||||
if (axes.empty())
|
||||
{
|
||||
return value.get_node_shared_ptr();
|
||||
}
|
||||
|
||||
Shape in_shape{value.get_shape()};
|
||||
for (size_t idx = 0; idx < axes.size(); ++idx)
|
||||
{
|
||||
in_shape.at(axes.at(idx)) = 0;
|
||||
}
|
||||
Shape output_shape;
|
||||
for (auto axis : in_shape)
|
||||
{
|
||||
if (axis != 0)
|
||||
{
|
||||
output_shape.push_back(axis);
|
||||
}
|
||||
}
|
||||
return make_shared<op::Reshape>(
|
||||
value, get_default_order(value.get_shape().size()), output_shape)
|
||||
->add_provenance_group_members_above({value});
|
||||
|
||||
};
|
||||
squeezed_arg = v0squeeze(arg, empty_axes);
|
||||
}
|
||||
|
||||
replacement_node =
|
||||
make_shared<op::v0::Broadcast>(squeezed_arg, target_shape, broadcast_axes);
|
||||
replace_node(node, replacement_node);
|
||||
return replacement_node;
|
||||
}
|
||||
|
||||
shared_ptr<Node> op_cast(shared_ptr<op::v1::Convolution> node)
|
||||
{
|
||||
const auto data_arg = node->input_value(0);
|
||||
|
||||
@@ -54,15 +54,6 @@ namespace opset1_upgrade
|
||||
return op_cast_binary_elementwise_node<op::v0::Add, op::v1::Add>(node);
|
||||
}
|
||||
|
||||
shared_ptr<Node> op_cast(shared_ptr<op::Broadcast> node)
|
||||
{
|
||||
auto replacement_node = ngraph::builder::opset1::make_broadcast(
|
||||
node->input_value(0), node->get_broadcast_shape(), node->get_broadcast_axes());
|
||||
replace_node(node, replacement_node.get_node_shared_ptr());
|
||||
return replacement_node.get_node_shared_ptr();
|
||||
}
|
||||
|
||||
shared_ptr<Node> op_cast(shared_ptr<op::BroadcastLike> node) { return nullptr; }
|
||||
shared_ptr<Node> op_cast(shared_ptr<op::v0::Convolution> node)
|
||||
{
|
||||
auto strides = node->get_window_movement_strides();
|
||||
|
||||
@@ -23,184 +23,6 @@ NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
|
||||
TEST(type_prop, broadcast_deduce)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
Shape bc_shape{2, 3, 4};
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
|
||||
ASSERT_EQ(bc->get_element_type(), element::f32);
|
||||
ASSERT_EQ(bc->get_shape(), bc_shape);
|
||||
}
|
||||
|
||||
TEST(type_prop, broadcast_axes_oob)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto bc_shape = Shape{2, 3, 4};
|
||||
|
||||
try
|
||||
{
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1, 3});
|
||||
FAIL() << "Broadcast axis out of bounds not detected";
|
||||
}
|
||||
catch (const NodeValidationFailure& error)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(error.what(),
|
||||
"Broadcast axis index (3) exceeds specified output shape rank");
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Deduced type check failed for unexpected reason";
|
||||
}
|
||||
}
|
||||
|
||||
TEST(type_prop, broadcast_shape_mismatch_wrong_rank)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto bc_shape = Shape{2, 3, 4, 5};
|
||||
|
||||
try
|
||||
{
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
|
||||
FAIL() << "Output shape mismatch (wrong rank) not detected";
|
||||
}
|
||||
catch (const NodeValidationFailure& error)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(
|
||||
error.what(),
|
||||
"Broadcast argument shape, specified output shape, and axes are incompatible");
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Deduced type check failed for unexpected reason";
|
||||
}
|
||||
}
|
||||
|
||||
TEST(type_prop, broadcast_shape_mismatch_wrong_size)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, Shape{2, 4});
|
||||
auto bc_shape = Shape{2, 3, 5};
|
||||
|
||||
try
|
||||
{
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
|
||||
FAIL() << "Output shape mismatch (wrong size) not detected";
|
||||
}
|
||||
catch (const NodeValidationFailure& error)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(
|
||||
error.what(),
|
||||
"Broadcast argument shape, specified output shape, and axes are incompatible");
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Deduced type check failed for unexpected reason";
|
||||
}
|
||||
}
|
||||
|
||||
TEST(type_prop, broadcast_partial_rank_dynamic_ok)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
|
||||
Shape bc_shape{2, 3, 4};
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
|
||||
ASSERT_EQ(bc->get_element_type(), element::f32);
|
||||
ASSERT_EQ(bc->get_shape(), bc_shape);
|
||||
}
|
||||
|
||||
TEST(type_prop, broadcast_partial_rank_dynamic_axes_oob)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, PartialShape::dynamic());
|
||||
auto bc_shape = Shape{2, 3, 4};
|
||||
|
||||
try
|
||||
{
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1, 3});
|
||||
FAIL() << "Broadcast axis out of bounds not detected";
|
||||
}
|
||||
catch (const NodeValidationFailure& error)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(error.what(),
|
||||
"Broadcast axis index (3) exceeds specified output shape rank");
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Deduced type check failed for unexpected reason";
|
||||
}
|
||||
}
|
||||
|
||||
TEST(type_prop, broadcast_partial_rank_static_dynamic_ok)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 4});
|
||||
Shape bc_shape{2, 3, 4};
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
|
||||
ASSERT_EQ(bc->get_element_type(), element::f32);
|
||||
ASSERT_EQ(bc->get_shape(), bc_shape);
|
||||
}
|
||||
|
||||
TEST(type_prop, broadcast_partial_rank_static_dynamic_axes_oob)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 4});
|
||||
auto bc_shape = Shape{2, 3, 4};
|
||||
|
||||
try
|
||||
{
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1, 3});
|
||||
FAIL() << "Broadcast axis out of bounds not detected";
|
||||
}
|
||||
catch (const NodeValidationFailure& error)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(error.what(),
|
||||
"Broadcast axis index (3) exceeds specified output shape rank");
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Deduced type check failed for unexpected reason";
|
||||
}
|
||||
}
|
||||
|
||||
TEST(type_prop, broadcast_partial_rank_static_dynamic_shape_mismatch_wrong_rank)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 4});
|
||||
auto bc_shape = Shape{2, 3, 4, 5};
|
||||
|
||||
try
|
||||
{
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
|
||||
FAIL() << "Output shape mismatch (wrong rank) not detected";
|
||||
}
|
||||
catch (const NodeValidationFailure& error)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(
|
||||
error.what(),
|
||||
"Broadcast argument shape, specified output shape, and axes are incompatible");
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Deduced type check failed for unexpected reason";
|
||||
}
|
||||
}
|
||||
|
||||
TEST(type_prop, broadcast_partial_rank_static_dynamic_shape_mismatch_wrong_size)
|
||||
{
|
||||
auto param = make_shared<op::Parameter>(element::f32, PartialShape{Dimension::dynamic(), 4});
|
||||
auto bc_shape = Shape{2, 3, 5};
|
||||
|
||||
try
|
||||
{
|
||||
auto bc = make_shared<op::Broadcast>(param, bc_shape, AxisSet{1});
|
||||
FAIL() << "Output shape mismatch (wrong size) not detected";
|
||||
}
|
||||
catch (const NodeValidationFailure& error)
|
||||
{
|
||||
EXPECT_HAS_SUBSTRING(
|
||||
error.what(),
|
||||
"Broadcast argument shape, specified output shape, and axes are incompatible");
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
FAIL() << "Deduced type check failed for unexpected reason";
|
||||
}
|
||||
}
|
||||
|
||||
// Because v3::Broadcast is backward compatible to v1::Broadcast all v1::Broadcast tests should pass
|
||||
template <typename T>
|
||||
class BroadcastTests : public ::testing::Test
|
||||
|
||||
Reference in New Issue
Block a user