Fixed compilation for Win32 (#3803)

This commit is contained in:
Ilya Lavrenov 2021-01-12 18:40:30 +03:00 committed by GitHub
parent 51f3b33a83
commit 4d4efdc889
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 193 additions and 65 deletions

View File

@ -115,6 +115,9 @@ public:
}
};
// TODO: remove once FusedOp is removed
NGRAPH_SUPPRESS_DEPRECATED_START
/// Relaxes tensor element type requirements for BaseOp inputs and outputs
/// This class template should be used with Node descendant class. Defines a new operation by extending the
/// original BaseOp operation with ability to accept inputs and provide outputs with element type that is
@ -232,5 +235,7 @@ const ::ngraph::Node::type_info_t& TypeRelaxed<BaseOp>::get_type_info_static() {
template <typename BaseOp>
const ::ngraph::Node::type_info_t TypeRelaxed<BaseOp>::type_info = TypeRelaxed<BaseOp>::get_type_info_static();
NGRAPH_SUPPRESS_DEPRECATED_END
} // namespace op
} // namespace ngraph

View File

@ -38,7 +38,7 @@ namespace ngraph
public:
NGRAPH_RTTI_DECLARATION;
Clamp() = default;
Clamp();
/// \brief Constructs a Clamp node.
///
/// \param data - Node producing the input tensor

View File

@ -46,7 +46,7 @@ namespace ngraph
public:
NGRAPH_RTTI_DECLARATION;
FakeQuantize() = default;
FakeQuantize();
///
/// \brief Constructs a FakeQuantize operation node.
///

View File

@ -35,7 +35,7 @@ namespace ngraph
public:
static constexpr NodeTypeInfo type_info{"Gelu", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
Gelu() = default;
Gelu();
/// \brief Constructs an Gelu operation.
///
/// \param data Input tensor

View File

@ -36,7 +36,7 @@ namespace ngraph
public:
static constexpr NodeTypeInfo type_info{"GRN", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
GRN() = default;
GRN();
/// \brief Constructs a GRN operation.
///
/// \param data - Node producing the input tensor

View File

@ -21,8 +21,6 @@
#include "ngraph/op/util/attr_types.hpp"
#include "ngraph/op/util/fused_op.hpp"
NGRAPH_SUPPRESS_DEPRECATED_START
namespace ngraph
{
namespace op
@ -95,6 +93,8 @@ namespace ngraph
PadType m_auto_pad;
};
NGRAPH_SUPPRESS_DEPRECATED_START
/// \brief Data batch backprop for batched convolution operation.
class NGRAPH_API GroupConvolutionBackpropData : public op::util::FusedOp
{
@ -102,7 +102,7 @@ namespace ngraph
static constexpr NodeTypeInfo type_info{"GroupConvolutionBackpropData", 1};
const NodeTypeInfo& get_type_info() const override { return type_info; }
/// \brief Constructs a batched-convolution data batch-backprop operation.
GroupConvolutionBackpropData() = default;
GroupConvolutionBackpropData();
// clang-format off
//
// \brief Constructs a batched-convolution data batch-backprop operation.
@ -249,8 +249,8 @@ namespace ngraph
PadType m_auto_pad;
CoordinateDiff m_output_padding;
};
NGRAPH_SUPPRESS_DEPRECATED_END
} // namespace v1
} // namespace op
} // namespace ngraph
NGRAPH_SUPPRESS_DEPRECATED_END

View File

@ -35,7 +35,7 @@ namespace ngraph
public:
static constexpr NodeTypeInfo type_info{"HardSigmoid", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
HardSigmoid() = default;
HardSigmoid();
/// \brief Constructs a HardSigmoid operation.
///

View File

@ -35,6 +35,8 @@ namespace ngraph
{
namespace v0
{
NGRAPH_SUPPRESS_DEPRECATED_START
///
/// \brief Class for lstm sequence node.
///
@ -48,7 +50,7 @@ namespace ngraph
{
public:
NGRAPH_RTTI_DECLARATION;
LSTMSequence() = default;
LSTMSequence();
using direction = RecurrentSequenceDirection;
@ -70,26 +72,7 @@ namespace ngraph
"tanh",
"tanh"},
const float clip_threshold = 0,
const bool input_forget = false)
: FusedOp({X,
initial_hidden_state,
initial_cell_state,
sequence_lengths,
W,
R,
B,
P})
, m_activations_alpha(activations_alpha)
, m_activations_beta(activations_beta)
, m_activations(activations)
, m_clip_threshold(clip_threshold)
, m_direction(lstm_direction)
, m_hidden_size(hidden_size)
, m_input_forget(input_forget)
, m_weights_format(weights_format)
{
constructor_validate_and_infer_types();
}
const bool input_forget = false);
explicit LSTMSequence(const Output<Node>& X,
const Output<Node>& initial_hidden_state,
@ -107,30 +90,7 @@ namespace ngraph
"tanh",
"tanh"},
const float clip_threshold = 0,
const bool input_forget = false)
: LSTMSequence(
X,
initial_hidden_state,
initial_cell_state,
sequence_lengths,
W,
R,
B,
Constant::create(
element::f32,
Shape{(lstm_direction == direction::BIDIRECTIONAL ? 2UL : 1UL),
3UL * static_cast<size_t>(hidden_size)},
std::vector<float>{0.f}),
hidden_size,
lstm_direction,
weights_format,
activations_alpha,
activations_beta,
activations,
clip_threshold,
input_forget)
{
}
const bool input_forget = false);
virtual void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;
@ -183,6 +143,8 @@ namespace ngraph
bool m_input_forget;
LSTMWeightsFormat m_weights_format;
};
NGRAPH_SUPPRESS_DEPRECATED_END
}
namespace v5

View File

@ -35,7 +35,7 @@ namespace ngraph
public:
static constexpr NodeTypeInfo type_info{"Mod", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
Mod() = default;
Mod();
/// \brief Constructs a Mod node.
///
/// \param A - Dividend tensor

View File

@ -35,7 +35,7 @@ namespace ngraph
public:
NGRAPH_RTTI_DECLARATION;
MVN() = default;
MVN();
/// \brief Constructs an MVN operation.
///
/// \param data Input tensor with data

View File

@ -37,7 +37,7 @@ namespace ngraph
public:
NGRAPH_RTTI_DECLARATION;
NormalizeL2() = default;
NormalizeL2();
///
/// \brief Constructs a Normalize operation.
///

View File

@ -36,7 +36,7 @@ namespace ngraph
{
public:
NGRAPH_RTTI_DECLARATION;
PRelu() = default;
PRelu();
/// \brief Constructs a PRelu operation.
///
/// \param data Input tensor

View File

@ -34,7 +34,7 @@ namespace ngraph
public:
static constexpr NodeTypeInfo type_info{"Selu", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
Selu() = default;
Selu();
/// \brief Constructs a Selu node.
///
/// \param data - Node producing the input tensor

View File

@ -36,7 +36,7 @@ namespace ngraph
public:
static constexpr NodeTypeInfo type_info{"SquaredDifference", 0};
const NodeTypeInfo& get_type_info() const override { return type_info; }
SquaredDifference() = default;
SquaredDifference();
/// \brief Constructs the squared difference operation.
///
/// \param x1 First input tensor

View File

@ -36,7 +36,7 @@ namespace ngraph
public:
NGRAPH_RTTI_DECLARATION;
Squeeze() = default;
Squeeze();
Squeeze(const Output<Node>& data, const Output<Node>& axes);
bool visit_attributes(AttributeVisitor& visitor) override;

View File

@ -93,6 +93,13 @@ bool op::v0::Clamp::evaluate(const HostTensorVector& outputs, const HostTensorVe
NGRAPH_RTTI_DEFINITION(op::v0::Clamp, "Clamp", 0);
op::Clamp::Clamp()
: FusedOp()
, m_min()
, m_max()
{
}
op::Clamp::Clamp(const Output<Node>& data, const double min, const double max)
: FusedOp({data})
, m_min{min}

View File

@ -41,6 +41,12 @@ NGRAPH_SUPPRESS_DEPRECATED_START
NGRAPH_RTTI_DEFINITION(op::FakeQuantize, "FakeQuantize", 0);
op::FakeQuantize::FakeQuantize()
: FusedOp()
, m_levels()
{
}
op::FakeQuantize::FakeQuantize(const Output<Node>& data,
const Output<Node>& input_low,
const Output<Node>& input_high,

View File

@ -34,6 +34,11 @@ NGRAPH_SUPPRESS_DEPRECATED_START
constexpr NodeTypeInfo op::Gelu::type_info;
op::Gelu::Gelu()
: FusedOp()
{
}
op::Gelu::Gelu(const Output<Node>& data)
: FusedOp({data})
{

View File

@ -35,6 +35,11 @@ NGRAPH_SUPPRESS_DEPRECATED_START
constexpr NodeTypeInfo op::GRN::type_info;
op::GRN::GRN()
: FusedOp()
{
}
op::GRN::GRN(const Output<Node>& data, float bias)
: FusedOp({data})
, m_bias(bias)

View File

@ -195,6 +195,17 @@ shared_ptr<Node> op::v1::GroupConvolution::clone_with_new_inputs(const OutputVec
constexpr NodeTypeInfo op::v1::GroupConvolutionBackpropData::type_info;
op::v1::GroupConvolutionBackpropData::GroupConvolutionBackpropData()
: FusedOp()
, m_strides()
, m_dilations()
, m_pads_begin()
, m_pads_end()
, m_auto_pad()
, m_output_padding()
{
}
op::v1::GroupConvolutionBackpropData::GroupConvolutionBackpropData(
const Output<Node>& data,
const Output<Node>& filters,

View File

@ -32,6 +32,11 @@ NGRAPH_SUPPRESS_DEPRECATED_START
constexpr NodeTypeInfo op::HardSigmoid::type_info;
op::HardSigmoid::HardSigmoid()
: FusedOp()
{
}
op::HardSigmoid::HardSigmoid(const Output<Node>& data,
const Output<Node>& alpha,
const Output<Node>& beta)

View File

@ -33,7 +33,88 @@ using namespace std;
NGRAPH_RTTI_DEFINITION(op::v0::LSTMSequence, "LSTMSequence", 0);
NGRAPH_RTTI_DEFINITION(op::v5::LSTMSequence, "LSTMSequence", 5);
bool ngraph::op::v0::LSTMSequence::visit_attributes(AttributeVisitor& visitor)
op::v0::LSTMSequence::LSTMSequence()
: FusedOp()
, m_activations_alpha()
, m_activations_beta()
, m_activations()
, m_clip_threshold()
, m_direction()
, m_hidden_size()
, m_input_forget()
, m_weights_format()
{
}
op::v0::LSTMSequence::LSTMSequence(const Output<Node>& X,
const Output<Node>& initial_hidden_state,
const Output<Node>& initial_cell_state,
const Output<Node>& sequence_lengths,
const Output<Node>& W,
const Output<Node>& R,
const Output<Node>& B,
const Output<Node>& P,
const std::int64_t hidden_size,
const LSTMSequence::direction lstm_direction,
LSTMWeightsFormat weights_format,
const std::vector<float> activations_alpha,
const std::vector<float> activations_beta,
const std::vector<std::string> activations,
const float clip_threshold,
const bool input_forget)
: FusedOp({X, initial_hidden_state, initial_cell_state, sequence_lengths, W, R, B, P})
, m_activations_alpha(activations_alpha)
, m_activations_beta(activations_beta)
, m_activations(activations)
, m_clip_threshold(clip_threshold)
, m_direction(lstm_direction)
, m_hidden_size(hidden_size)
, m_input_forget(input_forget)
, m_weights_format(weights_format)
{
constructor_validate_and_infer_types();
}
op::v0::LSTMSequence::LSTMSequence(const Output<Node>& X,
const Output<Node>& initial_hidden_state,
const Output<Node>& initial_cell_state,
const Output<Node>& sequence_lengths,
const Output<Node>& W,
const Output<Node>& R,
const Output<Node>& B,
const std::int64_t hidden_size,
const LSTMSequence::direction lstm_direction,
LSTMWeightsFormat weights_format,
const std::vector<float>& activations_alpha,
const std::vector<float>& activations_beta,
const std::vector<std::string>& activations,
const float clip_threshold,
const bool input_forget)
: op::v0::LSTMSequence(
X,
initial_hidden_state,
initial_cell_state,
sequence_lengths,
W,
R,
B,
Constant::create(
element::f32,
Shape{(lstm_direction == LSTMSequence::direction::BIDIRECTIONAL ? 2UL : 1UL),
3UL * static_cast<size_t>(hidden_size)},
std::vector<float>{0.f}),
hidden_size,
lstm_direction,
weights_format,
activations_alpha,
activations_beta,
activations,
clip_threshold,
input_forget)
{
}
bool op::v0::LSTMSequence::visit_attributes(AttributeVisitor& visitor)
{
NGRAPH_OP_SCOPE(v0_LSTMSequence_visit_attributes);
visitor.on_attribute("hidden_size", m_hidden_size);
@ -47,6 +128,7 @@ bool ngraph::op::v0::LSTMSequence::visit_attributes(AttributeVisitor& visitor)
visitor.on_attribute("weights_format", m_weights_format);
return true;
}
OutputVector op::v0::LSTMSequence::decompose_op() const
{
OutputVector results;

View File

@ -31,6 +31,12 @@ NGRAPH_SUPPRESS_DEPRECATED_START
constexpr NodeTypeInfo op::v1::Mod::type_info;
op::v1::Mod::Mod()
: FusedOp()
, m_auto_broadcast()
{
}
op::v1::Mod::Mod(const Output<Node>& A,
const Output<Node>& B,
const AutoBroadcastSpec& auto_broadcast)

View File

@ -35,6 +35,14 @@ NGRAPH_SUPPRESS_DEPRECATED_START
NGRAPH_RTTI_DEFINITION(op::v0::MVN, "MVN", 0);
op::MVN::MVN()
: FusedOp()
, m_across_channels()
, m_normalize_variance()
, m_reduction_axes()
{
}
op::MVN::MVN(const Output<Node>& data, bool across_channels, bool normalize_variance, double eps)
: FusedOp({data})
, m_eps{eps}

View File

@ -33,6 +33,13 @@ NGRAPH_SUPPRESS_DEPRECATED_START
NGRAPH_RTTI_DEFINITION(op::v0::NormalizeL2, "NormalizeL2", 0);
op::NormalizeL2::NormalizeL2()
: FusedOp()
, m_eps()
, m_eps_mode()
{
}
op::NormalizeL2::NormalizeL2(const Output<Node>& data,
const Output<Node>& axes,
float eps,

View File

@ -34,6 +34,11 @@ NGRAPH_SUPPRESS_DEPRECATED_START
NGRAPH_RTTI_DEFINITION(op::PRelu, "PRelu", 0);
op::PRelu::PRelu()
: FusedOp()
{
}
op::PRelu::PRelu(const Output<Node>& data, const Output<Node>& slope)
: FusedOp({data, slope})
{

View File

@ -31,6 +31,11 @@ NGRAPH_SUPPRESS_DEPRECATED_START
constexpr NodeTypeInfo op::v0::Selu::type_info;
op::v0::Selu::Selu()
: FusedOp()
{
}
op::v0::Selu::Selu(const Output<Node>& data, const Output<Node>& alpha, const Output<Node>& lambda)
: FusedOp({data, alpha, lambda})
{

View File

@ -29,6 +29,12 @@ NGRAPH_SUPPRESS_DEPRECATED_START
constexpr NodeTypeInfo op::SquaredDifference::type_info;
op::SquaredDifference::SquaredDifference()
: FusedOp()
, m_autobroadcast()
{
}
op::SquaredDifference::SquaredDifference(const Output<Node>& x1,
const Output<Node>& x2,
const AutoBroadcastSpec& auto_broadcast)

View File

@ -33,6 +33,11 @@ NGRAPH_SUPPRESS_DEPRECATED_START
NGRAPH_RTTI_DEFINITION(op::v0::Squeeze, "Squeeze", 0);
op::Squeeze::Squeeze()
: FusedOp()
{
}
op::Squeeze::Squeeze(const Output<Node>& data, const Output<Node>& axes)
: FusedOp({data, axes})
{

View File

@ -22,7 +22,6 @@
#include "ngraph/op/util/fused_op.hpp"
#include "ngraph/pass/pass.hpp"
NGRAPH_SUPPRESS_DEPRECATED_START
namespace ngraph
{
namespace pass
@ -75,4 +74,3 @@ namespace ngraph
};
}
}
NGRAPH_SUPPRESS_DEPRECATED_END