[CPU] Fix QueryNetwork for ReferenceNode (#5809)

* has_evaluate() was added
This commit is contained in:
Maxim Andronov 2021-05-28 09:24:38 +03:00 committed by GitHub
parent 04f42d8442
commit 9424272cce
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
195 changed files with 1727 additions and 9 deletions

View File

@ -15,6 +15,9 @@ using namespace InferenceEngine::details;
MKLDNNReferenceNode::MKLDNNReferenceNode(const std::shared_ptr<ngraph::Node>& op, const mkldnn::engine& eng, MKLDNNWeightsSharing::Ptr &cache,
const std::string& errorMessage) :
MKLDNNNode(op, eng, cache), ngraphOp(op), additionalErrorMessage(errorMessage) {
if (!op->has_evaluate()) {
IE_THROW(NotImplemented) << "Cannot fallback on ngraph reference implementation (Ngraph::Node::evaluate() is not implemented)";
}
setType(Reference);
setTypeStr("Reference");
}
@ -69,14 +72,7 @@ void MKLDNNReferenceNode::execute(mkldnn::stream strm) {
}
if (!ngraphOp->evaluate(outputs, inputs)) {
std::string errorDetails = "Unsupported operation of type: " + std::string(ngraphOp->get_type_name()) +
" name: " + std::string(ngraphOp->get_friendly_name());
errorDetails += "\nDetails: \n";
if (!additionalErrorMessage.empty()) {
errorDetails += additionalErrorMessage + "\n";
}
errorDetails += "Cannot fallback on ngraph reference implementation (Ngraph::Node::evaluate() is not implemented)";
IE_THROW(NotImplemented) << errorDetails;
IE_THROW() << "Evaluation failed on node of type: " << std::string(ngraphOp->get_type_name()) << " name: " << getName();
}
}

View File

@ -38,6 +38,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Abs;

View File

@ -36,6 +36,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Acos;

View File

@ -37,6 +37,7 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override { return true; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v3
using v3::Acosh;

View File

@ -51,6 +51,7 @@ namespace ngraph
size_t get_version() const override { return 1; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -43,6 +43,7 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -37,6 +37,7 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override { return true; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Asin;

View File

@ -37,6 +37,7 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override { return true; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v3
using v3::Asinh;

View File

@ -84,6 +84,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs,
const EvaluationContext& evaluation_context) const override;
bool has_evaluate() const override;
bool constant_fold(OutputVector& output_values,
const OutputVector& inputs_values) override;
};

View File

@ -38,6 +38,7 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override { return true; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Atan;

View File

@ -37,6 +37,7 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override { return true; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v3
using v3::Atanh;

View File

@ -44,6 +44,7 @@ namespace ngraph
const Output<Node>& crops_end);
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
void validate_and_infer_types() override;
std::shared_ptr<Node>

View File

@ -70,6 +70,7 @@ namespace ngraph
std::pair<bool, AxisSet> get_broadcast_axes() const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
bool broadcast_evaluate(const HostTensorVector& outputs,
@ -132,6 +133,7 @@ namespace ngraph
void validate_and_infer_types() override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
AutoBroadcastSpec m_broadcast_spec;

View File

@ -30,6 +30,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Ceiling;

View File

@ -42,6 +42,7 @@ namespace ngraph
double get_max() const { return m_max; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
double m_min;

View File

@ -51,6 +51,7 @@ namespace ngraph
void set_axis(int64_t axis) { m_axis = axis; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& output_values) const override;
bool evaluate_upper(const HostTensorVector& output_values) const override;

View File

@ -171,6 +171,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& outputs) const override;
bool evaluate_upper(const HostTensorVector& outputs) const override;

View File

@ -44,6 +44,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& outputs) const override;
bool evaluate_upper(const HostTensorVector& outputs) const override;

View File

@ -30,6 +30,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Cos;

View File

@ -30,6 +30,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Cosh;

View File

@ -60,6 +60,7 @@ namespace ngraph
void validate_and_infer_types() override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
std::size_t m_blocksize;

View File

@ -53,6 +53,7 @@ namespace ngraph
size_t get_version() const override { return 1; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
bool m_pythondiv{true};

View File

@ -54,6 +54,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -25,6 +25,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Erf;

View File

@ -31,6 +31,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Exp;

View File

@ -29,6 +29,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Floor;

View File

@ -44,6 +44,7 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1

View File

@ -74,6 +74,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
std::shared_ptr<Node>
clone_with_new_inputs(const OutputVector& new_args) const override;

View File

@ -36,6 +36,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -37,6 +37,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -35,6 +35,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v5
} // namespace op

View File

@ -35,6 +35,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v4
} // namespace op

View File

@ -216,6 +216,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
const InterpolateAttrs& get_attrs() const { return m_attrs; }

View File

@ -36,6 +36,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -37,6 +37,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -30,6 +30,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Log;

View File

@ -72,6 +72,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
Loop(const Loop&);

View File

@ -38,6 +38,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool get_transpose_a() const { return m_transpose_a; }
bool get_transpose_b() const { return m_transpose_b; }

View File

@ -33,6 +33,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -73,6 +73,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
Shape m_kernel;

View File

@ -39,6 +39,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -33,6 +33,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& outputs) const override;
bool evaluate_upper(const HostTensorVector& outputs) const override;
};

View File

@ -34,6 +34,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v4
} // namespace op

View File

@ -39,6 +39,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -30,6 +30,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Negative;

View File

@ -60,6 +60,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
element::Type m_output_type = element::i64;

View File

@ -31,6 +31,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -37,6 +37,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool visit_attributes(AttributeVisitor& visitor) override;
};
} // namespace v1

View File

@ -42,6 +42,7 @@ namespace ngraph
virtual bool evaluate(const HostTensorVector& output_values,
const HostTensorVector& input_values) const override;
bool has_evaluate() const override;
/// \return The index of the one-hot axis.
int64_t get_axis() const { return m_axis; }

View File

@ -41,6 +41,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -74,6 +74,7 @@ namespace ngraph
void set_pad_mode(PadMode pad_mode) { m_pad_mode = pad_mode; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
PadMode m_pad_mode;

View File

@ -52,6 +52,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -37,6 +37,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::PRelu;

View File

@ -66,6 +66,7 @@ namespace ngraph
virtual bool visit_attributes(AttributeVisitor& visitor) override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
PriorBoxAttrs m_attrs;

View File

@ -54,6 +54,7 @@ namespace ngraph
virtual bool visit_attributes(AttributeVisitor& visitor) override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
PriorBoxClusteredAttrs m_attrs;

View File

@ -42,6 +42,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
element::Type m_output_type;
@ -77,6 +78,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Range;

View File

@ -94,6 +94,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs,
const EvaluationContext& evaluation_context) const override;
bool has_evaluate() const override;
bool constant_fold(OutputVector& output_values,
const OutputVector& inputs_values) override;

View File

@ -40,6 +40,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v4
} // namespace op

View File

@ -39,6 +39,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v4
} // namespace op

View File

@ -36,6 +36,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -36,6 +36,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -32,6 +32,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -39,6 +39,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& outputs) const override;
bool evaluate_upper(const HostTensorVector& outputs) const override;
};

View File

@ -87,6 +87,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
}
}

View File

@ -34,6 +34,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool visit_attributes(AttributeVisitor& visitor) override;
};
} // namespace v0

View File

@ -54,6 +54,7 @@ namespace ngraph
void set_special_zero(bool special_zero) { m_special_zero = special_zero; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& outputs) const override;
bool evaluate_upper(const HostTensorVector& outputs) const override;
bool constant_fold(OutputVector& output_values,

View File

@ -36,6 +36,7 @@ namespace ngraph
bool needs_default_layout() const { return m_needs_default_layout; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool constant_fold(OutputVector& output_values,
const OutputVector& inputs_values) override;

View File

@ -50,6 +50,7 @@ namespace ngraph
virtual size_t get_version() const override { return 1; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
Mode mode_from_string(const std::string& mode) const;

View File

@ -66,6 +66,7 @@ namespace ngraph
PoolingMode get_mode() const { return m_mode; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
PoolingMode mode_from_string(const std::string& mode) const;

View File

@ -47,6 +47,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
RoundMode get_mode() const { return m_mode; }

View File

@ -40,6 +40,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& inputs) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
bool evaluate_scatter_element_update(const HostTensorVector& outputs,

View File

@ -34,6 +34,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v3
using v3::ScatterNDUpdate;

View File

@ -41,6 +41,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
bool evaluate_scatter_update(const HostTensorVector& outputs,

View File

@ -67,6 +67,7 @@ namespace ngraph
const AutoBroadcastSpec& get_autob() const override { return m_auto_broadcast; }
virtual bool evaluate(const HostTensorVector& output_values,
const HostTensorVector& input_values) const override;
bool has_evaluate() const override;
private:
AutoBroadcastSpec m_auto_broadcast;

View File

@ -43,6 +43,7 @@ namespace ngraph
bool get_is_foldable() const { return m_is_foldable; }
bool evaluate(const HostTensorVector& output_values,
const HostTensorVector& input_values) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& output_values) const override;
bool evaluate_upper(const HostTensorVector& output_values) const override;
bool constant_fold(OutputVector& output_values,
@ -81,6 +82,7 @@ namespace ngraph
bool get_is_foldable() const { return m_is_foldable; }
bool evaluate(const HostTensorVector& output_values,
const HostTensorVector& input_values) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& output_values) const override;
bool evaluate_upper(const HostTensorVector& output_values) const override;
bool constant_fold(OutputVector& output_values,

View File

@ -48,6 +48,7 @@ namespace ngraph
int64_t get_group() const { return m_group; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
/// \brief Generates a shape required to permute the data

View File

@ -26,6 +26,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Sigmoid;

View File

@ -30,6 +30,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Sign;

View File

@ -43,6 +43,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Sin;

View File

@ -29,6 +29,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Sinh;

View File

@ -42,6 +42,7 @@ namespace ngraph
void set_axis(const size_t axis) { m_axis = axis; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
size_t m_axis;

View File

@ -34,6 +34,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v4
} // namespace op

View File

@ -51,6 +51,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
bool evaluate_space_to_batch(const HostTensorVector& outputs,

View File

@ -58,6 +58,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
std::size_t m_blocksize;

View File

@ -41,6 +41,7 @@ namespace ngraph
void set_num_splits(const size_t num_splits) { m_num_splits = num_splits; }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
size_t m_num_splits;

View File

@ -43,6 +43,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Sqrt;

View File

@ -29,6 +29,7 @@ namespace ngraph
void validate_and_infer_types() override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& outputs) const override;
bool evaluate_upper(const HostTensorVector& outputs) const override;
bool constant_fold(OutputVector& output_values,

View File

@ -93,6 +93,7 @@ namespace ngraph
size_t get_version() const override { return 1; }
bool evaluate(const HostTensorVector& output_values,
const HostTensorVector& input_values) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& outputs) const override;
bool evaluate_upper(const HostTensorVector& outputs) const override;

View File

@ -37,6 +37,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
} // namespace op

View File

@ -39,6 +39,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v4
} // namespace op

View File

@ -43,6 +43,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Tan;

View File

@ -29,6 +29,7 @@ namespace ngraph
clone_with_new_inputs(const OutputVector& new_args) const override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0
using v0::Tanh;

View File

@ -35,6 +35,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
bool evaluate_tile(const HostTensorVector& outputs,

View File

@ -86,6 +86,7 @@ namespace ngraph
size_t get_default_output_index() const override { return no_default_index(); }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
int64_t m_axis;
@ -149,6 +150,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
protected:
virtual size_t

View File

@ -40,6 +40,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
using v1::Transpose;

View File

@ -28,6 +28,7 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
bool evaluate_lower(const HostTensorVector& output_values) const override;
bool evaluate_upper(const HostTensorVector& output_values) const override;

View File

@ -44,6 +44,7 @@ namespace ngraph
size_t get_default_output_index() const override { return no_default_index(); }
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
private:
bool evaluate_variadic_split(const HostTensorVector& outputs,

View File

@ -42,6 +42,7 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override;
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v1
namespace v0
@ -73,6 +74,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool has_evaluate() const override;
};
} // namespace v0

View File

@ -65,3 +65,21 @@ bool op::Abs::evaluate(const HostTensorVector& outputs, const HostTensorVector&
NGRAPH_OP_SCOPE(v0_Abs_evaluate);
return absop::evaluate_abs(inputs[0], outputs[0], shape_size(get_output_shape(0)));
}
bool op::Abs::has_evaluate() const
{
NGRAPH_OP_SCOPE(v0_Abs_has_evaluate);
switch (get_input_element_type(0))
{
case ngraph::element::i32:
case ngraph::element::i64:
case ngraph::element::u32:
case ngraph::element::u64:
case ngraph::element::f16:
case ngraph::element::f32:
case ngraph::element::bf16:
case ngraph::element::boolean: return true;
default: break;
}
return false;
}

View File

@ -73,3 +73,20 @@ bool op::Acos::evaluate(const HostTensorVector& outputs, const HostTensorVector&
NGRAPH_OP_SCOPE(v0_Acos_evaluate);
return acosop::evaluate_acos(inputs[0], outputs[0], shape_size(get_output_shape(0)));
}
bool op::Acos::has_evaluate() const
{
NGRAPH_OP_SCOPE(v0_Acos_has_evaluate);
switch (get_input_element_type(0))
{
case ngraph::element::i32:
case ngraph::element::i64:
case ngraph::element::u32:
case ngraph::element::u64:
case ngraph::element::f16:
case ngraph::element::f32:
case ngraph::element::boolean: return true;
default: break;
}
return false;
}

Some files were not shown because too many files have changed in this diff Show More