Modify padding checking logic to enable new model (#9310)

+ Modified a way to add padding in prepare_padding
+ Changed condition of assertion for onednn padding

Signed-off-by: Min, Byungil <byungil.min@intel.com>
This commit is contained in:
Min, Byungil 2021-12-22 18:17:08 +09:00 committed by GitHub
parent 0bbda24186
commit 0e3c4cc103
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 8 additions and 3 deletions

View File

@ -30,7 +30,12 @@ void prepare_padding::run(program& p) {
auto add_required_padding = [&p](program_node& node, padding& needed_padding) {
// Add extra reorder for cldnn primitive to handle required padding if needed
auto& input = node.get_dependency(0);
if (input.get_preferred_impl_type() == impl_types::onednn &&
bool is_usr_onednn = false;
for (auto& input_usr : input.get_users())
if (input_usr->get_preferred_impl_type() == impl_types::onednn)
is_usr_onednn = true;
if ((input.get_preferred_impl_type() == impl_types::onednn || is_usr_onednn) &&
node.get_preferred_impl_type() == impl_types::ocl &&
static_cast<bool>(needed_padding)) {
auto new_reorder = std::make_shared<reorder>(node.id() + "_padding_reorder_for_" + input.id(), input.id(), input.get_output_layout());

View File

@ -544,8 +544,8 @@ void remove_redundant_reorders::run(program& p) {
n->set_preferred_impl_type(preferred_impl);
}
// Validate fused layout when onednn is enable
if (n->get_preferred_impl_type() == impl_types::onednn && !lo.are_layouts_suitable_for_onednn(*n)) {
// Validate fused layout when onednn is enable in post_optimize_graph
if (!enable_reorder_fusing && n->get_preferred_impl_type() == impl_types::onednn && !lo.are_layouts_suitable_for_onednn(*n)) {
throw std::runtime_error("Onednn doesnot support padded input or output");
}
}