[IE TEST] Enable support of Dynamic shapes in IE TEST infra with examples (#7718)

* Squashed commit of the following:

commit 69d82175a8
Merge: 4c5eb0f8f 068d31511
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Sep 28 19:07:37 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit 4c5eb0f8f4
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Sep 28 19:06:16 2021 +0900

    Build all (Multiple target shape inputs removed)

commit dd3d17c85f
Merge: 0870cc6cc 1d3df63d6
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 20:29:47 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit 0870cc6cc1
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 20:23:27 2021 +0900

    While working on applying multiple target shapes to templateFuncTests, still failed

commit e73a9741de
Merge: 20ea316b0 3bf34b116
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 16:54:16 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit 20ea316b0d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 16:52:39 2021 +0900

    Apply Eltwise stype input shape to cpuFuncTests

commit 0802282972
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 14:59:00 2021 +0900

    Apply Eltwise input shape to templateFuncTests

    Modify GenerateInputs() to use functionRefs

commit 25c399d922
Merge: ab476a9ba b3050c268
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 24 14:45:01 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit ab476a9ba3
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 24 14:44:23 2021 +0900

    Add functionRefs for PrePostProcessTest

commit 451ce9096d
Merge: 5710141cd 846c9c91f
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 24 12:20:34 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit 5710141cd8
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 24 12:16:06 2021 +0900

    templateFuncTests

    * Remove StaticShape and add DynamicShape to make existing test cases static default

    * Fix typos

    cpuFuncTests

    * Add functionRefs to each case

commit c3b87f098d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 23 12:51:40 2021 +0900

    Fix to use ngraph::clone_function()

commit fa7e87b146
Merge: a8890950d d7dfce209
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 23 10:47:15 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit a8890950d3
Merge: ceb802d97 5847b35ed
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 16 19:24:08 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit ceb802d978
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 16 19:24:00 2021 +0900

    templateFuncTest runs except multiple input shape cases

commit 8adbaba71d
Merge: d11e3e917 f44369ce4
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 15:30:48 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit d11e3e917e
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 15:30:27 2021 +0900

    ConvolutionLayerTest for dynamic shape case (Test only)

commit d8eaf21acd
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 14:41:38 2021 +0900

    ConvolutionLayerTest for dynamic shape case

commit fe57d0faa6
Merge: 39a691ab9 7a392253d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 09:59:44 2021 +0900

    Merge branch 'master' into sy/ilyachur/draft_dynamic_plugin_rebased

commit 7a392253d7
Merge: b8966457b 021639a04
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 09:59:01 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 39a691ab95
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 09:55:38 2021 +0900

    Remove redundant line

commit 109cafeffe
Merge: 0245e05cc b8966457b
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 9 10:25:36 2021 +0900

    Merge branch 'master' into sy/ilyachur/draft_dynamic_plugin_rebased

commit b8966457b9
Merge: 904384fee 1c1401b06
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 9 10:24:37 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 0245e05cc8
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 9 10:18:54 2021 +0900

    Remove comments used for indivisual testing

    Change unproper function names

    Remove unnecessary template<>

commit 0854f07291
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 3 13:31:22 2021 +0900

    ConvolutionLayerTest and SoftMaxLayerTest on template_plugin for dynamic shapes rebased

commit 904384fee3
Merge: 4bf45de5d b78f228fa
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 3 09:11:44 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 4bf45de5dc
Merge: 18476fe1b 07f7061f9
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 2 08:54:23 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 18476fe1b9
Merge: e92f5928d f77d838e6
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Aug 23 09:48:34 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit e92f5928dc
Merge: ef937a5a5 2fefe1164
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Aug 20 14:40:00 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit ef937a5a52
Merge: 04fed4c2a 184b3cbe4
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Aug 20 13:32:53 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 04fed4c2af
Merge: 508c61615 39131968c
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Aug 13 13:27:26 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 508c616151
Merge: 0647e6827 273c7188a
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Aug 13 10:49:35 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 0647e68277
Merge: 89ba7c1bf 04ff7a6ed
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Wed Aug 11 13:39:51 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 89ba7c1bf1
Merge: ebdbea67c b11a2220b
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Aug 9 09:01:44 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit ebdbea67cb
Merge: 4880bd11d d921e7a9c
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Jul 26 11:57:49 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 4880bd11d4
Merge: eef72406c 1e1e3bfff
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jul 22 14:58:56 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit eef72406cc
Merge: 9ccacedef 2a15e3e79
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Jul 16 12:58:14 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 9ccacedefe
Merge: 14020554d c14edd413
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jul 15 13:48:24 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 14020554d2
Merge: 54182c03b 2a970a56d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Jul 9 08:04:43 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 54182c03b2
Merge: 7654df0d9 35d9bd0f6
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jul 8 08:15:46 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 7654df0d93
Merge: 8ab309328 64cbdb9c2
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Jul 5 13:00:33 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 8ab3093282
Merge: b77d127d7 db0145be1
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Wed Jun 30 13:27:58 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit b77d127d78
Merge: 4a5811623 632709012
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Jun 28 10:01:50 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 4a5811623d
Merge: 6ce8d8ce6 7e6641192
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Jun 21 09:31:00 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 6ce8d8ce66
Merge: 84bc851ec db67c1b2b
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jun 17 08:28:23 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 84bc851ecf
Merge: 5a2cb750e fb7435a64
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Jun 15 08:24:23 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 5a2cb750ec
Merge: 19ab2100a 05469e95e
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Jun 11 13:30:59 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 19ab2100a5
Merge: b2bdc3976 4d9fe14ec
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Jun 8 09:49:38 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit b2bdc39760
Merge: 0a6c3cc9b ac1803c3a
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jun 3 08:46:13 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 0a6c3cc9bf
Merge: e07337d53 97a9a76ff
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Wed May 26 10:51:49 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit e07337d533
Merge: d4b251678 e41e25533
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue May 18 08:46:00 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit d4b251678e
Merge: f396091bd 1b8a0f7ae
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon May 10 08:11:10 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit f396091bd8
Merge: f569dbc73 8645c0839
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri May 7 13:49:43 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit f569dbc739
Merge: a6b2800be 31b161097
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Apr 29 07:15:08 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit a6b2800be2
Merge: dc771f1c4 1ae1757a5
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Wed Apr 28 07:50:13 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit dc771f1c4f
Merge: 5320f7eae 5de5f4d7d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Apr 27 15:00:05 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 5320f7eae0
Merge: 47dedfde5 39e1a21c4
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Apr 26 08:32:06 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 47dedfde57
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Apr 19 09:06:56 2021 +0900

    Add test cases for PReLU in cpu plugin

    * For case when slope is vector

* fix build

* remove exztra

* Squashed commit of the following:

commit 69d82175a8
Merge: 4c5eb0f8f 068d31511
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Sep 28 19:07:37 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit 4c5eb0f8f4
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Sep 28 19:06:16 2021 +0900

    Build all (Multiple target shape inputs removed)

commit dd3d17c85f
Merge: 0870cc6cc 1d3df63d6
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 20:29:47 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit 0870cc6cc1
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 20:23:27 2021 +0900

    While working on applying multiple target shapes to templateFuncTests, still failed

commit e73a9741de
Merge: 20ea316b0 3bf34b116
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 16:54:16 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit 20ea316b0d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 16:52:39 2021 +0900

    Apply Eltwise stype input shape to cpuFuncTests

commit 0802282972
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 27 14:59:00 2021 +0900

    Apply Eltwise input shape to templateFuncTests

    Modify GenerateInputs() to use functionRefs

commit 25c399d922
Merge: ab476a9ba b3050c268
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 24 14:45:01 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit ab476a9ba3
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 24 14:44:23 2021 +0900

    Add functionRefs for PrePostProcessTest

commit 451ce9096d
Merge: 5710141cd 846c9c91f
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 24 12:20:34 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit 5710141cd8
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 24 12:16:06 2021 +0900

    templateFuncTests

    * Remove StaticShape and add DynamicShape to make existing test cases static default

    * Fix typos

    cpuFuncTests

    * Add functionRefs to each case

commit c3b87f098d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 23 12:51:40 2021 +0900

    Fix to use ngraph::clone_function()

commit fa7e87b146
Merge: a8890950d d7dfce209
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 23 10:47:15 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit a8890950d3
Merge: ceb802d97 5847b35ed
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 16 19:24:08 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit ceb802d978
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 16 19:24:00 2021 +0900

    templateFuncTest runs except multiple input shape cases

commit 8adbaba71d
Merge: d11e3e917 f44369ce4
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 15:30:48 2021 +0900

    Merge remote-tracking branch 'upstream/master' into sy/test/ConvolutionLayerTest_dynamic_shape_case

commit d11e3e917e
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 15:30:27 2021 +0900

    ConvolutionLayerTest for dynamic shape case (Test only)

commit d8eaf21acd
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 14:41:38 2021 +0900

    ConvolutionLayerTest for dynamic shape case

commit fe57d0faa6
Merge: 39a691ab9 7a392253d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 09:59:44 2021 +0900

    Merge branch 'master' into sy/ilyachur/draft_dynamic_plugin_rebased

commit 7a392253d7
Merge: b8966457b 021639a04
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 09:59:01 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 39a691ab95
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Sep 13 09:55:38 2021 +0900

    Remove redundant line

commit 109cafeffe
Merge: 0245e05cc b8966457b
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 9 10:25:36 2021 +0900

    Merge branch 'master' into sy/ilyachur/draft_dynamic_plugin_rebased

commit b8966457b9
Merge: 904384fee 1c1401b06
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 9 10:24:37 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 0245e05cc8
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 9 10:18:54 2021 +0900

    Remove comments used for indivisual testing

    Change unproper function names

    Remove unnecessary template<>

commit 0854f07291
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 3 13:31:22 2021 +0900

    ConvolutionLayerTest and SoftMaxLayerTest on template_plugin for dynamic shapes rebased

commit 904384fee3
Merge: 4bf45de5d b78f228fa
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Sep 3 09:11:44 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 4bf45de5dc
Merge: 18476fe1b 07f7061f9
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Sep 2 08:54:23 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 18476fe1b9
Merge: e92f5928d f77d838e6
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Aug 23 09:48:34 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit e92f5928dc
Merge: ef937a5a5 2fefe1164
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Aug 20 14:40:00 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit ef937a5a52
Merge: 04fed4c2a 184b3cbe4
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Aug 20 13:32:53 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 04fed4c2af
Merge: 508c61615 39131968c
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Aug 13 13:27:26 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 508c616151
Merge: 0647e6827 273c7188a
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Aug 13 10:49:35 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 0647e68277
Merge: 89ba7c1bf 04ff7a6ed
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Wed Aug 11 13:39:51 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 89ba7c1bf1
Merge: ebdbea67c b11a2220b
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Aug 9 09:01:44 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit ebdbea67cb
Merge: 4880bd11d d921e7a9c
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Jul 26 11:57:49 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 4880bd11d4
Merge: eef72406c 1e1e3bfff
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jul 22 14:58:56 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit eef72406cc
Merge: 9ccacedef 2a15e3e79
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Jul 16 12:58:14 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 9ccacedefe
Merge: 14020554d c14edd413
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jul 15 13:48:24 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 14020554d2
Merge: 54182c03b 2a970a56d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Jul 9 08:04:43 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 54182c03b2
Merge: 7654df0d9 35d9bd0f6
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jul 8 08:15:46 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 7654df0d93
Merge: 8ab309328 64cbdb9c2
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Jul 5 13:00:33 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 8ab3093282
Merge: b77d127d7 db0145be1
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Wed Jun 30 13:27:58 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit b77d127d78
Merge: 4a5811623 632709012
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Jun 28 10:01:50 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 4a5811623d
Merge: 6ce8d8ce6 7e6641192
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Jun 21 09:31:00 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 6ce8d8ce66
Merge: 84bc851ec db67c1b2b
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jun 17 08:28:23 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 84bc851ecf
Merge: 5a2cb750e fb7435a64
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Jun 15 08:24:23 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 5a2cb750ec
Merge: 19ab2100a 05469e95e
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri Jun 11 13:30:59 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 19ab2100a5
Merge: b2bdc3976 4d9fe14ec
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Jun 8 09:49:38 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit b2bdc39760
Merge: 0a6c3cc9b ac1803c3a
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Jun 3 08:46:13 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 0a6c3cc9bf
Merge: e07337d53 97a9a76ff
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Wed May 26 10:51:49 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit e07337d533
Merge: d4b251678 e41e25533
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue May 18 08:46:00 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit d4b251678e
Merge: f396091bd 1b8a0f7ae
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon May 10 08:11:10 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit f396091bd8
Merge: f569dbc73 8645c0839
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Fri May 7 13:49:43 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit f569dbc739
Merge: a6b2800be 31b161097
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Thu Apr 29 07:15:08 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit a6b2800be2
Merge: dc771f1c4 1ae1757a5
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Wed Apr 28 07:50:13 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit dc771f1c4f
Merge: 5320f7eae 5de5f4d7d
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Tue Apr 27 15:00:05 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 5320f7eae0
Merge: 47dedfde5 39e1a21c4
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Apr 26 08:32:06 2021 +0900

    Merge remote-tracking branch 'upstream/master'

commit 47dedfde57
Author: Steve Yoo <steve.yoo@intel.com>
Date:   Mon Apr 19 09:06:56 2021 +0900

    Add test cases for PReLU in cpu plugin

    * For case when slope is vector

* temp

* Cpu

* All fix for softmax

* ie + gna

* Remove extra

* fix crash

* eltwise

* Fix crash

* fix failures

* gpu myriad

* fix

* fix template

* fix

* tr

* gpu skip

* fix gpu

* fix template
This commit is contained in:
Irina Efode 2021-10-04 14:34:50 +03:00 committed by GitHub
parent c687929c19
commit 996b15ccf2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
333 changed files with 1583 additions and 981 deletions

View File

@ -13,8 +13,8 @@ namespace {
// ! [test_convolution:declare_parameters]
const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16,
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16,
};
/* ============= 2D Convolution ============= */
@ -58,41 +58,41 @@ const auto conv2DParams_AutoPadValid = ::testing::Combine(
// ! [test_convolution:instantiate]
INSTANTIATE_TEST_SUITE_P(Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
// ! [test_convolution:instantiate]
INSTANTIATE_TEST_SUITE_P(Convolution2D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
/* ============= 3D Convolution ============= */
const std::vector<std::vector<size_t >> kernels3d = {{3, 3, 3},
{3, 5, 3}};
{3, 5, 3}};
const std::vector<std::vector<ptrdiff_t>> paddings3d = {{0, 0, 0},
{0, 2, 0}};
const std::vector<std::vector<size_t >> strides3d = {{1, 1, 1},
{1, 2, 1}};
{1, 2, 1}};
const std::vector<std::vector<size_t >> dilations3d = {{1, 1, 1},
{1, 2, 1}};
{1, 2, 1}};
const auto conv3DParams_ExplicitPadding = ::testing::Combine(
::testing::ValuesIn(kernels3d),
@ -114,27 +114,27 @@ const auto conv3DParams_AutoPadValid = ::testing::Combine(
);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution3D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv3DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv3DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(nightly_Convolution3D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv3DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv3DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE)),
ConvolutionLayerTest::getTestCaseName);
} // namespace
} // namespace

View File

@ -12,66 +12,117 @@ using namespace LayerTestsDefinitions;
namespace {
const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP32,
};
const std::vector<InferenceEngine::Layout> inputLayouts2D = {
InferenceEngine::Layout::NC,
InferenceEngine::Layout::NC,
};
const std::vector<InferenceEngine::SizeVector> inputShapes2D = {
InferenceEngine::SizeVector {1, 100},
InferenceEngine::SizeVector {100, 1},
InferenceEngine::SizeVector {10, 10},
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputStaticShape2D = {
{{}, {{1, 100}}},
{{}, {{100, 1}}},
{{}, {{10, 10}}},
};
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputDynamicShape2D = {
{{ngraph::Dimension::dynamic(), 10}, {{1, 10}, {2, 10}, {10, 10}}},
{{ngraph::Dimension(1, 10), 10}, {{1, 10}, {2, 10}, {10, 10}}},
{{10, ngraph::Dimension::dynamic()}, {{10, 1}, {10, 5}, {10, 10}}},
{{ngraph::Dimension::dynamic(), ngraph::Dimension::dynamic()}, {{1, 10}, {2, 10}, {10, 10}}}
};
const std::vector<size_t> axis2D = {
0, 1
0, 1
};
const auto params2D = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputShapes2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
testing::Values(std::map<std::string, std::string>())
const auto params2D_static = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputStaticShape2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
testing::Values(std::map<std::string, std::string>())
);
const auto params2D_dynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputDynamicShape2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
testing::Values(std::map<std::string, std::string>())
);
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax2D,
smoke_SoftMax2D_static,
SoftMaxLayerTest,
params2D,
params2D_static,
SoftMaxLayerTest::getTestCaseName
);
const std::vector<InferenceEngine::SizeVector> inputShapes4D = {
InferenceEngine::SizeVector {1, 100, 1, 1},
InferenceEngine::SizeVector {1, 3, 4, 3},
InferenceEngine::SizeVector {2, 3, 4, 5},
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax2D_dynamic,
SoftMaxLayerTest,
params2D_dynamic,
SoftMaxLayerTest::getTestCaseName
);
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputStaticShape4D = {
{{}, {{1, 100, 1, 1}}},
{{}, {{50, 100, 4, 1}}},
{{}, {{2, 100, 10, 1}}},
};
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputDynamicShape4D = {
{{ngraph::Dimension::dynamic(), 100, ngraph::Dimension(1, 10), 1}, {{1, 100, 1, 1}, {100, 100, 5, 1}}},
{{ngraph::Dimension::dynamic(), ngraph::Dimension::dynamic(), ngraph::Dimension::dynamic(), ngraph::Dimension::dynamic()},
{{1, 100, 1, 1}, {50, 100, 4, 1}, {2, 100, 10, 1}}},
};
const std::vector<size_t> axis4D = {0, 1, 2, 3};
const auto params4D = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputShapes4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
testing::Values(std::map<std::string, std::string>())
const auto params4Dstatic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputStaticShape4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
testing::Values(std::map<std::string, std::string>())
);
const auto params4Ddynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputDynamicShape4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
testing::Values(std::map<std::string, std::string>())
);
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax4D,
smoke_SoftMax4D_static,
SoftMaxLayerTest,
params4D,
params2D_static,
SoftMaxLayerTest::getTestCaseName
);
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax4D_dynamic,
SoftMaxLayerTest,
params2D_dynamic,
SoftMaxLayerTest::getTestCaseName
);

View File

@ -59,4 +59,4 @@ INSTANTIATE_TEST_SUITE_P(
::testing::Values(std::vector<size_t>({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
} // namespace
} // namespace

View File

@ -9,17 +9,20 @@
using namespace LayerTestsDefinitions;
namespace {
TEST_P(EltwiseLayerTest, Serialize) { Serialize(); }
TEST_P(EltwiseLayerTest, Serialize) {
Serialize();
}
const std::vector<InferenceEngine::Precision> inputPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16,
InferenceEngine::Precision::I32,
};
std::vector<std::vector<std::vector<size_t>>> inputShapes = {
{{2}},
{{1, 5, 50}},
{{2, 10, 1, 4}, {2, 10, 1, 1}}
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inputShapes = {
{{}, {{{2}}}},
{{}, {{{1, 5, 50}}}},
{{}, {{{2, 10, 1, 4}, {2, 10, 1, 1}}}}
};
std::vector<ngraph::helpers::InputLayerType> secondaryInputTypes = {

View File

@ -70,6 +70,7 @@ protected:
ngraph::NodeVector {bias_2},
ngraph::ParameterVector {input},
"SimpleNet");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -76,5 +76,4 @@ INSTANTIATE_TEST_SUITE_P(smoke_BehaviorTests_2, InferRequestDynamicTests,
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::ValuesIn(configs)),
InferRequestDynamicTests::getTestCaseName);
} // namespace

View File

@ -12,8 +12,8 @@ using namespace LayerTestsDefinitions;
namespace {
const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16,
InferenceEngine::Precision::I32};
InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16,
InferenceEngine::Precision::I32};
/* ============= 1D Convolution ============= */
const std::vector<std::vector<size_t>> kernels1D = {{3}, {5}};
@ -24,40 +24,40 @@ const std::vector<std::vector<size_t>> dilations1D = {{1}, {3}};
const std::vector<size_t> numOutChannels1D = {1, 5};
const auto conv1DParams_ExplicitPadding = ::testing::Combine(
::testing::ValuesIn(kernels1D), ::testing::ValuesIn(strides1D),
::testing::ValuesIn(padBegins1D), ::testing::ValuesIn(padEnds1D),
::testing::ValuesIn(dilations1D), ::testing::ValuesIn(numOutChannels1D),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::ValuesIn(kernels1D), ::testing::ValuesIn(strides1D),
::testing::ValuesIn(padBegins1D), ::testing::ValuesIn(padEnds1D),
::testing::ValuesIn(dilations1D), ::testing::ValuesIn(numOutChannels1D),
::testing::Values(ngraph::op::PadType::EXPLICIT));
const auto conv1DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(kernels1D), ::testing::ValuesIn(strides1D),
::testing::Values(std::vector<ptrdiff_t>({0})),
::testing::Values(std::vector<ptrdiff_t>({0})),
::testing::ValuesIn(dilations1D), ::testing::ValuesIn(numOutChannels1D),
::testing::Values(ngraph::op::PadType::VALID));
::testing::ValuesIn(kernels1D), ::testing::ValuesIn(strides1D),
::testing::Values(std::vector<ptrdiff_t>({0})),
::testing::Values(std::vector<ptrdiff_t>({0})),
::testing::ValuesIn(dilations1D), ::testing::ValuesIn(numOutChannels1D),
::testing::Values(ngraph::op::PadType::VALID));
INSTANTIATE_TEST_SUITE_P(
smoke_Convolution1D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv1DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
smoke_Convolution1D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv1DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(
smoke_Convolution1D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv1DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
smoke_Convolution1D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv1DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
/* ============= 2D Convolution ============= */
const std::vector<std::vector<size_t>> kernels = {{3, 3}, {3, 5}};
@ -68,68 +68,68 @@ const std::vector<std::vector<size_t>> dilations = {{1, 1}, {3, 1}};
const std::vector<size_t> numOutChannels = {1, 5};
const auto conv2DParams_ExplicitPadding = ::testing::Combine(
::testing::ValuesIn(kernels), ::testing::ValuesIn(strides),
::testing::ValuesIn(padBegins), ::testing::ValuesIn(padEnds),
::testing::ValuesIn(dilations), ::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::ValuesIn(kernels), ::testing::ValuesIn(strides),
::testing::ValuesIn(padBegins), ::testing::ValuesIn(padEnds),
::testing::ValuesIn(dilations), ::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT));
const auto conv2DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(kernels), ::testing::ValuesIn(strides),
::testing::Values(std::vector<ptrdiff_t>({0, 0})),
::testing::Values(std::vector<ptrdiff_t>({0, 0})),
::testing::ValuesIn(dilations), ::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::VALID));
::testing::ValuesIn(kernels), ::testing::ValuesIn(strides),
::testing::Values(std::vector<ptrdiff_t>({0, 0})),
::testing::Values(std::vector<ptrdiff_t>({0, 0})),
::testing::ValuesIn(dilations), ::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::VALID));
INSTANTIATE_TEST_SUITE_P(
smoke_Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
smoke_Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(
smoke_Convolution2D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
smoke_Convolution2D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
// weight for this convolution have Acdb16a layout
// for [96,1,7,7] shape strides for 1 and 3 dimensions equals, but not default order
namespace specificWeightLayout {
const std::vector<size_t> kernels = {7, 7};
const std::vector<size_t> strides = {2, 2};
const std::vector<ptrdiff_t> padBegins = {1, 1};
const std::vector<ptrdiff_t> padEnds = {1, 1};
const std::vector<size_t> dilations = {1, 1};
const size_t numOutChannels = {96};
const auto conv2DParams_WeightLayout = ::testing::Combine(::testing::Values(kernels),
::testing::Values(strides),
::testing::Values(padBegins),
::testing::Values(padEnds),
::testing::Values(dilations),
::testing::Values(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT));
const std::vector<size_t> kernels = {7, 7};
const std::vector<size_t> strides = {2, 2};
const std::vector<ptrdiff_t> padBegins = {1, 1};
const std::vector<ptrdiff_t> padEnds = {1, 1};
const std::vector<size_t> dilations = {1, 1};
const size_t numOutChannels = {96};
const auto conv2DParams_WeightLayout = ::testing::Combine(::testing::Values(kernels),
::testing::Values(strides),
::testing::Values(padBegins),
::testing::Values(padEnds),
::testing::Values(dilations),
::testing::Values(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT));
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_SpecificWeightLayout, ConvolutionLayerTest,
::testing::Combine(conv2DParams_WeightLayout,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 1, 50, 75})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_SpecificWeightLayout, ConvolutionLayerTest,
::testing::Combine(conv2DParams_WeightLayout,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 1, 50, 75})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
} // namespace specificWeightLayout
/* ============= 3D Convolution ============= */
@ -140,39 +140,39 @@ const std::vector<std::vector<size_t>> dilations3d = {{1, 1, 1}, {1, 2, 1}};
const std::vector<size_t> numOutChannels3D = {1, 5};
const auto conv3DParams_ExplicitPadding = ::testing::Combine(
::testing::ValuesIn(kernels3d), ::testing::ValuesIn(strides3d),
::testing::ValuesIn(paddings3d), ::testing::ValuesIn(paddings3d),
::testing::ValuesIn(dilations3d), ::testing::ValuesIn(numOutChannels3D),
::testing::Values(ngraph::op::PadType::EXPLICIT));
::testing::ValuesIn(kernels3d), ::testing::ValuesIn(strides3d),
::testing::ValuesIn(paddings3d), ::testing::ValuesIn(paddings3d),
::testing::ValuesIn(dilations3d), ::testing::ValuesIn(numOutChannels3D),
::testing::Values(ngraph::op::PadType::EXPLICIT));
const auto conv3DParams_AutoPadValid = ::testing::Combine(
::testing::ValuesIn(kernels3d), ::testing::ValuesIn(strides3d),
::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})),
::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})),
::testing::ValuesIn(dilations3d), ::testing::ValuesIn(numOutChannels3D),
::testing::Values(ngraph::op::PadType::VALID));
::testing::ValuesIn(kernels3d), ::testing::ValuesIn(strides3d),
::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})),
::testing::Values(std::vector<ptrdiff_t>({0, 0, 0})),
::testing::ValuesIn(dilations3d), ::testing::ValuesIn(numOutChannels3D),
::testing::Values(ngraph::op::PadType::VALID));
INSTANTIATE_TEST_SUITE_P(
smoke_Convolution3D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv3DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
smoke_Convolution3D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv3DParams_ExplicitPadding, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(
smoke_Convolution3D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv3DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
smoke_Convolution3D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv3DParams_AutoPadValid, ::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t>({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
ConvolutionLayerTest::getTestCaseName);
} // namespace
} // namespace

View File

@ -9,20 +9,25 @@
using namespace LayerTestsDefinitions;
namespace {
std::vector<std::vector<std::vector<size_t>>> inShapes = {
{{2}},
{{2, 200}},
{{10, 200}},
{{1, 10, 100}},
{{4, 4, 16}},
{{1, 1, 1, 3}},
{{2, 17, 5, 4}, {1, 17, 1, 1}},
{{2, 17, 5, 1}, {1, 17, 1, 4}},
{{1, 2, 4}},
{{1, 4, 4}},
{{1, 4, 4, 1}},
{{1, 1, 1, 1, 1, 1, 3}},
{{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}}
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapesStatic = {
{{}, {{{2}}}},
{{}, {{{2, 200}}}},
{{}, {{{10, 200}}}},
{{}, {{{1, 10, 100}}}},
{{}, {{{4, 4, 16}}}},
{{}, {{{1, 1, 1, 3}}}},
{{}, {{{2, 17, 5, 4}, {1, 17, 1, 1}}}},
{{}, {{{2, 17, 5, 1}, {1, 17, 1, 4}}}},
{{}, {{{1, 2, 4}}}},
{{}, {{{1, 4, 4}}}},
{{}, {{{1, 4, 4, 1}}}},
{{}, {{{1, 1, 1, 1, 1, 1, 3}}}},
{{}, {{{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}}}}
};
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapesDynamic = {
{{{ngraph::Dimension(1, 10), 200}, {ngraph::Dimension(1, 10), 200}},
{{{2, 200}, {2, 200}}, {{1, 200}, {5, 200}}}},
};
std::vector<InferenceEngine::Precision> netPrecisions = {
@ -36,11 +41,19 @@ std::vector<ngraph::helpers::InputLayerType> secondaryInputTypes = {
ngraph::helpers::InputLayerType::PARAMETER,
};
std::vector<ngraph::helpers::InputLayerType> secondaryInputTypesDynamic = {
ngraph::helpers::InputLayerType::PARAMETER,
};
std::vector<CommonTestUtils::OpType> opTypes = {
CommonTestUtils::OpType::SCALAR,
CommonTestUtils::OpType::VECTOR,
};
std::vector<CommonTestUtils::OpType> opTypesDynamic = {
CommonTestUtils::OpType::VECTOR,
};
std::vector<ngraph::helpers::EltwiseTypes> eltwiseOpTypes = {
ngraph::helpers::EltwiseTypes::ADD,
ngraph::helpers::EltwiseTypes::MULTIPLY,
@ -52,10 +65,16 @@ std::vector<ngraph::helpers::EltwiseTypes> eltwiseOpTypes = {
ngraph::helpers::EltwiseTypes::MOD
};
std::vector<ngraph::helpers::EltwiseTypes> eltwiseOpTypesDynamic = {
ngraph::helpers::EltwiseTypes::ADD,
ngraph::helpers::EltwiseTypes::MULTIPLY,
ngraph::helpers::EltwiseTypes::SUBTRACT,
};
std::map<std::string, std::string> additional_config = {};
const auto multiply_params = ::testing::Combine(
::testing::ValuesIn(inShapes),
::testing::ValuesIn(inShapesStatic),
::testing::ValuesIn(eltwiseOpTypes),
::testing::ValuesIn(secondaryInputTypes),
::testing::ValuesIn(opTypes),
@ -66,13 +85,26 @@ const auto multiply_params = ::testing::Combine(
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(additional_config));
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs, EltwiseLayerTest, multiply_params, EltwiseLayerTest::getTestCaseName);
const auto multiply_params_dynamic = ::testing::Combine(
::testing::ValuesIn(inShapesDynamic),
::testing::ValuesIn(eltwiseOpTypesDynamic),
::testing::ValuesIn(secondaryInputTypesDynamic),
::testing::ValuesIn(opTypesDynamic),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(additional_config));
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_static, EltwiseLayerTest, multiply_params, EltwiseLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_dynamic, EltwiseLayerTest, multiply_params_dynamic, EltwiseLayerTest::getTestCaseName);
std::vector<std::vector<std::vector<size_t>>> inShapesSingleThread = {
{{1, 2, 3, 4}},
{{2, 2, 2, 2}},
{{2, 1, 2, 1, 2, 2}}
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapesSingleThread = {
{{}, {{{1, 2, 3, 4}}}},
{{}, {{{2, 2, 2, 2}}}},
{{}, {{{2, 1, 2, 1, 2, 2}}}}
};
std::vector<ngraph::helpers::EltwiseTypes> eltwiseOpTypesSingleThread = {

View File

@ -19,59 +19,110 @@ const std::vector<InferenceEngine::Layout> inputLayouts2D = {
InferenceEngine::Layout::NC,
};
const std::vector<InferenceEngine::SizeVector> inputShapes2D = {
InferenceEngine::SizeVector {1, 100},
InferenceEngine::SizeVector {100, 1},
InferenceEngine::SizeVector {10, 10},
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputStaticShape2D = {
{{}, {{1, 100}}},
{{}, {{100, 1}}},
{{}, {{10, 10}}},
};
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputDynamicShape2D = {
{{ngraph::Dimension::dynamic(), 10}, {{1, 10}, {2, 10}, {10, 10}}},
{{ngraph::Dimension(1, 10), 10}, {{1, 10}, {2, 10}, {10, 10}}},
{{10, ngraph::Dimension::dynamic()}, {{10, 1}, {10, 5}, {10, 10}}},
{{ngraph::Dimension::dynamic(), ngraph::Dimension::dynamic()}, {{1, 10}, {2, 10}, {10, 10}}}
};
const std::vector<size_t> axis2D = {
0, 1
};
const auto params2D = testing::Combine(
const auto params2D_static = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputShapes2D),
testing::ValuesIn(inputStaticShape2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_CPU),
testing::Values(std::map<std::string, std::string>())
);
const auto params2D_dynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::ValuesIn(inputLayouts2D),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputDynamicShape2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_CPU),
testing::Values(std::map<std::string, std::string>())
);
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax2D,
smoke_SoftMax2D_static,
SoftMaxLayerTest,
params2D,
params2D_static,
SoftMaxLayerTest::getTestCaseName
);
const std::vector<InferenceEngine::SizeVector> inputShapes4D = {
InferenceEngine::SizeVector {1, 100, 1, 1},
InferenceEngine::SizeVector {1, 3, 4, 3},
InferenceEngine::SizeVector {2, 3, 4, 5},
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax2D_dynamic,
SoftMaxLayerTest,
params2D_dynamic,
SoftMaxLayerTest::getTestCaseName
);
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputStaticShape4D = {
{{}, {{1, 100, 1, 1}}},
{{}, {{50, 100, 4, 1}}},
{{}, {{2, 100, 10, 1}}},
};
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputDynamicShape4D = {
{{ngraph::Dimension::dynamic(), 100, ngraph::Dimension(1, 10), 1}, {{1, 100, 1, 1}, {100, 100, 5, 1}}},
{{ngraph::Dimension::dynamic(), ngraph::Dimension::dynamic(), ngraph::Dimension::dynamic(), ngraph::Dimension::dynamic()},
{{1, 100, 1, 1}, {50, 100, 4, 1}, {2, 100, 10, 1}}},
};
const std::vector<size_t> axis4D = {0, 1, 2, 3};
const auto params4D = testing::Combine(
const auto params4Dstatic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputShapes4D),
testing::ValuesIn(inputStaticShape4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_CPU),
testing::Values(std::map<std::string, std::string>())
);
const auto params4Ddynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Precision::UNSPECIFIED),
testing::Values(InferenceEngine::Layout::NCHW),
testing::Values(InferenceEngine::Layout::ANY),
testing::ValuesIn(inputDynamicShape4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_CPU),
testing::Values(std::map<std::string, std::string>())
);
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax4D,
smoke_SoftMax4D_static,
SoftMaxLayerTest,
params4D,
params2D_static,
SoftMaxLayerTest::getTestCaseName
);
INSTANTIATE_TEST_SUITE_P(
smoke_SoftMax4D_dynamic,
SoftMaxLayerTest,
params2D_dynamic,
SoftMaxLayerTest::getTestCaseName
);

View File

@ -92,6 +92,7 @@ protected:
ngraph::ResultVector outputs;
outputs.push_back(std::make_shared<ngraph::opset1::Result>(outputNode));
function = std::make_shared<ngraph::Function>(outputs, inputs);
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -71,6 +71,7 @@ protected:
auto activation = ngraph::builder::makeActivation(params[0], ngPrc, activationType, shapes.second, constantsValue);
activation->get_rt_info() = getCPUInfo();
function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params, "Activation");
functionRefs = ngraph::clone_function(*function);
}
InferenceEngine::Precision netPrecision;

View File

@ -81,6 +81,7 @@ protected:
threshold = 1e-2;
function = (mode == "max" ? std::make_shared<ngraph::Function>(adapoolMax->outputs(), params, "AdaPoolMax") :
std::make_shared<ngraph::Function>(adapoolAvg->outputs(), params, "AdaPoolAvg"));
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -55,6 +55,7 @@ protected:
b2s->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(b2s)};
function = std::make_shared<ngraph::Function>(results, params, "BatchToSpace");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -56,6 +56,7 @@ protected:
auto concat = std::make_shared<ngraph::opset1::Concat>(paramOuts, axis);
function = makeNgraphFunction(ngPrc, params, concat, "concat");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -63,6 +63,7 @@ protected:
auto powerStatic = ngraph::builder::makeEltwise(inputs[0], inputs[1], nodeType);
function = std::make_shared<ngraph::Function>(powerStatic, ParameterVector{param}, "ConvertToPluginSpecificNode");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -18,13 +18,13 @@ using LayerTestsDefinitions::convSpecificParams;
using LayerTestsDefinitions::convLayerTestParamsSet;
typedef std::tuple<
convLayerTestParamsSet,
CPUSpecificParams,
fusingSpecificParams,
std::map<std::string, std::string> > convLayerCPUTestParamsSet;
convLayerTestParamsSet,
CPUSpecificParams,
fusingSpecificParams,
std::map<std::string, std::string> > convLayerCPUTestParamsSet;
class ConvolutionLayerCPUTest : public testing::WithParamInterface<convLayerCPUTestParamsSet>,
virtual public LayerTestsUtils::LayerTestsCommon, public CpuTestWithFusing {
virtual public LayerTestsUtils::LayerTestsCommon, public CpuTestWithFusing {
public:
static std::string getTestCaseName(const testing::TestParamInfo<convLayerCPUTestParamsSet>& obj) {
convLayerTestParamsSet basicParamsSet;
@ -35,7 +35,7 @@ public:
std::ostringstream result;
result << LayerTestsDefinitions::ConvolutionLayerTest::getTestCaseName(testing::TestParamInfo<convLayerTestParamsSet>(
basicParamsSet, 0));
basicParamsSet, 0));
result << CPUTestsBase::getTestCaseName(cpuParams);
result << CpuTestWithFusing::getTestCaseName(fusingParams);
@ -116,9 +116,11 @@ protected:
auto paramOuts = ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(inputParams));
auto convolutionNode = ngraph::builder::makeConvolution(paramOuts.front(), ngPrc, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels);
padEnd, dilation, padType, convOutChannels);
function = makeNgraphFunction(ngPrc, inputParams, convolutionNode, "Convolution");
functionRefs = ngraph::clone_function(*function);
function->set_friendly_name("convolutionRefs");
}
};
@ -206,13 +208,13 @@ const std::vector<SizeVector> inputShapesPlain2Blocked3d = { {1, 1, 7, 7, 7}, {1
/* INSTANCES */
/* ============= Convolution (Gemm 2D) ============= */
const auto convParams_ExplicitPadding_GEMM_2D = ::testing::Combine(
::testing::ValuesIn(kernels2d),
::testing::ValuesIn(strides2d),
::testing::ValuesIn(padBegins2d),
::testing::ValuesIn(padEnds2d),
::testing::ValuesIn(dilations2d),
::testing::ValuesIn(numOutChannels_Gemm),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2d),
::testing::ValuesIn(strides2d),
::testing::ValuesIn(padBegins2d),
::testing::ValuesIn(padEnds2d),
::testing::ValuesIn(dilations2d),
::testing::ValuesIn(numOutChannels_Gemm),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const std::vector<CPUSpecificParams> CPUParams_GEMM_2D = {
@ -221,62 +223,62 @@ const std::vector<CPUSpecificParams> CPUParams_GEMM_2D = {
};
INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_GEMM_FP32, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_GEMM_BF16, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_GEMM_I8, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_2D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
/* ============= Convolution (GEMM 3D) ============= */
const auto convParams_ExplicitPadding_GEMM_3D = ::testing::Combine(
::testing::ValuesIn(kernels3d),
::testing::ValuesIn(strides3d),
::testing::ValuesIn(padBegins3d),
::testing::ValuesIn(padEnds3d),
::testing::ValuesIn(dilations3d),
::testing::ValuesIn(numOutChannels_Gemm),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels3d),
::testing::ValuesIn(strides3d),
::testing::ValuesIn(padBegins3d),
::testing::ValuesIn(padEnds3d),
::testing::ValuesIn(dilations3d),
::testing::ValuesIn(numOutChannels_Gemm),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const std::vector<CPUSpecificParams> CPUParams_GEMM_3D = {
@ -285,62 +287,62 @@ const std::vector<CPUSpecificParams> CPUParams_GEMM_3D = {
};
INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_GEMM_FP32, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_GEMM_BF16, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_GEMM_I8, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_GEMM_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 12, 7, 7, 7 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_GEMM_3D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
/* ============= Convolution (2D) ============= */
const auto convParams_ExplicitPadding_2D = ::testing::Combine(
::testing::ValuesIn(kernels2d),
::testing::ValuesIn(strides2d),
::testing::ValuesIn(padBegins2d),
::testing::ValuesIn(padEnds2d),
::testing::ValuesIn(dilations2d),
::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2d),
::testing::ValuesIn(strides2d),
::testing::ValuesIn(padBegins2d),
::testing::ValuesIn(padEnds2d),
::testing::ValuesIn(dilations2d),
::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const std::vector<CPUSpecificParams> CPUParams_2D = {
@ -353,52 +355,52 @@ const std::vector<CPUSpecificParams> CPUParams_2D = {
};
INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_FP32, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_BF16, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D, conv_avx512_2D_nspc})),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D, conv_avx512_2D_nspc})),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_I8, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
const std::vector<CPUSpecificParams> CPUParams_2D_plain_to_blocked = {
conv_sse42_plain_to_blocked_2D,
@ -407,103 +409,103 @@ const std::vector<CPUSpecificParams> CPUParams_2D_plain_to_blocked = {
};
INSTANTIATE_TEST_SUITE_P(smoke_Conv_PlainToBlocked_2D_FP32, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapesPlain2Blocked2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D_plain_to_blocked)),
::testing::Values(emptyFusingSpec),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapesPlain2Blocked2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_2D_plain_to_blocked)),
::testing::Values(emptyFusingSpec),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_PlainToBlocked_2D_BF16, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16, Precision::FP32),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapesPlain2Blocked2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_plain_to_blocked_2D})),
::testing::Values(emptyFusingSpec),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16, Precision::FP32),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapesPlain2Blocked2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_plain_to_blocked_2D})),
::testing::Values(emptyFusingSpec),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
/* ============= Convolution (3D) ============= */
const auto convParams_ExplicitPadding_3D = ::testing::Combine(
::testing::ValuesIn(kernels3d),
::testing::ValuesIn(strides3d),
::testing::ValuesIn(padBegins3d),
::testing::ValuesIn(padEnds3d),
::testing::ValuesIn(dilations3d),
::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels3d),
::testing::ValuesIn(strides3d),
::testing::ValuesIn(padBegins3d),
::testing::ValuesIn(padEnds3d),
::testing::ValuesIn(dilations3d),
::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const std::vector<CPUSpecificParams> CPUParams_3D = {
//conv_sse42_3D, // not supported jit_sse42 for 3d
conv_avx2_3D,
conv_avx512_3D,
conv_avx2_3D_nspc,
conv_avx512_3D_nspc
//conv_sse42_3D, // not supported jit_sse42 for 3d
conv_avx2_3D,
conv_avx512_3D,
conv_avx2_3D_nspc,
conv_avx512_3D_nspc
};
INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_FP32, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_BF16, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_3D, conv_avx512_3D_nspc})),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_3D, conv_avx512_3D_nspc})),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_3D_I8, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
const std::vector<CPUSpecificParams> CPUParams_3D_plain_to_blocked = {
conv_avx2_plain_to_blocked_3D,
@ -511,36 +513,36 @@ const std::vector<CPUSpecificParams> CPUParams_3D_plain_to_blocked = {
};
INSTANTIATE_TEST_SUITE_P(smoke_Conv_PlainToBlocked_3D_FP32, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapesPlain2Blocked3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D_plain_to_blocked)),
::testing::Values(emptyFusingSpec),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapesPlain2Blocked3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_3D_plain_to_blocked)),
::testing::Values(emptyFusingSpec),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_PlainToBlocked_3D_BF16, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16, Precision::FP32),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapesPlain2Blocked3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_plain_to_blocked_3D})),
::testing::Values(emptyFusingSpec),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16, Precision::FP32),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapesPlain2Blocked3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_plain_to_blocked_3D})),
::testing::Values(emptyFusingSpec),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
/* ============= Kernel_1x1 (2D) ============= */
@ -555,61 +557,61 @@ const auto convParams_ExplicitPadding_1x1_2D = ::testing::Combine(
);
const std::vector<CPUSpecificParams> CPUParams_1x1_2D = {
conv_sse42_2D_1x1,
conv_avx2_2D_1x1,
conv_avx512_2D_1x1,
conv_sse42_2D_1x1_nspc,
conv_avx2_2D_1x1_nspc,
conv_avx512_2D_1x1_nspc
conv_sse42_2D_1x1,
conv_avx2_2D_1x1,
conv_avx512_2D_1x1,
conv_sse42_2D_1x1_nspc,
conv_avx2_2D_1x1_nspc,
conv_avx512_2D_1x1_nspc
};
INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_1x1_FP32, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_1x1_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1x1_2D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_1x1_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1x1_2D)),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_1x1_BF16, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_1x1_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D_1x1, conv_avx512_2D_1x1_nspc})),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_1x1_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::BF16),
::testing::Values(Precision::BF16),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice({conv_avx512_2D_1x1, conv_avx512_2D_1x1_nspc})),
::testing::ValuesIn(fusingParamsSetBF16),
::testing::Values(cpuBF16PluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_2D_1x1_I8, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_1x1_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1x1_2D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_ExplicitPadding_1x1_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::I8),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1x1_2D)),
::testing::Values(fusingSum),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
/* ============= Convolution (1D) ============= */
/* ============= Convolution params (1D) ============= */
@ -620,13 +622,13 @@ const std::vector<std::vector<ptrdiff_t>> padEnds1d = { {0} };
const std::vector<SizeVector> dilations1d = { {1}, {2} };
const auto convParams_1D = ::testing::Combine(
::testing::ValuesIn(kernels1d),
::testing::ValuesIn(strides1d),
::testing::ValuesIn(padBegins1d),
::testing::ValuesIn(padEnds1d),
::testing::ValuesIn(dilations1d),
::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels1d),
::testing::ValuesIn(strides1d),
::testing::ValuesIn(padBegins1d),
::testing::ValuesIn(padEnds1d),
::testing::ValuesIn(dilations1d),
::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const std::vector<CPUSpecificParams> CPUParams_1D = {
@ -636,20 +638,20 @@ const std::vector<CPUSpecificParams> CPUParams_1D = {
};
INSTANTIATE_TEST_SUITE_P(smoke_Conv_1D, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_1D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 64, 7})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1D)),
::testing::Values(fusingAddPerChannel),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_1D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 2, 64, 7})),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_1D)),
::testing::Values(fusingAddPerChannel),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
/* ============= Jit Planar ============= */
@ -671,20 +673,20 @@ const auto convParams_Planar_ExplicitPadding_2D = ::testing::Combine(
);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_Jit_Planar_2D_FP32, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_Planar_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_Jit_Planar_2D)),
::testing::Values(emptyFusingSpec, fusingRelu),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_Planar_ExplicitPadding_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes2d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_Jit_Planar_2D)),
::testing::Values(emptyFusingSpec, fusingRelu),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
/* ============= Convolution planar params (3D) ============= */
const std::vector<CPUSpecificParams> CPUParams_Jit_Planar_3D = {
@ -704,20 +706,20 @@ const auto convParams_Planar_ExplicitPadding_3D = ::testing::Combine(
);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_Jit_Planar_3D_FP32, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_Planar_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_Jit_Planar_3D)),
::testing::Values(emptyFusingSpec, fusingRelu),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_Planar_ExplicitPadding_3D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::ValuesIn(inputShapes3d),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(CPUParams_Jit_Planar_3D)),
::testing::Values(emptyFusingSpec, fusingRelu),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
/* ============= */
@ -743,31 +745,31 @@ const std::vector<std::vector<ptrdiff_t>> padEnds2d = { {0, 0} };
const std::vector<SizeVector> dilations2d = { {1, 1} };
const auto convParams_2D = ::testing::Combine(
::testing::ValuesIn(kernels2d),
::testing::ValuesIn(strides2d),
::testing::ValuesIn(padBegins2d),
::testing::ValuesIn(padEnds2d),
::testing::ValuesIn(dilations2d),
::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2d),
::testing::ValuesIn(strides2d),
::testing::ValuesIn(padBegins2d),
::testing::ValuesIn(padEnds2d),
::testing::ValuesIn(dilations2d),
::testing::ValuesIn(numOutChannels),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
INSTANTIATE_TEST_SUITE_P(smoke_Conv_winograd, ConvolutionLayerCPUTest,
::testing::Combine(
::testing::Combine(
convParams_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 1, 16, 10, 10 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(std::vector<CPUSpecificParams>{conv_winograd})),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
::testing::Combine(
::testing::Combine(
convParams_2D,
::testing::Values(Precision::FP32),
::testing::Values(Precision::FP32),
::testing::Values(Precision::UNSPECIFIED),
::testing::Values(Layout::ANY),
::testing::Values(Layout::ANY),
::testing::Values(std::vector<size_t >({ 1, 16, 10, 10 })),
::testing::Values(CommonTestUtils::DEVICE_CPU)),
::testing::ValuesIn(filterCPUInfoForDevice(std::vector<CPUSpecificParams>{conv_winograd})),
::testing::ValuesIn(fusingParamsSet),
::testing::Values(cpuEmptyPluginConfig)),
ConvolutionLayerCPUTest::getTestCaseName);
} // namespace winograd
} // namespace CPULayerTestsDefinitions
} // namespace CPULayerTestsDefinitions

View File

@ -95,6 +95,7 @@ protected:
}
function = makeNgraphFunction(ngPrc, inputParams, deconvolutionNode, "convolutionBackpropData");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -55,6 +55,7 @@ protected:
d2s->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(d2s)};
function = std::make_shared<ngraph::Function>(results, params, "DepthToSpace");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -16,7 +16,7 @@ typedef std::tuple<
CPUSpecificParams> EltwiseLayerCPUTestParamsSet;
class EltwiseLayerCPUTest : public testing::WithParamInterface<EltwiseLayerCPUTestParamsSet>,
virtual public LayerTestsUtils::LayerTestsCommon, public CPUTestsBase {
virtual public LayerTestsUtils::LayerTestsCommon, public CPUTestsBase {
public:
static std::string getTestCaseName(testing::TestParamInfo<EltwiseLayerCPUTestParamsSet> obj) {
LayerTestsDefinitions::EltwiseTestParams basicParamsSet;
@ -37,24 +37,26 @@ protected:
CPUSpecificParams cpuParams;
std::tie(basicParamsSet, cpuParams) = this->GetParam();
std::vector<std::vector<size_t>> inputShapes;
std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>> shapes;
InferenceEngine::Precision netPrecision;
ngraph::helpers::InputLayerType secondaryInputType;
CommonTestUtils::OpType opType;
ngraph::helpers::EltwiseTypes eltwiseType;
std::map<std::string, std::string> additional_config;
std::tie(inputShapes, eltwiseType, secondaryInputType, opType, netPrecision, inPrc, outPrc, inLayout, targetDevice, additional_config) = basicParamsSet;
std::tie(shapes, eltwiseType, secondaryInputType, opType, netPrecision, inPrc, outPrc, inLayout, targetDevice, additional_config) = basicParamsSet;
targetStaticShapes = shapes.second;
inputDynamicShapes = shapes.first;
std::tie(inFmts, outFmts, priority, selectedType) = cpuParams;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
selectedType = getPrimitiveType() + "_" + netPrecision.name();
std::vector<size_t> inputShape1, inputShape2;
if (inputShapes.size() == 1) {
inputShape1 = inputShape2 = inputShapes.front();
} else if (inputShapes.size() == 2) {
inputShape1 = inputShapes.front();
inputShape2 = inputShapes.back();
ngraph::Shape inputShape1 = targetStaticShapes.front().front(), inputShape2 = targetStaticShapes.front().back();
if (targetStaticShapes.front().size() == 1) {
inputShape1 = inputShape2 = targetStaticShapes.front().front();
} else if (targetStaticShapes.front().size() == 2) {
inputShape1 = targetStaticShapes.front().front();
inputShape2 = targetStaticShapes.front().back();
} else {
IE_THROW() << "Incorrect number of input shapes";
}
@ -100,6 +102,8 @@ protected:
auto eltwise = ngraph::builder::makeEltwise(input[0], secondaryInput, eltwiseType);
function = makeNgraphFunction(ngPrc, input, eltwise, "Eltwise");
functionRefs = ngraph::clone_function(*function);
functionRefs->set_friendly_name("EltwiseRefs");
}
};
@ -140,11 +144,11 @@ std::map<std::string, std::string> additional_config;
std::vector<Precision> netPrc = {Precision::BF16, Precision::FP32};
std::vector<std::vector<std::vector<size_t>>> inShapes_4D = {
{{2, 4, 4, 1}},
{{2, 17, 5, 4}},
{{2, 17, 5, 4}, {1, 17, 1, 1}},
{{2, 17, 5, 1}, {1, 17, 1, 4}},
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes_4D = {
{{}, {{{2, 4, 4, 1}}}},
{{}, {{{2, 17, 5, 4}}}},
{{}, {{{2, 17, 5, 4}, {1, 17, 1, 1}}}},
{{}, {{{2, 17, 5, 1}, {1, 17, 1, 4}}}},
};
std::vector<CPUSpecificParams> cpuParams_4D = {
@ -185,11 +189,11 @@ const auto params_4D_emptyCPUSpec = ::testing::Combine(
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_4D_emptyCPUSpec, EltwiseLayerCPUTest, params_4D_emptyCPUSpec, EltwiseLayerCPUTest::getTestCaseName);
std::vector<std::vector<std::vector<size_t>>> inShapes_5D = {
{{2, 4, 3, 4, 1}},
{{2, 17, 7, 5, 4}},
{{2, 17, 6, 5, 4}, {1, 17, 6, 1, 1}},
{{2, 17, 6, 5, 1}, {1, 17, 1, 1, 4}},
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes_5D = {
{{}, {{{2, 4, 3, 4, 1}}}},
{{}, {{{2, 17, 7, 5, 4}}}},
{{}, {{{2, 17, 6, 5, 4}, {1, 17, 6, 1, 1}}}},
{{}, {{{2, 17, 6, 5, 1}, {1, 17, 1, 1, 4}}}},
};
std::vector<CPUSpecificParams> cpuParams_5D = {
@ -230,9 +234,9 @@ const auto params_5D_emptyCPUSpec = ::testing::Combine(
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_5D, EltwiseLayerCPUTest, params_5D_emptyCPUSpec, EltwiseLayerCPUTest::getTestCaseName);
std::vector<std::vector<std::vector<size_t>>> inShapes_4D_Blocked_Planar = {
{{2, 17, 31, 3}, {2, 1, 31, 3}},
{{2, 17, 5, 1}, {2, 1, 1, 4}},
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes_4D_Blocked_Planar = {
{{}, {{{2, 17, 31, 3}, {2, 1, 31, 3}}}},
{{}, {{{2, 17, 5, 1}, {2, 1, 1, 4}}}},
};
std::vector<CPUSpecificParams> cpuParams_4D_Blocked_Planar = {
@ -256,9 +260,9 @@ const auto params_4D_Blocked_Planar = ::testing::Combine(
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_4D_Blocked_Planar, EltwiseLayerCPUTest, params_4D_Blocked_Planar, EltwiseLayerCPUTest::getTestCaseName);
std::vector<std::vector<std::vector<size_t>>> inShapes_4D_Planar_Blocked = {
{{2, 1, 31, 3}, {2, 17, 31, 3}},
{{2, 1, 1, 4}, {2, 17, 5, 1}},
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes_4D_Planar_Blocked = {
{{}, {{{2, 1, 31, 3}, {2, 17, 31, 3}}}},
{{}, {{{2, 1, 1, 4}, {2, 17, 5, 1}}}},
};
std::vector<CPUSpecificParams> cpuParams_4D_Planar_Blocked = {
@ -282,9 +286,9 @@ const auto params_4D_Planar_Blocked = ::testing::Combine(
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_4D_Planar_Blocked, EltwiseLayerCPUTest, params_4D_Planar_Blocked, EltwiseLayerCPUTest::getTestCaseName);
std::vector<std::vector<std::vector<size_t>>> inShapes_5D_Blocked_Planar = {
{{2, 17, 31, 4, 3}, {2, 1, 31, 1, 3}},
{{2, 17, 5, 3, 1}, {2, 1, 1, 3, 4}},
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes_5D_Blocked_Planar = {
{{}, {{{2, 17, 31, 4, 3}, {2, 1, 31, 1, 3}}}},
{{}, {{{2, 17, 5, 3, 1}, {2, 1, 1, 3, 4}}}},
};
std::vector<CPUSpecificParams> cpuParams_5D_Blocked_Planar = {
@ -308,9 +312,9 @@ const auto params_5D_Blocked_Planar = ::testing::Combine(
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_5D_Blocked_Planar, EltwiseLayerCPUTest, params_5D_Blocked_Planar, EltwiseLayerCPUTest::getTestCaseName);
std::vector<std::vector<std::vector<size_t>>> inShapes_5D_Planar_Blocked = {
{{2, 1, 31, 1, 3}, {2, 17, 31, 4, 3}},
{{2, 1, 1, 3, 4}, {2, 17, 5, 3, 1}},
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes_5D_Planar_Blocked = {
{{}, {{{2, 1, 31, 1, 3}, {2, 17, 31, 4, 3}}}},
{{}, {{{2, 1, 1, 3, 4}, {2, 17, 5, 3, 1}}}},
};
std::vector<CPUSpecificParams> cpuParams_5D_Planar_Blocked = {
@ -334,9 +338,9 @@ const auto params_5D_Planar_Blocked = ::testing::Combine(
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_5D_Planar_Blocked, EltwiseLayerCPUTest, params_5D_Planar_Blocked, EltwiseLayerCPUTest::getTestCaseName);
std::vector<std::vector<std::vector<size_t>>> inShapes_4D_1D = {
{{2, 17, 5, 4}, {4}},
{{1, 3, 3, 3}, {3}},
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes_4D_1D = {
{{}, {{{2, 17, 5, 4}, {4}}}},
{{}, {{{1, 3, 3, 3}, {3}}}},
};
std::vector<CPUSpecificParams> cpuParams_4D_1D = {
@ -361,9 +365,9 @@ const auto params_4D_1D = ::testing::Combine(
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_4D_1D, EltwiseLayerCPUTest, params_4D_1D, EltwiseLayerCPUTest::getTestCaseName);
std::vector<std::vector<std::vector<size_t>>> inShapes_5D_1D = {
{{2, 17, 5, 4, 10}, {10}},
{{1, 3, 3, 3, 3}, {3}},
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes_5D_1D = {
{{}, {{{2, 17, 5, 4, 10}, {10}}}},
{{}, {{{1, 3, 3, 3, 3}, {3}}}},
};
std::vector<CPUSpecificParams> cpuParams_5D_1D = {

View File

@ -53,6 +53,7 @@ protected:
inputNode, ngraph::Shape(kernel), ngraph::Strides(strides), ngraph::Shape(rates), pad_type);
ngraph::ResultVector results{std::make_shared<ngraph::opset6::Result>(extImgPatches)};
function = std::make_shared<ngraph::Function>(results, params, "ExtractImagePatches");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -123,6 +123,7 @@ protected:
fq->get_rt_info() = getCPUInfo();
function = std::make_shared<Function>(fq, params, "FakeQuantizeCPU");
functionRefs = ngraph::clone_function(*function);
}
private:

View File

@ -60,6 +60,7 @@ protected:
auto activation = ngraph::builder::makeGatherElements(params[0], indicesShape, ngIPrc, axis);
activation->get_rt_info() = getCPUInfo();
function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params, "GatherElements");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -104,6 +104,7 @@ protected:
ngraph::builder::makeGroupConvolution(paramOuts[0], ngPrc, kernel, stride, padBegin,
padEnd, dilation, padType, convOutChannels, numGroups));
function = makeNgraphFunction(ngPrc, params, groupConv, "groupConvolution");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -94,6 +94,7 @@ protected:
padEnd, dilation, padType, convOutChannels, numGroups, false, outputPadding));
}
function = makeNgraphFunction(ngPrc, params, groupConv, "groupConvolutionBackpropData");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -88,6 +88,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(gru_cell->output(0))};
function = makeNgraphFunction(ngPrc, params, gru_cell, "gru_cell");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -137,6 +137,8 @@ protected:
bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function);
EXPECT_EQ(ti_found, false);
}
functionRefs = ngraph::clone_function(*function);
}
void GenerateInputs() override {

View File

@ -107,6 +107,7 @@ protected:
selectedType += "BF16";
else
selectedType += netPrecision.name();
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -72,6 +72,7 @@ protected:
logicalNode->get_rt_info() = getCPUInfo();
function = std::make_shared<ngraph::Function>(logicalNode, inputs, "Logical");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -87,6 +87,7 @@ protected:
std::make_shared<ngraph::opset1::Result>(lstm_cell->output(1))};
function = makeNgraphFunction(ngPrc, params, lstm_cell, "lstm_cell");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -144,6 +144,8 @@ protected:
bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function);
EXPECT_EQ(ti_found, false);
}
functionRefs = ngraph::clone_function(*function);
}
void GenerateInputs() override {

View File

@ -98,6 +98,7 @@ protected:
auto paramOuts = helpers::convert2OutputVector(helpers::castOps2Nodes<opset1::Parameter>(params));
auto matMul = builder::makeMatMul(paramOuts[0], matrixB, transpA, transpB);
function = makeNgraphFunction(ngPrec, params, matMul, cpuNodeType);
functionRefs = ngraph::clone_function(*function);
checkFusingPosition = false;
}
};

View File

@ -71,6 +71,7 @@ protected:
threshold = 0.015f;
function = makeNgraphFunction(netPrc, param, mvn, "mvn");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -57,6 +57,7 @@ protected:
auto normalize = builder::makeNormalizeL2(paramOuts[0], axes, eps, eps_mode);
function = makeNgraphFunction(netPrc, params, normalize, "Normalize");
functionRefs = ngraph::clone_function(*function);
selectedType = "unknown_" + std::string(inPrc.name());
threshold = 0.015f;

View File

@ -73,6 +73,7 @@ protected:
auto oneHot = std::make_shared<ngraph::opset5::OneHot>(inputParams.front(), depthConst, onConst, offConst, axis);
function = makeNgraphFunction(ngPrc, inputParams, oneHot, "OneHot");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -58,6 +58,7 @@ protected:
pad->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(pad)};
function = std::make_shared<ngraph::Function>(results, params, "pad");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -86,6 +86,7 @@ protected:
function = makeNgraphFunction(ngPrc, params, pooling, "Pooling");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -97,6 +97,7 @@ protected:
threshold = 1e-2;
const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(psroi)};
function = std::make_shared<ngraph::Function>(results, params, "PSROIPooling");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -74,6 +74,7 @@ protected:
const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(reduce)};
function = std::make_shared<ngraph::Function>(results, params, "Reduce");
functionRefs = ngraph::clone_function(*function);
}
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo &info) const override {
if (ngraph::helpers::ReductionType::Prod == reductionType) {

View File

@ -77,6 +77,7 @@ protected:
attributes.do_softmax, mask, attributes.start_axis, attributes.end_axis);
function = makeNgraphFunction(ngPrc, paramRegionYolo, region_yolo, "RegionYolo");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -82,6 +82,7 @@ protected:
WRB, hidden_size, activations, {}, {}, clip);
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(rnn_cell)};
function = makeNgraphFunction(ngPrc, params, rnn_cell, "rnn_cell");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -117,6 +117,7 @@ protected:
bool ti_found = ngraph::helpers::is_tensor_iterator_exist(function);
EXPECT_EQ(ti_found, false);
}
functionRefs = ngraph::clone_function(*function);
}
void GenerateInputs() override {

View File

@ -144,6 +144,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(roi_pooling)};
function = makeNgraphFunction(ngPrc, params, roi_pooling, "roi_pooling");
functionRefs = ngraph::clone_function(*function);
selectedType += "_";
selectedType += netPrecision.name();

View File

@ -98,6 +98,7 @@ protected:
threshold = 1e-2;
const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(roialign)};
function = std::make_shared<ngraph::Function>(results, params, "ROIAlign");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -63,6 +63,8 @@ protected:
}
selectedType.push_back('_');
selectedType += netPrecision.name();
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -67,6 +67,7 @@ protected:
const auto softMax = std::make_shared<ngraph::opset1::Softmax>(paramOuts.at(0), config.axis);
function = makeNgraphFunction(ngPrc, params, softMax, "SoftMax");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -60,6 +60,7 @@ protected:
s2b->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(s2b)};
function = std::make_shared<ngraph::Function>(results, params, "SpaceToBatch");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -55,6 +55,7 @@ protected:
d2s->get_rt_info() = getCPUInfo();
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(d2s)};
function = std::make_shared<ngraph::Function>(results, params, "SpaceToDepth");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -79,6 +79,7 @@ protected:
}
split->get_rt_info() = getCPUInfo();
function = std::make_shared<ngraph::Function>(results, params, "split");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -71,6 +71,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(ss)};
function = std::make_shared<ngraph::Function>(results, params, "StridedSlice");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -67,6 +67,7 @@ protected:
transpose->get_rt_info() = getCPUInfo();
const ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(transpose)};
function = std::make_shared<ngraph::Function>(results, params, "Transpose");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -35,6 +35,7 @@ public:
auto gather = std::make_shared<ngraph::opset3::Gather>(paramOuts[0], indicesNode, axisNode);
ngraph::ResultVector results{std::make_shared<ngraph::opset3::Result>(gather)};
function = std::make_shared<ngraph::Function>(results, params, "gather");
functionRefs = ngraph::clone_function(*function);
}
std::vector<std::pair<ngraph::element::Type, std::vector<std::uint8_t>>> CalculateRefs() override {
// Convert the second input constant precision to i64 to run the reference function

View File

@ -42,6 +42,7 @@ public:
ngraph::ResultVector results{std::make_shared<ngraph::opset8::Result>(concat)};
function = std::make_shared<ngraph::Function>(results, inputParams, "ConcatConstantInPlace");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -75,6 +75,7 @@ protected:
}
function = std::make_shared<ngraph::Function>(results, inputParams, "Conv3dReshape");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -109,6 +109,7 @@ void ConvConcatSubgraphTest::SetUp() {
ngraph::ResultVector results{std::make_shared<ngraph::opset4::Result>(concat)};
function = std::make_shared<ngraph::Function>(results, inputParams, "convolutionConcat");
functionRefs = ngraph::clone_function(*function);
}
TEST_P(ConvConcatSubgraphTest, CompareWithRefs) {

View File

@ -59,6 +59,7 @@ protected:
}
function = makeNgraphFunction(element::f32, inputParams, pooling, "ConvPoolActiv");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -105,6 +105,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(eltwiseOps[eltwiseOps.size() - 1])};
function = std::make_shared<ngraph::Function>(results, ngraphParam, "eltwise_chain");
}
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -29,6 +29,7 @@ void FuseMulAddAndEwSimpleTest::SetUp() {
std::tie(inputShape, inPrec) = this->GetParam();
CreateGraph();
functionRefs = ngraph::clone_function(*function);
}
const auto mulAddAndEwSimpleCommonParams = ::testing::Combine(

View File

@ -76,6 +76,7 @@ protected:
quantizeIntervals[3]);
ngraph::ResultVector results{std::make_shared<ngraph::opset6::Result>(quantize)};
function = std::make_shared<ngraph::Function>(results, ngraph::ParameterVector{param}, "FuseScaleShiftAndQuantize");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -48,6 +48,7 @@ void FuseTransposeAndReorderTest::SetUp() {
std::tie(inputShape, inPrec) = this->GetParam();
CreateGraph();
functionRefs = ngraph::clone_function(*function);
}
const auto fuseTransposeAndReorderCommonParams = ::testing::Combine(

View File

@ -33,6 +33,7 @@ protected:
auto eltwise = ngraph::builder::makeEltwise(input[0], secondaryInput, eltwiseType);
function = makeNgraphFunction(ngPrc, input, eltwise, "Eltwise");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -36,6 +36,7 @@ protected:
NodeVector results{postOpCandidate, secondConsumpt};
function = std::make_shared<ngraph::Function>(results, inputParams, "NotFusedConvSimpleOp");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -60,6 +60,7 @@ protected:
auto matMul = builder::makeMatMul(reshape, matrixB, false, transpB);
function = makeNgraphFunction(element::f32, inputParams, matMul, "ReshapeFC");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -29,6 +29,7 @@ protected:
NodeVector results{add1, add2};
function = std::make_shared<ngraph::Function>(results, inputParams, "TileWithTwoOutputEdges");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -131,6 +131,7 @@ protected:
auto relu2 = std::make_shared<ngraph::opset1::Relu>(conv);
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(relu2)};
function = std::make_shared<ngraph::Function>(results, params, "ExportImportNetwork");
functionRefs = ngraph::clone_function(*function);
}
private:

View File

@ -96,6 +96,7 @@ protected:
relu->add_control_dependency(mem_w);
ngraph::ResultVector results{std::make_shared<ngraph::opset1::Result>(relu)};
function = std::make_shared<ngraph::Function>(results, params, "ExportImportNetwork");
functionRefs = ngraph::clone_function(*function);
}
private:

View File

@ -72,6 +72,7 @@ class Eltwise4dBroadcast : public testing::WithParamInterface<eltwiseParams>,
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) };
function = std::make_shared<ngraph::Function>(results, params, "Eltwise4dBroadcast");
functionRefs = ngraph::clone_function(*function);
}
};
@ -120,6 +121,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3) };
function = std::make_shared<ngraph::Function>(results, params, "Eltwise4dMultipleInput");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -90,6 +90,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(maxpool)};
function = std::make_shared<ngraph::Function>(results, inputVector, "ActMaxpoolReordering");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -64,6 +64,7 @@ protected:
auto add = std::make_shared<ngraph::opset1::Add>(fakeQuantize1, fakeQuantize2);
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(add)};
function = std::make_shared<ngraph::Function>(results, params, "BroadcastConstWithFq");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -118,6 +118,7 @@ protected:
auto result = std::make_shared<Result>(lastOp);
function = std::make_shared<Function>(ResultVector{result}, ParameterVector{input});
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -200,6 +200,7 @@ protected:
auto result = std::make_shared<Result>(lastOp);
function = std::make_shared<Function>(ResultVector{result}, ParameterVector{input});
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -199,6 +199,7 @@ protected:
auto result = std::make_shared<Result>(lastOp);
function = std::make_shared<Function>(ResultVector{result}, ParameterVector{input});
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -56,6 +56,7 @@ protected:
auto mul = ngraph::builder::makeEltwise(params[0], const_mult2, ngraph::helpers::EltwiseTypes::MULTIPLY);
function = std::make_shared<ngraph::Function>(mul, params, "EltwiseSplitOverChannelsPassTest");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -87,6 +87,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reluFQNode) };
function = std::make_shared<ngraph::Function>(results, inputVector, "FQActivation");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -99,6 +99,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset7::Result>(add3)};
function = std::make_shared<ngraph::Function>(results, params, "FQFusionWithMultipleWeights");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -41,6 +41,7 @@ protected:
auto mul3 = ngraph::builder::makeEltwise(mul2, fake3, ngraph::helpers::EltwiseTypes::ADD);
auto result = std::make_shared<ngraph::opset7::Result>(mul3);
function = std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, input, "fq_fusion_with_sigmoid");
functionRefs = ngraph::clone_function(*function);
}
public:
static std::string getTestCaseName(const testing::TestParamInfo<fqFusionWithSigmoidParams> &obj) {

View File

@ -104,6 +104,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(maxpool)};
function = std::make_shared<ngraph::Function>(results, inputVector, "FQMaxPoolReorder");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -89,6 +89,7 @@ protected:
results.push_back(std::make_shared<ngraph::opset8::Result>(reluFQNode));
}
function = std::make_shared<ngraph::Function>(results, inputVector, "FQOutputsActivation");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -77,6 +77,7 @@ protected:
ngraph::ResultVector results{std::make_shared<ngraph::opset8::Result>(reshape2),
std::make_shared<ngraph::opset8::Result>(reshape3)};
function = std::make_shared<ngraph::Function>(results, params, "FQFusionWithMultipleWeights");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -80,6 +80,7 @@ protected:
results.push_back(std::make_shared<ngraph::opset1::Result>(relu));
}
function = std::make_shared<ngraph::Function>(results, params, "InsertCopyBeforeSelfConcat");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -91,6 +91,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(matmul)};
function = std::make_shared<ngraph::Function>(results, params, "InsertTransposeBeforeMatmul");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -100,6 +100,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3)};
function = std::make_shared<ngraph::Function>(results, params, "InsertTransposeBetweenConvs");
functionRefs = ngraph::clone_function(*function);
}
};
@ -178,6 +179,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3)};
function = std::make_shared<ngraph::Function>(results, params, "InsertTransposeBetweenConvs");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -75,6 +75,7 @@ protected:
InferenceEngine::Precision netPrecision;
std::tie(netPrecision, configuration, targetDevice) = this->GetParam();
function = T::createTopology(netPrecision);
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -102,6 +102,7 @@ class RemovePermutationsNHWCToNCHWPassTest : public testing::WithParamInterface<
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass");
functionRefs = ngraph::clone_function(*function);
}
};
@ -145,6 +146,7 @@ protected:
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(permute2) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass4DOutput");
functionRefs = ngraph::clone_function(*function);
}
};
@ -240,6 +242,7 @@ class RemovePermutationsWithPoolAndActTest : public testing::WithParamInterface<
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass");
functionRefs = ngraph::clone_function(*function);
}
};
@ -330,6 +333,7 @@ class RemovePermutationsWithTwoConvTest : public testing::WithParamInterface<rem
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape2) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass");
functionRefs = ngraph::clone_function(*function);
}
};
@ -428,6 +432,7 @@ class RemovePermutationsWithEltwiseTest : public testing::WithParamInterface<rem
ngraph::ResultVector results{ std::make_shared<ngraph::opset1::Result>(reshape3) };
function = std::make_shared<ngraph::Function>(results, params, "RemovePermutationPass");
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -66,16 +66,16 @@ const std::vector<std::vector<size_t>> inputShapesW1 = {{1, 1, 32, 1},
const std::vector<size_t> numOutCannels = {4, 8, 12};
const std::vector<std::vector<size_t >> kernels2D = {
{5, 1},
{4, 1},
{1, 3},
{1, 2},
{2, 2},
{7, 1},
{3, 3},
{5, 1},
{4, 1},
{1, 3},
{1, 2},
{2, 2},
{7, 1},
{3, 3},
};
const std::vector<std::vector<size_t >> strides2D = {
{1, 1},
{1, 1},
};
const std::vector<std::vector<ptrdiff_t>> padBegins2D = { {0, 0},
};
@ -92,13 +92,13 @@ const std::vector<std::vector<size_t>> inputShapesMapTo1d = {{1, 1, 56, 5},
{1, 2, 64, 5}};
const auto conv2DParams_Kernels2D = ::testing::Combine(
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutCannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutCannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const auto conv2DParams_ExplicitPadding_Height1 = ::testing::Combine(
::testing::ValuesIn(kernelsH1),
@ -148,74 +148,74 @@ const auto conv2DParams_AutoPadValid_MapTo1d = ::testing::Combine(
// TODO: padding isn't currently supported in GNA
INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_Convolution2D_ExplicitPadding_Height1, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding_Height1,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesH1),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_ExplicitPadding_Height1,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesH1),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_Convolution2D_ExplicitPadding_Width1, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding_Width1,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesW1),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_ExplicitPadding_Width1,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesW1),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_AutoPadValid_Height1, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid_Height1,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesH1),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_AutoPadValid_Height1,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesH1),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_AutoPadValid_Width1, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid_Width1,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesW1),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_AutoPadValid_Width1,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesW1),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_AutoPadValid_MapTo1d, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid_MapTo1d,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesMapTo1d),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_AutoPadValid_MapTo1d,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(inputShapesMapTo1d),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_Kernels2D, GnaConvolutionLayerTest,
::testing::Combine(
conv2DParams_Kernels2D,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(input2DNCHW),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
GnaConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_Kernels2D,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(input2DNCHW),
::testing::Values(CommonTestUtils::DEVICE_GNA)),
GnaConvolutionLayerTest::getTestCaseName);
} // namespace

View File

@ -19,24 +19,24 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
/* ============= 2D Convolution ============= */
const std::vector<std::vector<size_t >> kernels2D = {
{1, 3},
{7, 1},
{3, 3},
{1, 3},
{7, 1},
{3, 3},
};
const std::vector<std::vector<size_t >> kernels2DInvalid = {
{1, 4},
{2, 3},
{3, 2},
{8, 1},
{4, 4},
{1, 4},
{2, 3},
{3, 2},
{8, 1},
{4, 4},
};
const std::vector<std::vector<size_t >> strides2D = {
{1, 1},
{1, 1},
};
const std::vector<std::vector<size_t >> strides2DInvalid = {
{4, 4}, {1, 4}
{4, 4}, {1, 4}
};
const std::vector<std::vector<ptrdiff_t>> padBegins2D = { {0, 0},
};
@ -56,65 +56,65 @@ const std::vector<size_t> numOutChannels2DInvalid = { 1, 7, 9, 400 };
const std::vector<std::vector<size_t>> input2DNCHWFine = { { 1, 8, 20, 16 } };
const std::vector<std::vector<size_t>> input2DNCHWInvalidInputC = {
{ 1, 7, 20, 16 },
{ 1, 9, 20, 16 },
{ 1, 400, 20, 16 } };
{ 1, 7, 20, 16 },
{ 1, 9, 20, 16 },
{ 1, 400, 20, 16 } };
const std::vector<std::vector<size_t>> input2DNCHWInvalidInputH = { { 1, 8, 15, 16 }, { 1, 8, 400, 16 } };
const std::vector<std::vector<size_t>> input2DNCHWInvalidInputW = { { 1, 8, 20, 14 }, { 1, 8, 20, 400 } };
const auto conv2DParametersFine = ::testing::Combine(
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const auto conv2DParametersInvalidKernel = ::testing::Combine(
::testing::ValuesIn(kernels2DInvalid),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2DInvalid),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const auto conv2DParametersInvalidFilterNumber = ::testing::Combine(
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2DInvalid),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2DInvalid),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const auto conv2DParametersInvalidPadding = ::testing::Combine(
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2DInvalid),
::testing::ValuesIn(padEnds2DInvalid),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2DInvalid),
::testing::ValuesIn(padEnds2DInvalid),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const auto conv2DParametersInvalidStride = ::testing::Combine(
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2DInvalid),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2DInvalid),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2D),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
const auto conv2DParametersInvalidDilation = ::testing::Combine(
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2DInvalid),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
::testing::ValuesIn(kernels2D),
::testing::ValuesIn(strides2D),
::testing::ValuesIn(padBegins2D),
::testing::ValuesIn(padEnds2D),
::testing::ValuesIn(dilations2DInvalid),
::testing::ValuesIn(numOutChannels2D),
::testing::Values(ngraph::op::PadType::EXPLICIT)
);
class GnaConv2DNegativeTest : public ConvolutionLayerTest, protected GnaLayerTestCheck {
@ -133,7 +133,7 @@ protected:
const auto expected = expectedSubstring();
ASSERT_STR_CONTAINS(errorMsg, expected);
EXPECT_TRUE(errorMsg.find(expected) != std::string::npos) << "Wrong error message, actula error message: " << errorMsg <<
", expected: " << expected;
", expected: " << expected;
}
}
}
@ -172,4 +172,4 @@ GNA_NEG_INSTANTIATE(Padding, InvalidPadding, Fine, "Convolution's input padding
GNA_NEG_INSTANTIATE(Stride, InvalidStride, Fine, "Unsupported convolution stride shape")
GNA_NEG_INSTANTIATE(Dilation, InvalidDilation, Fine, "dilation is not supported on GNA")
} // namespace
} // namespace

View File

@ -9,14 +9,14 @@
using namespace LayerTestsDefinitions;
namespace {
std::vector<std::vector<std::vector<size_t>>> inShapes = {
{{2}},
{{8}},
{{1, 200}},
{{1, 1, 1, 3}},
{{1, 2, 4}},
{{1, 4, 4}},
{{1, 4, 4, 1}}
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes = {
{{}, {{{2}}}},
{{}, {{{8}}}},
{{}, {{{1, 200}}}},
{{}, {{{1, 1, 1, 3}}}},
{{}, {{{1, 2, 4}}}},
{{}, {{{1, 4, 4}}}},
{{}, {{{1, 4, 4, 1}}}}
};

View File

@ -17,15 +17,14 @@ const std::vector<FuncTestUtils::BlobType> BlobTypes = {
FuncTestUtils::BlobType::NV12
};
const std::map<std::string, std::string> cpuConfig{}; //nothing special
const std::map<std::string, std::string> autoConfig{};
const std::map<std::string, std::string> gpuConfig{}; //nothing special
const std::map<std::string, std::string> multiConfig{{ MULTI_CONFIG_KEY(DEVICE_PRIORITIES) , CommonTestUtils::DEVICE_GPU}};
const std::map<std::string, std::string> heteroConfig{{ "TARGET_FALLBACK", CommonTestUtils::DEVICE_GPU }};
INSTANTIATE_TEST_SUITE_P(smoke_Behavior, InferRequestSetBlobByType,
::testing::Combine(::testing::ValuesIn(BlobTypes),
::testing::Values(CommonTestUtils::DEVICE_GPU),
::testing::Values(cpuConfig)),
::testing::Values(gpuConfig)),
InferRequestSetBlobByType::getTestCaseName);
@ -37,8 +36,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_Behavior_Multi, InferRequestSetBlobByType,
INSTANTIATE_TEST_SUITE_P(smoke_Behavior_Auto, InferRequestSetBlobByType,
::testing::Combine(::testing::ValuesIn(BlobTypes),
::testing::Values(CommonTestUtils::DEVICE_AUTO + std::string(":") + CommonTestUtils::DEVICE_CPU),
::testing::Values(autoConfig)),
::testing::Values(CommonTestUtils::DEVICE_AUTO),
::testing::Values(multiConfig)),
InferRequestSetBlobByType::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Behavior_Hetero, InferRequestSetBlobByType,

View File

@ -17,15 +17,15 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
/* ============= 2D Convolution ============= */
const std::vector<std::vector<size_t >> kernels = {{3, 3},
{3, 5}};
{3, 5}};
const std::vector<std::vector<size_t >> strides = {{1, 1},
{1, 3}};
{1, 3}};
const std::vector<std::vector<ptrdiff_t>> padBegins = {{0, 0},
{0, 3}};
const std::vector<std::vector<ptrdiff_t>> padEnds = {{0, 0},
{0, 3}};
const std::vector<std::vector<size_t >> dilations = {{1, 1},
{3, 1}};
{3, 1}};
const std::vector<size_t> numOutChannels = {1, 5};
const std::vector<ngraph::op::PadType> padTypes = {
ngraph::op::PadType::EXPLICIT,
@ -51,37 +51,37 @@ const auto conv2DParams_AutoPadValid = ::testing::Combine(
);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
ConvolutionLayerTest::getTestCaseName);
/* ============= 3D Convolution ============= */
const std::vector<std::vector<size_t >> kernels3d = {{3, 3, 3},
{3, 5, 3}};
{3, 5, 3}};
const std::vector<std::vector<ptrdiff_t>> paddings3d = {{0, 0, 0},
{0, 2, 0}};
const std::vector<std::vector<size_t >> strides3d = {{1, 1, 1},
{1, 2, 1}};
{1, 2, 1}};
const std::vector<std::vector<size_t >> dilations3d = { {1, 1, 1} };
@ -98,14 +98,14 @@ const auto conv3DParams = ::testing::Combine(
);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution3D_Basic1, ConvolutionLayerTest,
::testing::Combine(
conv3DParams,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv3DParams,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 10, 10, 10})),
::testing::Values(CommonTestUtils::DEVICE_GPU)),
ConvolutionLayerTest::getTestCaseName);
} // namespace

View File

@ -9,20 +9,20 @@
using namespace LayerTestsDefinitions;
namespace {
std::vector<std::vector<std::vector<size_t>>> inShapes = {
{{2}},
{{2, 200}},
{{10, 200}},
{{1, 10, 100}},
{{4, 4, 16}},
{{1, 1, 1, 3}},
{{2, 17, 5, 4}, {1, 17, 1, 1}},
{{2, 17, 5, 1}, {1, 17, 1, 4}},
{{1, 2, 4}},
{{1, 4, 4}},
{{1, 4, 4, 1}},
{{1, 4, 3, 2, 1, 3}},
{{1, 3, 1, 1, 1, 3}, {1, 3, 1, 1, 1, 1}},
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes = {
{{}, {{{2}}}},
{{}, {{{2, 200}}}},
{{}, {{{10, 200}}}},
{{}, {{{1, 10, 100}}}},
{{}, {{{4, 4, 16}}}},
{{}, {{{1, 1, 1, 3}}}},
{{}, {{{2, 17, 5, 4}, {1, 17, 1, 1}}}},
{{}, {{{2, 17, 5, 1}, {1, 17, 1, 4}}}},
{{}, {{{1, 2, 4}}}},
{{}, {{{1, 4, 4}}}},
{{}, {{{1, 4, 4, 1}}}},
{{}, {{{1, 4, 3, 2, 1, 3}}}},
{{}, {{{1, 3, 1, 1, 1, 3}, {1, 3, 1, 1, 1, 1}}}},
};
std::vector<InferenceEngine::Precision> netPrecisions = {

View File

@ -19,10 +19,10 @@ const std::vector<InferenceEngine::Layout> inputLayouts2D = {
InferenceEngine::Layout::NC,
};
const std::vector<InferenceEngine::SizeVector> inputShapes2D = {
InferenceEngine::SizeVector {1, 100},
InferenceEngine::SizeVector {100, 1},
InferenceEngine::SizeVector {10, 10},
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputShapes2D = {
{{}, {{1, 100}}},
{{}, {{100, 1}}},
{{}, {{10, 10}}},
};
const std::vector<size_t> axis2D = {
@ -48,10 +48,10 @@ INSTANTIATE_TEST_SUITE_P(
SoftMaxLayerTest::getTestCaseName
);
const std::vector<InferenceEngine::SizeVector> inputShapes4D = {
InferenceEngine::SizeVector {1, 100, 1, 1},
InferenceEngine::SizeVector {1, 3, 4, 3},
InferenceEngine::SizeVector {2, 3, 4, 5},
const std::vector<std::pair<ngraph::PartialShape, std::vector<ngraph::Shape>>> inputShapes4D = {
{{}, {{1, 100, 1, 1}}},
{{}, {{1, 3, 4, 3}}},
{{}, {{2, 3, 4, 5}}},
};
const std::vector<size_t> axis4D = {0, 1, 2, 3};

View File

@ -68,11 +68,9 @@ std::vector<std::string> disabledTestPatterns() {
R"(.*Behavior.*InferRequestIOBBlobSetLayoutTest.*layout=(95|OIHW).*)",
R"(.*Behavior.*InferRequestIOBBlobSetLayoutTest.*CanSetInBlobWithDifferentLayouts.*layout=NHWC.*)",
R"(.*Behavior.*InferRequestIOBBlobSetLayoutTest.*CanSetOutBlobWithDifferentLayouts.*layout=(CN|HW).*)",
R"(.*Behavior_Multi.*InferRequestSetBlobByType.*Batched.*)",
R"(.*Behavior.*(Multi|Auto).*InferRequestSetBlobByType.*Batched.*)",
R"(.*(Multi|Auto).*Behavior.*InferRequestIOBBlobTest.*canProcessDeallocatedOutputBlobAfterGetAndSetBlob.*)",
// TODO: until issue is xxx-59670 is resolved
R"(.*Gather8LayerTest.*)",
// TODO: Issue 66516
R"(.*smoke_PrePostProcess_GPU.*convert_element_type_and_mean.*)"
R"(.*Gather8LayerTest.*)"
};
}
}

View File

@ -256,6 +256,7 @@ protected:
break;
}
}
functionRefs = ngraph::clone_function(*function);
}
};

View File

@ -18,15 +18,15 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
/* ============= 2D Convolution ============= */
// Layer specific
const std::vector<std::vector<size_t >> kernels = {{3, 3},
{3, 5}};
{3, 5}};
const std::vector<std::vector<size_t >> strides = {{1, 1},
{1, 3}};
{1, 3}};
const std::vector<std::vector<ptrdiff_t>> padBegins = {{0, 0},
{0, 3}};
const std::vector<std::vector<ptrdiff_t>> padEnds = {{0, 0},
{0, 3}};
const std::vector<std::vector<size_t >> dilations = {{1, 1},
{3, 1}};
{3, 1}};
const std::vector<size_t> numOutCannels = {1, 5};
const std::vector<ngraph::op::PadType> padTypes = {
ngraph::op::PadType::EXPLICIT,
@ -61,39 +61,39 @@ const auto conv2DParams_BigDimensionValid = ::testing::Combine(
);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_ExplicitPadding, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_ExplicitPadding,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_AutoPadValid, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_AutoPadValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 30, 30})),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
ConvolutionLayerTest::getTestCaseName);
INSTANTIATE_TEST_SUITE_P(smoke_Convolution2D_BigDimensionValid, ConvolutionLayerTest,
::testing::Combine(
conv2DParams_BigDimensionValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 1, 2500})),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
ConvolutionLayerTest::getTestCaseName);
::testing::Combine(
conv2DParams_BigDimensionValid,
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(std::vector<size_t >({1, 3, 1, 2500})),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)),
ConvolutionLayerTest::getTestCaseName);
/* ============= 3D Convolution ============= */
// TODO: 3D convolution fails with sigabort

View File

@ -15,14 +15,14 @@ namespace {
typedef std::map<std::string, std::string> Config;
std::vector<std::vector<std::vector<size_t>>> inShapes = {
{{2}},
{{1, 1, 1, 3}},
{{1, 2, 4}},
{{1, 4, 4}},
{{1, 4, 4, 1}},
{{16, 16, 96}, {96}},
{{52, 1, 52, 3, 2}, {2}}
std::vector<std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>>> inShapes = {
{{}, {{{2}}}},
{{}, {{{1, 1, 1, 3}}}},
{{}, {{{1, 2, 4}}}},
{{}, {{{1, 4, 4}}}},
{{}, {{{1, 4, 4, 1}}}},
{{}, {{{16, 16, 96}, {96}}}},
{{}, {{{52, 1, 52, 3, 2}, {2}}}}
};
std::vector<InferenceEngine::Precision> fpTypes = {
@ -84,5 +84,4 @@ INSTANTIATE_TEST_SUITE_P(smoke_EltwiseMathInt,
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(Config{{InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH, CONFIG_VALUE(NO)}})),
EltwiseLayerTest::getTestCaseName);
} // namespace

View File

@ -39,7 +39,7 @@ std::vector<std::string> disabledTestPatterns() {
// TODO: Issue 54163
R"(.*ActivationLayerTest.*SoftPlus.*)",
// TODO: Issue 54722
R"(.*IS=\(16\.16\.96\)\(96\)_eltwiseOpType=FloorMod_secondaryInputType=PARAMETER_opType=VECTOR_netPRC=FP32.*)",
R"(.*TS=\(\(16\.16\.96\)_\(96\)_\).*eltwiseOpType=FloorMod_secondaryInputType=PARAMETER_opType=VECTOR_netPRC=FP32.*)",
// TODO: Issue 57108
R"(.*QueryNetworkHETEROWithMULTINoThrow_V10.*)",
R"(.*QueryNetworkMULTIWithHETERONoThrow_V10.*)",

View File

@ -107,13 +107,14 @@ public:
function = make_ngraph(false);
reference_function = make_ngraph(true); //use extra ops to mimic the preprocessing
functionRefs = ngraph::clone_function(*function);
}
void Validate() override {
// w/a: copy of original function is required to provide correct op coverage report (overflow of convert counter issue)
auto copyOriginalFunction = function;
//force the reference implementation to use graph with extra Convert operation
function = reference_function;
functionRefs = ngraph::clone_function(*reference_function);
LayerTestsUtils::LayerTestsCommon::Validate();
function = copyOriginalFunction;
}

View File

@ -69,6 +69,7 @@ TEST_P(TrivialLoopTest, PassThroughBody) {
function = std::make_shared<ngraph::Function>(
ngraph::OutputVector {loop},
ngraph::ParameterVector {start});
functionRefs = ngraph::clone_function(*function);
// Precalculated ref blobs
auto blob = make_blob_with_precision({iePrc, ieShape, InferenceEngine::TensorDesc::getLayoutByDims(ieShape)});
@ -113,6 +114,7 @@ TEST_P(TrivialLoopTest, UnusedInputBody) {
function = std::make_shared<ngraph::Function>(
ngraph::OutputVector {loop},
ngraph::ParameterVector {start});
functionRefs = ngraph::clone_function(*function);
// Precalculated ref blobs
auto blob = make_blob_with_precision({iePrc, ieShape, InferenceEngine::TensorDesc::getLayoutByDims(ieShape)});

View File

@ -141,6 +141,7 @@ void LoadNetworkCacheTestBase::SetUp() {
} catch (...) {
GTEST_SKIP();
}
functionRefs = ngraph::clone_function(*function);
std::stringstream ss;
auto hash = std::hash<std::string>()(GetTestName());

View File

@ -61,18 +61,21 @@ void DetectNetworkBatch::LoadNetwork() {
TEST_P(DetectNetworkBatch, InferWithOneInput) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
function = ngraph::builder::subgraph::makeSplitConvConcat();
functionRefs = ngraph::clone_function(*function);
Run();
};
TEST_P(DetectNetworkBatch, InferWithMultipleInputs_DiffDims) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
function = makeNNWithMultipleInputsDiffDims();
functionRefs = ngraph::clone_function(*function);
Run();
};
TEST_P(DetectNetworkBatch, InferWithMultipleInputs_SameDims) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
function = makeNNWithMultipleInputsSameDims();
functionRefs = ngraph::clone_function(*function);
Run();
};

View File

@ -47,6 +47,7 @@ namespace ConfigurationTestsDefinitions {
max_batch_size = *std::max_element(batch_sizes.begin(), batch_sizes.end());
function = ngraph::builder::subgraph::makeSingleConv();
functionRefs = ngraph::clone_function(*function);
}
void DynamicBatchTest::LoadNetwork() {
@ -71,6 +72,7 @@ namespace ConfigurationTestsDefinitions {
inputs.push_back(blob);
}
reference_inputs.push_back(inputs);
functionRefs = ngraph::clone_function(*function);
reference_outputs.push_back(CalculateRefs());
}

Some files were not shown because too many files have changed in this diff Show More