diff --git a/src/plugins/intel_cpu/src/config.h b/src/plugins/intel_cpu/src/config.h index 26c882adf4d..a027844d9fe 100644 --- a/src/plugins/intel_cpu/src/config.h +++ b/src/plugins/intel_cpu/src/config.h @@ -82,7 +82,7 @@ struct Config { std::map _config; - bool isNewApi = true; + bool isLegacyApi = false; #ifdef CPU_DEBUG_CAPS DebugCapsConfig debugCaps; diff --git a/src/plugins/intel_cpu/src/exec_network.cpp b/src/plugins/intel_cpu/src/exec_network.cpp index c17a16c5bce..80687369d85 100644 --- a/src/plugins/intel_cpu/src/exec_network.cpp +++ b/src/plugins/intel_cpu/src/exec_network.cpp @@ -79,8 +79,12 @@ ExecNetwork::ExecNetwork(const InferenceEngine::CNNNetwork &network, } bool isFloatModel = !ov::op::util::has_op_with_type(function); - _cfg.isNewApi = !isLegacyAPI(); _mutex = std::make_shared(); + const auto& core = _plugin->GetCore(); + if (!core) + IE_THROW() << "Unable to get API version. Core is unavailable"; + _cfg.isLegacyApi = !core->isNewAPI(); + if (cfg.exclusiveAsyncRequests) { // special case when all InferRequests are muxed into a single queue @@ -209,14 +213,6 @@ std::shared_ptr ExecNetwork::GetExecGraphInfo() { return GetGraph()._graph.dump(); } -bool ExecNetwork::isLegacyAPI() const { - const auto& core = _plugin->GetCore(); - if (!core) - IE_THROW() << "Unable to get API version. Core is unavailable"; - - return !core->isNewAPI(); -} - Parameter ExecNetwork::GetConfigLegacy(const std::string &name) const { if (_graphs.empty()) IE_THROW() << "No graph was found"; @@ -279,7 +275,7 @@ InferenceEngine::Parameter ExecNetwork::GetMetric(const std::string &name) const const auto& graph = graphLock._graph; const auto& config = graph.getConfig(); - if (isLegacyAPI()) { + if (_cfg.isLegacyApi) { return GetMetricLegacy(name, graph); } diff --git a/src/plugins/intel_cpu/src/exec_network.h b/src/plugins/intel_cpu/src/exec_network.h index 757cec5f22e..506dc524cd5 100644 --- a/src/plugins/intel_cpu/src/exec_network.h +++ b/src/plugins/intel_cpu/src/exec_network.h @@ -76,8 +76,6 @@ protected: */ GraphGuard::Lock GetGraph() const; - bool isLegacyAPI() const; - InferenceEngine::Parameter GetConfigLegacy(const std::string &name) const; InferenceEngine::Parameter GetMetricLegacy(const std::string &name, const GraphGuard& graph) const; diff --git a/src/plugins/intel_cpu/src/graph.cpp b/src/plugins/intel_cpu/src/graph.cpp index 69ccef79861..4801918ce14 100644 --- a/src/plugins/intel_cpu/src/graph.cpp +++ b/src/plugins/intel_cpu/src/graph.cpp @@ -961,9 +961,9 @@ void Graph::PullOutputData(BlobMap &out) { auto srcPrec = actualDesc.getPrecision(); auto dstPrec = expectedDesc.getPrecision(); - if (getConfig().isNewApi && srcPrec == dstPrec && ext_blob->byteSize() != intr_blob.GetSize()) - IE_THROW() << "Output blob byte size is not equal network output byte size (" - << ext_blob->byteSize() << "!=" << intr_blob.GetSize() << ")."; + if (!getConfig().isLegacyApi && srcPrec == dstPrec && ext_blob->byteSize() != intr_blob.GetSize()) + IE_THROW() << "Output blob byte size is not equal network output byte size (" << ext_blob->byteSize() + << "!=" << intr_blob.GetSize() << ")."; void *ext_blob_ptr = ext_blob->buffer(); void *intr_blob_ptr = intr_blob.GetData(); diff --git a/src/plugins/template/tests/functional/skip_tests_config.cpp b/src/plugins/template/tests/functional/skip_tests_config.cpp index c9448805ea4..6e09d190da0 100644 --- a/src/plugins/template/tests/functional/skip_tests_config.cpp +++ b/src/plugins/template/tests/functional/skip_tests_config.cpp @@ -139,4 +139,4 @@ std::vector disabledTestPatterns() { R"(.*ReferenceConversionLayerTest.CompareWithHardcodedRefs/conversionType=(Convert|ConvertLike)_shape=.*_iType=(f16|f32|bf16)_oType=u4.*)"); #endif return retVector; -} +} \ No newline at end of file diff --git a/src/tests/functional/plugin/shared/include/behavior/compiled_model/compiled_model_base.hpp b/src/tests/functional/plugin/shared/include/behavior/compiled_model/compiled_model_base.hpp index dabb77226f0..2078e555ed6 100644 --- a/src/tests/functional/plugin/shared/include/behavior/compiled_model/compiled_model_base.hpp +++ b/src/tests/functional/plugin/shared/include/behavior/compiled_model/compiled_model_base.hpp @@ -157,7 +157,83 @@ TEST_P(OVCompiledModelBaseTest, canCompileModelFromMemory) { )V0G0N"; - EXPECT_NO_THROW(auto execNet = core->compile_model(model, ov::Tensor(), target_device, configuration)); + EXPECT_NO_THROW(auto execNet = core ->compile_model(model, ov::Tensor(), target_device, configuration)); +} + +TEST_P(OVCompiledModelBaseTest, canCompileModelwithBrace) { + std::string model = R"V0G0N( + + + + + + + 1 + 3 + 22 + 22 + + + + + + + + 1 + 3 + 22 + 22 + + + + + + + 1 + 3 + 22 + 22 + + + 1 + 3 + 22 + 22 + + + + + 1 + 6 + 22 + 22 + + + + + + + 1 + 6 + 22 + 22 + + + + + + + + + + + )V0G0N"; + ov::CompiledModel compiled_model; + { + ov::Core tmp_core = createCoreWithTemplate(); + compiled_model = tmp_core.compile_model(model, ov::Tensor(), target_device, configuration); + } + EXPECT_NO_THROW(compiled_model.get_property(ov::optimal_number_of_infer_requests)); } TEST(OVCompiledModelBaseTest, canCompileModelToDefaultDevice) {