diff --git a/inference-engine/src/auto_batch/auto_batch.cpp b/inference-engine/src/auto_batch/auto_batch.cpp index 0f151e5a1af..84f305b343b 100644 --- a/inference-engine/src/auto_batch/auto_batch.cpp +++ b/inference-engine/src/auto_batch/auto_batch.cpp @@ -599,7 +599,7 @@ InferenceEngine::IExecutableNetworkInternal::Ptr AutoBatchInferencePlugin::LoadN const size_t footprint = report_footprint(GetCore(), deviceName, "After BATCHED"); if (footprint > total_mem) { // WA for inaccurate footprint estimations std::cout << "!!!! Total on-device mem is " << total_mem << " less than :" << footprint << std::endl; - throw NETWORK_NOT_LOADED; + // throw NETWORK_NOT_LOADED; } } } catch (...) {