From 5007cba70a930e9423de9e7efecc7060a2f4052b Mon Sep 17 00:00:00 2001 From: Edward Shogulin Date: Mon, 26 Oct 2020 16:02:11 +0300 Subject: [PATCH] [LPT] bfloat enabling fix (#2819) --- inference-engine/src/mkldnn_plugin/mkldnn_exec_network.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_exec_network.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_exec_network.cpp index e7bad8e156c..2d0ca6e275f 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_exec_network.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_exec_network.cpp @@ -54,8 +54,8 @@ MKLDNNExecNetwork::MKLDNNExecNetwork(const InferenceEngine::ICNNNetwork &network // we are cloning network if we have statistics and we can transform network. _clonedNetwork = cloneNet(network); -#ifdef USE_CNNNETWORK_LPT if (_cfg.lpTransformsMode == Config::LPTransformsMode::On) { +#ifdef USE_CNNNETWORK_LPT auto params = LayerTransformation::Params(true, // updatePrecisions true, // quantizeOutputs true, // weightsToConst @@ -70,6 +70,7 @@ MKLDNNExecNetwork::MKLDNNExecNetwork(const InferenceEngine::ICNNNetwork &network LayerTransformation::Params(params).setPrecisionsOnActivations({ Precision::U8 }), "ScaleShift")); transformer.transform(*_clonedNetwork); +#endif // Check if network is INT8 or Binary. // BF16 transformations were disabled since CPU plug-in doesn't support mixed precision execution: @@ -98,7 +99,6 @@ MKLDNNExecNetwork::MKLDNNExecNetwork(const InferenceEngine::ICNNNetwork &network bf16Transformer.convertToFloat(cnnetwork); } } -#endif MKLDNNGraph::ApplyUnrollPasses(static_cast(*_clonedNetwork));