diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_input_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_input_node.cpp index b6f8a046c71..3bcdec92619 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_input_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_input_node.cpp @@ -393,7 +393,7 @@ void MKLDNNInputNode::initSupportedPrimitiveDescriptors() { std::vector outPortConfs; if (getType() == Input || getType() == MemoryInput) { - precision = getOriginalOutputPrecisionAtPort(0); + auto precision = getOriginalOutputPrecisionAtPort(0); if (precision == Precision::U16 || isMeanImage) { precision = Precision::FP32; } @@ -403,7 +403,7 @@ void MKLDNNInputNode::initSupportedPrimitiveDescriptors() { inPortConfs.push_back({LayoutType::ncsp, precision, true}); } } else if (getType() == Output) { - precision = getOriginalInputPrecisionAtPort(0); + auto precision = getOriginalInputPrecisionAtPort(0); if (precision == Precision::U16) precision = Precision::FP32; inPortConfs.push_back({LayoutType::ncsp, precision}); diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_input_node.h b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_input_node.h index f5f209e96cc..a930fadde61 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_input_node.h +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_input_node.h @@ -38,7 +38,6 @@ private: private: std::shared_ptr constOp; - InferenceEngine::Precision precision; MKLDNNMemoryCPtr memoryPtr; bool isMeanImage = false; };