Fixed addOutput behavior for experimental ops (#2138)
This commit is contained in:
parent
d78fd196e8
commit
27ca6be728
@ -130,7 +130,7 @@ void ngraph::op::GenericIE::validate_and_infer_types() {
|
||||
}
|
||||
|
||||
// WA: shape infer has to know number of outputs
|
||||
if ((type == "Proposal" || type == "ExperimentalDetectronROIFeatureExtractor" || type == "ExperimentalDetectronDetectionOutput")
|
||||
if ((type == "ExperimentalDetectronROIFeatureExtractor" || type == "ExperimentalDetectronDetectionOutput")
|
||||
&& parameters.find("num_outputs") == parameters.end()) {
|
||||
parameters["num_outputs"] = std::to_string(outputs.size());
|
||||
}
|
||||
@ -149,6 +149,13 @@ void ngraph::op::GenericIE::validate_and_infer_types() {
|
||||
// Extensions are not loaded when we create nGraph function
|
||||
// First call: create node
|
||||
if (initialized < 1) {
|
||||
if ((type == "ExperimentalDetectronROIFeatureExtractor" || type == "ExperimentalDetectronDetectionOutput")
|
||||
&& outputs.size() < 2) {
|
||||
// Add fake port
|
||||
PortIE port;
|
||||
port.precision = InferenceEngine::Precision::FP32;
|
||||
outputs.emplace_back(port);
|
||||
}
|
||||
if (outputs.size())
|
||||
set_output_size(outputs.size());
|
||||
for (size_t output_index = 0; output_index < outputs.size(); output_index++) {
|
||||
|
@ -880,4 +880,79 @@ TEST(CNNNGraphImplTests, addOutputForParameter) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CNNNGraphImplTests, AddOutputToExperimentalOp) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Activation" version="10">
|
||||
<layers>
|
||||
<layer name="in1" type="Parameter" id="0" version="opset1">
|
||||
<data shape="1,3,22,22" element_type="f32"/>
|
||||
<output>
|
||||
<port id="0" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>22</dim>
|
||||
<dim>22</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer name="exp" id="1" type="ExperimentalDetectronROIFeatureExtractor" version="experimental">
|
||||
<input>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>22</dim>
|
||||
<dim>22</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>22</dim>
|
||||
<dim>22</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer name="activation" id="2" type="ReLU" version="opset1">
|
||||
<input>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>22</dim>
|
||||
<dim>22</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>22</dim>
|
||||
<dim>22</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer name="output" type="Result" id="3" version="opset1">
|
||||
<input>
|
||||
<port id="0" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>22</dim>
|
||||
<dim>22</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
</layers>
|
||||
<edges>
|
||||
<edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
|
||||
<edge from-layer="1" from-port="2" to-layer="2" to-port="1"/>
|
||||
<edge from-layer="2" from-port="2" to-layer="3" to-port="0"/>
|
||||
</edges>
|
||||
</net>
|
||||
)V0G0N";
|
||||
InferenceEngine::Core core;
|
||||
CNNNetwork network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr());
|
||||
network.addOutput("exp");
|
||||
auto outputs = network.getOutputsInfo();
|
||||
ASSERT_NE(outputs.find("exp.0"), outputs.end());
|
||||
}
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
|
Loading…
Reference in New Issue
Block a user