[POT] Update tests with new data (#8209)
* Update POT tests with new data * Revert package changes * Remove mobinenet-ssd from POT tests * Update benchmark.py IE Python API usage
This commit is contained in:
parent
7ef4ff6385
commit
ce9a968030
6
.gitattributes
vendored
6
.gitattributes
vendored
@ -67,9 +67,11 @@
|
||||
|
||||
#POT attributes
|
||||
tools/pot/tests/data/test_cases_refs/* filter=lfs diff=lfs merge=lfs -text
|
||||
tools/pot/tests/data/models/*/* filter=lfs diff=lfs merge=lfs -text
|
||||
tools/pot/tests/data/reference_models/* filter=lfs diff=lfs merge=lfs -text
|
||||
tools/pot/tests/data/video/* filter=lfs diff=lfs merge=lfs -text
|
||||
tools/pot/tests/data/reference_fake_quantize_conf/* filter=lfs diff=lfs merge=lfs -text
|
||||
/tools/pot/tests/** -pot_package
|
||||
/configs/accuracy_checker/** -pot_package
|
||||
/configs/quantization/** -pot_package
|
||||
/tools/pot/tools/auxilary/** -pot_package
|
||||
/tools/pot/tools/run_series_experiments.py -pot_package
|
||||
/tools/pot/.pylintrc -pot_package
|
||||
|
@ -95,7 +95,7 @@ def benchmark_embedded_python_api(path_to_model_file):
|
||||
for key, value in input_info.items():
|
||||
m = []
|
||||
dt = np_d_type[value.precision]
|
||||
for x in value.shape:
|
||||
for x in value.input_data.shape:
|
||||
m.append(x)
|
||||
m[0] = m[0] * batch_size
|
||||
input_data[key] = np.empty(tuple(m), dtype=dt)
|
||||
@ -122,7 +122,7 @@ def benchmark_embedded_python_api(path_to_model_file):
|
||||
infer_requests = exe_network.requests
|
||||
batch_size = ie_network.batch_size
|
||||
request_queue = InferRequestsQueue(infer_requests)
|
||||
requests_input_data = get_dummy_inputs(batch_size, ie_network.inputs, infer_requests)
|
||||
requests_input_data = get_dummy_inputs(batch_size, ie_network.input_info, infer_requests)
|
||||
infer_request = request_queue.get_idle_request()
|
||||
|
||||
# For warming up
|
||||
|
3
tools/pot/tests/data/frame_extractor/0.png
Normal file
3
tools/pot/tests/data/frame_extractor/0.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:8dcf3768e706a5585d9ac32b58f991bc9a872b0ca2a8dbf320919f8dc6ae65d0
|
||||
size 67197
|
3
tools/pot/tests/data/frame_extractor/1.png
Normal file
3
tools/pot/tests/data/frame_extractor/1.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:760fa936c7d6f734e19daf8134a4ea7408e3475c6fc9707d9399b36e4734eb29
|
||||
size 33765
|
3
tools/pot/tests/data/frame_extractor/2.png
Normal file
3
tools/pot/tests/data/frame_extractor/2.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7da11f06a96ce498d819a839ea1ceb3f85608b5a98c1f07d60e96e8c003e187a
|
||||
size 96789
|
3
tools/pot/tests/data/frame_extractor_ref/0.png
Normal file
3
tools/pot/tests/data/frame_extractor_ref/0.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:8dcf3768e706a5585d9ac32b58f991bc9a872b0ca2a8dbf320919f8dc6ae65d0
|
||||
size 67197
|
3
tools/pot/tests/data/frame_extractor_ref/1.png
Normal file
3
tools/pot/tests/data/frame_extractor_ref/1.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:760fa936c7d6f734e19daf8134a4ea7408e3475c6fc9707d9399b36e4734eb29
|
||||
size 33765
|
3
tools/pot/tests/data/frame_extractor_ref/2.png
Normal file
3
tools/pot/tests/data/frame_extractor_ref/2.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7da11f06a96ce498d819a839ea1ceb3f85608b5a98c1f07d60e96e8c003e187a
|
||||
size 96789
|
3
tools/pot/tests/data/image_data/0.png
Normal file
3
tools/pot/tests/data/image_data/0.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e8c0887a2215dad87c2c04becf50ae94c413432299c2c056804a12acd0ed75dd
|
||||
size 37708
|
3
tools/pot/tests/data/image_data/1.png
Normal file
3
tools/pot/tests/data/image_data/1.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e1ad0a169e6772410316e99bd63d8240f1d35b48c82ab26a611c02c9317be116
|
||||
size 9097
|
3
tools/pot/tests/data/image_data/2.png
Normal file
3
tools/pot/tests/data/image_data/2.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:dfa77cefe13067555ace1e12e383f93cacac0137893dfbb02f85dd78f81015c0
|
||||
size 135769
|
3
tools/pot/tests/data/image_data/3.png
Normal file
3
tools/pot/tests/data/image_data/3.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c2559b04de51a9576e5be0764fd19662d01698bd035deebbd390543b1b4163bb
|
||||
size 80215
|
3
tools/pot/tests/data/image_data/4.png
Normal file
3
tools/pot/tests/data/image_data/4.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e3a9d6eba3484991ea3d9f9d4237ef0406a91af0cedc8f5324073739d1e4c3ed
|
||||
size 22335
|
5
tools/pot/tests/data/image_loading/image_files.txt
Normal file
5
tools/pot/tests/data/image_loading/image_files.txt
Normal file
@ -0,0 +1,5 @@
|
||||
./images/0.JPEG
|
||||
./images/1.JPEG
|
||||
./images/2.JPEG
|
||||
./images/3.JPEG
|
||||
./images/4.JPEG
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:776976f67763b4eea488fb8ceceeaef7009f0399ac8b5f67dd156ef6f866f09b
|
||||
size 159
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:32ec7873e536aeb9e735d3de44c032c1cb33cb305c3a5a032ca50c4d5d2d0223
|
||||
size 28116312
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9600e8bcd272e35438d2634ef16af7acf0042fddad7c9964acace5850c5c1533
|
||||
size 171
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:837c05c2e45374a71ad00539d288e75c83a11462a13d2db7ce3de497557e46df
|
||||
size 15967759
|
@ -1,7 +1,3 @@
|
||||
{
|
||||
"name": "1_input_model",
|
||||
"framework": "onnx",
|
||||
"mo_args": {
|
||||
"input_model": "$model_dir/first_convs_test_nets/1_input_model.onnx"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:ad965f49cea59413aef6d0b61b06d35d17d53b875dde3f2f25eacfdc2fe4670c
|
||||
size 146
|
||||
|
Binary file not shown.
@ -1,7 +1,3 @@
|
||||
{
|
||||
"name": "3_inputs_model",
|
||||
"framework": "onnx",
|
||||
"mo_args": {
|
||||
"input_model": "$model_dir/first_convs_test_nets/3_inputs_model.onnx"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:0433858efb3ca6ed53dca5b7e3324fdc31579e6f58c6dddb3f88fafa80e285f4
|
||||
size 148
|
||||
|
Binary file not shown.
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f3ef0a49a165cbf1f910295287f09c14664566a09bd7ebed6b37b0b1019b6168
|
||||
size 153
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d3be84455194bfd343bada5c846a519e9374037026bc7c581c0d642f5052a264
|
||||
size 13086189
|
Binary file not shown.
@ -1,8 +1,3 @@
|
||||
{
|
||||
"name": "matmul_divide_const",
|
||||
"framework": "dldt",
|
||||
"mo_args": {
|
||||
"model": "$model_dir/matmul_divide_const/matmul_divide_const.xml",
|
||||
"weights": "$model_dir/matmul_divide_const/matmul_divide_const.bin"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7ed7eef3415ccb1cd1b8606bdb8348e124979bb78b02e50badda92aa2cd3dc4d
|
||||
size 222
|
||||
|
@ -1,2 +1,3 @@
|
||||
<?xml version="1.0" ?>
|
||||
<mapping/>
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:43d82f63bfea7d3ae9d641169b9693596199d78171e94b918e09497a4f08a012
|
||||
size 34
|
||||
|
@ -1,640 +1,3 @@
|
||||
<?xml version="1.0" ?>
|
||||
<net name="matmul_divide_const" version="10">
|
||||
<layers>
|
||||
<layer id="0" name="decoder_0" type="Parameter" version="opset1">
|
||||
<data element_type="f32" shape="1,16,512"/>
|
||||
<output>
|
||||
<port id="0" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="1" name="2491117/Output_0/Data_/copy_const284_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="0" shape="1,16,512" size="32768"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="2" name="decoder_250" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="3" name="264/3D_Tile_Unsqueeze_dim6178_const286_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="32768" shape="1" size="8"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="4" name="decoder_264/3D_Tile_Unsqueeze" type="Unsqueeze" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="5" name="61806182_const288_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="32776" shape="4" size="32"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>4</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="6" name="decoder_264" type="Tile" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>4</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="7" name="264/3D_Tile_Squeeze_dim6185_const290_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="32768" shape="1" size="8"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="8" name="decoder_264/3D_Tile_Squeeze" type="Squeeze" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="9" name="271/Cast_19331_const292_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="32808" shape="3" size="24"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="10" name="decoder_271" type="Reshape" version="opset1">
|
||||
<data special_zero="True"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="11" name="onnx_initializer_node_218/Output_0/Data__const294_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="32832" shape="8,64,512" size="1048576"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>64</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="12" name="decoder_290" type="MatMul" version="opset1">
|
||||
<data transpose_a="False" transpose_b="True"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>8</dim>
|
||||
<dim>64</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>64</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="13" name="297/Cast_19315_const296_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="1081408" shape="3" size="24"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="14" name="decoder_297" type="Reshape" version="opset1">
|
||||
<data special_zero="True"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>64</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>64</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="15" name="273/3D_Tile_Unsqueeze_dim6274_const298_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="32768" shape="1" size="8"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="16" name="decoder_273/3D_Tile_Unsqueeze" type="Unsqueeze" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="17" name="62766278_const300_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="32776" shape="4" size="32"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>4</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="18" name="decoder_273" type="Tile" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>4</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="19" name="273/3D_Tile_Squeeze_dim6281_const302_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="32768" shape="1" size="8"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="20" name="decoder_273/3D_Tile_Squeeze" type="Squeeze" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="21" name="280/Cast_19279_const304_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="32808" shape="3" size="24"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="22" name="decoder_280" type="Reshape" version="opset1">
|
||||
<data special_zero="True"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="23" name="onnx_initializer_node_219/Output_0/Data__const306_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="1081432" shape="8,64,512" size="1048576"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>64</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="24" name="decoder_298" type="MatMul" version="opset1">
|
||||
<data transpose_a="False" transpose_b="True"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>8</dim>
|
||||
<dim>64</dim>
|
||||
<dim>512</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>64</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="25" name="305/Cast_19367_const308_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="1081408" shape="3" size="24"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="26" name="decoder_305" type="Reshape" version="opset1">
|
||||
<data special_zero="True"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>64</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>64</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="27" name="314/Cast_19273_const310_const" type="Const" version="opset1">
|
||||
<data element_type="i64" offset="2130008" shape="3" size="24"/>
|
||||
<output>
|
||||
<port id="1" precision="I64">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="28" name="decoder_314" type="Transpose" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>64</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>64</dim>
|
||||
<dim>16</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="29" name="decoder_315" type="MatMul" version="opset1">
|
||||
<data transpose_a="False" transpose_b="False"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>64</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>8</dim>
|
||||
<dim>64</dim>
|
||||
<dim>16</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>16</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="30" name="316/Output_0/Data_/copy_const313_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="2130032" shape="1,1,1" size="4"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="31" name="decoder_317" type="Divide" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>16</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>16</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="32" name="decoder_674/sink_port_0" type="Result" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>8</dim>
|
||||
<dim>16</dim>
|
||||
<dim>16</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
</layers>
|
||||
<edges>
|
||||
<edge from-layer="0" from-port="0" to-layer="2" to-port="0"/>
|
||||
<edge from-layer="1" from-port="1" to-layer="2" to-port="1"/>
|
||||
<edge from-layer="2" from-port="2" to-layer="4" to-port="0"/>
|
||||
<edge from-layer="3" from-port="1" to-layer="4" to-port="1"/>
|
||||
<edge from-layer="4" from-port="2" to-layer="6" to-port="0"/>
|
||||
<edge from-layer="5" from-port="1" to-layer="6" to-port="1"/>
|
||||
<edge from-layer="6" from-port="2" to-layer="8" to-port="0"/>
|
||||
<edge from-layer="7" from-port="1" to-layer="8" to-port="1"/>
|
||||
<edge from-layer="8" from-port="2" to-layer="10" to-port="0"/>
|
||||
<edge from-layer="9" from-port="1" to-layer="10" to-port="1"/>
|
||||
<edge from-layer="10" from-port="2" to-layer="12" to-port="0"/>
|
||||
<edge from-layer="11" from-port="1" to-layer="12" to-port="1"/>
|
||||
<edge from-layer="12" from-port="2" to-layer="14" to-port="0"/>
|
||||
<edge from-layer="13" from-port="1" to-layer="14" to-port="1"/>
|
||||
<edge from-layer="2" from-port="2" to-layer="16" to-port="0"/>
|
||||
<edge from-layer="15" from-port="1" to-layer="16" to-port="1"/>
|
||||
<edge from-layer="16" from-port="2" to-layer="18" to-port="0"/>
|
||||
<edge from-layer="17" from-port="1" to-layer="18" to-port="1"/>
|
||||
<edge from-layer="18" from-port="2" to-layer="20" to-port="0"/>
|
||||
<edge from-layer="19" from-port="1" to-layer="20" to-port="1"/>
|
||||
<edge from-layer="20" from-port="2" to-layer="22" to-port="0"/>
|
||||
<edge from-layer="21" from-port="1" to-layer="22" to-port="1"/>
|
||||
<edge from-layer="22" from-port="2" to-layer="24" to-port="0"/>
|
||||
<edge from-layer="23" from-port="1" to-layer="24" to-port="1"/>
|
||||
<edge from-layer="24" from-port="2" to-layer="26" to-port="0"/>
|
||||
<edge from-layer="25" from-port="1" to-layer="26" to-port="1"/>
|
||||
<edge from-layer="26" from-port="2" to-layer="28" to-port="0"/>
|
||||
<edge from-layer="27" from-port="1" to-layer="28" to-port="1"/>
|
||||
<edge from-layer="14" from-port="2" to-layer="29" to-port="0"/>
|
||||
<edge from-layer="28" from-port="2" to-layer="29" to-port="1"/>
|
||||
<edge from-layer="29" from-port="2" to-layer="31" to-port="0"/>
|
||||
<edge from-layer="30" from-port="1" to-layer="31" to-port="1"/>
|
||||
<edge from-layer="31" from-port="2" to-layer="32" to-port="0"/>
|
||||
</edges>
|
||||
<meta_data>
|
||||
<MO_version value="unknown version"/>
|
||||
<cli_parameters>
|
||||
<blobs_as_inputs value="True"/>
|
||||
<caffe_parser_path value="DIR"/>
|
||||
<data_type value="FP32"/>
|
||||
<disable_nhwc_to_nchw value="False"/>
|
||||
<disable_omitting_optional value="False"/>
|
||||
<disable_resnet_optimization value="False"/>
|
||||
<disable_weights_compression value="False"/>
|
||||
<enable_concat_optimization value="False"/>
|
||||
<enable_flattening_nested_params value="False"/>
|
||||
<enable_ssd_gluoncv value="False"/>
|
||||
<extensions value="DIR"/>
|
||||
<framework value="onnx"/>
|
||||
<freeze_placeholder_with_value value="{}"/>
|
||||
<generate_deprecated_IR_V2 value="False"/>
|
||||
<generate_deprecated_IR_V7 value="False"/>
|
||||
<generate_experimental_IR_V10 value="True"/>
|
||||
<input value="0"/>
|
||||
<input_model value="DIR/action_recognition_0001_decoder.onnx"/>
|
||||
<input_model_is_text value="False"/>
|
||||
<input_shape value="[1,16,512]"/>
|
||||
<k value="DIR/CustomLayersMapping.xml"/>
|
||||
<keep_quantize_ops_in_IR value="True"/>
|
||||
<keep_shape_ops value="False"/>
|
||||
<legacy_mxnet_model value="False"/>
|
||||
<log_level value="ERROR"/>
|
||||
<mean_scale_values value="{}"/>
|
||||
<mean_values value="()"/>
|
||||
<model_name value="action-recognition-0001-decoder"/>
|
||||
<move_to_preprocess value="False"/>
|
||||
<output value="['674']"/>
|
||||
<output_dir value="DIR"/>
|
||||
<placeholder_data_types value="{}"/>
|
||||
<placeholder_shapes value="{'0': array([ 1, 16, 512])}"/>
|
||||
<progress value="False"/>
|
||||
<remove_memory value="False"/>
|
||||
<remove_output_softmax value="False"/>
|
||||
<reverse_input_channels value="False"/>
|
||||
<save_params_from_nd value="False"/>
|
||||
<scale_values value="()"/>
|
||||
<silent value="False"/>
|
||||
<stream_output value="False"/>
|
||||
<unset unset_cli_parameters="batch, counts, disable_fusing, disable_gfusing, finegrain_fusing, input_checkpoint, input_meta_graph, input_proto, input_symbol, mean_file, mean_file_offsets, nd_prefix_name, pretrained_model_name, saved_model_dir, saved_model_tags, scale, tensorboard_logdir, tensorflow_custom_layer_libraries, tensorflow_custom_operations_config_update, tensorflow_object_detection_api_pipeline_config, tensorflow_use_custom_operations_config, transformations_config"/>
|
||||
</cli_parameters>
|
||||
</meta_data>
|
||||
<quantization_parameters>
|
||||
<config>{
|
||||
'compression': {
|
||||
'target_device': 'CPU',
|
||||
'algorithms': [
|
||||
{
|
||||
'name': 'DefaultQuantization',
|
||||
'params': {
|
||||
'stat_subset_size': 1,
|
||||
'preset': 'performance'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
'engine': {
|
||||
'evaluations': [
|
||||
{
|
||||
'name': 'action-recognition-0001-encoder',
|
||||
'module': 'custom_evaluators.sequential_action_recognition_evaluator.SequentialActionRecognitionEvaluator',
|
||||
'module_config': {
|
||||
'network_info': {
|
||||
'encoder': {},
|
||||
'decoder': {
|
||||
'num_processing_frames': 16,
|
||||
'adapter': 'classification'
|
||||
}
|
||||
},
|
||||
'launchers': [
|
||||
{
|
||||
'framework': 'dlsdk',
|
||||
'tags': [
|
||||
'FP32'
|
||||
]
|
||||
}
|
||||
],
|
||||
'datasets': [
|
||||
{
|
||||
'name': 'kinetics-400',
|
||||
'data_source': 'PATH',
|
||||
'annotation_conversion': {
|
||||
'converter': 'driver_action_recognition',
|
||||
'annotation_file': 'PATH',
|
||||
'data_dir': 'PATH'
|
||||
},
|
||||
'annotation': 'kinetics_action_recognition.pickle',
|
||||
'dataset_meta': 'kinetics_action_recognition.json',
|
||||
'preprocessing': [
|
||||
{
|
||||
'type': 'resize',
|
||||
'size': 224,
|
||||
'aspect_ratio_scale': 'fit_to_window'
|
||||
},
|
||||
{
|
||||
'type': 'crop',
|
||||
'size': 224
|
||||
}
|
||||
],
|
||||
'metrics': [
|
||||
{
|
||||
'type': 'clip_accuracy',
|
||||
'presenter': 'print_vector'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
'models': [],
|
||||
'stat_requests_number': null,
|
||||
'eval_requests_number': null,
|
||||
'type': 'accuracy_checker'
|
||||
}
|
||||
}</config>
|
||||
<version value="1.0"/>
|
||||
<cli_params value="{'evaluate': True, 'output_dir': 'PATH', 'direct_dump': False, 'log_level': 'DEBUG', 'pbar': False, 'keep_uncompressed_weights': False}"/>
|
||||
</quantization_parameters>
|
||||
</net>
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:5d678641136d8356fca47d4db4f39cb98e59426a9e1113754811d9e12e47463a
|
||||
size 17387
|
||||
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:993a01f598da6e29da4c104b80fb6928ee9a74e859e08e4e654b37351e2e23e0
|
||||
size 180
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:4da69916b5eae4d8985355ce8e1325fed0c2560d1b7e3e11b3083bf901478df2
|
||||
size 3700526
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9b6b02a81088574e09387e7c634712e739c2f4a5a6f93452304373aecb047e56
|
||||
size 159
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:cc114deb9c089de3043ef6dae9ccdd961e2a328c9e1b26abcf77107bcddc9df8
|
||||
size 9800899
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:8f63794cc55d623e6a5d4990d12255dcd0dd3b5386330d016af935b39efb9357
|
||||
size 171
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:26335e55efba71945ea9f39142e150b44e68be1acd955f440c6c7da527f38391
|
||||
size 7670107
|
Binary file not shown.
Binary file not shown.
@ -1,7 +1,3 @@
|
||||
{
|
||||
"name": "test_multibranch_propogation_with_fq_moving",
|
||||
"framework": "pytorch",
|
||||
"mo_args": {
|
||||
"input_model": "$model_dir/multibranch_propogation_test_nets/test_ig_border_case_with_fq_moving.onnx"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:03db5199e326aee2e68164fb64388f54d2d058fd2e80c8918c3b65c58ea1f843
|
||||
size 212
|
||||
|
@ -1,7 +1,3 @@
|
||||
{
|
||||
"name": "test_multibranch_propogation_without_fq_moving",
|
||||
"framework": "pytorch",
|
||||
"mo_args": {
|
||||
"input_model": "$model_dir/multibranch_propogation_test_nets/test_ig_border_case_without_fq_moving.onnx"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e7a22758b328fcbe4860ff8b9381845cab814e77f2fe1d8435d36a774d5749cd
|
||||
size 218
|
||||
|
@ -1,7 +1,3 @@
|
||||
{
|
||||
"name": "multiple_out_ports_net",
|
||||
"framework": "tf",
|
||||
"mo_args": {
|
||||
"input_model": "$model_dir/multiple_out_ports_net/multiple_out_ports_net.pb"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6394338eaa4b275cd74c61c9e25a90fda50ad0d8e946a02d72c14bc5c3d9a36f
|
||||
size 161
|
||||
|
Binary file not shown.
@ -1,7 +1,3 @@
|
||||
{
|
||||
"name": "outlier_channel_splitting_example",
|
||||
"framework": "onnx",
|
||||
"mo_args": {
|
||||
"input_model": "$model_dir/outlier_channel_splitting_example/outlier_channel_splitting_example.onnx"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c7c69c314159743ee73d08be43b473891865440a7bc8092295bcb5bbb1d99f79
|
||||
size 198
|
||||
|
Binary file not shown.
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:cb72e6af63106ec6ae1624491d8ea3b39d5aab128bd7a66168cb1636ff77393d
|
||||
size 144
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:950226c0d702bb8b688701434c3846e72c848dc9bd059d194ab132f781be486f
|
||||
size 40224953
|
Binary file not shown.
@ -1,8 +1,3 @@
|
||||
{
|
||||
"name": "scaleshift_fuse",
|
||||
"framework": "dldt",
|
||||
"mo_args": {
|
||||
"model": "$model_dir/scaleshift_fuse/scaleshift_fuse.xml",
|
||||
"weights": "$model_dir/scaleshift_fuse/scaleshift_fuse.bin"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:54e12e3df889dff95bfbe949a8e98facdfd5464bb746686ecc7b48d0da78e154
|
||||
size 202
|
||||
|
@ -1,2 +1,3 @@
|
||||
<?xml version="1.0" ?>
|
||||
<mapping/>
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:43d82f63bfea7d3ae9d641169b9693596199d78171e94b918e09497a4f08a012
|
||||
size 34
|
||||
|
@ -1,393 +1,3 @@
|
||||
<?xml version="1.0" ?>
|
||||
<net name="scaleshift_fuse" version="10">
|
||||
<layers>
|
||||
<layer id="0" name="data" type="Parameter" version="opset1">
|
||||
<data element_type="f32" shape="1,3,60,60"/>
|
||||
<output>
|
||||
<port id="0" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="1" name="data_mul_22172221/copy_const110_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="0" shape="1,3,1,1" size="12"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="2" name="Mul_/Fused_Mul_" type="Multiply" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="3" name="data_add_22182223/copy_const112_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="12" shape="1,3,1,1" size="12"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="4" name="Add_/Fused_Add_" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="5" name="106/Output_0/Data__const114_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="24" shape="32,3,3,3" size="3456"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="6" name="conv1/WithoutBiases" type="Convolution" version="opset1">
|
||||
<data dilations="1,1" output_padding="0,0" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="7" name="conv1/Dims1358/copy_const116_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3480" shape="1,32,1,1" size="128"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="8" name="conv1" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="9" name="relu_conv1" type="ReLU" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="10" name="data_mul_22252229/copy_const119_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3608" shape="1,32,1,1" size="128"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="11" name="Mul1_1975/Fused_Mul_" type="Multiply" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="12" name="data_add_22262231/copy_const121_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3736" shape="1,32,1,1" size="128"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="13" name="Add1_1976/Fused_Add_" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="14" name="108/Output_0/Data__const123_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3864" shape="16,32,3,3" size="18432"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>16</dim>
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="15" name="pool1/WithoutBiases" type="Convolution" version="opset1">
|
||||
<data dilations="1,1" output_padding="0,0" pads_begin="1,1" pads_end="1,1" strides="2,2"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>16</dim>
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="16" name="pool1/Dims1394/copy_const125_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="22296" shape="1,16,1,1" size="64"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="17" name="pool1" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="18" name="angle_p_fc/sink_port_0" type="Result" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
</layers>
|
||||
<edges>
|
||||
<edge from-layer="0" from-port="0" to-layer="2" to-port="0"/>
|
||||
<edge from-layer="1" from-port="1" to-layer="2" to-port="1"/>
|
||||
<edge from-layer="2" from-port="2" to-layer="4" to-port="0"/>
|
||||
<edge from-layer="3" from-port="1" to-layer="4" to-port="1"/>
|
||||
<edge from-layer="4" from-port="2" to-layer="6" to-port="0"/>
|
||||
<edge from-layer="5" from-port="1" to-layer="6" to-port="1"/>
|
||||
<edge from-layer="6" from-port="2" to-layer="8" to-port="0"/>
|
||||
<edge from-layer="7" from-port="1" to-layer="8" to-port="1"/>
|
||||
<edge from-layer="8" from-port="2" to-layer="9" to-port="0"/>
|
||||
<edge from-layer="9" from-port="1" to-layer="11" to-port="0"/>
|
||||
<edge from-layer="10" from-port="1" to-layer="11" to-port="1"/>
|
||||
<edge from-layer="11" from-port="2" to-layer="13" to-port="0"/>
|
||||
<edge from-layer="12" from-port="1" to-layer="13" to-port="1"/>
|
||||
<edge from-layer="13" from-port="2" to-layer="15" to-port="0"/>
|
||||
<edge from-layer="14" from-port="1" to-layer="15" to-port="1"/>
|
||||
<edge from-layer="15" from-port="2" to-layer="17" to-port="0"/>
|
||||
<edge from-layer="16" from-port="1" to-layer="17" to-port="1"/>
|
||||
<edge from-layer="17" from-port="2" to-layer="18" to-port="0"/>
|
||||
</edges>
|
||||
<meta_data>
|
||||
<MO_version value="unknown version"/>
|
||||
<cli_parameters>
|
||||
<blobs_as_inputs value="True"/>
|
||||
<caffe_parser_path value="DIR"/>
|
||||
<data_type value="FP32"/>
|
||||
<disable_nhwc_to_nchw value="False"/>
|
||||
<disable_omitting_optional value="False"/>
|
||||
<disable_resnet_optimization value="False"/>
|
||||
<enable_concat_optimization value="False"/>
|
||||
<enable_flattening_nested_params value="False"/>
|
||||
<enable_ssd_gluoncv value="False"/>
|
||||
<extensions value="DIR"/>
|
||||
<framework value="caffe"/>
|
||||
<freeze_placeholder_with_value value="{}"/>
|
||||
<generate_deprecated_IR_V2 value="False"/>
|
||||
<generate_deprecated_IR_V7 value="False"/>
|
||||
<generate_experimental_IR_V10 value="True"/>
|
||||
<input value="data"/>
|
||||
<input_model value="DIR/headpos_net.caffemodel"/>
|
||||
<input_model_is_text value="False"/>
|
||||
<input_proto value="DIR/headpos_net.prototxt"/>
|
||||
<input_shape value="[1,3,60,60]"/>
|
||||
<k value="DIR/CustomLayersMapping.xml"/>
|
||||
<keep_quantize_ops_in_IR value="True"/>
|
||||
<keep_shape_ops value="False"/>
|
||||
<legacy_mxnet_model value="False"/>
|
||||
<log_level value="ERROR"/>
|
||||
<mean_scale_values value="{'data': {'mean': array([120., 110., 104.]), 'scale': array([256.41025641])}}"/>
|
||||
<mean_values value="data[120.0,110.0,104.0]"/>
|
||||
<model_name value="headpos_net"/>
|
||||
<move_to_preprocess value="False"/>
|
||||
<output value="['angle_r_fc', 'angle_p_fc', 'angle_y_fc']"/>
|
||||
<output_dir value="DIR"/>
|
||||
<placeholder_data_types value="{}"/>
|
||||
<placeholder_shapes value="{'data': array([ 1, 3, 60, 60])}"/>
|
||||
<progress value="False"/>
|
||||
<remove_memory value="False"/>
|
||||
<remove_output_softmax value="False"/>
|
||||
<reverse_input_channels value="False"/>
|
||||
<save_params_from_nd value="False"/>
|
||||
<scale_values value="data[256.4102564102564]"/>
|
||||
<silent value="False"/>
|
||||
<stream_output value="False"/>
|
||||
<unset unset_cli_parameters="batch, counts, disable_fusing, disable_gfusing, finegrain_fusing, input_checkpoint, input_meta_graph, input_symbol, mean_file, mean_file_offsets, nd_prefix_name, pretrained_model_name, saved_model_dir, saved_model_tags, scale, tensorboard_logdir, tensorflow_custom_layer_libraries, tensorflow_custom_operations_config_update, tensorflow_object_detection_api_pipeline_config, tensorflow_operation_patterns, tensorflow_subgraph_patterns, tensorflow_use_custom_operations_config, transformations_config"/>
|
||||
</cli_parameters>
|
||||
</meta_data>
|
||||
</net>
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f3dcb45d8f69aa643c3e8038b92fdee885418d17d052470b9ef921039a8c3822
|
||||
size 10919
|
||||
|
Binary file not shown.
@ -1,8 +1,3 @@
|
||||
{
|
||||
"name": "scaleshift_no_fuse_1",
|
||||
"framework": "dldt",
|
||||
"mo_args": {
|
||||
"model": "$model_dir/scaleshift_fuse/scaleshift_no_fuse_1.xml",
|
||||
"weights": "$model_dir/scaleshift_fuse/scaleshift_no_fuse_1.bin"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:58d6366c3dbb3c1a0f4f9c5ce8c4569260a0d65e355dc0d16b8aec3432a72d7a
|
||||
size 217
|
||||
|
@ -1,2 +1,3 @@
|
||||
<?xml version="1.0" ?>
|
||||
<mapping/>
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:43d82f63bfea7d3ae9d641169b9693596199d78171e94b918e09497a4f08a012
|
||||
size 34
|
||||
|
@ -1,404 +1,3 @@
|
||||
<?xml version="1.0" ?>
|
||||
<net name="scaleshift_no_fuse_1" version="10">
|
||||
<layers>
|
||||
<layer id="0" name="data" type="Parameter" version="opset1">
|
||||
<data element_type="f32" shape="1,3,60,60"/>
|
||||
<output>
|
||||
<port id="0" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="1" name="data_mul_22172221/copy_const110_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="0" shape="1,3,1,1" size="12"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="2" name="Mul_/Fused_Mul_" type="Multiply" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="3" name="data_add_22182223/copy_const112_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="12" shape="1,3,1,1" size="12"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="4" name="Add_/Fused_Add_" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="5" name="106/Output_0/Data__const114_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="24" shape="32,3,3,3" size="3456"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="6" name="conv1/WithoutBiases" type="Convolution" version="opset1">
|
||||
<data dilations="1,1" output_padding="0,0" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="7" name="conv1/Dims1358/copy_const116_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3480" shape="1,32,1,1" size="128"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="8" name="conv1" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="9" name="relu_conv1" type="ReLU" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="10" name="data_mul_22252229/copy_const119_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3608" shape="1,32,1,1" size="128"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="11" name="Mul1_1975/Fused_Mul_" type="Multiply" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="12" name="data_add_22262231/copy_const121_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3736" shape="1,32,1,1" size="128"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="13" name="Add1_1976/Fused_Add_" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="14" name="108/Output_0/Data__const123_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3864" shape="16,32,3,3" size="18432"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>16</dim>
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="15" name="pool1/WithoutBiases" type="Convolution" version="opset1">
|
||||
<data dilations="1,1" output_padding="0,0" pads_begin="1,1" pads_end="1,1" strides="2,2"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>16</dim>
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="16" name="pool1/Dims1394/copy_const125_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="22296" shape="1,16,1,1" size="64"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="17" name="pool1" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="18" name="angle_p_fc/sink_port_0" type="Result" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
<layer id="19" name="angle_r_fc/sink_port_0" type="Result" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
</layers>
|
||||
<edges>
|
||||
<edge from-layer="0" from-port="0" to-layer="2" to-port="0"/>
|
||||
<edge from-layer="1" from-port="1" to-layer="2" to-port="1"/>
|
||||
<edge from-layer="2" from-port="2" to-layer="4" to-port="0"/>
|
||||
<edge from-layer="3" from-port="1" to-layer="4" to-port="1"/>
|
||||
<edge from-layer="4" from-port="2" to-layer="6" to-port="0"/>
|
||||
<edge from-layer="5" from-port="1" to-layer="6" to-port="1"/>
|
||||
<edge from-layer="6" from-port="2" to-layer="8" to-port="0"/>
|
||||
<edge from-layer="7" from-port="1" to-layer="8" to-port="1"/>
|
||||
<edge from-layer="8" from-port="2" to-layer="9" to-port="0"/>
|
||||
<edge from-layer="9" from-port="1" to-layer="11" to-port="0"/>
|
||||
<edge from-layer="10" from-port="1" to-layer="11" to-port="1"/>
|
||||
<edge from-layer="11" from-port="2" to-layer="13" to-port="0"/>
|
||||
<edge from-layer="12" from-port="1" to-layer="13" to-port="1"/>
|
||||
<edge from-layer="13" from-port="2" to-layer="15" to-port="0"/>
|
||||
<edge from-layer="14" from-port="1" to-layer="15" to-port="1"/>
|
||||
<edge from-layer="15" from-port="2" to-layer="17" to-port="0"/>
|
||||
<edge from-layer="16" from-port="1" to-layer="17" to-port="1"/>
|
||||
<edge from-layer="17" from-port="2" to-layer="18" to-port="0"/>
|
||||
<edge from-layer="8" from-port="2" to-layer="19" to-port="0"/>
|
||||
</edges>
|
||||
<meta_data>
|
||||
<MO_version value="unknown version"/>
|
||||
<cli_parameters>
|
||||
<blobs_as_inputs value="True"/>
|
||||
<caffe_parser_path value="DIR"/>
|
||||
<data_type value="FP32"/>
|
||||
<disable_nhwc_to_nchw value="False"/>
|
||||
<disable_omitting_optional value="False"/>
|
||||
<disable_resnet_optimization value="False"/>
|
||||
<enable_concat_optimization value="False"/>
|
||||
<enable_flattening_nested_params value="False"/>
|
||||
<enable_ssd_gluoncv value="False"/>
|
||||
<extensions value="DIR"/>
|
||||
<framework value="caffe"/>
|
||||
<freeze_placeholder_with_value value="{}"/>
|
||||
<generate_deprecated_IR_V2 value="False"/>
|
||||
<generate_deprecated_IR_V7 value="False"/>
|
||||
<generate_experimental_IR_V10 value="True"/>
|
||||
<input value="data"/>
|
||||
<input_model value="DIR/headpos_net.caffemodel"/>
|
||||
<input_model_is_text value="False"/>
|
||||
<input_proto value="DIR/headpos_net.prototxt"/>
|
||||
<input_shape value="[1,3,60,60]"/>
|
||||
<k value="DIR/CustomLayersMapping.xml"/>
|
||||
<keep_quantize_ops_in_IR value="True"/>
|
||||
<keep_shape_ops value="False"/>
|
||||
<legacy_mxnet_model value="False"/>
|
||||
<log_level value="ERROR"/>
|
||||
<mean_scale_values value="{'data': {'mean': array([120., 110., 104.]), 'scale': array([256.41025641])}}"/>
|
||||
<mean_values value="data[120.0,110.0,104.0]"/>
|
||||
<model_name value="headpos_net"/>
|
||||
<move_to_preprocess value="False"/>
|
||||
<output value="['angle_r_fc', 'angle_p_fc', 'angle_y_fc']"/>
|
||||
<output_dir value="DIR"/>
|
||||
<placeholder_data_types value="{}"/>
|
||||
<placeholder_shapes value="{'data': array([ 1, 3, 60, 60])}"/>
|
||||
<progress value="False"/>
|
||||
<remove_memory value="False"/>
|
||||
<remove_output_softmax value="False"/>
|
||||
<reverse_input_channels value="False"/>
|
||||
<save_params_from_nd value="False"/>
|
||||
<scale_values value="data[256.4102564102564]"/>
|
||||
<silent value="False"/>
|
||||
<stream_output value="False"/>
|
||||
<unset unset_cli_parameters="batch, counts, disable_fusing, disable_gfusing, finegrain_fusing, input_checkpoint, input_meta_graph, input_symbol, mean_file, mean_file_offsets, nd_prefix_name, pretrained_model_name, saved_model_dir, saved_model_tags, scale, tensorboard_logdir, tensorflow_custom_layer_libraries, tensorflow_custom_operations_config_update, tensorflow_object_detection_api_pipeline_config, tensorflow_operation_patterns, tensorflow_subgraph_patterns, tensorflow_use_custom_operations_config, transformations_config"/>
|
||||
</cli_parameters>
|
||||
</meta_data>
|
||||
</net>
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b816b8316b12511fa5a1b46e637179c86677b9b2b8608b671e6fa7c10c68484e
|
||||
size 11207
|
||||
|
Binary file not shown.
@ -1,8 +1,3 @@
|
||||
{
|
||||
"name": "scaleshift_no_fuse_2",
|
||||
"framework": "dldt",
|
||||
"mo_args": {
|
||||
"model": "$model_dir/scaleshift_fuse/scaleshift_no_fuse_2.xml",
|
||||
"weights": "$model_dir/scaleshift_fuse/scaleshift_no_fuse_2.bin"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b1e0e6b4866c66ee6040110408d8e717f4b253948a623f45b8c427a175f18e62
|
||||
size 217
|
||||
|
@ -1,2 +1,3 @@
|
||||
<?xml version="1.0" ?>
|
||||
<mapping/>
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:43d82f63bfea7d3ae9d641169b9693596199d78171e94b918e09497a4f08a012
|
||||
size 34
|
||||
|
@ -1,404 +1,3 @@
|
||||
<?xml version="1.0" ?>
|
||||
<net name="scaleshift_no_fuse_2" version="10">
|
||||
<layers>
|
||||
<layer id="0" name="data" type="Parameter" version="opset1">
|
||||
<data element_type="f32" shape="1,3,60,60"/>
|
||||
<output>
|
||||
<port id="0" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="1" name="data_mul_22172221/copy_const110_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="0" shape="1,3,1,1" size="12"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="2" name="Mul_/Fused_Mul_" type="Multiply" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="3" name="data_add_22182223/copy_const112_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="12" shape="1,3,1,1" size="12"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="4" name="Add_/Fused_Add_" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="5" name="106/Output_0/Data__const114_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="24" shape="32,3,3,3" size="3456"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="6" name="conv1/WithoutBiases" type="Convolution" version="opset1">
|
||||
<data dilations="1,1" output_padding="0,0" pads_begin="1,1" pads_end="1,1" strides="1,1"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>3</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="7" name="conv1/Dims1358/copy_const116_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3480" shape="1,32,1,1" size="128"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="8" name="conv1" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="9" name="relu_conv1" type="ReLU" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="10" name="data_mul_22252229/copy_const119_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3608" shape="1,32,1,1" size="128"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="11" name="Mul1_1975/Fused_Mul_" type="Multiply" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="12" name="data_add_22262231/copy_const121_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3736" shape="1,32,1,1" size="128"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="13" name="Add1_1976/Fused_Add_" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="14" name="108/Output_0/Data__const123_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="3864" shape="16,32,3,3" size="18432"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>16</dim>
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="15" name="pool1/WithoutBiases" type="Convolution" version="opset1">
|
||||
<data dilations="1,1" output_padding="0,0" pads_begin="1,1" pads_end="1,1" strides="2,2"/>
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>16</dim>
|
||||
<dim>32</dim>
|
||||
<dim>3</dim>
|
||||
<dim>3</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="16" name="pool1/Dims1394/copy_const125_const" type="Const" version="opset1">
|
||||
<data element_type="f32" offset="22296" shape="1,16,1,1" size="64"/>
|
||||
<output>
|
||||
<port id="1" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="17" name="pool1" type="Add" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
<port id="1">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>1</dim>
|
||||
<dim>1</dim>
|
||||
</port>
|
||||
</input>
|
||||
<output>
|
||||
<port id="2" precision="FP32">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
</output>
|
||||
</layer>
|
||||
<layer id="18" name="angle_p_fc/sink_port_0" type="Result" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>16</dim>
|
||||
<dim>30</dim>
|
||||
<dim>30</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
<layer id="19" name="angle_r_fc/sink_port_0" type="Result" version="opset1">
|
||||
<input>
|
||||
<port id="0">
|
||||
<dim>1</dim>
|
||||
<dim>32</dim>
|
||||
<dim>60</dim>
|
||||
<dim>60</dim>
|
||||
</port>
|
||||
</input>
|
||||
</layer>
|
||||
</layers>
|
||||
<edges>
|
||||
<edge from-layer="0" from-port="0" to-layer="2" to-port="0"/>
|
||||
<edge from-layer="1" from-port="1" to-layer="2" to-port="1"/>
|
||||
<edge from-layer="2" from-port="2" to-layer="4" to-port="0"/>
|
||||
<edge from-layer="3" from-port="1" to-layer="4" to-port="1"/>
|
||||
<edge from-layer="4" from-port="2" to-layer="6" to-port="0"/>
|
||||
<edge from-layer="5" from-port="1" to-layer="6" to-port="1"/>
|
||||
<edge from-layer="6" from-port="2" to-layer="8" to-port="0"/>
|
||||
<edge from-layer="7" from-port="1" to-layer="8" to-port="1"/>
|
||||
<edge from-layer="8" from-port="2" to-layer="9" to-port="0"/>
|
||||
<edge from-layer="9" from-port="1" to-layer="11" to-port="0"/>
|
||||
<edge from-layer="10" from-port="1" to-layer="11" to-port="1"/>
|
||||
<edge from-layer="11" from-port="2" to-layer="13" to-port="0"/>
|
||||
<edge from-layer="12" from-port="1" to-layer="13" to-port="1"/>
|
||||
<edge from-layer="13" from-port="2" to-layer="15" to-port="0"/>
|
||||
<edge from-layer="14" from-port="1" to-layer="15" to-port="1"/>
|
||||
<edge from-layer="15" from-port="2" to-layer="17" to-port="0"/>
|
||||
<edge from-layer="16" from-port="1" to-layer="17" to-port="1"/>
|
||||
<edge from-layer="17" from-port="2" to-layer="18" to-port="0"/>
|
||||
<edge from-layer="9" from-port="1" to-layer="19" to-port="0"/>
|
||||
</edges>
|
||||
<meta_data>
|
||||
<MO_version value="unknown version"/>
|
||||
<cli_parameters>
|
||||
<blobs_as_inputs value="True"/>
|
||||
<caffe_parser_path value="DIR"/>
|
||||
<data_type value="FP32"/>
|
||||
<disable_nhwc_to_nchw value="False"/>
|
||||
<disable_omitting_optional value="False"/>
|
||||
<disable_resnet_optimization value="False"/>
|
||||
<enable_concat_optimization value="False"/>
|
||||
<enable_flattening_nested_params value="False"/>
|
||||
<enable_ssd_gluoncv value="False"/>
|
||||
<extensions value="DIR"/>
|
||||
<framework value="caffe"/>
|
||||
<freeze_placeholder_with_value value="{}"/>
|
||||
<generate_deprecated_IR_V2 value="False"/>
|
||||
<generate_deprecated_IR_V7 value="False"/>
|
||||
<generate_experimental_IR_V10 value="True"/>
|
||||
<input value="data"/>
|
||||
<input_model value="DIR/headpos_net.caffemodel"/>
|
||||
<input_model_is_text value="False"/>
|
||||
<input_proto value="DIR/headpos_net.prototxt"/>
|
||||
<input_shape value="[1,3,60,60]"/>
|
||||
<k value="DIR/CustomLayersMapping.xml"/>
|
||||
<keep_quantize_ops_in_IR value="True"/>
|
||||
<keep_shape_ops value="False"/>
|
||||
<legacy_mxnet_model value="False"/>
|
||||
<log_level value="ERROR"/>
|
||||
<mean_scale_values value="{'data': {'mean': array([120., 110., 104.]), 'scale': array([256.41025641])}}"/>
|
||||
<mean_values value="data[120.0,110.0,104.0]"/>
|
||||
<model_name value="headpos_net"/>
|
||||
<move_to_preprocess value="False"/>
|
||||
<output value="['angle_r_fc', 'angle_p_fc', 'angle_y_fc']"/>
|
||||
<output_dir value="DIR"/>
|
||||
<placeholder_data_types value="{}"/>
|
||||
<placeholder_shapes value="{'data': array([ 1, 3, 60, 60])}"/>
|
||||
<progress value="False"/>
|
||||
<remove_memory value="False"/>
|
||||
<remove_output_softmax value="False"/>
|
||||
<reverse_input_channels value="False"/>
|
||||
<save_params_from_nd value="False"/>
|
||||
<scale_values value="data[256.4102564102564]"/>
|
||||
<silent value="False"/>
|
||||
<stream_output value="False"/>
|
||||
<unset unset_cli_parameters="batch, counts, disable_fusing, disable_gfusing, finegrain_fusing, input_checkpoint, input_meta_graph, input_symbol, mean_file, mean_file_offsets, nd_prefix_name, pretrained_model_name, saved_model_dir, saved_model_tags, scale, tensorboard_logdir, tensorflow_custom_layer_libraries, tensorflow_custom_operations_config_update, tensorflow_object_detection_api_pipeline_config, tensorflow_operation_patterns, tensorflow_subgraph_patterns, tensorflow_use_custom_operations_config, transformations_config"/>
|
||||
</cli_parameters>
|
||||
</meta_data>
|
||||
</net>
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7e3c4e8d1a0d0a3569dc97c4f6f4660e46c73dd6cfd528f47a710fbc47937bcb
|
||||
size 11207
|
||||
|
@ -1,7 +1,3 @@
|
||||
{
|
||||
"name": "sparsity_example",
|
||||
"framework": "tf",
|
||||
"mo_args": {
|
||||
"input_model": "$model_dir/sparsity_example/sparsity_example.pb"
|
||||
}
|
||||
}
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:14baef7789d2897a9325d380d1fa0b3c077910484f66ff0af9319ee9d9bc2c23
|
||||
size 143
|
||||
|
Binary file not shown.
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9203d23507fd107be3aa3cdf40c519f8a4c55d41233e2cdd64df6878b646e09a
|
||||
size 165
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:5ac0f1d329683e99f74e640b3540f8694ab8d83fc5badb96487bbc43b92a14e5
|
||||
size 2897132
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:ad7053835f5e9dad1e03fc9d14945a77cba40df1d1fef958d6793c424a9c1f9c
|
||||
size 1085622
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:df841d3e81aa6476e54787bcfec5e334bee27b80bbad7c8446b0f7f120ca5e7e
|
||||
size 570777
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7c0f1c9825c4702d799f718e8dd988657f2c44358cd463585f4ca1bf0d90a6e6
|
||||
size 264440
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:04d5d451ee94f09bea1bc69afdaf13c497a39797384675a702777ff6a1970506
|
||||
size 266073
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:28d8dd7ce7ef796fc94bd94d974986f693d6194de00bab1f6497d1361551c89e
|
||||
size 400586
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6a5bb771b397abb967e156f0deffb3f4ee7fa5f1dcdd13fe36226817c3f11a16
|
||||
size 138625
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f4091df7f399097a4ecb046014df916b26c79bdef30ff564d349f314a61de217
|
||||
size 365966
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6f2cea1cb0c85f3b3b5d360ffa383f91cebadb2cd6696a524bbec16e1e780fe4
|
||||
size 200424
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:72be47cf42c9771bd4fa3d0f42837c2f54cc8f619c0ca29c732f25a871a1fbb0
|
||||
size 249198
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:5e081799bb7929f865b836e413c74c2e4fd4c7ac4242db7969b632274e121631
|
||||
size 262139
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6848630335a85c7f3d80100f866da8a8b3ae987a04cef90637ad0ef229508622
|
||||
size 102370
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:add70f121123664a48002a9f0673e0dd9ec3984fd5bd0e293d219f5f97af8c4c
|
||||
size 437836
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3118d1ef6a1ec4e0cec1e68de7d04f9462fd1ccdc7601ad01ea5208ab6b9cf2b
|
||||
size 325360
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:aff0bb1f34ad541699d5ec229a49fc8d090acbf2bee320d3ad17592b14d46309
|
||||
size 97200
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:486ad7c7bca603eec7a940a655c9aa53bbbf23e1ef026d01961bbaf43696179a
|
||||
size 115127
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1ab4a16206b62c9a52cedaf26e696cd53bcaab943e30c1edf4f9a6dede83b633
|
||||
size 54186
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d53c5dc4302f036a65d73b4d5d54df138a480f071a2e7da2959d85f175c1e4c1
|
||||
size 100908
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:35e2f90b0f4f3758947ab0d51bd9fef292674518fc1adf15df5e757416372867
|
||||
size 103255
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:fb176f5a1b2e380aa42bd87a5600dd917078a190bac4fd226975fd10b4aa99cf
|
||||
size 93195
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b725d959c41a95d8028dc68b7622deb1c11a9ab845a86dfa9b78ba5f5be4dc9c
|
||||
size 69444
|
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7c8d6b23f61ad3585ed1b7184260d6be725c96d6a1b3c3834890e72f0311706b
|
||||
size 48053
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b6d022b9d82f84e4d1a019175fd246aa69a4b0eb5f169130c39323af6365cc5d
|
||||
size 113049
|
@ -1 +1 @@
|
||||
{"Conv_142/WithoutBiases/fq_input_0": [0.0, 0.2242812712987264], "Conv_109/WithoutBiases/fq_input_0": [-1.8664333624963545e-06, 0.052320354198194446], "Conv_28/WithoutBiases/fq_input_0": [-0.6761601765950521, 0.7606801986694336], "Conv_133/WithoutBiases/fq_input_0": [0.0, 0.17377585172653198], "Conv_138/WithoutBiases/fq_input_0": [-5.599300087489063e-07, 0.038324688244406775], "Conv_123/WithoutBiases/fq_input_0": [0.0, 0.20735913515090942], "Conv_57/WithoutBiases/fq_input_0": [-0.7855395362490699, 0.6873470942179362], "Conv_94/WithoutBiases/fq_input_0": [0.0, 0.2643519441286723], "Conv_51/WithoutBiases/fq_input_0": [-6.99912510936133e-07, 0.07398068032526514], "Conv_45/WithoutBiases/fq_input_0": [0.0, 0.2704688310623169], "Conv_96/WithoutBiases/fq_input_0": [-0.46407422703944395, 0.49027196566263836], "Conv_65/WithoutBiases/fq_input_0": [0.0, 0.40045098463694256], "Add_114/fq_input_0": [-0.2957377831141154, 0.39612583793266837], "Add_114/fq_input_1": [-0.20504931608835855, 0.16843336678686596], "Add_75/fq_input_0": [-0.33282386845555795, 0.4387223720550537], "Add_75/fq_input_1": [-0.2811157777905464, 0.2498806913693746], "Conv_162/WithoutBiases/fq_input_0": [0.0, 0.19471615552902222], "Add_95/fq_input_0": [-0.5081917842229208, 0.451726030420374], "Add_95/fq_input_1": [-0.2702499142824075, 0.1861323912938436], "Conv_13/WithoutBiases/fq_input_0": [0.0, 0.2555986753044029], "Conv_90/WithoutBiases/fq_input_0": [-1.7497812773403325e-06, 0.049696102097811584], "Conv_4/WithoutBiases/fq_input_0": [-1.6797900262467193e-05, 0.12491567412913622], "Conv_0/WithoutBiases/fq_input_0": [-0.2776877482732137, 0.42340508152549416], "Conv_84/WithoutBiases/fq_input_0": [0.0, 0.20490833123524985], "Add_27/fq_input_0": [-0.45871689029642054, 0.6344869136810303], "Add_27/fq_input_1": [-0.5353115544174657, 0.5744806925455729], "Conv_61/WithoutBiases/fq_input_0": [0.0, 0.10879192669462], "Conv_70/WithoutBiases/fq_input_0": [-6.99912510936133e-07, 0.05212856566480833], "Conv_55/WithoutBiases/fq_input_0": [0.0, 0.24343103170394897], "Add_56/fq_input_0": [-0.47849833965301514, 0.538310632109642], "Add_56/fq_input_1": [-0.3224060734113057, 0.30042384113326215], "Add_153/fq_input_0": [-0.16854696046738396, 0.14747859040896097], "Add_153/fq_input_1": [-0.14878487214446068, 0.13225321968396506], "Conv_80/WithoutBiases/fq_input_0": [-1.0498687664041996e-06, 0.04881053143390195], "Conv_36/WithoutBiases/fq_input_0": [0.0, 0.3457832336425781], "Conv_17/WithoutBiases/fq_input_0": [0.0, 0.7615942160288492], "Add_124/fq_input_0": [-0.34435376035215715, 0.43272773424784344], "Add_124/fq_input_1": [-0.18676529611860002, 0.22736644744873047], "Conv_32/WithoutBiases/fq_input_0": [0.0, 0.14368237382559865], "Conv_158/WithoutBiases/fq_input_0": [-1.007426508515174e-05, 0.014825364361810207], "Conv_163/WithoutBiases/fq_input_0": [-0.2511186701313296, 0.27376051743825275], "Conv_26/WithoutBiases/fq_input_0": [0.0, 0.4776722590128581], "Conv_152/WithoutBiases/fq_input_0": [0.0, 0.12657655278841654], "Conv_100/WithoutBiases/fq_input_0": [0.0, 0.06863055285389742], "Add_85/fq_input_0": [-0.46573809782663983, 0.42055455102256284], "Add_85/fq_input_1": [-0.1863194237584653, 0.21976137161254883], "Conv_129/WithoutBiases/fq_input_0": [-9.332166812481773e-07, 0.05778365740125063], "Conv_113/WithoutBiases/fq_input_0": [0.0, 0.24311176935831705], "Conv_9/WithoutBiases/fq_input_0": [-0.7567498683929443, 0.812121809982672], "Conv_148/WithoutBiases/fq_input_0": [-2.2397200349956254e-06, 0.03194333815664984], "GlobalAveragePool_167/reduce/fq_input_0": [0.0, 1.0], "Conv_119/WithoutBiases/fq_input_0": [-1.8664333624963547e-06, 0.05056043888575707], "Conv_104/WithoutBiases/fq_input_0": [0.0, 0.2517488996187846], "Conv_22/WithoutBiases/fq_input_0": [0.0, 0.1020818330106084], "Add_143/fq_input_0": [-0.17097491853766975, 0.1256142258644104], "Add_143/fq_input_1": [-0.08282462934342523, 0.08347679177920024], "Conv_41/WithoutBiases/fq_input_0": [0.0, 0.07427590073914163], "Add_46/fq_input_0": [-0.4115632568917624, 0.38350212574005127], "Add_46/fq_input_1": [-0.36867761611938477, 0.32259291410446167], "Conv_8/WithoutBiases/fq_input_0": [0.0, 1.0], "Conv_125/WithoutBiases/fq_input_0": [-0.38285431117876195, 0.4735303322474162], "Conv_154/WithoutBiases/fq_input_0": [-0.2496981308573768, 0.19556776682535806], "Conv_74/WithoutBiases/fq_input_0": [0.0, 0.22831233342488608], "Reshape_173/fq_input_0": [0.0, 0.8365342617034912]}
|
||||
{"Conv_74/WithoutBiases/fq_input_0": [0.0, 0.1986770341905644], "Add_124/fq_input_0": [-0.272860113301509, 0.30696762746419765], "Add_124/fq_input_1": [-0.32439105371346183, 0.26227361789599035], "Conv_125/WithoutBiases/fq_input_0": [-0.41168129966033096, 0.48557281498398014], "Conv_70/WithoutBiases/fq_input_0": [0.0, 0.04836913998273462], "Add_27/fq_input_0": [-0.4858114130019989, 0.4820160113379209], "Add_27/fq_input_1": [-0.7506775992739116, 0.756588446512289], "Conv_9/WithoutBiases/fq_input_0": [-1.0, 0.8613138686131386], "Add_75/fq_input_0": [-0.43236630570979506, 0.32800202502122383], "Add_75/fq_input_1": [-0.26767741089219377, 0.34176669426414025], "Conv_80/WithoutBiases/fq_input_0": [-9.331298663654041e-07, 0.04795441368156403], "Conv_8/WithoutBiases/fq_input_0": [0.0, 0.8888061977130475], "Add_95/fq_input_0": [-0.4670480196746606, 0.3776132925029171], "Add_95/fq_input_1": [-0.5320417328378549, 0.2902045815479209], "Conv_154/WithoutBiases/fq_input_0": [-0.5123423996720231, 0.6336866522259234], "Conv_152/WithoutBiases/fq_input_0": [0.0, 0.28360465669046603], "Conv_55/WithoutBiases/fq_input_0": [0.0, 0.26970554618804865], "Conv_142/WithoutBiases/fq_input_0": [0.0, 0.37551385019509304], "Add_143/fq_input_0": [-0.26211320360754853, 0.2725977317518505], "Add_143/fq_input_1": [-0.12865026386419867, 0.14936513685928152], "Conv_41/WithoutBiases/fq_input_0": [0.0, 0.0730270750799249], "Conv_65/WithoutBiases/fq_input_0": [0.0, 0.2538664893986357], "Conv_104/WithoutBiases/fq_input_0": [0.0, 0.32660769410782287], "GlobalAveragePool_167/reduce/fq_input_0": [0.0, 0.8888061977130475], "Add_46/fq_input_0": [-0.4630984623750088, 0.41164307766667446], "Add_46/fq_input_1": [-0.4644157525842346, 0.4128140022970974], "Add_85/fq_input_0": [-0.39777751548287493, 0.3824783802719951], "Add_85/fq_input_1": [-0.23387001130811247, 0.2046362598945984], "Conv_84/WithoutBiases/fq_input_0": [0.0, 0.20072268720704173], "Conv_32/WithoutBiases/fq_input_0": [0.0, 0.16312493181298865], "Conv_17/WithoutBiases/fq_input_0": [0.0, 0.5136048763683335], "Conv_109/WithoutBiases/fq_input_0": [-1.4515353476795174e-06, 0.0488219123411795], "Conv_57/WithoutBiases/fq_input_0": [-0.63127367215193, 0.7807858576615976], "Add_56/fq_input_0": [-0.5720002764330961, 0.6641020158587643], "Add_56/fq_input_1": [-0.29116690834763886, 0.3174196623789834], "Conv_22/WithoutBiases/fq_input_0": [0.0, 0.1054715493335459], "Conv_94/WithoutBiases/fq_input_0": [0.0, 0.500169455692634], "Conv_138/WithoutBiases/fq_input_0": [-2.4883463103077446e-07, 0.041876295983191626], "Conv_129/WithoutBiases/fq_input_0": [-4.147243850512907e-07, 0.08542537264190668], "Conv_4/WithoutBiases/fq_input_0": [-1.4930077861846466e-05, 0.14306248555199863], "Conv_90/WithoutBiases/fq_input_0": [-1.8662597327308082e-06, 0.04625857273261655], "Conv_0/WithoutBiases/fq_input_0": [-0.29686057572878516, 0.3136188340360553], "Conv_96/WithoutBiases/fq_input_0": [-0.5538940426317897, 0.44782922595761715], "Conv_61/WithoutBiases/fq_input_0": [0.0, 0.10988586790625982], "Conv_158/WithoutBiases/fq_input_0": [-1.8907486646365963e-05, 0.012060893794179912], "Conv_163/WithoutBiases/fq_input_0": [-0.22149533099768434, 0.17626036903336853], "Conv_119/WithoutBiases/fq_input_0": [-1.4515353476795176e-06, 0.04821581800256121], "Conv_51/WithoutBiases/fq_input_0": [-6.220865775769361e-07, 0.07665421540265335], "Conv_113/WithoutBiases/fq_input_0": [0.0, 0.36954189677216925], "Add_114/fq_input_0": [-0.2459926094997045, 0.28560158899541965], "Add_114/fq_input_1": [-0.1701828436690771, 0.22433193029105622], "Conv_148/WithoutBiases/fq_input_0": [-4.976692620615489e-07, 0.04610769558002592], "Conv_13/WithoutBiases/fq_input_0": [0.0, 0.27532813837889425], "Conv_162/WithoutBiases/fq_input_0": [0.0, 0.18792196604203984], "Add_153/fq_input_0": [-0.25017974292631573, 0.3143851636773171], "Add_153/fq_input_1": [-0.4105879150203398, 0.4010393588570761], "Conv_133/WithoutBiases/fq_input_0": [0.0, 0.3768387337711209], "Conv_28/WithoutBiases/fq_input_0": [-0.8976801240198111, 0.9335873289806036], "Conv_45/WithoutBiases/fq_input_0": [0.0, 0.268333901599714], "Conv_100/WithoutBiases/fq_input_0": [0.0, 0.06661587036129482], "Conv_123/WithoutBiases/fq_input_0": [0.0, 0.30598569918273477], "Conv_26/WithoutBiases/fq_input_0": [0.0, 0.412934844745316], "Conv_36/WithoutBiases/fq_input_0": [0.0, 0.368357190279106], "Reshape_173/fq_input_0": [0.0, 0.6060830426133388]}
|
@ -1 +1 @@
|
||||
{"Conv_142/WithoutBiases/fq_input_0": [0.0, 0.2755489305605197], "Conv_109/WithoutBiases/fq_input_0": [0.0, 0.06430231181257642], "Conv_28/WithoutBiases/fq_input_0": [-0.9369239559344311, 0.6142932242289382], "Conv_133/WithoutBiases/fq_input_0": [0.0, 0.21349865650044245], "Conv_138/WithoutBiases/fq_input_0": [0.0, 0.047094490479181655], "Conv_123/WithoutBiases/fq_input_0": [0.0, 0.2547586234103526], "Conv_57/WithoutBiases/fq_input_0": [-0.9705570862387501, 0.6142932242289382], "Conv_94/WithoutBiases/fq_input_0": [0.0, 0.3247792161799804], "Conv_51/WithoutBiases/fq_input_0": [0.0, 0.09090055108477514], "Conv_45/WithoutBiases/fq_input_0": [0.0, 0.33229434057340523], "Conv_96/WithoutBiases/fq_input_0": [-0.6070843394740638, 0.6023414930719228], "Conv_65/WithoutBiases/fq_input_0": [0.0, 0.4919886529965608], "Add_114/fq_input_0": [-0.48748908408243213, 0.4836805756130381], "Add_114/fq_input_1": [-0.253904439444876, 0.25192081101171293], "Add_75/fq_input_0": [-0.5432525212645125, 0.5390083609421334], "Add_75/fq_input_1": [-0.3456265869534882, 0.3429263792429141], "Conv_162/WithoutBiases/fq_input_0": [0.0, 0.23922562997877286], "Add_95/fq_input_0": [-0.6292737404108696, 0.6142932242289382], "Add_95/fq_input_1": [-0.3327225799137669, 0.33012318475819064], "Conv_13/WithoutBiases/fq_input_0": [0.0, 0.2935925959882982], "Conv_90/WithoutBiases/fq_input_0": [0.0, 0.06107350902584563], "Conv_4/WithoutBiases/fq_input_0": [0.0, 0.15410890631916244], "Conv_0/WithoutBiases/fq_input_0": [-0.5232526511631934, 0.519164739825981], "Conv_84/WithoutBiases/fq_input_0": [0.0, 0.2517475989317457], "Add_27/fq_input_0": [-0.7856599925642845, 0.6142932242289382], "Add_27/fq_input_1": [-0.711356667728818, 0.6142932242289382], "Conv_61/WithoutBiases/fq_input_0": [0.0, 0.1336603000288348], "Conv_70/WithoutBiases/fq_input_0": [0.0, 0.06405321570589022], "Conv_55/WithoutBiases/fq_input_0": [0.0, 0.29907606668559134], "Add_56/fq_input_0": [-0.6654038284290348, 0.6142932242289382], "Add_56/fq_input_1": [-0.3992226597227293, 0.39610373269364546], "Add_153/fq_input_0": [-0.20742287807060308, 0.20580238683567648], "Add_153/fq_input_1": [-0.18306217639729155, 0.18163200314418773], "Conv_80/WithoutBiases/fq_input_0": [0.0, 0.05997703213378154], "Conv_36/WithoutBiases/fq_input_0": [0.0, 0.4248245949572151], "Conv_17/WithoutBiases/fq_input_0": [0.0, 0.6142932242289382], "Add_124/fq_input_0": [-0.5358295989102682, 0.5316434301687817], "Add_124/fq_input_1": [-0.28153885850156357, 0.2793393361695201], "Conv_32/WithoutBiases/fq_input_0": [0.0, 0.1765262133808422], "Conv_158/WithoutBiases/fq_input_0": [0.0, 0.018292064328992032], "Conv_163/WithoutBiases/fq_input_0": [-0.338986796192709, 0.3363384618474535], "Conv_26/WithoutBiases/fq_input_0": [0.0, 0.5868616642274581], "Conv_152/WithoutBiases/fq_input_0": [0.0, 0.15551023744836157], "Conv_100/WithoutBiases/fq_input_0": [0.0, 0.08431856527548029], "Add_85/fq_input_0": [-0.5767050235165817, 0.5721995155203583], "Add_85/fq_input_1": [-0.2721217945778322, 0.2699958430576929], "Conv_129/WithoutBiases/fq_input_0": [0.0, 0.07100889144362615], "Conv_113/WithoutBiases/fq_input_0": [0.0, 0.2986838252942451], "Conv_9/WithoutBiases/fq_input_0": [-1.0, 0.6142932242289382], "Conv_148/WithoutBiases/fq_input_0": [0.0, 0.03926168143718818], "GlobalAveragePool_167/reduce/fq_input_0": [0.0, 0.6142932242289382], "Conv_119/WithoutBiases/fq_input_0": [0.0, 0.06214195124008526], "Conv_104/WithoutBiases/fq_input_0": [0.0, 0.30929528648582094], "Conv_22/WithoutBiases/fq_input_0": [0.0, 0.12541635944634277], "Add_143/fq_input_0": [-0.21034385164940816, 0.2087005403083972], "Add_143/fq_input_1": [-0.10336600203153663, 0.10255845514066526], "Conv_41/WithoutBiases/fq_input_0": [0.0, 0.0912543592151422], "Add_46/fq_input_0": [-0.5058592177776973, 0.501907192638809], "Add_46/fq_input_1": [-0.45651887673855107, 0.4529523230140312], "Conv_8/WithoutBiases/fq_input_0": [0.0, 0.6142932242289382], "Conv_125/WithoutBiases/fq_input_0": [-0.5863538384961826, 0.5817729491329312], "Conv_154/WithoutBiases/fq_input_0": [-0.3077924152160166, 0.3053877869721415], "Conv_74/WithoutBiases/fq_input_0": [0.0, 0.2805014388616113], "Reshape_173/fq_input_0": [0.0, 0.6142932242289382]}
|
||||
{"Conv_74/WithoutBiases/fq_input_0": [0.0, 0.19712486986095062], "Add_124/fq_input_0": [-0.30510741152862386, 0.3027237598760565], "Add_124/fq_input_1": [-0.32193653739507117, 0.3194214081966722], "Conv_125/WithoutBiases/fq_input_0": [-0.48557281498398014, 0.44093119964670713], "Conv_70/WithoutBiases/fq_input_0": [0.0, 0.04799125778908363], "Add_27/fq_input_0": [-0.4857995917664735, 0.44093119964670713], "Add_27/fq_input_1": [-0.756588446512289, 0.44093119964670713], "Conv_9/WithoutBiases/fq_input_0": [-1.0, 0.44093119964670713], "Add_75/fq_input_0": [-0.4323663057097951, 0.4289884439464373], "Add_75/fq_input_1": [-0.3401223683421448, 0.3374651623394718], "Conv_80/WithoutBiases/fq_input_0": [0.0, 0.0475885850586743], "Conv_8/WithoutBiases/fq_input_0": [0.0, 0.44093119964670713], "Add_95/fq_input_0": [-0.46598874500577203, 0.44093119964670713], "Add_95/fq_input_1": [-0.5275738105273449, 0.44093119964670713], "Conv_154/WithoutBiases/fq_input_0": [-0.630203460555073, 0.44093119964670713], "Conv_152/WithoutBiases/fq_input_0": [0.0, 0.2813889953100718], "Conv_55/WithoutBiases/fq_input_0": [0.0, 0.2675984716084545], "Conv_142/WithoutBiases/fq_input_0": [0.0, 0.3725801482404439], "Add_143/fq_input_0": [-0.27090319672266056, 0.26878676549826475], "Add_143/fq_input_1": [-0.14877220909688169, 0.14760992621331231], "Conv_41/WithoutBiases/fq_input_0": [0.0, 0.07245654888854787], "Conv_65/WithoutBiases/fq_input_0": [0.0, 0.2518831574502089], "Conv_104/WithoutBiases/fq_input_0": [0.0, 0.3240560714976055], "GlobalAveragePool_167/reduce/fq_input_0": [0.0, 0.44093119964670713], "Add_46/fq_input_0": [-0.4630984623750088, 0.44093119964670713], "Add_46/fq_input_1": [-0.4644157525842347, 0.44093119964670713], "Add_85/fq_input_0": [-0.3973987456323131, 0.3942940679320606], "Add_85/fq_input_1": [-0.2338700113081125, 0.23204290184476786], "Conv_84/WithoutBiases/fq_input_0": [0.0, 0.19915454121323672], "Conv_32/WithoutBiases/fq_input_0": [0.0, 0.16185053017681666], "Conv_17/WithoutBiases/fq_input_0": [0.0, 0.44093119964670713], "Conv_109/WithoutBiases/fq_input_0": [0.0, 0.048461754433139985], "Conv_57/WithoutBiases/fq_input_0": [-0.7807858576615976, 0.44093119964670713], "Add_56/fq_input_0": [-0.6641020158587643, 0.44093119964670713], "Add_56/fq_input_1": [-0.3157671917532787, 0.31330026056770627], "Conv_22/WithoutBiases/fq_input_0": [0.0, 0.104647550557784], "Conv_94/WithoutBiases/fq_input_0": [0.0, 0.44093119964670713], "Conv_138/WithoutBiases/fq_input_0": [0.0, 0.04155202021016073], "Conv_129/WithoutBiases/fq_input_0": [0.0, 0.08476486777412125], "Conv_4/WithoutBiases/fq_input_0": [0.0, 0.14246885253362926], "Conv_90/WithoutBiases/fq_input_0": [0.0, 0.04591175662268951], "Conv_0/WithoutBiases/fq_input_0": [-0.3136188340360553, 0.3111686868951486], "Conv_96/WithoutBiases/fq_input_0": [-0.5527886268741754, 0.44093119964670713], "Conv_61/WithoutBiases/fq_input_0": [0.0, 0.10902738797391175], "Conv_158/WithoutBiases/fq_input_0": [0.0, 0.012117864176108687], "Conv_163/WithoutBiases/fq_input_0": [-0.22149533099768434, 0.21976489872426494], "Conv_119/WithoutBiases/fq_input_0": [0.0, 0.0478593288007644], "Conv_51/WithoutBiases/fq_input_0": [0.0, 0.07606394972161658], "Conv_113/WithoutBiases/fq_input_0": [0.0, 0.3666548507036367], "Add_114/fq_input_0": [-0.2833707103569921, 0.2811568766823281], "Add_114/fq_input_1": [-0.22433193029105625, 0.22257933708565736], "Conv_148/WithoutBiases/fq_input_0": [0.0, 0.04575276640093503], "Conv_13/WithoutBiases/fq_input_0": [0.0, 0.2537086036571915], "Conv_162/WithoutBiases/fq_input_0": [0.0, 0.18645382568233643], "Add_153/fq_input_0": [-0.3143851636773171, 0.31192902958608804], "Add_153/fq_input_1": [-0.4100139414214486, 0.40681070750409354], "Conv_133/WithoutBiases/fq_input_0": [0.0, 0.3738946811635341], "Conv_28/WithoutBiases/fq_input_0": [-0.9303551230468007, 0.44093119964670713], "Conv_45/WithoutBiases/fq_input_0": [0.0, 0.26623754299346625], "Conv_100/WithoutBiases/fq_input_0": [0.0, 0.06609543320964402], "Conv_123/WithoutBiases/fq_input_0": [0.0, 0.30359518590786966], "Conv_26/WithoutBiases/fq_input_0": [0.0, 0.4097087912707432], "Conv_36/WithoutBiases/fq_input_0": [0.0, 0.3654793997300505], "Reshape_173/fq_input_0": [0.0, 0.44093119964670713]}
|
@ -1 +1 @@
|
||||
{"Conv_142/WithoutBiases/fq_input_0": [0.0, 0.2242812712987264], "Conv_109/WithoutBiases/fq_input_0": [0.0, 0.05233845114707947], "Conv_28/WithoutBiases/fq_input_0": [-0.7626031990752132, 0.7566453615824381], "Conv_133/WithoutBiases/fq_input_0": [0.0, 0.17377585172653198], "Conv_138/WithoutBiases/fq_input_0": [0.0, 0.038332256178061165], "Conv_123/WithoutBiases/fq_input_0": [0.0, 0.20735913515090942], "Conv_57/WithoutBiases/fq_input_0": [-0.7899786681327919, 0.7838069597880045], "Conv_94/WithoutBiases/fq_input_0": [0.0, 0.2643519441286723], "Conv_51/WithoutBiases/fq_input_0": [0.0, 0.0739879161119461], "Conv_45/WithoutBiases/fq_input_0": [0.0, 0.2704688310623169], "Conv_96/WithoutBiases/fq_input_0": [-0.49413237484108435, 0.49027196566263836], "Conv_65/WithoutBiases/fq_input_0": [0.0, 0.40045098463694256], "Add_114/fq_input_0": [-0.39678858959768704, 0.3936886787414551], "Add_114/fq_input_1": [-0.20666387763236138, 0.20504931608835855], "Add_75/fq_input_0": [-0.44217687892162894, 0.4387223720550537], "Add_75/fq_input_1": [-0.28132052684393455, 0.2791227102279663], "Conv_162/WithoutBiases/fq_input_0": [0.0, 0.19471615552902222], "Add_95/fq_input_0": [-0.5121932943349122, 0.5081917842229208], "Add_95/fq_input_1": [-0.2708173937059137, 0.26870163281758624], "Conv_13/WithoutBiases/fq_input_0": [0.0, 0.25559868415196735], "Conv_90/WithoutBiases/fq_input_0": [0.0, 0.0497103879849116], "Conv_4/WithoutBiases/fq_input_0": [0.0, 0.1254359483718872], "Conv_0/WithoutBiases/fq_input_0": [-0.4258981139015338, 0.42257078488667804], "Conv_84/WithoutBiases/fq_input_0": [0.0, 0.20490833123524985], "Add_27/fq_input_0": [-0.6394828736312747, 0.6344869136810303], "Add_27/fq_input_1": [-0.5790041625656168, 0.5744806925455729], "Conv_61/WithoutBiases/fq_input_0": [0.0, 0.10879193743069966], "Conv_70/WithoutBiases/fq_input_0": [0.0, 0.05213570098082224], "Conv_55/WithoutBiases/fq_input_0": [0.0, 0.24343103170394897], "Add_56/fq_input_0": [-0.5416011459871227, 0.5373698870340983], "Add_56/fq_input_1": [-0.32494470391060737, 0.3224060734113057], "Add_153/fq_input_0": [-0.16883051113819514, 0.16751152276992798], "Add_153/fq_input_1": [-0.14900227544188813, 0.14783819516499838], "Conv_80/WithoutBiases/fq_input_0": [0.0, 0.048817917704582214], "Conv_36/WithoutBiases/fq_input_0": [0.0, 0.3457832336425781], "Conv_17/WithoutBiases/fq_input_0": [0.0, 0.7615942160288492], "Add_124/fq_input_0": [-0.43613503924192093, 0.43272773424784344], "Add_124/fq_input_1": [-0.22915673443651574, 0.22736644744873047], "Conv_32/WithoutBiases/fq_input_0": [0.0, 0.14368237058321634], "Conv_158/WithoutBiases/fq_input_0": [0.0, 0.014888707548379898], "Conv_163/WithoutBiases/fq_input_0": [-0.27591611206375083, 0.27376051743825275], "Conv_26/WithoutBiases/fq_input_0": [0.0, 0.4776722590128581], "Conv_152/WithoutBiases/fq_input_0": [0.0, 0.12657655278841654], "Conv_100/WithoutBiases/fq_input_0": [0.0, 0.06863055129845937], "Add_85/fq_input_0": [-0.46940532694338505, 0.46573809782663983], "Add_85/fq_input_1": [-0.22149177611343504, 0.21976137161254883], "Conv_129/WithoutBiases/fq_input_0": [0.0, 0.0577972282965978], "Conv_113/WithoutBiases/fq_input_0": [0.0, 0.24311176935831705], "Conv_9/WithoutBiases/fq_input_0": [-0.8139435375143537, 0.8075846036275228], "Conv_148/WithoutBiases/fq_input_0": [0.0, 0.03195679187774658], "GlobalAveragePool_167/reduce/fq_input_0": [0.0, 1.0], "Conv_119/WithoutBiases/fq_input_0": [0.0, 0.050580039620399475], "Conv_104/WithoutBiases/fq_input_0": [0.0, 0.2517488996187846], "Conv_22/WithoutBiases/fq_input_0": [0.0, 0.10208183526992798], "Add_143/fq_input_0": [-0.1712080187059137, 0.16987045605977377], "Add_143/fq_input_1": [-0.0841340893522648, 0.08347679177920024], "Conv_41/WithoutBiases/fq_input_0": [0.0, 0.0742758959531784], "Add_46/fq_input_0": [-0.41174084120222276, 0.4085241158803304], "Add_46/fq_input_1": [-0.3715805894746555, 0.36867761611938477], "Conv_8/WithoutBiases/fq_input_0": [0.0, 1.0], "Conv_125/WithoutBiases/fq_input_0": [-0.47725891754070293, 0.4735303322474162], "Conv_154/WithoutBiases/fq_input_0": [-0.2505256472610113, 0.24856841564178467], "Conv_74/WithoutBiases/fq_input_0": [0.0, 0.22831233342488608], "Reshape_173/fq_input_0": [0.0, 0.8365342617034912]}
|
||||
{"Conv_74/WithoutBiases/fq_input_0": [0.0, 0.19712486986095062], "Add_124/fq_input_0": [-0.30510741152862386, 0.3027237598760565], "Add_124/fq_input_1": [-0.32193653739507117, 0.3194214081966722], "Conv_125/WithoutBiases/fq_input_0": [-0.48557281498398014, 0.4817792773669178], "Conv_70/WithoutBiases/fq_input_0": [0.0, 0.04799125778908363], "Add_27/fq_input_0": [-0.4857995917664735, 0.48200428245579796], "Add_27/fq_input_1": [-0.756588446512289, 0.7506775992739118], "Conv_9/WithoutBiases/fq_input_0": [-1.0, 0.9921875], "Add_75/fq_input_0": [-0.4323663057097951, 0.4289884439464373], "Add_75/fq_input_1": [-0.3401223683421448, 0.3374651623394718], "Conv_80/WithoutBiases/fq_input_0": [0.0, 0.0475885850586743], "Conv_8/WithoutBiases/fq_input_0": [0.0, 0.8818623992934143], "Add_95/fq_input_0": [-0.46598874500577203, 0.4623482079354144], "Add_95/fq_input_1": [-0.5275738105273449, 0.5234521401326], "Conv_154/WithoutBiases/fq_input_0": [-0.630203460555073, 0.6252799960194865], "Conv_152/WithoutBiases/fq_input_0": [0.0, 0.2813889953100718], "Conv_55/WithoutBiases/fq_input_0": [0.0, 0.2675984716084545], "Conv_142/WithoutBiases/fq_input_0": [0.0, 0.3725801482404439], "Add_143/fq_input_0": [-0.27090319672266056, 0.26878676549826475], "Add_143/fq_input_1": [-0.14877220909688169, 0.14760992621331231], "Conv_41/WithoutBiases/fq_input_0": [0.0, 0.07245654888854787], "Conv_65/WithoutBiases/fq_input_0": [0.0, 0.2518831574502089], "Conv_104/WithoutBiases/fq_input_0": [0.0, 0.3240560714976055], "GlobalAveragePool_167/reduce/fq_input_0": [0.0, 0.8818623992934143], "Add_46/fq_input_0": [-0.4630984623750088, 0.45948050563770404], "Add_46/fq_input_1": [-0.4644157525842347, 0.4607875045171703], "Add_85/fq_input_0": [-0.3973987456323131, 0.3942940679320606], "Add_85/fq_input_1": [-0.2338700113081125, 0.23204290184476786], "Conv_84/WithoutBiases/fq_input_0": [0.0, 0.19915454121323672], "Conv_32/WithoutBiases/fq_input_0": [0.0, 0.16185053017681666], "Conv_17/WithoutBiases/fq_input_0": [0.0, 0.5095923382717059], "Conv_109/WithoutBiases/fq_input_0": [0.0, 0.048461754433139985], "Conv_57/WithoutBiases/fq_input_0": [-0.7807858576615976, 0.7746859681486165], "Add_56/fq_input_0": [-0.6641020158587643, 0.6589137188598677], "Add_56/fq_input_1": [-0.3157671917532787, 0.31330026056770627], "Conv_22/WithoutBiases/fq_input_0": [0.0, 0.104647550557784], "Conv_94/WithoutBiases/fq_input_0": [0.0, 0.4962618818200354], "Conv_138/WithoutBiases/fq_input_0": [0.0, 0.04155202021016073], "Conv_129/WithoutBiases/fq_input_0": [0.0, 0.08476486777412125], "Conv_4/WithoutBiases/fq_input_0": [0.0, 0.14246885253362926], "Conv_90/WithoutBiases/fq_input_0": [0.0, 0.04591175662268951], "Conv_0/WithoutBiases/fq_input_0": [-0.3136188340360553, 0.3111686868951486], "Conv_96/WithoutBiases/fq_input_0": [-0.5527886268741754, 0.5484699657267209], "Conv_61/WithoutBiases/fq_input_0": [0.0, 0.10902738797391175], "Conv_158/WithoutBiases/fq_input_0": [0.0, 0.012117864176108687], "Conv_163/WithoutBiases/fq_input_0": [-0.22149533099768434, 0.21976489872426494], "Conv_119/WithoutBiases/fq_input_0": [0.0, 0.0478593288007644], "Conv_51/WithoutBiases/fq_input_0": [0.0, 0.07606394972161658], "Conv_113/WithoutBiases/fq_input_0": [0.0, 0.3666548507036367], "Add_114/fq_input_0": [-0.2833707103569921, 0.2811568766823281], "Add_114/fq_input_1": [-0.22433193029105625, 0.22257933708565736], "Conv_148/WithoutBiases/fq_input_0": [0.0, 0.04575276640093503], "Conv_13/WithoutBiases/fq_input_0": [0.0, 0.2731771330544343], "Conv_162/WithoutBiases/fq_input_0": [0.0, 0.18645382568233643], "Add_153/fq_input_0": [-0.3143851636773171, 0.31192902958608804], "Add_153/fq_input_1": [-0.4100139414214486, 0.40681070750409354], "Conv_133/WithoutBiases/fq_input_0": [0.0, 0.3738946811635341], "Conv_28/WithoutBiases/fq_input_0": [-0.9303551230468007, 0.9230867236479976], "Conv_45/WithoutBiases/fq_input_0": [0.0, 0.26623754299346625], "Conv_100/WithoutBiases/fq_input_0": [0.0, 0.06609543320964402], "Conv_123/WithoutBiases/fq_input_0": [0.0, 0.30359518590786966], "Conv_26/WithoutBiases/fq_input_0": [0.0, 0.4097087912707432], "Conv_36/WithoutBiases/fq_input_0": [0.0, 0.3654793997300505], "Reshape_173/fq_input_0": [0.0, 0.6013480188429221]}
|
File diff suppressed because one or more lines are too long
3
tools/pot/tests/data/reference_scale/test_data/0.png
Normal file
3
tools/pot/tests/data/reference_scale/test_data/0.png
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e8c0887a2215dad87c2c04becf50ae94c413432299c2c056804a12acd0ed75dd
|
||||
size 37708
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user