[GPU] Fixed typo in cache.json (#8756)

This commit is contained in:
Vladimir Paramuzov 2021-11-27 15:06:54 +03:00 committed by GitHub
parent 5719192687
commit ab3a892d48
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 8 additions and 8 deletions

View File

@ -1436,7 +1436,7 @@
"F32_BFYX_v1_p0_0_v1_p0_0_v2048_p0_0_v300_p0_0;F32_FB_v300_p0_0_v21_p0_0": ["fully_connected_gpu_fb_oi_ref", 0],
"F32_BFYX_v1_p0_0_v1_p0_0_v2048_p0_0_v300_p0_0;F32_FB_v300_p0_0_v84_p0_0": ["fully_connected_gpu_fb_oi_ref", 0],
"F32_BFYX_v1792_p0_0_v1_p0_0_v1_p0_0_v1_p0_0;F32_BF_v512_p0_0_v1_p0_0": ["fully_connected_gpu_bf_io_input_spatial", 2]
},
}
},
"64": {
"CONVOLUTION": {
@ -2320,7 +2320,7 @@
"F16_YXFB_v300_p0_0_v4096_p0_0_v1_p0_0_v1_p0_0;F16_FB_v300_p0_0_v6565_p0_0": ["fully_connected_gpu_yxfb_ref", 0],
"F16_YXFB_v300_p0_0_v4096_p0_0_v1_p0_0_v1_p0_0;F16_FB_v300_p0_0_v101_p0_0": ["fully_connected_gpu_fb_oi_ref", 2],
"F16_BFYX_v71_p0_0_v88_p0_0_v1_p0_0_v1_p0_0;F16_BF_v128_p0_0_v1_p0_0": ["fully_connected_gpu_bf_io_gemm", 1]
},
}
}
},
"version_1": {

View File

@ -11,11 +11,11 @@
<model path="public/mobilenet-ssd/FP32/mobilenet-ssd.xml" precision="FP32" test="infer_request_inference" device="GPU" vmsize="1667380" vmpeak="1667380" vmrss="472123" vmhwm="753719" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="create_exenetwork" device="CPU" vmsize="908018" vmpeak="908018" vmrss="41142" vmhwm="41142" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="create_exenetwork" device="GPU" vmsize="2108095" vmpeak="2230602" vmrss="215618" vmhwm="281486" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="create_exenetwork" device="GPU" vmsize="2108095" vmpeak="2230602" vmrss="235618" vmhwm="301486" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="inference_with_streams" device="CPU" vmsize="1008342" vmpeak="1093008" vmrss="43602" vmhwm="43602" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="inference_with_streams" device="GPU" vmsize="2203952" vmpeak="2241272" vmrss="214328" vmhwm="280103" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="inference_with_streams" device="GPU" vmsize="2203952" vmpeak="2241272" vmrss="244328" vmhwm="300103" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="infer_request_inference" device="CPU" vmsize="908549" vmpeak="908549" vmrss="43492" vmhwm="43492" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="infer_request_inference" device="GPU" vmsize="2108100" vmpeak="2230612" vmrss="193330" vmhwm="280550" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="infer_request_inference" device="GPU" vmsize="2108100" vmpeak="2230612" vmrss="223996" vmhwm="300550" />
<model path="public/ssd300/FP32/ssd300.xml" precision="FP32" test="create_exenetwork" device="CPU" vmsize="1054892" vmpeak="1054892" vmrss="292962" vmhwm="292962" />
<model path="public/ssd300/FP32/ssd300.xml" precision="FP32" test="create_exenetwork" device="GPU" vmsize="1887948" vmpeak="1925471" vmrss="792849" vmhwm="1081917" />
@ -40,11 +40,11 @@
<model path="public/mobilenet-ssd/FP16/mobilenet-ssd.xml" precision="FP16" test="infer_request_inference" device="GPU" vmsize="1623206" vmpeak="1708402" vmrss="459144" vmhwm="762455" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="create_exenetwork" device="CPU" vmsize="908169" vmpeak="908169" vmrss="42021" vmhwm="42021" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="create_exenetwork" device="GPU" vmsize="2107846" vmpeak="2145161" vmrss="192961" vmhwm="280217" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="create_exenetwork" device="GPU" vmsize="2107846" vmpeak="2145161" vmrss="236984" vmhwm="303448" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="inference_with_streams" device="CPU" vmsize="1008113" vmpeak="1088786" vmrss="44028" vmhwm="44028" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="inference_with_streams" device="GPU" vmsize="2203697" vmpeak="2222500" vmrss="193143" vmhwm="279931" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="inference_with_streams" device="GPU" vmsize="2203697" vmpeak="2222500" vmrss="237996" vmhwm="303904" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="infer_request_inference" device="CPU" vmsize="908700" vmpeak="908700" vmrss="43227" vmhwm="43227" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="infer_request_inference" device="GPU" vmsize="2107846" vmpeak="2145161" vmrss="194516" vmhwm="281372" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="infer_request_inference" device="GPU" vmsize="2107846" vmpeak="2145161" vmrss="235248" vmhwm="305996" />
<model path="public/ssd300/FP16/ssd300.xml" precision="FP16" test="create_exenetwork" device="CPU" vmsize="1120184" vmpeak="1120184" vmrss="359200" vmhwm="359200" />
<model path="public/ssd300/FP16/ssd300.xml" precision="FP16" test="create_exenetwork" device="GPU" vmsize="1630569" vmpeak="1752530" vmrss="546364" vmhwm="874426" />