Change thresholds for MemCheck nightly (#9815)

* add performance hint to time infer

* fix th values for memcheck nightly
This commit is contained in:
Victor Kuznetsov
2022-01-21 11:54:29 +03:00
committed by GitHub
parent 01096508f9
commit 0f4f2ebade

View File

@@ -50,7 +50,7 @@
<model path="intel/image-retrieval-0001/FP16-INT8/image-retrieval-0001.xml" precision="FP16-INT8" test="infer_request_inference" device="GPU" vmsize="2010569" vmpeak="2095766" vmrss="603959" vmhwm="982862" />
<model path="intel/image-retrieval-0001/FP16-INT8/image-retrieval-0001.xml" precision="FP16-INT8" test="inference_with_streams" device="CPU" vmsize="1077174" vmpeak="1077944" vmrss="57236" vmhwm="57236" />
<model path="intel/image-retrieval-0001/FP16-INT8/image-retrieval-0001.xml" precision="FP16-INT8" test="inference_with_streams" device="GPU" vmsize="2110050" vmpeak="2195247" vmrss="605082" vmhwm="977121" />
<model path="intel/landmarks-regression-retail-0009/FP16-INT8/landmarks-regression-retail-0009.xml" precision="FP16-INT8" test="create_exenetwork" device="CPU" vmsize="898024" vmpeak="898024" vmrss="26774" vmhwm="26774" />
<model path="intel/landmarks-regression-retail-0009/FP16-INT8/landmarks-regression-retail-0009.xml" precision="FP16-INT8" test="create_exenetwork" device="CPU" vmsize="905855" vmpeak="991052" vmrss="35001" vmhwm="35001" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="intel/landmarks-regression-retail-0009/FP16-INT8/landmarks-regression-retail-0009.xml" precision="FP16-INT8" test="create_exenetwork" device="GPU" vmsize="1845022" vmpeak="1871490" vmrss="229418" vmhwm="344151" />
<model path="intel/landmarks-regression-retail-0009/FP16-INT8/landmarks-regression-retail-0009.xml" precision="FP16-INT8" test="infer_request_inference" device="CPU" vmsize="903156" vmpeak="974376" vmrss="27279" vmhwm="27279" />
<model path="intel/landmarks-regression-retail-0009/FP16-INT8/landmarks-regression-retail-0009.xml" precision="FP16-INT8" test="infer_request_inference" device="GPU" vmsize="1930765" vmpeak="2015962" vmrss="230198" vmhwm="344370" />
@@ -279,9 +279,9 @@
<model path="public/faster_rcnn_inception_resnet_v2_atrous_coco/FP16/faster_rcnn_inception_resnet_v2_atrous_coco.xml" precision="FP16" test="inference_with_streams" device="CPU" vmsize="4607044" vmpeak="4610559" vmrss="2155514" vmhwm="2155514" />
<model path="public/faster_rcnn_inception_resnet_v2_atrous_coco/FP16/faster_rcnn_inception_resnet_v2_atrous_coco.xml" precision="FP16" test="inference_with_streams" device="GPU" vmsize="9583225" vmpeak="9668422" vmrss="8279850" vmhwm="8279850" />
<model path="public/faster_rcnn_inception_resnet_v2_atrous_coco/FP32/faster_rcnn_inception_resnet_v2_atrous_coco.xml" precision="FP32" test="create_exenetwork" device="CPU" vmsize="2632042" vmpeak="2632042" vmrss="670566" vmhwm="670566" />
<model path="public/faster_rcnn_inception_resnet_v2_atrous_coco/FP32/faster_rcnn_inception_resnet_v2_atrous_coco.xml" precision="FP32" test="create_exenetwork" device="GPU" vmsize="5374428" vmpeak="5374428" vmrss="4190154" vmhwm="4190154" />
<model path="public/faster_rcnn_inception_resnet_v2_atrous_coco/FP32/faster_rcnn_inception_resnet_v2_atrous_coco.xml" precision="FP32" test="create_exenetwork" device="GPU" vmsize="9593402" vmpeak="9593402" vmrss="8055205" vmhwm="8055205" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/faster_rcnn_inception_resnet_v2_atrous_coco/FP32/faster_rcnn_inception_resnet_v2_atrous_coco.xml" precision="FP32" test="infer_request_inference" device="CPU" vmsize="3011106" vmpeak="3088722" vmrss="1648244" vmhwm="1648244" />
<model path="public/faster_rcnn_inception_resnet_v2_atrous_coco/FP32/faster_rcnn_inception_resnet_v2_atrous_coco.xml" precision="FP32" test="infer_request_inference" device="GPU" vmsize="5464877" vmpeak="5550074" vmrss="4217454" vmhwm="4217454" />
<model path="public/faster_rcnn_inception_resnet_v2_atrous_coco/FP32/faster_rcnn_inception_resnet_v2_atrous_coco.xml" precision="FP32" test="infer_request_inference" device="GPU" vmsize="9599023" vmpeak="9599023" vmrss="8026049" vmhwm="8026049" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/faster_rcnn_inception_resnet_v2_atrous_coco/FP32/faster_rcnn_inception_resnet_v2_atrous_coco.xml" precision="FP32" test="inference_with_streams" device="CPU" vmsize="4363980" vmpeak="4435730" vmrss="2004262" vmhwm="2004262" />
<model path="public/googlenet-v1-tf/FP16/googlenet-v1-tf.xml" precision="FP16" test="create_exenetwork" device="CPU" vmsize="1020562" vmpeak="1024306" vmrss="119329" vmhwm="119329" />
<model path="public/googlenet-v1-tf/FP16/googlenet-v1-tf.xml" precision="FP16" test="create_exenetwork" device="GPU" vmsize="1904156" vmpeak="1934992" vmrss="474572" vmhwm="807986" />
@@ -420,24 +420,36 @@
<model path="public/mtcnn/mtcnn-o/FP16/mtcnn-o.xml" precision="FP16" test="infer_request_inference" device="GPU" vmsize="1943931" vmpeak="2029128" vmrss="293878" vmhwm="437933" />
<model path="public/mtcnn/mtcnn-o/FP16/mtcnn-o.xml" precision="FP16" test="inference_with_streams" device="CPU" vmsize="1002414" vmpeak="1168414" vmrss="32047" vmhwm="32047" />
<model path="public/mtcnn/mtcnn-o/FP16/mtcnn-o.xml" precision="FP16" test="inference_with_streams" device="GPU" vmsize="2039954" vmpeak="2125151" vmrss="294184" vmhwm="437283" />
<model path="public/mtcnn/mtcnn-o/FP32/mtcnn-o.xml" precision="FP32" test="create_exenetwork" device="CPU" vmsize="898060" vmpeak="898060" vmrss="27092" vmhwm="27092" />
<model path="public/mtcnn/mtcnn-o/FP32/mtcnn-o.xml" precision="FP32" test="create_exenetwork" device="CPU" vmsize="906594" vmpeak="906594" vmrss="36088" vmhwm="36088" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mtcnn/mtcnn-o/FP32/mtcnn-o.xml" precision="FP32" test="create_exenetwork" device="GPU" vmsize="1877002" vmpeak="1899934" vmrss="273171" vmhwm="373313" />
<model path="public/mtcnn/mtcnn-o/FP32/mtcnn-o.xml" precision="FP32" test="infer_request_inference" device="CPU" vmsize="898461" vmpeak="898461" vmrss="29276" vmhwm="29276" />
<model path="public/mtcnn/mtcnn-o/FP32/mtcnn-o.xml" precision="FP32" test="infer_request_inference" device="CPU" vmsize="907124" vmpeak="907124" vmrss="38651" vmhwm="38651" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mtcnn/mtcnn-o/FP32/mtcnn-o.xml" precision="FP32" test="infer_request_inference" device="GPU" vmsize="1962100" vmpeak="2047297" vmrss="297538" vmhwm="397456" />
<model path="public/mtcnn/mtcnn-o/FP32/mtcnn-o.xml" precision="FP32" test="inference_with_streams" device="CPU" vmsize="1001873" vmpeak="1081012" vmrss="31730" vmhwm="31730" />
<model path="public/mtcnn/mtcnn-o/FP32/mtcnn-o.xml" precision="FP32" test="inference_with_streams" device="GPU" vmsize="2057952" vmpeak="2143148" vmrss="296623" vmhwm="396120" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="create_exenetwork" device="CPU" vmsize="893391" vmpeak="893391" vmrss="26540" vmhwm="26540" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="create_exenetwork" device="CPU" vmsize="901539" vmpeak="901539" vmrss="34528" vmhwm="34528" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="create_exenetwork" device="GPU" vmsize="1871490" vmpeak="1895717" vmrss="286785" vmhwm="382896" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="infer_request_inference" device="CPU" vmsize="893791" vmpeak="893791" vmrss="27773" vmhwm="27773" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="infer_request_inference" device="GPU" vmsize="1954758" vmpeak="2039954" vmrss="286015" vmhwm="382257" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="inference_with_streams" device="CPU" vmsize="993314" vmpeak="1070123" vmrss="27118" vmhwm="27118" />
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="inference_with_streams" device="CPU" vmsize="1001962" vmpeak="1001962" vmrss="36218" vmhwm="36218" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mtcnn/mtcnn-r/FP16/mtcnn-r.xml" precision="FP16" test="inference_with_streams" device="GPU" vmsize="2052585" vmpeak="2052585" vmrss="287752" vmhwm="383385" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="create_exenetwork" device="CPU" vmsize="892860" vmpeak="973684" vmrss="26197" vmhwm="26197" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="create_exenetwork" device="GPU" vmsize="1878266" vmpeak="1898499" vmrss="262189" vmhwm="337875" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="infer_request_inference" device="CPU" vmsize="893261" vmpeak="893261" vmrss="25828" vmhwm="25828" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="infer_request_inference" device="CPU" vmsize="901732" vmpeak="901732" vmrss="34086" vmhwm="34086" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="infer_request_inference" device="GPU" vmsize="1963353" vmpeak="1963353" vmrss="260572" vmhwm="336034" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="inference_with_streams" device="CPU" vmsize="1456457" vmpeak="1541654" vmrss="28574" vmhwm="28574" />
<model path="public/mtcnn/mtcnn-r/FP32/mtcnn-r.xml" precision="FP32" test="inference_with_streams" device="GPU" vmsize="2059200" vmpeak="2144396" vmrss="263166" vmhwm="337578" />
<model path="public/mask_rcnn_resnet50_atrous_coco/FP16/mask_rcnn_resnet50_atrous_coco.xml" precision="FP16" test="create_exenetwork" device="CPU" vmsize="2793830" vmpeak="2793830" vmrss="685063" vmhwm="685063" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP16/mask_rcnn_resnet50_atrous_coco.xml" precision="FP16" test="create_exenetwork" device="GPU" vmsize="3952738" vmpeak="3952738" vmrss="2415951" vmhwm="2415951" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP16/mask_rcnn_resnet50_atrous_coco.xml" precision="FP16" test="inference_with_streams" device="CPU" vmsize="4760704" vmpeak="4837279" vmrss="2300220" vmhwm="2300220" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP16/mask_rcnn_resnet50_atrous_coco.xml" precision="FP16" test="inference_with_streams" device="GPU" vmsize="5623264" vmpeak="5722886" vmrss="4014436" vmhwm="4113943" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP16/mask_rcnn_resnet50_atrous_coco.xml" precision="FP16" test="infer_request_inference" device="CPU" vmsize="3049919" vmpeak="3126494" vmrss="2015208" vmhwm="2015208" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP16/mask_rcnn_resnet50_atrous_coco.xml" precision="FP16" test="infer_request_inference" device="GPU" vmsize="3984978" vmpeak="4084600" vmrss="2446657" vmhwm="2546122" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP32/mask_rcnn_resnet50_atrous_coco.xml" precision="FP32" test="create_exenetwork" device="CPU" vmsize="2641449" vmpeak="2641449" vmrss="500333" vmhwm="500333" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP32/mask_rcnn_resnet50_atrous_coco.xml" precision="FP32" test="create_exenetwork" device="GPU" vmsize="4967180" vmpeak="5213473" vmrss="3420466" vmhwm="3666821" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP32/mask_rcnn_resnet50_atrous_coco.xml" precision="FP32" test="inference_with_streams" device="CPU" vmsize="4566224" vmpeak="4632654" vmrss="2124101" vmhwm="2124101" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP32/mask_rcnn_resnet50_atrous_coco.xml" precision="FP32" test="inference_with_streams" device="GPU" vmsize="7386906" vmpeak="7610553" vmrss="5782452" vmhwm="5999718" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP32/mask_rcnn_resnet50_atrous_coco.xml" precision="FP32" test="infer_request_inference" device="CPU" vmsize="2896103" vmpeak="2974114" vmrss="1829848" vmhwm="1829848" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/mask_rcnn_resnet50_atrous_coco/FP32/mask_rcnn_resnet50_atrous_coco.xml" precision="FP32" test="infer_request_inference" device="GPU" vmsize="4985754" vmpeak="5215402" vmrss="3446508" vmhwm="3675848" /> # Values from {"commit_id": "403339f8f470c90dee6f6d94ed58644b2787f66b", "commit_date": "2022-01-19 14:13"} and *= 1.3
<model path="public/se-inception/FP16/se-inception.xml" precision="FP16" test="create_exenetwork" device="CPU" vmsize="1114391" vmpeak="1114391" vmrss="202155" vmhwm="202155" />
<model path="public/se-inception/FP16/se-inception.xml" precision="FP16" test="create_exenetwork" device="GPU" vmsize="1995806" vmpeak="2045851" vmrss="683181" vmhwm="1193899" />
<model path="public/se-inception/FP16/se-inception.xml" precision="FP16" test="infer_request_inference" device="CPU" vmsize="1115426" vmpeak="1115426" vmrss="208135" vmhwm="208135" />