* Partial progress * Finish v1 * Cleanup * Remove useless files * Fix path to pdpd * Fix onnx path * Minor change * Rework MO * Minor change * Remove some costraints * Add MO constraints * Update gitignore for MO * Minor change * Apply tech sync discussion * Cleanup * CR comment * Debug ONNX FE * simplify ONNX FE * Update cmake * Hardcode ONNX requirement * Add dependency resolver to cmake * Add constraints for openvino/tests * Add missing pytest-html * Fix -c path * Revert debug changes to path * Add cmake to copy constraints.txt * Update dependabot * Remove slash * Remove cmake * Debug prints * Minor changes * Move reqs check to separate file * Add requirements parser to benchmark_tool * Fix smoke tests constraints * Minor fixes * Minor change * My fixes were apparently wrong * Debug - self.executable_path * Debug - add singledispatch to tests and tools * Debug - print IE_APP_PATHs * Revert "Debug - print IE_APP_PATHs" This reverts commit67ccb6d3f5. * Revert "Debug - add singledispatch to tests and tools" This reverts commit3b945931e2. * Revert "Debug - self.executable_path" This reverts commit3aa724eff6. * update dependabot * update dependabot * Skip benchmark_app tests * Use CMAKE_CURRENT_BINARY_DIR in cmake * Remove debug prints * minor change --------- Signed-off-by: p-wysocki <przemyslaw.wysocki@intel.com>
93 lines
2.9 KiB
Python
93 lines
2.9 KiB
Python
"""
|
|
Copyright (C) 2018-2023 Intel Corporation
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
you may not use this file except in compliance with the License.
|
|
You may obtain a copy of the License at
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
Unless required by applicable law or agreed to in writing, software
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
See the License for the specific language governing permissions and
|
|
limitations under the License.
|
|
"""
|
|
import os
|
|
import pytest
|
|
import logging as log
|
|
import sys
|
|
from common.samples_common_test_class import SamplesCommonTestClass
|
|
from common.samples_common_test_class import get_tests
|
|
|
|
log.basicConfig(format="[ %(levelname)s ] %(message)s", level=log.INFO, stream=sys.stdout)
|
|
|
|
test_data_fp32_async = get_tests \
|
|
(cmd_params={'i': [os.path.join('227x227', 'dog.bmp')],
|
|
'm': [os.path.join('squeezenet1.1', 'FP32', 'squeezenet1.1.xml')],
|
|
'batch': [1],
|
|
'sample_type': ['C++', 'Python'],
|
|
'd': ['CPU'],
|
|
'api': ['async'],
|
|
'nireq': ['4'],
|
|
'niter': ['10'], },
|
|
use_device=['d']
|
|
)
|
|
|
|
test_data_fp32_sync = get_tests \
|
|
(cmd_params={'i': [os.path.join('227x227', 'dog.bmp')],
|
|
'm': [os.path.join('squeezenet1.1', 'FP32', 'squeezenet1.1.xml')],
|
|
'batch': [1],
|
|
'sample_type': ['C++', 'Python'],
|
|
'd': ['CPU'],
|
|
'niter': ['10'],
|
|
'api': ['sync']},
|
|
use_device=['d']
|
|
)
|
|
|
|
|
|
|
|
class TestBenchmarkApp(SamplesCommonTestClass):
|
|
@classmethod
|
|
def setup_class(cls):
|
|
cls.sample_name = 'benchmark_app'
|
|
super().setup_class()
|
|
|
|
@pytest.mark.parametrize("param", test_data_fp32_async)
|
|
@pytest.mark.skip("Ticket: 106850")
|
|
def test_benchmark_app_sample_fp32_async(self, param):
|
|
_check_output(self, param)
|
|
|
|
@pytest.mark.parametrize("param", test_data_fp32_sync)
|
|
@pytest.mark.skip("Ticket: 106850")
|
|
def test_benchmark_app_fp32_sync(self, param):
|
|
_check_output(self, param)
|
|
|
|
|
|
def _check_output(self, param):
|
|
"""
|
|
Benchmark_app has functional and accuracy testing.
|
|
For accuracy the test checks if 'FPS' 'Latency' in output. If both exist - the est passed
|
|
"""
|
|
|
|
# Run _test function, that returns stdout or 0.
|
|
stdout = self._test(param)
|
|
if not stdout:
|
|
return 0
|
|
stdout = stdout.split('\n')
|
|
is_ok = False
|
|
flag = 0
|
|
for line in stdout:
|
|
if 'FPS' in line:
|
|
is_ok = True
|
|
if is_ok == False:
|
|
flag = 1
|
|
log.error("No FPS in output")
|
|
|
|
is_ok = False
|
|
for line in stdout:
|
|
if 'Latency' in line:
|
|
is_ok = True
|
|
if is_ok == False:
|
|
flag = 1
|
|
log.error("No Latency in output")
|
|
assert flag == 0, "Wrong output of this sample"
|
|
log.info('Accuracy passed')
|