Move all utils to common folder (#7303)
* move all utils to common folder * add utils install in CMakeLists.txt * revert plugin utils * add log info: test * add logging info: test * test imports * change imports: test * fix file name
This commit is contained in:
parent
d5e063d435
commit
4d377901bf
@ -11,8 +11,8 @@ import sys
|
||||
from inspect import getsourcefile
|
||||
from pathlib import Path
|
||||
|
||||
# add ../lib to imports
|
||||
sys.path.insert(0, str((Path(getsourcefile(lambda: 0)) / ".." / ".." / "lib").resolve(strict=True)))
|
||||
# add utils folder to imports
|
||||
sys.path.insert(0, str((Path(getsourcefile(lambda: 0)) / ".." / ".." / "utils").resolve(strict=True)))
|
||||
|
||||
import yaml
|
||||
import pytest
|
||||
|
@ -12,19 +12,19 @@ Usage: ./scrips/compare_memcheck_2_runs.py cur_source ref_source \
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging as log
|
||||
import os
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from glob import glob
|
||||
from operator import itemgetter
|
||||
from pathlib import Path
|
||||
import logging as log
|
||||
|
||||
from pymongo import MongoClient
|
||||
from memcheck_upload import create_memcheck_records
|
||||
|
||||
# Database arguments
|
||||
from memcheck_upload import DATABASE, DB_COLLECTIONS
|
||||
from memcheck_upload import create_memcheck_records
|
||||
|
||||
|
||||
class HashableDict(dict):
|
||||
|
@ -9,13 +9,12 @@ Usage: ./scrips/get_testdata.py
|
||||
# pylint:disable=line-too-long
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging as log
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import json
|
||||
|
||||
from distutils.dir_util import copy_tree
|
||||
from inspect import getsourcefile
|
||||
from pathlib import Path
|
||||
|
@ -10,18 +10,18 @@ Usage: ./scrips/memcheck_upload.py https://ci.intel.com/job/memchek/1234/ \
|
||||
--artifact_root ./gtest-parallel-logs --dryrun
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from types import SimpleNamespace
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
from inspect import getsourcefile
|
||||
from glob import glob
|
||||
import xml.etree.ElementTree as ET
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
from glob import glob
|
||||
from inspect import getsourcefile
|
||||
from types import SimpleNamespace
|
||||
|
||||
import yaml
|
||||
from pymongo import MongoClient
|
||||
|
||||
|
@ -25,4 +25,6 @@ endif()
|
||||
add_subdirectory(src)
|
||||
|
||||
install(DIRECTORY test_runner/ DESTINATION tests/time_tests/test_runner COMPONENT tests EXCLUDE_FROM_ALL)
|
||||
install(DIRECTORY .automation/ DESTINATION tests/time_tests/test_runner/.automation COMPONENT tests EXCLUDE_FROM_ALL)
|
||||
install(DIRECTORY scripts/ DESTINATION tests/time_tests/scripts COMPONENT tests EXCLUDE_FROM_ALL)
|
||||
install(DIRECTORY ../utils/ DESTINATION tests/utils COMPONENT tests EXCLUDE_FROM_ALL)
|
||||
|
@ -12,7 +12,6 @@ collected statistics.
|
||||
|
||||
import statistics
|
||||
import tempfile
|
||||
import subprocess
|
||||
import logging
|
||||
import argparse
|
||||
import sys
|
||||
@ -27,35 +26,11 @@ sys.path.append(TIME_TESTS_DIR)
|
||||
|
||||
from test_runner.utils import filter_timetest_result
|
||||
|
||||
UTILS_DIR = os.path.join(Path(__file__).parent.parent.parent, "utils")
|
||||
sys.path.insert(0, str(UTILS_DIR))
|
||||
|
||||
def run_cmd(args: list, log=None, verbose=True):
|
||||
""" Run command
|
||||
"""
|
||||
if log is None:
|
||||
log = logging.getLogger('run_cmd')
|
||||
log_out = log.info if verbose else log.debug
|
||||
|
||||
log.info(f'========== cmd: {" ".join(args)}') # pylint: disable=logging-fstring-interpolation
|
||||
|
||||
proc = subprocess.Popen(args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
encoding='utf-8',
|
||||
universal_newlines=True)
|
||||
output = []
|
||||
for line in iter(proc.stdout.readline, ''):
|
||||
log_out(line.strip('\n'))
|
||||
output.append(line)
|
||||
if line or proc.poll() is None:
|
||||
continue
|
||||
break
|
||||
outs = proc.communicate()[0]
|
||||
|
||||
if outs:
|
||||
log_out(outs.strip('\n'))
|
||||
output.append(outs)
|
||||
log.info('========== Completed. Exit code: %d', proc.returncode)
|
||||
return proc.returncode, ''.join(output)
|
||||
from proc_utils import cmd_exec
|
||||
from path_utils import check_positive_int
|
||||
|
||||
|
||||
def parse_stats(stats: list, res: dict):
|
||||
@ -100,7 +75,7 @@ def run_timetest(args: dict, log=None):
|
||||
stats = {}
|
||||
for run_iter in range(args["niter"]):
|
||||
tmp_stats_path = tempfile.NamedTemporaryFile().name
|
||||
retcode, msg = run_cmd(cmd_common + ["-s", str(tmp_stats_path)], log=log)
|
||||
retcode, msg = cmd_exec(cmd_common + ["-s", str(tmp_stats_path)], log=log)
|
||||
if retcode != 0:
|
||||
log.error("Run of executable '{}' failed with return code '{}'. Error: {}\n"
|
||||
"Statistics aggregation is skipped.".format(args["executable"], retcode, msg))
|
||||
@ -132,15 +107,6 @@ def run_timetest(args: dict, log=None):
|
||||
return 0, "", aggregated_stats, stats
|
||||
|
||||
|
||||
def check_positive_int(val):
|
||||
"""Check argsparse argument is positive integer and return it"""
|
||||
value = int(val)
|
||||
if value < 1:
|
||||
msg = "%r is less than 1" % val
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
return value
|
||||
|
||||
|
||||
def cli_parser():
|
||||
"""parse command-line arguments"""
|
||||
parser = argparse.ArgumentParser(description='Run timetest executable')
|
||||
|
3
tests/time_tests/test_runner/__init__.py
Normal file
3
tests/time_tests/test_runner/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
@ -3,12 +3,9 @@
|
||||
#
|
||||
"""
|
||||
Basic high-level plugin file for pytest.
|
||||
|
||||
See [Writing plugins](https://docs.pytest.org/en/latest/writing_plugins.html)
|
||||
for more information.
|
||||
|
||||
This plugin adds the following command-line options:
|
||||
|
||||
* `--test_conf` - Path to test configuration file. Used to parametrize tests.
|
||||
Format: YAML file.
|
||||
* `--exe` - Path to a timetest binary to execute.
|
||||
@ -23,19 +20,19 @@ import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
|
||||
from pathlib import Path
|
||||
from jsonschema import validate, ValidationError
|
||||
|
||||
TIME_TESTS_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
sys.path.append(TIME_TESTS_DIR)
|
||||
|
||||
from scripts.run_timetest import check_positive_int
|
||||
from test_runner.utils import upload_timetest_data, metadata_from_manifest, get_os_name, get_os_version, \
|
||||
get_cpu_info, DATABASE, DB_COLLECTIONS
|
||||
# add utils folder to imports
|
||||
UTILS_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), "utils")
|
||||
sys.path.insert(0, str(UTILS_DIR))
|
||||
|
||||
from path_utils import check_positive_int
|
||||
from platform_utils import get_os_name, get_os_version, get_cpu_info
|
||||
from utils import upload_data, metadata_from_manifest, DB_COLLECTIONS
|
||||
|
||||
# -------------------- CLI options --------------------
|
||||
|
||||
@ -82,7 +79,7 @@ def pytest_addoption(parser):
|
||||
'--db_collection',
|
||||
type=str,
|
||||
required=is_db_used,
|
||||
help='collection name in "{}" database'.format(DATABASE),
|
||||
help='collection name in database',
|
||||
choices=DB_COLLECTIONS
|
||||
)
|
||||
db_args_parser.addoption(
|
||||
@ -117,7 +114,6 @@ def niter(request):
|
||||
|
||||
# -------------------- CLI options --------------------
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def temp_dir(pytestconfig):
|
||||
"""Create temporary directory for test purposes.
|
||||
@ -131,7 +127,6 @@ def temp_dir(pytestconfig):
|
||||
@pytest.fixture(scope="function")
|
||||
def cl_cache_dir(pytestconfig, instance):
|
||||
"""Generate directory to save OpenCL cache before test run and clean up after run.
|
||||
|
||||
Folder `cl_cache` should be created in a directory where tests were run. In this case
|
||||
cache will be saved correctly. This behaviour is OS independent.
|
||||
More: https://github.com/intel/compute-runtime/blob/master/opencl/doc/FAQ.md#how-can-cl_cache-be-enabled
|
||||
@ -171,7 +166,6 @@ def model_cache_dir(pytestconfig, instance):
|
||||
@pytest.fixture(scope="function")
|
||||
def test_info(request, pytestconfig):
|
||||
"""Fixture for collecting timetests information.
|
||||
|
||||
Current fixture fills in `request` and `pytestconfig` global
|
||||
fixtures with timetests information which will be used for
|
||||
internal purposes.
|
||||
@ -186,7 +180,6 @@ def test_info(request, pytestconfig):
|
||||
@pytest.fixture(scope="function")
|
||||
def validate_test_case(request, test_info):
|
||||
"""Fixture for validating test case on correctness.
|
||||
|
||||
Fixture checks current test case contains all fields required for
|
||||
a correct work.
|
||||
"""
|
||||
@ -226,7 +219,6 @@ def validate_test_case(request, test_info):
|
||||
@pytest.fixture(scope="function")
|
||||
def prepare_db_info(request, test_info, executable, niter, manifest_metadata):
|
||||
"""Fixture for preparing and validating data to submit to a database.
|
||||
|
||||
Fixture prepares data and metadata to submit to a database. One of the steps
|
||||
is parsing of build information from build manifest. After preparation,
|
||||
it checks if data contains required properties.
|
||||
@ -345,7 +337,6 @@ def manifest_metadata(request):
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
"""Pytest hook for test generation.
|
||||
|
||||
Generate parameterized tests from discovered modules and test config
|
||||
parameters.
|
||||
"""
|
||||
@ -376,7 +367,6 @@ def pytest_make_parametrize_id(config, val, argname):
|
||||
@pytest.mark.hookwrapper
|
||||
def pytest_runtest_makereport(item, call):
|
||||
"""Pytest hook for report preparation.
|
||||
|
||||
Submit tests' data to a database.
|
||||
"""
|
||||
|
||||
@ -403,5 +393,5 @@ def pytest_runtest_makereport(item, call):
|
||||
|
||||
db_url = item.config.getoption("db_url")
|
||||
db_collection = item.config.getoption("db_collection")
|
||||
logging.info("Upload data to {}/{}.{}. Data: {}".format(db_url, DATABASE, db_collection, data))
|
||||
upload_timetest_data(data, db_url, db_collection)
|
||||
logging.info("Upload data to {}/{}.{}. Data: {}".format(db_url, 'timetests', db_collection, data))
|
||||
upload_data(data, db_url, 'timetests', db_collection)
|
||||
|
@ -18,9 +18,18 @@ from pathlib import Path
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
# add utils folder to imports
|
||||
UTILS_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), "utils")
|
||||
sys.path.insert(0, str(UTILS_DIR))
|
||||
|
||||
from path_utils import expand_env_vars
|
||||
|
||||
TIME_TESTS_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
sys.path.append(TIME_TESTS_DIR)
|
||||
|
||||
from scripts.run_timetest import run_timetest
|
||||
from test_runner.utils import expand_env_vars
|
||||
|
||||
REFS_FACTOR = 1.2 # 120%
|
||||
|
||||
|
@ -3,66 +3,13 @@
|
||||
|
||||
"""Utility module."""
|
||||
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
import distro
|
||||
import yaml
|
||||
import numpy as np
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from pymongo import MongoClient
|
||||
|
||||
# constants
|
||||
DATABASE = 'timetests' # database name for timetests results
|
||||
DB_COLLECTIONS = ["commit", "nightly", "weekly"]
|
||||
PRODUCT_NAME = 'dldt' # product name from build manifest
|
||||
|
||||
# Define a range to cut outliers which are < Q1 − IQR_CUTOFF * IQR, and > Q3 + IQR_CUTOFF * IQR
|
||||
# https://en.wikipedia.org/wiki/Interquartile_range
|
||||
IQR_CUTOFF = 1.5
|
||||
|
||||
|
||||
def expand_env_vars(obj):
|
||||
"""Expand environment variables in provided object."""
|
||||
|
||||
if isinstance(obj, list):
|
||||
for i, value in enumerate(obj):
|
||||
obj[i] = expand_env_vars(value)
|
||||
elif isinstance(obj, dict):
|
||||
for name, value in obj.items():
|
||||
obj[name] = expand_env_vars(value)
|
||||
else:
|
||||
obj = os.path.expandvars(obj)
|
||||
return obj
|
||||
|
||||
|
||||
def upload_timetest_data(data, db_url, db_collection):
|
||||
""" Upload timetest data to database."""
|
||||
client = MongoClient(db_url)
|
||||
collection = client[DATABASE][db_collection]
|
||||
collection.replace_one({'_id': data['_id']}, data, upsert=True)
|
||||
|
||||
|
||||
def metadata_from_manifest(manifest: Path):
|
||||
""" Extract commit metadata from manifest."""
|
||||
with open(manifest, 'r') as manifest_file:
|
||||
manifest = yaml.safe_load(manifest_file)
|
||||
repo_trigger = next(
|
||||
repo for repo in manifest['components'][PRODUCT_NAME]['repository'] if repo['trigger'])
|
||||
return {
|
||||
'product_type': manifest['components'][PRODUCT_NAME]['product_type'],
|
||||
'commit_sha': repo_trigger['revision'],
|
||||
'commit_date': repo_trigger['commit_time'],
|
||||
'repo_url': repo_trigger['url'],
|
||||
'branch': repo_trigger['branch'],
|
||||
'target_branch': repo_trigger['target_branch'] if repo_trigger["target_branch"] else repo_trigger["branch"],
|
||||
'version': manifest['components'][PRODUCT_NAME]['version']
|
||||
}
|
||||
|
||||
|
||||
def calculate_iqr(stats: list):
|
||||
"""IQR is calculated as the difference between the 3th and the 1th quantile of the data."""
|
||||
q1 = np.quantile(stats, 0.25)
|
||||
@ -80,98 +27,3 @@ def filter_timetest_result(stats: dict):
|
||||
upd_time_results = [x for x in time_results if (q1 - cut_off < x < q3 + cut_off)]
|
||||
filtered_stats.update({step_name: upd_time_results if upd_time_results else time_results})
|
||||
return filtered_stats
|
||||
|
||||
|
||||
class UnsupportedOsError(Exception):
|
||||
"""Exception for unsupported OS type."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
error_message = f'OS type "{get_os_type()}" is not currently supported'
|
||||
if args or kwargs:
|
||||
super().__init__(*args, **kwargs)
|
||||
else:
|
||||
super().__init__(error_message)
|
||||
|
||||
|
||||
class OsType(Enum):
|
||||
"""Container for supported os types."""
|
||||
WINDOWS = 'Windows'
|
||||
LINUX = 'Linux'
|
||||
DARWIN = 'Darwin'
|
||||
|
||||
|
||||
def get_os_type():
|
||||
"""
|
||||
Get OS type
|
||||
|
||||
:return: OS type
|
||||
:rtype: String | Exception if it is not supported
|
||||
"""
|
||||
return platform.system()
|
||||
|
||||
|
||||
def os_type_is_windows():
|
||||
"""Returns True if OS type is Windows. Otherwise returns False."""
|
||||
return get_os_type() == OsType.WINDOWS.value
|
||||
|
||||
|
||||
def os_type_is_linux():
|
||||
"""Returns True if OS type is Linux. Otherwise returns False."""
|
||||
return get_os_type() == OsType.LINUX.value
|
||||
|
||||
|
||||
def os_type_is_darwin():
|
||||
"""Returns True if OS type is Darwin. Otherwise returns False."""
|
||||
return get_os_type() == OsType.DARWIN.value
|
||||
|
||||
|
||||
def get_os_name():
|
||||
"""
|
||||
Check OS type and return OS name
|
||||
|
||||
:return: OS name
|
||||
:rtype: String | Exception if it is not supported
|
||||
"""
|
||||
if os_type_is_linux():
|
||||
return distro.id().lower()
|
||||
if os_type_is_windows() or os_type_is_darwin():
|
||||
return get_os_type().lower()
|
||||
raise UnsupportedOsError()
|
||||
|
||||
|
||||
def get_os_version():
|
||||
"""
|
||||
Check OS version and return it
|
||||
|
||||
:return: OS version
|
||||
:rtype: tuple | Exception if it is not supported
|
||||
"""
|
||||
if os_type_is_linux():
|
||||
return distro.major_version(), distro.minor_version()
|
||||
if os_type_is_windows():
|
||||
return sys.getwindowsversion().major, sys.getwindowsversion().minor
|
||||
if os_type_is_darwin():
|
||||
return tuple(platform.mac_ver()[0].split(".")[:2])
|
||||
raise UnsupportedOsError()
|
||||
|
||||
|
||||
def get_cpu_info():
|
||||
"""
|
||||
Check OS version and returns name and frequency of cpu
|
||||
|
||||
:return: CPU name and frequency
|
||||
:rtype: str
|
||||
"""
|
||||
model = ''
|
||||
if os_type_is_linux():
|
||||
command = r"lscpu | sed -n 's/Model name:[ \t]*//p'"
|
||||
model = subprocess.check_output(command, shell=True)
|
||||
elif os_type_is_windows():
|
||||
command = 'wmic cpu get name | find /v "Name"'
|
||||
model = subprocess.check_output(command, shell=True)
|
||||
elif os_type_is_darwin():
|
||||
command = ['/usr/sbin/sysctl', "-n", "machdep.cpu.brand_string"]
|
||||
model = subprocess.check_output(command)
|
||||
else:
|
||||
raise UnsupportedOsError()
|
||||
info = model.decode('utf-8').strip()
|
||||
return info
|
||||
|
3
tests/utils/__init__.py
Normal file
3
tests/utils/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
@ -6,10 +6,17 @@
|
||||
""" Common utilities for working with paths
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# add utils folder to imports
|
||||
UTILS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.insert(0, str(UTILS_DIR))
|
||||
|
||||
from platform_utils import get_os_name
|
||||
|
||||
|
||||
def expand_env_vars(obj):
|
||||
"""Expand environment variables in provided object."""
|
||||
@ -25,15 +32,6 @@ def expand_env_vars(obj):
|
||||
return obj
|
||||
|
||||
|
||||
def get_os_name():
|
||||
"""Function for getting OS name"""
|
||||
if sys.platform == "win32":
|
||||
os_name = 'Windows'
|
||||
else:
|
||||
os_name = 'Linux'
|
||||
return os_name
|
||||
|
||||
|
||||
def get_lib_path(lib_name):
|
||||
"""Function for getting absolute path in OpenVINO directory to specific lib"""
|
||||
os_name = get_os_name()
|
||||
@ -49,3 +47,12 @@ def get_lib_path(lib_name):
|
||||
'Linux': Path('deployment_tools/ngraph/lib/libngraph.so')}
|
||||
}
|
||||
return all_libs[lib_name][os_name]
|
||||
|
||||
|
||||
def check_positive_int(val):
|
||||
"""Check argsparse argument is positive integer and return it"""
|
||||
value = int(val)
|
||||
if value < 1:
|
||||
msg = "%r is less than 1" % val
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
return value
|
106
tests/utils/platform_utils.py
Normal file
106
tests/utils/platform_utils.py
Normal file
@ -0,0 +1,106 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
""" Common utilities for working with OSes or platforms
|
||||
"""
|
||||
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from enum import Enum
|
||||
|
||||
import distro
|
||||
|
||||
|
||||
class UnsupportedOsError(Exception):
|
||||
"""Exception for unsupported OS type."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
error_message = f'OS type "{get_os_type()}" is not currently supported'
|
||||
if args or kwargs:
|
||||
super().__init__(*args, **kwargs)
|
||||
else:
|
||||
super().__init__(error_message)
|
||||
|
||||
|
||||
class OsType(Enum):
|
||||
"""Container for supported os types."""
|
||||
WINDOWS = 'Windows'
|
||||
LINUX = 'Linux'
|
||||
DARWIN = 'Darwin'
|
||||
|
||||
|
||||
def get_os_type():
|
||||
"""
|
||||
Get OS type
|
||||
:return: OS type
|
||||
:rtype: String | Exception if it is not supported
|
||||
"""
|
||||
return platform.system()
|
||||
|
||||
|
||||
def os_type_is_windows():
|
||||
"""Returns True if OS type is Windows. Otherwise returns False."""
|
||||
return get_os_type() == OsType.WINDOWS.value
|
||||
|
||||
|
||||
def os_type_is_linux():
|
||||
"""Returns True if OS type is Linux. Otherwise returns False."""
|
||||
return get_os_type() == OsType.LINUX.value
|
||||
|
||||
|
||||
def os_type_is_darwin():
|
||||
"""Returns True if OS type is Darwin. Otherwise returns False."""
|
||||
return get_os_type() == OsType.DARWIN.value
|
||||
|
||||
|
||||
def get_os_name():
|
||||
"""
|
||||
Check OS type and return OS name
|
||||
:return: OS name
|
||||
:rtype: String | Exception if it is not supported
|
||||
"""
|
||||
if os_type_is_linux():
|
||||
return distro.id().lower()
|
||||
if os_type_is_windows() or os_type_is_darwin():
|
||||
return get_os_type().lower()
|
||||
raise UnsupportedOsError()
|
||||
|
||||
|
||||
def get_os_version():
|
||||
"""
|
||||
Check OS version and return it
|
||||
:return: OS version
|
||||
:rtype: tuple | Exception if it is not supported
|
||||
"""
|
||||
if os_type_is_linux():
|
||||
return distro.major_version(), distro.minor_version()
|
||||
if os_type_is_windows():
|
||||
return sys.getwindowsversion().major, sys.getwindowsversion().minor
|
||||
if os_type_is_darwin():
|
||||
return tuple(platform.mac_ver()[0].split(".")[:2])
|
||||
raise UnsupportedOsError()
|
||||
|
||||
|
||||
def get_cpu_info():
|
||||
"""
|
||||
Check OS version and returns name and frequency of cpu
|
||||
:return: CPU name and frequency
|
||||
:rtype: str
|
||||
"""
|
||||
model = ''
|
||||
if os_type_is_linux():
|
||||
command = r"lscpu | sed -n 's/Model name:[ \t]*//p'"
|
||||
model = subprocess.check_output(command, shell=True)
|
||||
elif os_type_is_windows():
|
||||
command = 'wmic cpu get name | find /v "Name"'
|
||||
model = subprocess.check_output(command, shell=True)
|
||||
elif os_type_is_darwin():
|
||||
command = ['/usr/sbin/sysctl', "-n", "machdep.cpu.brand_string"]
|
||||
model = subprocess.check_output(command)
|
||||
else:
|
||||
raise UnsupportedOsError()
|
||||
info = model.decode('utf-8').strip()
|
||||
return info
|
38
tests/utils/utils.py
Normal file
38
tests/utils/utils.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""Utility module."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from pymongo import MongoClient
|
||||
|
||||
# constants
|
||||
DATABASES = ['timetests', 'memcheck']
|
||||
DB_COLLECTIONS = ["commit", "nightly", "weekly"]
|
||||
PRODUCT_NAME = 'dldt' # product name from build manifest
|
||||
|
||||
|
||||
def upload_data(data, db_url, db_name, db_collection):
|
||||
""" Upload timetest data to database."""
|
||||
client = MongoClient(db_url)
|
||||
collection = client[db_name][db_collection]
|
||||
collection.replace_one({'_id': data['_id']}, data, upsert=True)
|
||||
|
||||
|
||||
def metadata_from_manifest(manifest: Path):
|
||||
""" Extract commit metadata from manifest."""
|
||||
with open(manifest, 'r') as manifest_file:
|
||||
manifest = yaml.safe_load(manifest_file)
|
||||
repo_trigger = next(
|
||||
repo for repo in manifest['components'][PRODUCT_NAME]['repository'] if repo['trigger'])
|
||||
return {
|
||||
'product_type': manifest['components'][PRODUCT_NAME]['product_type'],
|
||||
'commit_sha': repo_trigger['revision'],
|
||||
'commit_date': repo_trigger['commit_time'],
|
||||
'repo_url': repo_trigger['url'],
|
||||
'branch': repo_trigger['branch'],
|
||||
'target_branch': repo_trigger['target_branch'] if repo_trigger["target_branch"] else repo_trigger["branch"],
|
||||
'version': manifest['components'][PRODUCT_NAME]['version']
|
||||
}
|
Loading…
Reference in New Issue
Block a user