Modify time tests so that data can be pushed to OVVP (#13636)

* Set db_api_handler for pushing to new database in timetests

* Change function of pushing to db.

Co-authored-by: Daria Ilina <daria.krupnova@intel.com>
This commit is contained in:
Smirnova Maria 2022-11-10 16:17:39 +02:00 committed by GitHub
parent 0942c6617b
commit 8f97ee55fc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 69 additions and 5 deletions

View File

@ -75,6 +75,7 @@ def run_timetest(args: dict, log=None):
# Run executable and collect statistics
stats = {}
logs = []
for run_iter in range(args["niter"]):
tmp_stats_path = tempfile.NamedTemporaryFile().name
retcode, msg = cmd_exec(cmd_common + ["-s", str(tmp_stats_path)], log=log)
@ -87,6 +88,9 @@ def run_timetest(args: dict, log=None):
with open(tmp_stats_path, "r") as file:
raw_data = list(yaml.load_all(file, Loader=yaml.SafeLoader))
with open(tmp_stats_path, "r") as file:
logs.append(file.read())
os.unlink(tmp_stats_path)
# Parse raw data
@ -106,7 +110,7 @@ def run_timetest(args: dict, log=None):
aggregated_stats = aggregate_stats(filtered_stats)
log.debug(f"Aggregated statistics after full run: {aggregated_stats}")
return 0, "", aggregated_stats, stats
return 0, "", aggregated_stats, stats, logs
def cli_parser():
@ -150,7 +154,7 @@ if __name__ == "__main__":
logging.basicConfig(format="[ %(levelname)s ] %(message)s",
level=logging.DEBUG, stream=sys.stdout)
exit_code, _, aggr_stats, _ = run_timetest(
exit_code, _, aggr_stats, _, _ = run_timetest(
dict(args._get_kwargs()), log=logging) # pylint: disable=protected-access
if args.stats_path:
# Save aggregated results to a file

View File

@ -32,7 +32,7 @@ sys.path.insert(0, str(UTILS_DIR))
from path_utils import check_positive_int
from platform_utils import get_os_name, get_os_version, get_cpu_info
from utils import upload_data, metadata_from_manifest, DB_COLLECTIONS
from utils import upload_data, metadata_from_manifest, push_to_db_facade, modify_data_for_push_to_new_db, DB_COLLECTIONS
# -------------------- CLI options --------------------
@ -99,6 +99,12 @@ def pytest_addoption(parser):
required=is_db_used,
help='Path to build manifest to extract commit information'
)
db_args_parser.addoption(
'--db_api_handler',
type=str,
help='API handler url for push data to database',
default=''
)
@pytest.fixture(scope="session")
@ -413,3 +419,9 @@ def pytest_runtest_makereport(item, call):
logging.info(f"Upload data to {db_url}/{'timetests'}.{db_collection}. "
f"Data: {data}")
upload_data(data, db_url, 'timetests', db_collection)
db_api_handler = item.config.getoption("db_api_handler")
if db_api_handler and call.when == "call":
new_format_records = modify_data_for_push_to_new_db(data)
new_format_records["log"] = item._request.test_info["logs"]
push_to_db_facade(new_format_records, db_api_handler)

View File

@ -70,14 +70,15 @@ def test_timetest(instance, executable, niter, cl_cache_dir, model_cache, model_
"model_cache": model_cache,
}
logging.info("Run timetest once to generate any cache")
retcode, msg, _, _ = run_timetest({**exe_args, "niter": 1}, log=logging)
retcode, msg, _, _, _ = run_timetest({**exe_args, "niter": 1}, log=logging)
assert retcode == 0, f"Run of executable for warm up failed: {msg}"
if cl_cache_dir:
assert os.listdir(cl_cache_dir), "cl_cache isn't generated"
if model_cache_dir:
assert os.listdir(model_cache_dir), "model_cache isn't generated"
retcode, msg, aggr_stats, raw_stats = run_timetest(exe_args, log=logging)
retcode, msg, aggr_stats, raw_stats, logs = run_timetest(exe_args, log=logging)
test_info["logs"] = "\n".join(logs)
assert retcode == 0, f"Run of executable failed: {msg}"
# Add timetest results to submit to database and save in new test conf as references

View File

@ -7,6 +7,10 @@ from pathlib import Path
import yaml
from pymongo import MongoClient
from copy import deepcopy
import requests
import logging
import re
# constants
DATABASES = ['timetests', 'memorytests']
@ -21,6 +25,49 @@ def upload_data(data, db_url, db_name, db_collection):
collection.replace_one({'_id': data['_id']}, data, upsert=True)
def push_to_db_facade(data, db_api_handler):
headers = {"Content-Type": "application/json", "accept": "application/json"}
response = requests.post(db_api_handler, json=data, headers=headers)
if response.ok:
logging.info("Uploaded records by API url {}".format(db_api_handler))
else:
raise ConnectionError("Failed to upload records by API url {}".format(db_api_handler))
def modify_data_for_push_to_new_db(data):
new_data = deepcopy(data)
if 'run_id' in new_data:
del new_data['run_id']
new_data['build_url'] = data['run_id']
if 'os' in new_data:
platform, os_version_major, os_version_minor = data['os'].split("_")
new_data['os'] = "{} {}.{}".format(platform, os_version_major, os_version_minor)
if 'model' in new_data:
new_data['model_name'] = data['model']['name']
new_data['model'] = data['model']['path']
new_data['precision'] = data['model']['precision']
new_data['framework'] = data['model']['framework']
if 'device' in new_data:
new_data['device'] = data['device']['name']
if 'test_name' in new_data:
del new_data['test_name']
if 'commit_sha' in new_data:
del new_data['commit_sha']
if 'repo_url' in new_data:
del new_data['repo_url']
if 'product_type' in new_data:
del new_data['product_type']
if 'version' in new_data:
del new_data['version']
new_data['dldt_version'] = re.findall(r"\d{4}\.\d+.\d-\d+-\w+", data['version'])[0]
if 'raw_results' in new_data:
del new_data['raw_results']
for raw_result_name, raw_result in data['raw_results'].items():
new_data['results'][raw_result_name]['raw_results'] = raw_result
return new_data
def metadata_from_manifest(manifest: Path):
""" Extract commit metadata from manifest."""
with open(manifest, 'r') as manifest_file: