2014-10-23 12:17:09 -05:00
|
|
|
# Authors:
|
|
|
|
# Petr Viktorin <pviktori@redhat.com>
|
|
|
|
#
|
|
|
|
# Copyright (C) 2011 Red Hat
|
|
|
|
# see file 'COPYING' for use and warranty information
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
"""Pytest plugin for IPA Integration tests"""
|
|
|
|
|
2018-04-05 02:21:16 -05:00
|
|
|
from __future__ import print_function, absolute_import
|
2015-08-12 06:44:11 -05:00
|
|
|
|
2017-05-25 05:42:54 -05:00
|
|
|
import logging
|
2014-10-23 13:56:15 -05:00
|
|
|
import os
|
|
|
|
import tempfile
|
|
|
|
import shutil
|
2017-05-22 11:27:44 -05:00
|
|
|
import re
|
2014-10-23 13:56:15 -05:00
|
|
|
|
2014-10-23 12:17:09 -05:00
|
|
|
import pytest
|
2014-11-13 09:23:56 -06:00
|
|
|
from pytest_multihost import make_multihost_fixture
|
2014-10-23 12:17:09 -05:00
|
|
|
|
2014-10-23 13:56:15 -05:00
|
|
|
from ipapython import ipautil
|
2018-02-07 10:18:07 -06:00
|
|
|
from ipaplatform.paths import paths
|
2017-05-29 06:41:18 -05:00
|
|
|
from ipatests.test_util import yield_fixture
|
2017-03-20 05:26:45 -05:00
|
|
|
from .config import Config
|
2017-03-20 05:29:10 -05:00
|
|
|
from .env_config import get_global_config
|
2017-03-20 05:34:17 -05:00
|
|
|
from . import tasks
|
2014-10-23 12:17:09 -05:00
|
|
|
|
2017-05-25 05:42:54 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
2014-10-23 13:56:15 -05:00
|
|
|
|
|
|
|
|
|
|
|
def pytest_addoption(parser):
|
|
|
|
group = parser.getgroup("IPA integration tests")
|
|
|
|
|
|
|
|
group.addoption(
|
|
|
|
'--logfile-dir', dest="logfile_dir", default=None,
|
|
|
|
help="Directory to store integration test logs in.")
|
|
|
|
|
|
|
|
|
2017-05-22 11:27:44 -05:00
|
|
|
def _get_logname_from_node(node):
|
|
|
|
name = node.nodeid
|
2019-09-23 16:30:22 -05:00
|
|
|
name = re.sub(r'\(\)/', '', name) # remove ()/
|
|
|
|
name = re.sub(r'[()]', '', name) # and standalone brackets
|
|
|
|
name = re.sub(r'(/|::)', '-', name)
|
2017-05-22 11:27:44 -05:00
|
|
|
return name
|
|
|
|
|
|
|
|
|
2014-10-23 13:56:15 -05:00
|
|
|
def collect_test_logs(node, logs_dict, test_config):
|
|
|
|
"""Collect logs from a test
|
|
|
|
|
|
|
|
Calls collect_logs
|
|
|
|
|
|
|
|
:param node: The pytest collection node (request.node)
|
|
|
|
:param logs_dict: Mapping of host to list of log filnames to collect
|
|
|
|
:param test_config: Pytest configuration
|
|
|
|
"""
|
|
|
|
collect_logs(
|
2017-05-22 11:27:44 -05:00
|
|
|
name=_get_logname_from_node(node),
|
2014-10-23 13:56:15 -05:00
|
|
|
logs_dict=logs_dict,
|
|
|
|
logfile_dir=test_config.getoption('logfile_dir'),
|
|
|
|
beakerlib_plugin=test_config.pluginmanager.getplugin('BeakerLibPlugin'),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2017-05-22 11:33:49 -05:00
|
|
|
def collect_systemd_journal(node, hosts, test_config):
|
|
|
|
"""Collect systemd journal from remote hosts
|
|
|
|
|
|
|
|
:param node: The pytest collection node (request.node)
|
|
|
|
:param hosts: List of hosts from which to collect journal
|
|
|
|
:param test_config: Pytest configuration
|
|
|
|
"""
|
|
|
|
name = _get_logname_from_node(node)
|
|
|
|
logfile_dir = test_config.getoption('logfile_dir')
|
|
|
|
|
2017-06-09 07:34:10 -05:00
|
|
|
if logfile_dir is None:
|
|
|
|
return
|
|
|
|
|
2017-05-22 11:33:49 -05:00
|
|
|
for host in hosts:
|
2017-05-25 05:42:54 -05:00
|
|
|
logger.info("Collecting journal from: %s", host.hostname)
|
2017-05-22 11:33:49 -05:00
|
|
|
|
|
|
|
topdirname = os.path.join(logfile_dir, name, host.hostname)
|
|
|
|
if not os.path.exists(topdirname):
|
|
|
|
os.makedirs(topdirname)
|
|
|
|
|
|
|
|
# Get journal content
|
|
|
|
cmd = host.run_command(
|
|
|
|
['journalctl', '--since', host.config.log_journal_since],
|
|
|
|
log_stdout=False, raiseonerr=False)
|
|
|
|
if cmd.returncode:
|
2017-05-25 05:42:54 -05:00
|
|
|
logger.error('An error occurred while collecting journal')
|
2017-05-22 11:33:49 -05:00
|
|
|
continue
|
|
|
|
|
|
|
|
# Write journal to file
|
|
|
|
with open(os.path.join(topdirname, "journal"), 'w') as f:
|
|
|
|
f.write(cmd.stdout_text)
|
|
|
|
|
|
|
|
|
2014-10-23 13:56:15 -05:00
|
|
|
def collect_logs(name, logs_dict, logfile_dir=None, beakerlib_plugin=None):
|
|
|
|
"""Collect logs from remote hosts
|
|
|
|
|
|
|
|
Calls collect_logs
|
|
|
|
|
|
|
|
:param name: Name under which logs arecollected, e.g. name of the test
|
|
|
|
:param logs_dict: Mapping of host to list of log filnames to collect
|
|
|
|
:param logfile_dir: Directory to log to
|
|
|
|
:param beakerlib_plugin:
|
|
|
|
BeakerLibProcess or BeakerLibPlugin used to collect tests for BeakerLib
|
|
|
|
|
|
|
|
If neither logfile_dir nor beakerlib_plugin is given, no tests are
|
|
|
|
collected.
|
|
|
|
"""
|
|
|
|
if logs_dict and (logfile_dir or beakerlib_plugin):
|
|
|
|
|
|
|
|
if logfile_dir:
|
|
|
|
remove_dir = False
|
|
|
|
else:
|
|
|
|
logfile_dir = tempfile.mkdtemp()
|
|
|
|
remove_dir = True
|
|
|
|
|
|
|
|
topdirname = os.path.join(logfile_dir, name)
|
|
|
|
|
|
|
|
for host, logs in logs_dict.items():
|
2017-05-25 05:42:54 -05:00
|
|
|
logger.info('Collecting logs from: %s', host.hostname)
|
2017-11-08 06:43:43 -06:00
|
|
|
dirname = os.path.join(topdirname, host.hostname)
|
|
|
|
if not os.path.isdir(dirname):
|
|
|
|
os.makedirs(dirname)
|
|
|
|
tarname = os.path.join(dirname, 'logs.tar.xz')
|
|
|
|
# get temporary file name
|
|
|
|
cmd = host.run_command(['mktemp'])
|
|
|
|
tmpname = cmd.stdout_text.strip()
|
2014-10-23 13:56:15 -05:00
|
|
|
# Tar up the logs on the remote server
|
2016-07-20 10:35:32 -05:00
|
|
|
cmd = host.run_command(
|
2017-11-08 06:43:43 -06:00
|
|
|
['tar', 'cJvf', tmpname, '--ignore-failed-read'] + logs,
|
2016-07-20 10:35:32 -05:00
|
|
|
log_stdout=False, raiseonerr=False)
|
2014-10-23 13:56:15 -05:00
|
|
|
if cmd.returncode:
|
2017-05-25 05:42:54 -05:00
|
|
|
logger.warning('Could not collect all requested logs')
|
2017-11-08 06:43:43 -06:00
|
|
|
# fetch tar file
|
|
|
|
with open(tarname, 'wb') as f:
|
|
|
|
f.write(host.get_file_contents(tmpname))
|
|
|
|
# delete from remote
|
|
|
|
host.run_command(['rm', '-f', tmpname])
|
2014-10-23 13:56:15 -05:00
|
|
|
# Unpack on the local side
|
2018-02-07 10:18:07 -06:00
|
|
|
ipautil.run([paths.TAR, 'xJvf', 'logs.tar.xz'], cwd=dirname,
|
2014-10-23 13:56:15 -05:00
|
|
|
raiseonerr=False)
|
|
|
|
os.unlink(tarname)
|
|
|
|
|
|
|
|
if beakerlib_plugin:
|
|
|
|
# Use BeakerLib's rlFileSubmit on the indifidual files
|
|
|
|
# The resulting submitted filename will be
|
|
|
|
# $HOSTNAME-$FILENAME (with '/' replaced by '-')
|
|
|
|
beakerlib_plugin.run_beakerlib_command(['pushd', topdirname])
|
|
|
|
try:
|
2016-10-06 13:28:59 -05:00
|
|
|
for dirpath, _dirnames, filenames in os.walk(topdirname):
|
2014-10-23 13:56:15 -05:00
|
|
|
for filename in filenames:
|
|
|
|
fullname = os.path.relpath(
|
|
|
|
os.path.join(dirpath, filename), topdirname)
|
2017-05-25 05:42:54 -05:00
|
|
|
logger.debug('Submitting file: %s', fullname)
|
2014-10-23 13:56:15 -05:00
|
|
|
beakerlib_plugin.run_beakerlib_command(
|
|
|
|
['rlFileSubmit', fullname])
|
|
|
|
finally:
|
|
|
|
beakerlib_plugin.run_beakerlib_command(['popd'])
|
|
|
|
|
|
|
|
if remove_dir:
|
|
|
|
if beakerlib_plugin:
|
|
|
|
# The BeakerLib process runs asynchronously, let it clean up
|
|
|
|
# after it's done with the directory
|
|
|
|
beakerlib_plugin.run_beakerlib_command(
|
|
|
|
['rm', '-rvf', topdirname])
|
|
|
|
else:
|
|
|
|
shutil.rmtree(topdirname)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='class')
|
|
|
|
def class_integration_logs():
|
|
|
|
"""Internal fixture providing class-level logs_dict"""
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
2017-05-29 06:41:18 -05:00
|
|
|
@yield_fixture
|
2014-10-23 13:56:15 -05:00
|
|
|
def integration_logs(class_integration_logs, request):
|
|
|
|
"""Provides access to test integration logs, and collects after each test
|
|
|
|
"""
|
|
|
|
yield class_integration_logs
|
2017-05-22 11:33:49 -05:00
|
|
|
hosts = class_integration_logs.keys()
|
2014-10-23 13:56:15 -05:00
|
|
|
collect_test_logs(request.node, class_integration_logs, request.config)
|
2017-05-22 11:33:49 -05:00
|
|
|
collect_systemd_journal(request.node, hosts, request.config)
|
2014-10-23 13:56:15 -05:00
|
|
|
|
|
|
|
|
2017-05-29 06:41:18 -05:00
|
|
|
@yield_fixture(scope='class')
|
2014-11-13 09:23:56 -06:00
|
|
|
def mh(request, class_integration_logs):
|
|
|
|
"""IPA's multihost fixture object
|
2014-10-23 13:56:15 -05:00
|
|
|
"""
|
2014-10-23 12:17:09 -05:00
|
|
|
cls = request.cls
|
|
|
|
|
2014-11-13 09:23:56 -06:00
|
|
|
domain_description = {
|
|
|
|
'type': 'IPA',
|
|
|
|
'hosts': {
|
|
|
|
'master': 1,
|
|
|
|
'replica': cls.num_replicas,
|
2014-12-15 10:11:40 -06:00
|
|
|
'client': cls.num_clients,
|
2014-11-13 09:23:56 -06:00
|
|
|
},
|
|
|
|
}
|
|
|
|
domain_description['hosts'].update(
|
|
|
|
{role: 1 for role in cls.required_extra_roles})
|
|
|
|
|
|
|
|
domain_descriptions = [domain_description]
|
2016-10-06 13:28:59 -05:00
|
|
|
for _i in range(cls.num_ad_domains):
|
2014-11-13 09:23:56 -06:00
|
|
|
domain_descriptions.append({
|
|
|
|
'type': 'AD',
|
2019-03-27 11:24:26 -05:00
|
|
|
'hosts': {'ad': 1}
|
|
|
|
})
|
|
|
|
for _i in range(cls.num_ad_subdomains):
|
|
|
|
domain_descriptions.append({
|
|
|
|
'type': 'AD_SUBDOMAIN',
|
|
|
|
'hosts': {'ad_subdomain': 1}
|
|
|
|
})
|
|
|
|
for _i in range(cls.num_ad_treedomains):
|
|
|
|
domain_descriptions.append({
|
|
|
|
'type': 'AD_TREEDOMAIN',
|
|
|
|
'hosts': {'ad_treedomain': 1}
|
2014-11-13 09:23:56 -06:00
|
|
|
})
|
|
|
|
|
|
|
|
mh = make_multihost_fixture(
|
|
|
|
request,
|
|
|
|
domain_descriptions,
|
|
|
|
config_class=Config,
|
|
|
|
_config=get_global_config(),
|
|
|
|
)
|
2014-12-15 11:42:45 -06:00
|
|
|
|
2014-11-13 09:23:56 -06:00
|
|
|
mh.domain = mh.config.domains[0]
|
|
|
|
[mh.master] = mh.domain.hosts_by_role('master')
|
|
|
|
mh.replicas = mh.domain.hosts_by_role('replica')
|
|
|
|
mh.clients = mh.domain.hosts_by_role('client')
|
2019-03-27 11:24:26 -05:00
|
|
|
ad_domains = mh.config.ad_domains
|
|
|
|
if ad_domains:
|
|
|
|
mh.ads = []
|
|
|
|
for domain in ad_domains:
|
|
|
|
mh.ads.extend(domain.hosts_by_role('ad'))
|
|
|
|
mh.ad_subdomains = []
|
|
|
|
for domain in ad_domains:
|
|
|
|
mh.ad_subdomains.extend(domain.hosts_by_role('ad_subdomain'))
|
|
|
|
mh.ad_treedomains = []
|
|
|
|
for domain in ad_domains:
|
|
|
|
mh.ad_treedomains.extend(domain.hosts_by_role('ad_treedomain'))
|
2014-10-23 12:17:09 -05:00
|
|
|
|
2014-10-23 13:56:15 -05:00
|
|
|
cls.logs_to_collect = class_integration_logs
|
2014-10-23 12:17:09 -05:00
|
|
|
|
2014-10-23 13:56:15 -05:00
|
|
|
def collect_log(host, filename):
|
2017-05-25 05:42:54 -05:00
|
|
|
logger.info('Adding %s:%s to list of logs to collect',
|
|
|
|
host.external_hostname, filename)
|
2014-10-23 13:56:15 -05:00
|
|
|
class_integration_logs.setdefault(host, []).append(filename)
|
|
|
|
|
2015-08-12 06:44:11 -05:00
|
|
|
print(mh.config)
|
2014-12-15 11:42:45 -06:00
|
|
|
for host in mh.config.get_all_hosts():
|
2014-10-23 13:56:15 -05:00
|
|
|
host.add_log_collector(collect_log)
|
2017-05-25 05:42:54 -05:00
|
|
|
logger.info('Preparing host %s', host.hostname)
|
2016-03-21 08:08:06 -05:00
|
|
|
tasks.prepare_host(host)
|
2014-10-23 12:17:09 -05:00
|
|
|
|
2014-12-15 11:42:45 -06:00
|
|
|
setup_class(cls, mh)
|
2014-11-13 09:23:56 -06:00
|
|
|
mh._pytestmh_request.addfinalizer(lambda: teardown_class(cls))
|
2014-10-23 12:17:09 -05:00
|
|
|
|
2018-12-10 05:51:29 -06:00
|
|
|
try:
|
|
|
|
yield mh.install()
|
|
|
|
finally:
|
|
|
|
hosts = list(cls.get_all_hosts())
|
|
|
|
for host in hosts:
|
|
|
|
host.remove_log_collector(collect_log)
|
|
|
|
collect_test_logs(
|
|
|
|
request.node, class_integration_logs, request.config
|
|
|
|
)
|
|
|
|
collect_systemd_journal(request.node, hosts, request.config)
|
2014-10-23 13:56:15 -05:00
|
|
|
|
2014-11-13 09:23:56 -06:00
|
|
|
|
2014-12-15 11:42:45 -06:00
|
|
|
def setup_class(cls, mh):
|
|
|
|
"""Add convenience attributes to the test class
|
2014-11-13 09:23:56 -06:00
|
|
|
|
|
|
|
This is deprecated in favor of the mh fixture.
|
|
|
|
To be removed when no more tests using this.
|
|
|
|
"""
|
2014-12-15 11:42:45 -06:00
|
|
|
cls.domain = mh.domain
|
|
|
|
cls.master = mh.master
|
|
|
|
cls.replicas = mh.replicas
|
|
|
|
cls.clients = mh.clients
|
|
|
|
cls.ad_domains = mh.config.ad_domains
|
2019-03-27 11:24:26 -05:00
|
|
|
if cls.ad_domains:
|
|
|
|
cls.ads = mh.ads
|
|
|
|
cls.ad_subdomains = mh.ad_subdomains
|
|
|
|
cls.ad_treedomains = mh.ad_treedomains
|
2014-11-13 09:23:56 -06:00
|
|
|
|
|
|
|
|
|
|
|
def teardown_class(cls):
|
2014-12-15 11:42:45 -06:00
|
|
|
"""Remove convenience attributes from the test class
|
2014-11-13 09:23:56 -06:00
|
|
|
|
|
|
|
This is deprecated in favor of the mh fixture.
|
|
|
|
To be removed when no more tests using this.
|
|
|
|
"""
|
|
|
|
del cls.master
|
|
|
|
del cls.replicas
|
|
|
|
del cls.clients
|
|
|
|
del cls.domain
|
2019-03-27 11:24:26 -05:00
|
|
|
if cls.ad_domains:
|
|
|
|
del cls.ads
|
|
|
|
del cls.ad_subdomains
|
|
|
|
del cls.ad_treedomains
|
|
|
|
del cls.ad_domains
|