mirror of
https://salsa.debian.org/freeipa-team/freeipa.git
synced 2025-02-25 18:55:28 -06:00
logging: do not use ipa_log_manager
to create module-level loggers
Replace all `ipa_log_manager.log_mgr.get_logger` calls to create module-level loggers with `logging.getLogger` calls and deprecate `ipa_log_manager.log_mgr.get_logger`. Reviewed-By: Martin Basti <mbasti@redhat.com>
This commit is contained in:
parent
7a482b7c72
commit
07229c8ff6
@ -6,6 +6,7 @@ import base64
|
||||
import collections
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import pipes
|
||||
@ -31,7 +32,6 @@ import six
|
||||
from ipalib import api
|
||||
from ipalib import errors
|
||||
from ipalib.text import _
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
|
||||
if six.PY3:
|
||||
unicode = str
|
||||
@ -41,7 +41,7 @@ Routines for constructing certificate signing requests using IPA data and
|
||||
stored templates.
|
||||
""")
|
||||
|
||||
logger = log_mgr.get_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IndexableUndefined(jinja2.Undefined):
|
||||
@ -164,7 +164,7 @@ class Formatter(object):
|
||||
'Template error when formatting certificate data'))
|
||||
|
||||
logger.debug(
|
||||
'Formatting with template: %s' % combined_template_source)
|
||||
'Formatting with template: %s', combined_template_source)
|
||||
combined_template = self.jinja2.from_string(combined_template_source)
|
||||
|
||||
return combined_template
|
||||
@ -190,7 +190,7 @@ class Formatter(object):
|
||||
|
||||
def _prepare_syntax_rule(
|
||||
self, syntax_rule, data_rules, description, data_sources):
|
||||
logger.debug('Syntax rule template: %s' % syntax_rule.template)
|
||||
logger.debug('Syntax rule template: %s', syntax_rule.template)
|
||||
template = self.jinja2.from_string(
|
||||
syntax_rule.template, globals=self.passthrough_globals)
|
||||
is_required = syntax_rule.options.get('required', False)
|
||||
|
@ -23,6 +23,7 @@ import base64
|
||||
import errno
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
@ -46,9 +47,8 @@ from ipalib import Bytes, Flag, Str
|
||||
from ipalib.plugable import Registry
|
||||
from ipalib import _
|
||||
from ipapython.dnsutil import DNSName
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
|
||||
logger = log_mgr.get_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validated_read(argname, filename, mode='r', encoding=None):
|
||||
|
@ -6,6 +6,7 @@ import collections
|
||||
import errno
|
||||
import json
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
@ -14,9 +15,8 @@ from . import schema
|
||||
from ipaclient.plugins.rpcclient import rpcclient
|
||||
from ipalib.constants import USER_CACHE_PATH
|
||||
from ipapython.dnsutil import DNSName
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
|
||||
logger = log_mgr.get_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ServerInfo(collections.MutableMapping):
|
||||
@ -50,7 +50,7 @@ class ServerInfo(collections.MutableMapping):
|
||||
pass
|
||||
else:
|
||||
# warn that the file is unreadable, probably corrupted
|
||||
logger.warning('Failed to read server info: {}'.format(e))
|
||||
logger.warning('Failed to read server info: %s', e)
|
||||
|
||||
def _write(self):
|
||||
try:
|
||||
@ -62,7 +62,7 @@ class ServerInfo(collections.MutableMapping):
|
||||
with open(self._path, 'w') as sc:
|
||||
json.dump(self._dict, sc)
|
||||
except EnvironmentError as e:
|
||||
logger.warning('Failed to write server info: {}'.format(e))
|
||||
logger.warning('Failed to write server info: %s', e)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._dict[key]
|
||||
|
@ -5,6 +5,7 @@
|
||||
import collections
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
@ -23,7 +24,8 @@ from ipalib.parameters import DefaultFrom, Flag, Password, Str
|
||||
from ipapython.ipautil import fsdecode
|
||||
from ipapython.dn import DN
|
||||
from ipapython.dnsutil import DNSName
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
FORMAT = '1'
|
||||
|
||||
@ -57,8 +59,6 @@ _PARAMS = {
|
||||
'str': parameters.Str,
|
||||
}
|
||||
|
||||
logger = log_mgr.get_logger(__name__)
|
||||
|
||||
|
||||
class _SchemaCommand(ClientCommand):
|
||||
pass
|
||||
@ -377,7 +377,7 @@ class Schema(object):
|
||||
# Failed to read the schema from cache. There may be a lot of
|
||||
# causes and not much we can do about it. Just ensure we will
|
||||
# ignore the cache and fetch the schema from server.
|
||||
logger.warning("Failed to read schema: {}".format(e))
|
||||
logger.warning("Failed to read schema: %s", e)
|
||||
fingerprint = None
|
||||
read_failed = True
|
||||
|
||||
@ -387,7 +387,7 @@ class Schema(object):
|
||||
try:
|
||||
self._write_schema(fingerprint)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to write schema: {}".format(e))
|
||||
logger.warning("Failed to write schema: %s", e)
|
||||
|
||||
self.fingerprint = fingerprint
|
||||
self.ttl = ttl
|
||||
|
@ -22,13 +22,14 @@
|
||||
This module contains default platform-specific implementations of system tasks.
|
||||
'''
|
||||
|
||||
import logging
|
||||
|
||||
from pkg_resources import parse_version
|
||||
|
||||
from ipaplatform.paths import paths
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
from ipapython import ipautil
|
||||
|
||||
log = log_mgr.get_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseTaskNamespace(object):
|
||||
@ -219,10 +220,10 @@ class BaseTaskNamespace(object):
|
||||
return False
|
||||
|
||||
def add_user_to_group(self, user, group):
|
||||
log.debug('Adding user %s to group %s', user, group)
|
||||
logger.debug('Adding user %s to group %s', user, group)
|
||||
args = [paths.USERMOD, '-a', '-G', group, user]
|
||||
try:
|
||||
ipautil.run(args)
|
||||
log.debug('Done adding user to group')
|
||||
logger.debug('Done adding user to group')
|
||||
except ipautil.CalledProcessError as e:
|
||||
log.debug('Failed to add user to group: %s', e)
|
||||
logger.debug('Failed to add user to group: %s', e)
|
||||
|
@ -25,6 +25,7 @@ system tasks.
|
||||
'''
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pwd
|
||||
import shutil
|
||||
@ -41,7 +42,6 @@ from cffi import FFI
|
||||
from pyasn1.error import PyAsn1Error
|
||||
from six.moves import urllib
|
||||
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
from ipapython import ipautil
|
||||
import ipapython.errors
|
||||
|
||||
@ -54,6 +54,8 @@ from ipaplatform.base.tasks import BaseTaskNamespace
|
||||
from ipalib.constants import IPAAPI_USER
|
||||
# pylint: enable=ipa-forbidden-import
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_ffi = FFI()
|
||||
_ffi.cdef("""
|
||||
int rpmvercmp (const char *a, const char *b);
|
||||
@ -63,8 +65,6 @@ int rpmvercmp (const char *a, const char *b);
|
||||
# https://cffi.readthedocs.org/en/latest/overview.html#id8
|
||||
_librpm = _ffi.dlopen(find_library("rpm"))
|
||||
|
||||
log = log_mgr.get_logger(__name__)
|
||||
|
||||
|
||||
def selinux_enabled():
|
||||
"""
|
||||
@ -229,11 +229,11 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
||||
try:
|
||||
ipautil.run([paths.UPDATE_CA_TRUST])
|
||||
except CalledProcessError as e:
|
||||
log.error(
|
||||
logger.error(
|
||||
"Could not update systemwide CA trust database: %s", e)
|
||||
return False
|
||||
else:
|
||||
log.info("Systemwide CA database updated.")
|
||||
logger.info("Systemwide CA database updated.")
|
||||
return True
|
||||
|
||||
def insert_ca_certs_into_systemwide_ca_store(self, ca_certs):
|
||||
@ -248,7 +248,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
||||
try:
|
||||
os.remove(new_cacert_path)
|
||||
except OSError as e:
|
||||
log.error(
|
||||
logger.error(
|
||||
"Could not remove %s: %s", new_cacert_path, e)
|
||||
return False
|
||||
|
||||
@ -257,7 +257,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
||||
try:
|
||||
f = open(new_cacert_path, 'w')
|
||||
except IOError as e:
|
||||
log.info("Failed to open %s: %s", new_cacert_path, e)
|
||||
logger.info("Failed to open %s: %s", new_cacert_path, e)
|
||||
return False
|
||||
|
||||
f.write("# This file was created by IPA. Do not edit.\n"
|
||||
@ -271,7 +271,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
||||
serial_number = x509.get_der_serial_number(cert, x509.DER)
|
||||
public_key_info = x509.get_der_public_key_info(cert, x509.DER)
|
||||
except (PyAsn1Error, ValueError, CertificateError) as e:
|
||||
log.warning(
|
||||
logger.warning(
|
||||
"Failed to decode certificate \"%s\": %s", nickname, e)
|
||||
continue
|
||||
|
||||
@ -311,7 +311,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
||||
try:
|
||||
ext_key_usage = x509.encode_ext_key_usage(ext_key_usage)
|
||||
except PyAsn1Error as e:
|
||||
log.warning(
|
||||
logger.warning(
|
||||
"Failed to encode extended key usage for \"%s\": %s",
|
||||
nickname, e)
|
||||
continue
|
||||
@ -348,7 +348,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
||||
try:
|
||||
os.remove(new_cacert_path)
|
||||
except OSError as e:
|
||||
log.error(
|
||||
logger.error(
|
||||
"Could not remove %s: %s", new_cacert_path, e)
|
||||
result = False
|
||||
else:
|
||||
@ -376,8 +376,8 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
||||
try:
|
||||
self.set_hostname(old_hostname)
|
||||
except ipautil.CalledProcessError as e:
|
||||
log.debug("%s", traceback.format_exc())
|
||||
log.error(
|
||||
logger.debug("%s", traceback.format_exc())
|
||||
logger.error(
|
||||
"Failed to restore this machine hostname to %s (%s).",
|
||||
old_hostname, e
|
||||
)
|
||||
@ -414,7 +414,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
||||
if original_state != state:
|
||||
updated_vars[setting] = state
|
||||
except ipautil.CalledProcessError as e:
|
||||
log.error("Cannot get SELinux boolean '%s': %s", setting, e)
|
||||
logger.error("Cannot get SELinux boolean '%s': %s", setting, e)
|
||||
failed_vars[setting] = state
|
||||
|
||||
if updated_vars:
|
||||
@ -481,12 +481,12 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
||||
os.unlink(paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
log.debug(
|
||||
logger.debug(
|
||||
'Trying to remove %s but file does not exist',
|
||||
paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF
|
||||
)
|
||||
else:
|
||||
log.error(
|
||||
logger.error(
|
||||
'Error removing %s: %s',
|
||||
paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF, e
|
||||
)
|
||||
|
@ -94,6 +94,11 @@ class _DeprecatedLogger(object):
|
||||
|
||||
def get_logger(who, bind_logger_names=False):
|
||||
if isinstance(who, six.string_types):
|
||||
warnings.warn(
|
||||
"{}.log_mgr.get_logger is deprecated, use "
|
||||
"logging.getLogger".format(__name__),
|
||||
DeprecationWarning)
|
||||
|
||||
logger_name = who
|
||||
else:
|
||||
caller_globals = sys._getframe(1).f_globals
|
||||
|
@ -17,13 +17,13 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
import logging
|
||||
import pprint
|
||||
|
||||
import ldap.schema
|
||||
|
||||
import ipapython.version
|
||||
from ipalib import api
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
from ipapython.dn import DN
|
||||
from ipaserver.install.ldapupdate import connect
|
||||
from ipaserver.install import installutils
|
||||
@ -38,7 +38,7 @@ SCHEMA_ELEMENT_CLASSES = (
|
||||
|
||||
ORIGIN = 'IPA v%s' % ipapython.version.VERSION
|
||||
|
||||
log = log_mgr.get_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_oid_dependency_order(schema, cls):
|
||||
@ -124,7 +124,7 @@ def update_schema(schema_files, ldapi=False, dm_password=None,):
|
||||
for attr in schema_entry[attrname]}
|
||||
|
||||
for filename in schema_files:
|
||||
log.debug('Processing schema LDIF file %s', filename)
|
||||
logger.debug('Processing schema LDIF file %s', filename)
|
||||
url = "file://{}".format(filename)
|
||||
_dn, new_schema = ldap.schema.subentry.urlfetch(url)
|
||||
|
||||
@ -147,10 +147,10 @@ def update_schema(schema_files, ldapi=False, dm_password=None,):
|
||||
|
||||
if old_obj:
|
||||
old_attr = old_entries_by_oid.get(oid)
|
||||
log.debug('Replace: %s', old_attr)
|
||||
log.debug(' with: %s', value)
|
||||
logger.debug('Replace: %s', old_attr)
|
||||
logger.debug(' with: %s', value)
|
||||
else:
|
||||
log.debug('Add: %s', value)
|
||||
logger.debug('Add: %s', value)
|
||||
|
||||
new_elements.append(value.encode('utf-8'))
|
||||
|
||||
@ -161,11 +161,12 @@ def update_schema(schema_files, ldapi=False, dm_password=None,):
|
||||
# so updates must be executed with groups of independent OIDs
|
||||
if new_elements:
|
||||
modlist = schema_entry.generate_modlist()
|
||||
log.debug("Schema modlist:\n%s", pprint.pformat(modlist))
|
||||
logger.debug("Schema modlist:\n%s",
|
||||
pprint.pformat(modlist))
|
||||
conn.update_entry(schema_entry)
|
||||
|
||||
if not modified:
|
||||
log.debug('Not updating schema')
|
||||
logger.debug('Not updating schema')
|
||||
|
||||
return modified
|
||||
|
||||
|
@ -21,11 +21,12 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
from ipapython.ipa_log_manager import log_mgr, standard_logging_setup
|
||||
from ipapython.ipa_log_manager import standard_logging_setup
|
||||
from ipatests.pytest_plugins.integration import config
|
||||
from ipatests.pytest_plugins.integration import tasks
|
||||
from ipatests.pytest_plugins.integration.host import Host
|
||||
@ -36,8 +37,7 @@ try:
|
||||
except ImportError:
|
||||
BeakerLibProcess = None
|
||||
|
||||
|
||||
log = log_mgr.get_logger(__name__)
|
||||
logger = logging.getLogger(os.path.basename(__file__))
|
||||
|
||||
|
||||
class TaskRunner(object):
|
||||
@ -333,36 +333,37 @@ class TaskRunner(object):
|
||||
|
||||
def install_master(self, args):
|
||||
master = self.get_host(args.host, default=args.domain.master)
|
||||
log.info('Installing master %s', master.hostname)
|
||||
logger.info('Installing master %s', master.hostname)
|
||||
tasks.install_master(master)
|
||||
|
||||
def install_replica(self, args):
|
||||
replica = self.get_host(args.replica)
|
||||
master = self.get_host(args.master, default=args.domain.master)
|
||||
log.info('Installing replica %s from %s',
|
||||
replica.hostname, master.hostname)
|
||||
logger.info('Installing replica %s from %s',
|
||||
replica.hostname, master.hostname)
|
||||
tasks.install_replica(master, replica)
|
||||
|
||||
def install_client(self, args):
|
||||
client = self.get_host(args.client)
|
||||
master = self.get_host(args.master, default=args.domain.master)
|
||||
log.info('Installing client %s on %s', client.hostname, master.hostname)
|
||||
logger.info('Installing client %s on %s',
|
||||
client.hostname, master.hostname)
|
||||
tasks.install_client(master, client)
|
||||
|
||||
def uninstall_master(self, args):
|
||||
default_hosts = [args.domain.master] + args.domain.replicas
|
||||
hosts = self.get_hosts(args.host, default=default_hosts)
|
||||
log.info('Uninstalling masters: %s', [h.hostname for h in hosts])
|
||||
logger.info('Uninstalling masters: %s', [h.hostname for h in hosts])
|
||||
for master in hosts:
|
||||
log.info('Uninstalling %s', master.hostname)
|
||||
logger.info('Uninstalling %s', master.hostname)
|
||||
tasks.uninstall_master(master)
|
||||
|
||||
def uninstall_client(self, args):
|
||||
default_hosts = args.domain.clients
|
||||
hosts = self.get_hosts(args.host, default=default_hosts)
|
||||
log.info('Uninstalling clients: %s', [h.hostname for h in hosts])
|
||||
logger.info('Uninstalling clients: %s', [h.hostname for h in hosts])
|
||||
for client in hosts:
|
||||
log.info('Uninstalling %s', client.hostname)
|
||||
logger.info('Uninstalling %s', client.hostname)
|
||||
tasks.uninstall_client(client)
|
||||
|
||||
def uninstall_all(self, args):
|
||||
@ -372,9 +373,9 @@ class TaskRunner(object):
|
||||
def cleanup(self, args):
|
||||
default_hosts = args.domain.hosts
|
||||
hosts = self.get_hosts(args.host, default=default_hosts)
|
||||
log.info('Cleaning up hosts: %s', [h.hostname for h in hosts])
|
||||
logger.info('Cleaning up hosts: %s', [h.hostname for h in hosts])
|
||||
for host in hosts:
|
||||
log.info('Cleaning up %s', host.hostname)
|
||||
logger.info('Cleaning up %s', host.hostname)
|
||||
tasks.unapply_fixes(host)
|
||||
|
||||
def connect_replica(self, args):
|
||||
@ -402,7 +403,7 @@ class TaskRunner(object):
|
||||
|
||||
def install_adtrust(self, args):
|
||||
master = self.get_host(args.host, default=args.domain.master)
|
||||
log.info('Configuring AD trust support on %s', master.hostname)
|
||||
logger.info('Configuring AD trust support on %s', master.hostname)
|
||||
tasks.install_adtrust(master)
|
||||
|
||||
def configure_dns_for_trust(self, args):
|
||||
|
@ -21,6 +21,7 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
@ -30,13 +31,12 @@ import pytest
|
||||
from pytest_multihost import make_multihost_fixture
|
||||
|
||||
from ipapython import ipautil
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
from ipatests.test_util import yield_fixture
|
||||
from .config import Config
|
||||
from .env_config import get_global_config
|
||||
from . import tasks
|
||||
|
||||
log = log_mgr.get_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
@ -86,7 +86,7 @@ def collect_systemd_journal(node, hosts, test_config):
|
||||
return
|
||||
|
||||
for host in hosts:
|
||||
log.info("Collecting journal from: %s", host.hostname)
|
||||
logger.info("Collecting journal from: %s", host.hostname)
|
||||
|
||||
topdirname = os.path.join(logfile_dir, name, host.hostname)
|
||||
if not os.path.exists(topdirname):
|
||||
@ -97,7 +97,7 @@ def collect_systemd_journal(node, hosts, test_config):
|
||||
['journalctl', '--since', host.config.log_journal_since],
|
||||
log_stdout=False, raiseonerr=False)
|
||||
if cmd.returncode:
|
||||
log.error('An error occurred while collecting journal')
|
||||
logger.error('An error occurred while collecting journal')
|
||||
continue
|
||||
|
||||
# Write journal to file
|
||||
@ -130,14 +130,14 @@ def collect_logs(name, logs_dict, logfile_dir=None, beakerlib_plugin=None):
|
||||
topdirname = os.path.join(logfile_dir, name)
|
||||
|
||||
for host, logs in logs_dict.items():
|
||||
log.info('Collecting logs from: %s', host.hostname)
|
||||
logger.info('Collecting logs from: %s', host.hostname)
|
||||
|
||||
# Tar up the logs on the remote server
|
||||
cmd = host.run_command(
|
||||
['tar', '-c', '--ignore-failed-read', '-J', '-v'] + logs,
|
||||
log_stdout=False, raiseonerr=False)
|
||||
if cmd.returncode:
|
||||
log.warning('Could not collect all requested logs')
|
||||
logger.warning('Could not collect all requested logs')
|
||||
|
||||
# Unpack on the local side
|
||||
dirname = os.path.join(topdirname, host.hostname)
|
||||
@ -162,7 +162,7 @@ def collect_logs(name, logs_dict, logfile_dir=None, beakerlib_plugin=None):
|
||||
for filename in filenames:
|
||||
fullname = os.path.relpath(
|
||||
os.path.join(dirpath, filename), topdirname)
|
||||
log.debug('Submitting file: %s', fullname)
|
||||
logger.debug('Submitting file: %s', fullname)
|
||||
beakerlib_plugin.run_beakerlib_command(
|
||||
['rlFileSubmit', fullname])
|
||||
finally:
|
||||
@ -235,14 +235,14 @@ def mh(request, class_integration_logs):
|
||||
cls.logs_to_collect = class_integration_logs
|
||||
|
||||
def collect_log(host, filename):
|
||||
log.info('Adding %s:%s to list of logs to collect' %
|
||||
(host.external_hostname, filename))
|
||||
logger.info('Adding %s:%s to list of logs to collect',
|
||||
host.external_hostname, filename)
|
||||
class_integration_logs.setdefault(host, []).append(filename)
|
||||
|
||||
print(mh.config)
|
||||
for host in mh.config.get_all_hosts():
|
||||
host.add_log_collector(collect_log)
|
||||
log.info('Preparing host %s', host.hostname)
|
||||
logger.info('Preparing host %s', host.hostname)
|
||||
tasks.prepare_host(host)
|
||||
|
||||
setup_class(cls, mh)
|
||||
|
@ -20,12 +20,12 @@
|
||||
|
||||
"""Utilities for configuration of multi-master tests"""
|
||||
|
||||
import logging
|
||||
import random
|
||||
|
||||
import pytest_multihost.config
|
||||
|
||||
from ipapython.dn import DN
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
from ipalib.constants import MAX_DOMAIN_LEVEL
|
||||
|
||||
|
||||
@ -71,7 +71,7 @@ class Config(pytest_multihost.config.Config):
|
||||
return Domain
|
||||
|
||||
def get_logger(self, name):
|
||||
return log_mgr.get_logger(name)
|
||||
return logging.getLogger(name)
|
||||
|
||||
@property
|
||||
def ad_domains(self):
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
"""Common tasks for FreeIPA integration tests"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import textwrap
|
||||
import re
|
||||
@ -35,7 +36,6 @@ from six import StringIO
|
||||
from ipapython import ipautil
|
||||
from ipaplatform.paths import paths
|
||||
from ipapython.dn import DN
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
from ipalib import errors
|
||||
from ipalib.util import get_reverse_zone_default, verify_host_resolvable
|
||||
from ipalib.constants import (
|
||||
@ -44,8 +44,7 @@ from ipalib.constants import (
|
||||
from .env_config import env_to_script
|
||||
from .host import Host
|
||||
|
||||
|
||||
log = log_mgr.get_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_server_logs_collecting(host):
|
||||
@ -115,7 +114,7 @@ def prepare_reverse_zone(host, ip):
|
||||
"dnszone-add",
|
||||
zone], raiseonerr=False)
|
||||
if result.returncode > 0:
|
||||
log.warning(result.stderr_text)
|
||||
logger.warning("%s", result.stderr_text)
|
||||
return zone, result.returncode
|
||||
|
||||
def prepare_host(host):
|
||||
@ -231,14 +230,14 @@ def restore_hostname(host):
|
||||
try:
|
||||
hostname = host.get_file_contents(backupname)
|
||||
except IOError:
|
||||
log.debug('No hostname backed up on %s' % host.hostname)
|
||||
logger.debug('No hostname backed up on %s', host.hostname)
|
||||
else:
|
||||
host.run_command(['hostname', hostname.strip()])
|
||||
host.run_command(['rm', backupname])
|
||||
|
||||
|
||||
def enable_replication_debugging(host):
|
||||
log.info('Enable LDAP replication logging')
|
||||
logger.info('Enable LDAP replication logging')
|
||||
logging_ldif = textwrap.dedent("""
|
||||
dn: cn=config
|
||||
changetype: modify
|
||||
@ -1020,10 +1019,10 @@ def install_topo(topo, master, replicas, clients, domain_level=None,
|
||||
|
||||
for parent, child in get_topo(topo)(master, replicas):
|
||||
if child in installed:
|
||||
log.info('Connecting replica %s to %s' % (parent, child))
|
||||
logger.info('Connecting replica %s to %s', parent, child)
|
||||
connect_replica(parent, child)
|
||||
else:
|
||||
log.info('Installing replica %s from %s' % (parent, child))
|
||||
logger.info('Installing replica %s from %s', parent, child)
|
||||
install_replica(
|
||||
parent, child,
|
||||
setup_ca=setup_replica_cas,
|
||||
@ -1037,7 +1036,7 @@ def install_clients(servers, clients):
|
||||
"""Install IPA clients, distributing them among the given servers"""
|
||||
izip = getattr(itertools, 'izip', zip)
|
||||
for server, client in izip(itertools.cycle(servers), clients):
|
||||
log.info('Installing client %s on %s' % (server, client))
|
||||
logger.info('Installing client %s on %s', server, client)
|
||||
install_client(server, client)
|
||||
|
||||
|
||||
@ -1060,7 +1059,7 @@ def wait_for_replication(ldap, timeout=30):
|
||||
Note that this waits for updates originating on this host, not those
|
||||
coming from other hosts.
|
||||
"""
|
||||
log.debug('Waiting for replication to finish')
|
||||
logger.debug('Waiting for replication to finish')
|
||||
for i in range(timeout):
|
||||
time.sleep(1)
|
||||
status_attr = 'nsds5replicaLastUpdateStatus'
|
||||
@ -1069,7 +1068,7 @@ def wait_for_replication(ldap, timeout=30):
|
||||
DN(('cn', 'mapping tree'), ('cn', 'config')),
|
||||
filter='(objectclass=nsds5replicationagreement)',
|
||||
attrs_list=[status_attr, progress_attr])
|
||||
log.debug('Replication agreements: \n%s', _entries_to_ldif(entries))
|
||||
logger.debug('Replication agreements: \n%s', _entries_to_ldif(entries))
|
||||
if any(
|
||||
not (
|
||||
# older DS format
|
||||
@ -1079,16 +1078,16 @@ def wait_for_replication(ldap, timeout=30):
|
||||
)
|
||||
for e in entries
|
||||
):
|
||||
log.error('Replication error')
|
||||
logger.error('Replication error')
|
||||
continue
|
||||
if any(e.single_value[progress_attr] == 'TRUE' for e in entries):
|
||||
log.debug('Replication in progress (waited %s/%ss)',
|
||||
i, timeout)
|
||||
logger.debug('Replication in progress (waited %s/%ss)',
|
||||
i, timeout)
|
||||
else:
|
||||
log.debug('Replication finished')
|
||||
logger.debug('Replication finished')
|
||||
break
|
||||
else:
|
||||
log.error('Giving up wait for replication to finish')
|
||||
logger.error('Giving up wait for replication to finish')
|
||||
|
||||
|
||||
def add_a_records_for_hosts_in_master_domain(master):
|
||||
@ -1097,10 +1096,11 @@ def add_a_records_for_hosts_in_master_domain(master):
|
||||
# domain
|
||||
try:
|
||||
verify_host_resolvable(host.hostname)
|
||||
log.debug("The host (%s) is resolvable." % host.domain.name)
|
||||
logger.debug("The host (%s) is resolvable.", host.domain.name)
|
||||
except errors.DNSNotARecordError:
|
||||
log.debug("Hostname (%s) does not have A/AAAA record. Adding new one.",
|
||||
master.hostname)
|
||||
logger.debug("Hostname (%s) does not have A/AAAA record. Adding "
|
||||
"new one.",
|
||||
master.hostname)
|
||||
add_a_record(master, host)
|
||||
|
||||
|
||||
|
@ -21,12 +21,9 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
from ipatests.pytest_plugins.integration import tasks
|
||||
from pytest_sourceorder import ordered
|
||||
|
||||
log = log_mgr.get_logger(__name__)
|
||||
|
||||
|
||||
@ordered
|
||||
@pytest.mark.usefixtures('mh')
|
||||
|
@ -19,18 +19,18 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import contextlib
|
||||
|
||||
from ipapython.ipa_log_manager import log_mgr
|
||||
from ipapython.dn import DN
|
||||
from ipatests.test_integration.base import IntegrationTest
|
||||
from ipatests.pytest_plugins.integration import tasks
|
||||
from ipatests.test_integration.test_dnssec import wait_until_record_is_signed
|
||||
from ipatests.util import assert_deepequal
|
||||
|
||||
log = log_mgr.get_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def assert_entries_equal(a, b):
|
||||
@ -109,13 +109,13 @@ def restore_checker(host):
|
||||
|
||||
results = []
|
||||
for check, assert_func in CHECKS:
|
||||
log.info('Storing result for %s', check)
|
||||
logger.info('Storing result for %s', check)
|
||||
results.append(check(host))
|
||||
|
||||
yield
|
||||
|
||||
for (check, assert_func), expected in zip(CHECKS, results):
|
||||
log.info('Checking result for %s', check)
|
||||
logger.info('Checking result for %s', check)
|
||||
got = check(host)
|
||||
assert_func(expected, got)
|
||||
|
||||
@ -130,7 +130,7 @@ def backup(host):
|
||||
'INFO: Backed up to ')
|
||||
if line.startswith(prefix):
|
||||
backup_path = line[len(prefix):].strip()
|
||||
log.info('Backup path for %s is %s', host, backup_path)
|
||||
logger.info('Backup path for %s is %s', host, backup_path)
|
||||
return backup_path
|
||||
else:
|
||||
raise AssertionError('Backup directory not found in output')
|
||||
@ -158,7 +158,7 @@ class TestBackupAndRestore(IntegrationTest):
|
||||
with restore_checker(self.master):
|
||||
backup_path = backup(self.master)
|
||||
|
||||
log.info('Backup path for %s is %s', self.master, backup_path)
|
||||
logger.info('Backup path for %s is %s', self.master, backup_path)
|
||||
|
||||
self.master.run_command(['ipa-server-install',
|
||||
'--uninstall',
|
||||
@ -181,7 +181,7 @@ class TestBackupAndRestore(IntegrationTest):
|
||||
with restore_checker(self.master):
|
||||
backup_path = backup(self.master)
|
||||
|
||||
log.info('Backup path for %s is %s', self.master, backup_path)
|
||||
logger.info('Backup path for %s is %s', self.master, backup_path)
|
||||
|
||||
self.master.run_command(['ipa-server-install',
|
||||
'--uninstall',
|
||||
|
Loading…
Reference in New Issue
Block a user