mirror of
https://salsa.debian.org/freeipa-team/freeipa.git
synced 2025-02-25 18:55:28 -06:00
logging: do not use ipa_log_manager to create module-level loggers
Replace all `ipa_log_manager.log_mgr.get_logger` calls to create module-level loggers with `logging.getLogger` calls and deprecate `ipa_log_manager.log_mgr.get_logger`. Reviewed-By: Martin Basti <mbasti@redhat.com>
This commit is contained in:
committed by
Martin Basti
parent
7a482b7c72
commit
07229c8ff6
@@ -6,6 +6,7 @@ import base64
|
|||||||
import collections
|
import collections
|
||||||
import errno
|
import errno
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import pipes
|
import pipes
|
||||||
@@ -31,7 +32,6 @@ import six
|
|||||||
from ipalib import api
|
from ipalib import api
|
||||||
from ipalib import errors
|
from ipalib import errors
|
||||||
from ipalib.text import _
|
from ipalib.text import _
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
|
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
unicode = str
|
unicode = str
|
||||||
@@ -41,7 +41,7 @@ Routines for constructing certificate signing requests using IPA data and
|
|||||||
stored templates.
|
stored templates.
|
||||||
""")
|
""")
|
||||||
|
|
||||||
logger = log_mgr.get_logger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class IndexableUndefined(jinja2.Undefined):
|
class IndexableUndefined(jinja2.Undefined):
|
||||||
@@ -164,7 +164,7 @@ class Formatter(object):
|
|||||||
'Template error when formatting certificate data'))
|
'Template error when formatting certificate data'))
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Formatting with template: %s' % combined_template_source)
|
'Formatting with template: %s', combined_template_source)
|
||||||
combined_template = self.jinja2.from_string(combined_template_source)
|
combined_template = self.jinja2.from_string(combined_template_source)
|
||||||
|
|
||||||
return combined_template
|
return combined_template
|
||||||
@@ -190,7 +190,7 @@ class Formatter(object):
|
|||||||
|
|
||||||
def _prepare_syntax_rule(
|
def _prepare_syntax_rule(
|
||||||
self, syntax_rule, data_rules, description, data_sources):
|
self, syntax_rule, data_rules, description, data_sources):
|
||||||
logger.debug('Syntax rule template: %s' % syntax_rule.template)
|
logger.debug('Syntax rule template: %s', syntax_rule.template)
|
||||||
template = self.jinja2.from_string(
|
template = self.jinja2.from_string(
|
||||||
syntax_rule.template, globals=self.passthrough_globals)
|
syntax_rule.template, globals=self.passthrough_globals)
|
||||||
is_required = syntax_rule.options.get('required', False)
|
is_required = syntax_rule.options.get('required', False)
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ import base64
|
|||||||
import errno
|
import errno
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
@@ -46,9 +47,8 @@ from ipalib import Bytes, Flag, Str
|
|||||||
from ipalib.plugable import Registry
|
from ipalib.plugable import Registry
|
||||||
from ipalib import _
|
from ipalib import _
|
||||||
from ipapython.dnsutil import DNSName
|
from ipapython.dnsutil import DNSName
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
|
|
||||||
logger = log_mgr.get_logger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def validated_read(argname, filename, mode='r', encoding=None):
|
def validated_read(argname, filename, mode='r', encoding=None):
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import collections
|
|||||||
import errno
|
import errno
|
||||||
import json
|
import json
|
||||||
import locale
|
import locale
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -14,9 +15,8 @@ from . import schema
|
|||||||
from ipaclient.plugins.rpcclient import rpcclient
|
from ipaclient.plugins.rpcclient import rpcclient
|
||||||
from ipalib.constants import USER_CACHE_PATH
|
from ipalib.constants import USER_CACHE_PATH
|
||||||
from ipapython.dnsutil import DNSName
|
from ipapython.dnsutil import DNSName
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
|
|
||||||
logger = log_mgr.get_logger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ServerInfo(collections.MutableMapping):
|
class ServerInfo(collections.MutableMapping):
|
||||||
@@ -50,7 +50,7 @@ class ServerInfo(collections.MutableMapping):
|
|||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
# warn that the file is unreadable, probably corrupted
|
# warn that the file is unreadable, probably corrupted
|
||||||
logger.warning('Failed to read server info: {}'.format(e))
|
logger.warning('Failed to read server info: %s', e)
|
||||||
|
|
||||||
def _write(self):
|
def _write(self):
|
||||||
try:
|
try:
|
||||||
@@ -62,7 +62,7 @@ class ServerInfo(collections.MutableMapping):
|
|||||||
with open(self._path, 'w') as sc:
|
with open(self._path, 'w') as sc:
|
||||||
json.dump(self._dict, sc)
|
json.dump(self._dict, sc)
|
||||||
except EnvironmentError as e:
|
except EnvironmentError as e:
|
||||||
logger.warning('Failed to write server info: {}'.format(e))
|
logger.warning('Failed to write server info: %s', e)
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return self._dict[key]
|
return self._dict[key]
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
import collections
|
import collections
|
||||||
import errno
|
import errno
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
@@ -23,7 +24,8 @@ from ipalib.parameters import DefaultFrom, Flag, Password, Str
|
|||||||
from ipapython.ipautil import fsdecode
|
from ipapython.ipautil import fsdecode
|
||||||
from ipapython.dn import DN
|
from ipapython.dn import DN
|
||||||
from ipapython.dnsutil import DNSName
|
from ipapython.dnsutil import DNSName
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
FORMAT = '1'
|
FORMAT = '1'
|
||||||
|
|
||||||
@@ -57,8 +59,6 @@ _PARAMS = {
|
|||||||
'str': parameters.Str,
|
'str': parameters.Str,
|
||||||
}
|
}
|
||||||
|
|
||||||
logger = log_mgr.get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class _SchemaCommand(ClientCommand):
|
class _SchemaCommand(ClientCommand):
|
||||||
pass
|
pass
|
||||||
@@ -377,7 +377,7 @@ class Schema(object):
|
|||||||
# Failed to read the schema from cache. There may be a lot of
|
# Failed to read the schema from cache. There may be a lot of
|
||||||
# causes and not much we can do about it. Just ensure we will
|
# causes and not much we can do about it. Just ensure we will
|
||||||
# ignore the cache and fetch the schema from server.
|
# ignore the cache and fetch the schema from server.
|
||||||
logger.warning("Failed to read schema: {}".format(e))
|
logger.warning("Failed to read schema: %s", e)
|
||||||
fingerprint = None
|
fingerprint = None
|
||||||
read_failed = True
|
read_failed = True
|
||||||
|
|
||||||
@@ -387,7 +387,7 @@ class Schema(object):
|
|||||||
try:
|
try:
|
||||||
self._write_schema(fingerprint)
|
self._write_schema(fingerprint)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning("Failed to write schema: {}".format(e))
|
logger.warning("Failed to write schema: %s", e)
|
||||||
|
|
||||||
self.fingerprint = fingerprint
|
self.fingerprint = fingerprint
|
||||||
self.ttl = ttl
|
self.ttl = ttl
|
||||||
|
|||||||
@@ -22,13 +22,14 @@
|
|||||||
This module contains default platform-specific implementations of system tasks.
|
This module contains default platform-specific implementations of system tasks.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
from pkg_resources import parse_version
|
from pkg_resources import parse_version
|
||||||
|
|
||||||
from ipaplatform.paths import paths
|
from ipaplatform.paths import paths
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
from ipapython import ipautil
|
from ipapython import ipautil
|
||||||
|
|
||||||
log = log_mgr.get_logger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class BaseTaskNamespace(object):
|
class BaseTaskNamespace(object):
|
||||||
@@ -219,10 +220,10 @@ class BaseTaskNamespace(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def add_user_to_group(self, user, group):
|
def add_user_to_group(self, user, group):
|
||||||
log.debug('Adding user %s to group %s', user, group)
|
logger.debug('Adding user %s to group %s', user, group)
|
||||||
args = [paths.USERMOD, '-a', '-G', group, user]
|
args = [paths.USERMOD, '-a', '-G', group, user]
|
||||||
try:
|
try:
|
||||||
ipautil.run(args)
|
ipautil.run(args)
|
||||||
log.debug('Done adding user to group')
|
logger.debug('Done adding user to group')
|
||||||
except ipautil.CalledProcessError as e:
|
except ipautil.CalledProcessError as e:
|
||||||
log.debug('Failed to add user to group: %s', e)
|
logger.debug('Failed to add user to group: %s', e)
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ system tasks.
|
|||||||
'''
|
'''
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import pwd
|
import pwd
|
||||||
import shutil
|
import shutil
|
||||||
@@ -41,7 +42,6 @@ from cffi import FFI
|
|||||||
from pyasn1.error import PyAsn1Error
|
from pyasn1.error import PyAsn1Error
|
||||||
from six.moves import urllib
|
from six.moves import urllib
|
||||||
|
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
from ipapython import ipautil
|
from ipapython import ipautil
|
||||||
import ipapython.errors
|
import ipapython.errors
|
||||||
|
|
||||||
@@ -54,6 +54,8 @@ from ipaplatform.base.tasks import BaseTaskNamespace
|
|||||||
from ipalib.constants import IPAAPI_USER
|
from ipalib.constants import IPAAPI_USER
|
||||||
# pylint: enable=ipa-forbidden-import
|
# pylint: enable=ipa-forbidden-import
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
_ffi = FFI()
|
_ffi = FFI()
|
||||||
_ffi.cdef("""
|
_ffi.cdef("""
|
||||||
int rpmvercmp (const char *a, const char *b);
|
int rpmvercmp (const char *a, const char *b);
|
||||||
@@ -63,8 +65,6 @@ int rpmvercmp (const char *a, const char *b);
|
|||||||
# https://cffi.readthedocs.org/en/latest/overview.html#id8
|
# https://cffi.readthedocs.org/en/latest/overview.html#id8
|
||||||
_librpm = _ffi.dlopen(find_library("rpm"))
|
_librpm = _ffi.dlopen(find_library("rpm"))
|
||||||
|
|
||||||
log = log_mgr.get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def selinux_enabled():
|
def selinux_enabled():
|
||||||
"""
|
"""
|
||||||
@@ -229,11 +229,11 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
|||||||
try:
|
try:
|
||||||
ipautil.run([paths.UPDATE_CA_TRUST])
|
ipautil.run([paths.UPDATE_CA_TRUST])
|
||||||
except CalledProcessError as e:
|
except CalledProcessError as e:
|
||||||
log.error(
|
logger.error(
|
||||||
"Could not update systemwide CA trust database: %s", e)
|
"Could not update systemwide CA trust database: %s", e)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
log.info("Systemwide CA database updated.")
|
logger.info("Systemwide CA database updated.")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def insert_ca_certs_into_systemwide_ca_store(self, ca_certs):
|
def insert_ca_certs_into_systemwide_ca_store(self, ca_certs):
|
||||||
@@ -248,7 +248,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
|||||||
try:
|
try:
|
||||||
os.remove(new_cacert_path)
|
os.remove(new_cacert_path)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
log.error(
|
logger.error(
|
||||||
"Could not remove %s: %s", new_cacert_path, e)
|
"Could not remove %s: %s", new_cacert_path, e)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -257,7 +257,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
|||||||
try:
|
try:
|
||||||
f = open(new_cacert_path, 'w')
|
f = open(new_cacert_path, 'w')
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
log.info("Failed to open %s: %s", new_cacert_path, e)
|
logger.info("Failed to open %s: %s", new_cacert_path, e)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
f.write("# This file was created by IPA. Do not edit.\n"
|
f.write("# This file was created by IPA. Do not edit.\n"
|
||||||
@@ -271,7 +271,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
|||||||
serial_number = x509.get_der_serial_number(cert, x509.DER)
|
serial_number = x509.get_der_serial_number(cert, x509.DER)
|
||||||
public_key_info = x509.get_der_public_key_info(cert, x509.DER)
|
public_key_info = x509.get_der_public_key_info(cert, x509.DER)
|
||||||
except (PyAsn1Error, ValueError, CertificateError) as e:
|
except (PyAsn1Error, ValueError, CertificateError) as e:
|
||||||
log.warning(
|
logger.warning(
|
||||||
"Failed to decode certificate \"%s\": %s", nickname, e)
|
"Failed to decode certificate \"%s\": %s", nickname, e)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -311,7 +311,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
|||||||
try:
|
try:
|
||||||
ext_key_usage = x509.encode_ext_key_usage(ext_key_usage)
|
ext_key_usage = x509.encode_ext_key_usage(ext_key_usage)
|
||||||
except PyAsn1Error as e:
|
except PyAsn1Error as e:
|
||||||
log.warning(
|
logger.warning(
|
||||||
"Failed to encode extended key usage for \"%s\": %s",
|
"Failed to encode extended key usage for \"%s\": %s",
|
||||||
nickname, e)
|
nickname, e)
|
||||||
continue
|
continue
|
||||||
@@ -348,7 +348,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
|||||||
try:
|
try:
|
||||||
os.remove(new_cacert_path)
|
os.remove(new_cacert_path)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
log.error(
|
logger.error(
|
||||||
"Could not remove %s: %s", new_cacert_path, e)
|
"Could not remove %s: %s", new_cacert_path, e)
|
||||||
result = False
|
result = False
|
||||||
else:
|
else:
|
||||||
@@ -376,8 +376,8 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
|||||||
try:
|
try:
|
||||||
self.set_hostname(old_hostname)
|
self.set_hostname(old_hostname)
|
||||||
except ipautil.CalledProcessError as e:
|
except ipautil.CalledProcessError as e:
|
||||||
log.debug("%s", traceback.format_exc())
|
logger.debug("%s", traceback.format_exc())
|
||||||
log.error(
|
logger.error(
|
||||||
"Failed to restore this machine hostname to %s (%s).",
|
"Failed to restore this machine hostname to %s (%s).",
|
||||||
old_hostname, e
|
old_hostname, e
|
||||||
)
|
)
|
||||||
@@ -414,7 +414,7 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
|||||||
if original_state != state:
|
if original_state != state:
|
||||||
updated_vars[setting] = state
|
updated_vars[setting] = state
|
||||||
except ipautil.CalledProcessError as e:
|
except ipautil.CalledProcessError as e:
|
||||||
log.error("Cannot get SELinux boolean '%s': %s", setting, e)
|
logger.error("Cannot get SELinux boolean '%s': %s", setting, e)
|
||||||
failed_vars[setting] = state
|
failed_vars[setting] = state
|
||||||
|
|
||||||
if updated_vars:
|
if updated_vars:
|
||||||
@@ -481,12 +481,12 @@ class RedHatTaskNamespace(BaseTaskNamespace):
|
|||||||
os.unlink(paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF)
|
os.unlink(paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno == errno.ENOENT:
|
if e.errno == errno.ENOENT:
|
||||||
log.debug(
|
logger.debug(
|
||||||
'Trying to remove %s but file does not exist',
|
'Trying to remove %s but file does not exist',
|
||||||
paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF
|
paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
log.error(
|
logger.error(
|
||||||
'Error removing %s: %s',
|
'Error removing %s: %s',
|
||||||
paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF, e
|
paths.SYSTEMD_SYSTEM_HTTPD_IPA_CONF, e
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -94,6 +94,11 @@ class _DeprecatedLogger(object):
|
|||||||
|
|
||||||
def get_logger(who, bind_logger_names=False):
|
def get_logger(who, bind_logger_names=False):
|
||||||
if isinstance(who, six.string_types):
|
if isinstance(who, six.string_types):
|
||||||
|
warnings.warn(
|
||||||
|
"{}.log_mgr.get_logger is deprecated, use "
|
||||||
|
"logging.getLogger".format(__name__),
|
||||||
|
DeprecationWarning)
|
||||||
|
|
||||||
logger_name = who
|
logger_name = who
|
||||||
else:
|
else:
|
||||||
caller_globals = sys._getframe(1).f_globals
|
caller_globals = sys._getframe(1).f_globals
|
||||||
|
|||||||
@@ -17,13 +17,13 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
import logging
|
||||||
import pprint
|
import pprint
|
||||||
|
|
||||||
import ldap.schema
|
import ldap.schema
|
||||||
|
|
||||||
import ipapython.version
|
import ipapython.version
|
||||||
from ipalib import api
|
from ipalib import api
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
from ipapython.dn import DN
|
from ipapython.dn import DN
|
||||||
from ipaserver.install.ldapupdate import connect
|
from ipaserver.install.ldapupdate import connect
|
||||||
from ipaserver.install import installutils
|
from ipaserver.install import installutils
|
||||||
@@ -38,7 +38,7 @@ SCHEMA_ELEMENT_CLASSES = (
|
|||||||
|
|
||||||
ORIGIN = 'IPA v%s' % ipapython.version.VERSION
|
ORIGIN = 'IPA v%s' % ipapython.version.VERSION
|
||||||
|
|
||||||
log = log_mgr.get_logger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _get_oid_dependency_order(schema, cls):
|
def _get_oid_dependency_order(schema, cls):
|
||||||
@@ -124,7 +124,7 @@ def update_schema(schema_files, ldapi=False, dm_password=None,):
|
|||||||
for attr in schema_entry[attrname]}
|
for attr in schema_entry[attrname]}
|
||||||
|
|
||||||
for filename in schema_files:
|
for filename in schema_files:
|
||||||
log.debug('Processing schema LDIF file %s', filename)
|
logger.debug('Processing schema LDIF file %s', filename)
|
||||||
url = "file://{}".format(filename)
|
url = "file://{}".format(filename)
|
||||||
_dn, new_schema = ldap.schema.subentry.urlfetch(url)
|
_dn, new_schema = ldap.schema.subentry.urlfetch(url)
|
||||||
|
|
||||||
@@ -147,10 +147,10 @@ def update_schema(schema_files, ldapi=False, dm_password=None,):
|
|||||||
|
|
||||||
if old_obj:
|
if old_obj:
|
||||||
old_attr = old_entries_by_oid.get(oid)
|
old_attr = old_entries_by_oid.get(oid)
|
||||||
log.debug('Replace: %s', old_attr)
|
logger.debug('Replace: %s', old_attr)
|
||||||
log.debug(' with: %s', value)
|
logger.debug(' with: %s', value)
|
||||||
else:
|
else:
|
||||||
log.debug('Add: %s', value)
|
logger.debug('Add: %s', value)
|
||||||
|
|
||||||
new_elements.append(value.encode('utf-8'))
|
new_elements.append(value.encode('utf-8'))
|
||||||
|
|
||||||
@@ -161,11 +161,12 @@ def update_schema(schema_files, ldapi=False, dm_password=None,):
|
|||||||
# so updates must be executed with groups of independent OIDs
|
# so updates must be executed with groups of independent OIDs
|
||||||
if new_elements:
|
if new_elements:
|
||||||
modlist = schema_entry.generate_modlist()
|
modlist = schema_entry.generate_modlist()
|
||||||
log.debug("Schema modlist:\n%s", pprint.pformat(modlist))
|
logger.debug("Schema modlist:\n%s",
|
||||||
|
pprint.pformat(modlist))
|
||||||
conn.update_entry(schema_entry)
|
conn.update_entry(schema_entry)
|
||||||
|
|
||||||
if not modified:
|
if not modified:
|
||||||
log.debug('Not updating schema')
|
logger.debug('Not updating schema')
|
||||||
|
|
||||||
return modified
|
return modified
|
||||||
|
|
||||||
|
|||||||
@@ -21,11 +21,12 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from ipapython.ipa_log_manager import log_mgr, standard_logging_setup
|
from ipapython.ipa_log_manager import standard_logging_setup
|
||||||
from ipatests.pytest_plugins.integration import config
|
from ipatests.pytest_plugins.integration import config
|
||||||
from ipatests.pytest_plugins.integration import tasks
|
from ipatests.pytest_plugins.integration import tasks
|
||||||
from ipatests.pytest_plugins.integration.host import Host
|
from ipatests.pytest_plugins.integration.host import Host
|
||||||
@@ -36,8 +37,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
BeakerLibProcess = None
|
BeakerLibProcess = None
|
||||||
|
|
||||||
|
logger = logging.getLogger(os.path.basename(__file__))
|
||||||
log = log_mgr.get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class TaskRunner(object):
|
class TaskRunner(object):
|
||||||
@@ -333,36 +333,37 @@ class TaskRunner(object):
|
|||||||
|
|
||||||
def install_master(self, args):
|
def install_master(self, args):
|
||||||
master = self.get_host(args.host, default=args.domain.master)
|
master = self.get_host(args.host, default=args.domain.master)
|
||||||
log.info('Installing master %s', master.hostname)
|
logger.info('Installing master %s', master.hostname)
|
||||||
tasks.install_master(master)
|
tasks.install_master(master)
|
||||||
|
|
||||||
def install_replica(self, args):
|
def install_replica(self, args):
|
||||||
replica = self.get_host(args.replica)
|
replica = self.get_host(args.replica)
|
||||||
master = self.get_host(args.master, default=args.domain.master)
|
master = self.get_host(args.master, default=args.domain.master)
|
||||||
log.info('Installing replica %s from %s',
|
logger.info('Installing replica %s from %s',
|
||||||
replica.hostname, master.hostname)
|
replica.hostname, master.hostname)
|
||||||
tasks.install_replica(master, replica)
|
tasks.install_replica(master, replica)
|
||||||
|
|
||||||
def install_client(self, args):
|
def install_client(self, args):
|
||||||
client = self.get_host(args.client)
|
client = self.get_host(args.client)
|
||||||
master = self.get_host(args.master, default=args.domain.master)
|
master = self.get_host(args.master, default=args.domain.master)
|
||||||
log.info('Installing client %s on %s', client.hostname, master.hostname)
|
logger.info('Installing client %s on %s',
|
||||||
|
client.hostname, master.hostname)
|
||||||
tasks.install_client(master, client)
|
tasks.install_client(master, client)
|
||||||
|
|
||||||
def uninstall_master(self, args):
|
def uninstall_master(self, args):
|
||||||
default_hosts = [args.domain.master] + args.domain.replicas
|
default_hosts = [args.domain.master] + args.domain.replicas
|
||||||
hosts = self.get_hosts(args.host, default=default_hosts)
|
hosts = self.get_hosts(args.host, default=default_hosts)
|
||||||
log.info('Uninstalling masters: %s', [h.hostname for h in hosts])
|
logger.info('Uninstalling masters: %s', [h.hostname for h in hosts])
|
||||||
for master in hosts:
|
for master in hosts:
|
||||||
log.info('Uninstalling %s', master.hostname)
|
logger.info('Uninstalling %s', master.hostname)
|
||||||
tasks.uninstall_master(master)
|
tasks.uninstall_master(master)
|
||||||
|
|
||||||
def uninstall_client(self, args):
|
def uninstall_client(self, args):
|
||||||
default_hosts = args.domain.clients
|
default_hosts = args.domain.clients
|
||||||
hosts = self.get_hosts(args.host, default=default_hosts)
|
hosts = self.get_hosts(args.host, default=default_hosts)
|
||||||
log.info('Uninstalling clients: %s', [h.hostname for h in hosts])
|
logger.info('Uninstalling clients: %s', [h.hostname for h in hosts])
|
||||||
for client in hosts:
|
for client in hosts:
|
||||||
log.info('Uninstalling %s', client.hostname)
|
logger.info('Uninstalling %s', client.hostname)
|
||||||
tasks.uninstall_client(client)
|
tasks.uninstall_client(client)
|
||||||
|
|
||||||
def uninstall_all(self, args):
|
def uninstall_all(self, args):
|
||||||
@@ -372,9 +373,9 @@ class TaskRunner(object):
|
|||||||
def cleanup(self, args):
|
def cleanup(self, args):
|
||||||
default_hosts = args.domain.hosts
|
default_hosts = args.domain.hosts
|
||||||
hosts = self.get_hosts(args.host, default=default_hosts)
|
hosts = self.get_hosts(args.host, default=default_hosts)
|
||||||
log.info('Cleaning up hosts: %s', [h.hostname for h in hosts])
|
logger.info('Cleaning up hosts: %s', [h.hostname for h in hosts])
|
||||||
for host in hosts:
|
for host in hosts:
|
||||||
log.info('Cleaning up %s', host.hostname)
|
logger.info('Cleaning up %s', host.hostname)
|
||||||
tasks.unapply_fixes(host)
|
tasks.unapply_fixes(host)
|
||||||
|
|
||||||
def connect_replica(self, args):
|
def connect_replica(self, args):
|
||||||
@@ -402,7 +403,7 @@ class TaskRunner(object):
|
|||||||
|
|
||||||
def install_adtrust(self, args):
|
def install_adtrust(self, args):
|
||||||
master = self.get_host(args.host, default=args.domain.master)
|
master = self.get_host(args.host, default=args.domain.master)
|
||||||
log.info('Configuring AD trust support on %s', master.hostname)
|
logger.info('Configuring AD trust support on %s', master.hostname)
|
||||||
tasks.install_adtrust(master)
|
tasks.install_adtrust(master)
|
||||||
|
|
||||||
def configure_dns_for_trust(self, args):
|
def configure_dns_for_trust(self, args):
|
||||||
|
|||||||
@@ -21,6 +21,7 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
import shutil
|
import shutil
|
||||||
@@ -30,13 +31,12 @@ import pytest
|
|||||||
from pytest_multihost import make_multihost_fixture
|
from pytest_multihost import make_multihost_fixture
|
||||||
|
|
||||||
from ipapython import ipautil
|
from ipapython import ipautil
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
from ipatests.test_util import yield_fixture
|
from ipatests.test_util import yield_fixture
|
||||||
from .config import Config
|
from .config import Config
|
||||||
from .env_config import get_global_config
|
from .env_config import get_global_config
|
||||||
from . import tasks
|
from . import tasks
|
||||||
|
|
||||||
log = log_mgr.get_logger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
@@ -86,7 +86,7 @@ def collect_systemd_journal(node, hosts, test_config):
|
|||||||
return
|
return
|
||||||
|
|
||||||
for host in hosts:
|
for host in hosts:
|
||||||
log.info("Collecting journal from: %s", host.hostname)
|
logger.info("Collecting journal from: %s", host.hostname)
|
||||||
|
|
||||||
topdirname = os.path.join(logfile_dir, name, host.hostname)
|
topdirname = os.path.join(logfile_dir, name, host.hostname)
|
||||||
if not os.path.exists(topdirname):
|
if not os.path.exists(topdirname):
|
||||||
@@ -97,7 +97,7 @@ def collect_systemd_journal(node, hosts, test_config):
|
|||||||
['journalctl', '--since', host.config.log_journal_since],
|
['journalctl', '--since', host.config.log_journal_since],
|
||||||
log_stdout=False, raiseonerr=False)
|
log_stdout=False, raiseonerr=False)
|
||||||
if cmd.returncode:
|
if cmd.returncode:
|
||||||
log.error('An error occurred while collecting journal')
|
logger.error('An error occurred while collecting journal')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Write journal to file
|
# Write journal to file
|
||||||
@@ -130,14 +130,14 @@ def collect_logs(name, logs_dict, logfile_dir=None, beakerlib_plugin=None):
|
|||||||
topdirname = os.path.join(logfile_dir, name)
|
topdirname = os.path.join(logfile_dir, name)
|
||||||
|
|
||||||
for host, logs in logs_dict.items():
|
for host, logs in logs_dict.items():
|
||||||
log.info('Collecting logs from: %s', host.hostname)
|
logger.info('Collecting logs from: %s', host.hostname)
|
||||||
|
|
||||||
# Tar up the logs on the remote server
|
# Tar up the logs on the remote server
|
||||||
cmd = host.run_command(
|
cmd = host.run_command(
|
||||||
['tar', '-c', '--ignore-failed-read', '-J', '-v'] + logs,
|
['tar', '-c', '--ignore-failed-read', '-J', '-v'] + logs,
|
||||||
log_stdout=False, raiseonerr=False)
|
log_stdout=False, raiseonerr=False)
|
||||||
if cmd.returncode:
|
if cmd.returncode:
|
||||||
log.warning('Could not collect all requested logs')
|
logger.warning('Could not collect all requested logs')
|
||||||
|
|
||||||
# Unpack on the local side
|
# Unpack on the local side
|
||||||
dirname = os.path.join(topdirname, host.hostname)
|
dirname = os.path.join(topdirname, host.hostname)
|
||||||
@@ -162,7 +162,7 @@ def collect_logs(name, logs_dict, logfile_dir=None, beakerlib_plugin=None):
|
|||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
fullname = os.path.relpath(
|
fullname = os.path.relpath(
|
||||||
os.path.join(dirpath, filename), topdirname)
|
os.path.join(dirpath, filename), topdirname)
|
||||||
log.debug('Submitting file: %s', fullname)
|
logger.debug('Submitting file: %s', fullname)
|
||||||
beakerlib_plugin.run_beakerlib_command(
|
beakerlib_plugin.run_beakerlib_command(
|
||||||
['rlFileSubmit', fullname])
|
['rlFileSubmit', fullname])
|
||||||
finally:
|
finally:
|
||||||
@@ -235,14 +235,14 @@ def mh(request, class_integration_logs):
|
|||||||
cls.logs_to_collect = class_integration_logs
|
cls.logs_to_collect = class_integration_logs
|
||||||
|
|
||||||
def collect_log(host, filename):
|
def collect_log(host, filename):
|
||||||
log.info('Adding %s:%s to list of logs to collect' %
|
logger.info('Adding %s:%s to list of logs to collect',
|
||||||
(host.external_hostname, filename))
|
host.external_hostname, filename)
|
||||||
class_integration_logs.setdefault(host, []).append(filename)
|
class_integration_logs.setdefault(host, []).append(filename)
|
||||||
|
|
||||||
print(mh.config)
|
print(mh.config)
|
||||||
for host in mh.config.get_all_hosts():
|
for host in mh.config.get_all_hosts():
|
||||||
host.add_log_collector(collect_log)
|
host.add_log_collector(collect_log)
|
||||||
log.info('Preparing host %s', host.hostname)
|
logger.info('Preparing host %s', host.hostname)
|
||||||
tasks.prepare_host(host)
|
tasks.prepare_host(host)
|
||||||
|
|
||||||
setup_class(cls, mh)
|
setup_class(cls, mh)
|
||||||
|
|||||||
@@ -20,12 +20,12 @@
|
|||||||
|
|
||||||
"""Utilities for configuration of multi-master tests"""
|
"""Utilities for configuration of multi-master tests"""
|
||||||
|
|
||||||
|
import logging
|
||||||
import random
|
import random
|
||||||
|
|
||||||
import pytest_multihost.config
|
import pytest_multihost.config
|
||||||
|
|
||||||
from ipapython.dn import DN
|
from ipapython.dn import DN
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
from ipalib.constants import MAX_DOMAIN_LEVEL
|
from ipalib.constants import MAX_DOMAIN_LEVEL
|
||||||
|
|
||||||
|
|
||||||
@@ -71,7 +71,7 @@ class Config(pytest_multihost.config.Config):
|
|||||||
return Domain
|
return Domain
|
||||||
|
|
||||||
def get_logger(self, name):
|
def get_logger(self, name):
|
||||||
return log_mgr.get_logger(name)
|
return logging.getLogger(name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ad_domains(self):
|
def ad_domains(self):
|
||||||
|
|||||||
@@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
"""Common tasks for FreeIPA integration tests"""
|
"""Common tasks for FreeIPA integration tests"""
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import textwrap
|
import textwrap
|
||||||
import re
|
import re
|
||||||
@@ -35,7 +36,6 @@ from six import StringIO
|
|||||||
from ipapython import ipautil
|
from ipapython import ipautil
|
||||||
from ipaplatform.paths import paths
|
from ipaplatform.paths import paths
|
||||||
from ipapython.dn import DN
|
from ipapython.dn import DN
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
from ipalib import errors
|
from ipalib import errors
|
||||||
from ipalib.util import get_reverse_zone_default, verify_host_resolvable
|
from ipalib.util import get_reverse_zone_default, verify_host_resolvable
|
||||||
from ipalib.constants import (
|
from ipalib.constants import (
|
||||||
@@ -44,8 +44,7 @@ from ipalib.constants import (
|
|||||||
from .env_config import env_to_script
|
from .env_config import env_to_script
|
||||||
from .host import Host
|
from .host import Host
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
log = log_mgr.get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_server_logs_collecting(host):
|
def setup_server_logs_collecting(host):
|
||||||
@@ -115,7 +114,7 @@ def prepare_reverse_zone(host, ip):
|
|||||||
"dnszone-add",
|
"dnszone-add",
|
||||||
zone], raiseonerr=False)
|
zone], raiseonerr=False)
|
||||||
if result.returncode > 0:
|
if result.returncode > 0:
|
||||||
log.warning(result.stderr_text)
|
logger.warning("%s", result.stderr_text)
|
||||||
return zone, result.returncode
|
return zone, result.returncode
|
||||||
|
|
||||||
def prepare_host(host):
|
def prepare_host(host):
|
||||||
@@ -231,14 +230,14 @@ def restore_hostname(host):
|
|||||||
try:
|
try:
|
||||||
hostname = host.get_file_contents(backupname)
|
hostname = host.get_file_contents(backupname)
|
||||||
except IOError:
|
except IOError:
|
||||||
log.debug('No hostname backed up on %s' % host.hostname)
|
logger.debug('No hostname backed up on %s', host.hostname)
|
||||||
else:
|
else:
|
||||||
host.run_command(['hostname', hostname.strip()])
|
host.run_command(['hostname', hostname.strip()])
|
||||||
host.run_command(['rm', backupname])
|
host.run_command(['rm', backupname])
|
||||||
|
|
||||||
|
|
||||||
def enable_replication_debugging(host):
|
def enable_replication_debugging(host):
|
||||||
log.info('Enable LDAP replication logging')
|
logger.info('Enable LDAP replication logging')
|
||||||
logging_ldif = textwrap.dedent("""
|
logging_ldif = textwrap.dedent("""
|
||||||
dn: cn=config
|
dn: cn=config
|
||||||
changetype: modify
|
changetype: modify
|
||||||
@@ -1020,10 +1019,10 @@ def install_topo(topo, master, replicas, clients, domain_level=None,
|
|||||||
|
|
||||||
for parent, child in get_topo(topo)(master, replicas):
|
for parent, child in get_topo(topo)(master, replicas):
|
||||||
if child in installed:
|
if child in installed:
|
||||||
log.info('Connecting replica %s to %s' % (parent, child))
|
logger.info('Connecting replica %s to %s', parent, child)
|
||||||
connect_replica(parent, child)
|
connect_replica(parent, child)
|
||||||
else:
|
else:
|
||||||
log.info('Installing replica %s from %s' % (parent, child))
|
logger.info('Installing replica %s from %s', parent, child)
|
||||||
install_replica(
|
install_replica(
|
||||||
parent, child,
|
parent, child,
|
||||||
setup_ca=setup_replica_cas,
|
setup_ca=setup_replica_cas,
|
||||||
@@ -1037,7 +1036,7 @@ def install_clients(servers, clients):
|
|||||||
"""Install IPA clients, distributing them among the given servers"""
|
"""Install IPA clients, distributing them among the given servers"""
|
||||||
izip = getattr(itertools, 'izip', zip)
|
izip = getattr(itertools, 'izip', zip)
|
||||||
for server, client in izip(itertools.cycle(servers), clients):
|
for server, client in izip(itertools.cycle(servers), clients):
|
||||||
log.info('Installing client %s on %s' % (server, client))
|
logger.info('Installing client %s on %s', server, client)
|
||||||
install_client(server, client)
|
install_client(server, client)
|
||||||
|
|
||||||
|
|
||||||
@@ -1060,7 +1059,7 @@ def wait_for_replication(ldap, timeout=30):
|
|||||||
Note that this waits for updates originating on this host, not those
|
Note that this waits for updates originating on this host, not those
|
||||||
coming from other hosts.
|
coming from other hosts.
|
||||||
"""
|
"""
|
||||||
log.debug('Waiting for replication to finish')
|
logger.debug('Waiting for replication to finish')
|
||||||
for i in range(timeout):
|
for i in range(timeout):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
status_attr = 'nsds5replicaLastUpdateStatus'
|
status_attr = 'nsds5replicaLastUpdateStatus'
|
||||||
@@ -1069,7 +1068,7 @@ def wait_for_replication(ldap, timeout=30):
|
|||||||
DN(('cn', 'mapping tree'), ('cn', 'config')),
|
DN(('cn', 'mapping tree'), ('cn', 'config')),
|
||||||
filter='(objectclass=nsds5replicationagreement)',
|
filter='(objectclass=nsds5replicationagreement)',
|
||||||
attrs_list=[status_attr, progress_attr])
|
attrs_list=[status_attr, progress_attr])
|
||||||
log.debug('Replication agreements: \n%s', _entries_to_ldif(entries))
|
logger.debug('Replication agreements: \n%s', _entries_to_ldif(entries))
|
||||||
if any(
|
if any(
|
||||||
not (
|
not (
|
||||||
# older DS format
|
# older DS format
|
||||||
@@ -1079,16 +1078,16 @@ def wait_for_replication(ldap, timeout=30):
|
|||||||
)
|
)
|
||||||
for e in entries
|
for e in entries
|
||||||
):
|
):
|
||||||
log.error('Replication error')
|
logger.error('Replication error')
|
||||||
continue
|
continue
|
||||||
if any(e.single_value[progress_attr] == 'TRUE' for e in entries):
|
if any(e.single_value[progress_attr] == 'TRUE' for e in entries):
|
||||||
log.debug('Replication in progress (waited %s/%ss)',
|
logger.debug('Replication in progress (waited %s/%ss)',
|
||||||
i, timeout)
|
i, timeout)
|
||||||
else:
|
else:
|
||||||
log.debug('Replication finished')
|
logger.debug('Replication finished')
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
log.error('Giving up wait for replication to finish')
|
logger.error('Giving up wait for replication to finish')
|
||||||
|
|
||||||
|
|
||||||
def add_a_records_for_hosts_in_master_domain(master):
|
def add_a_records_for_hosts_in_master_domain(master):
|
||||||
@@ -1097,10 +1096,11 @@ def add_a_records_for_hosts_in_master_domain(master):
|
|||||||
# domain
|
# domain
|
||||||
try:
|
try:
|
||||||
verify_host_resolvable(host.hostname)
|
verify_host_resolvable(host.hostname)
|
||||||
log.debug("The host (%s) is resolvable." % host.domain.name)
|
logger.debug("The host (%s) is resolvable.", host.domain.name)
|
||||||
except errors.DNSNotARecordError:
|
except errors.DNSNotARecordError:
|
||||||
log.debug("Hostname (%s) does not have A/AAAA record. Adding new one.",
|
logger.debug("Hostname (%s) does not have A/AAAA record. Adding "
|
||||||
master.hostname)
|
"new one.",
|
||||||
|
master.hostname)
|
||||||
add_a_record(master, host)
|
add_a_record(master, host)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -21,12 +21,9 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
from ipatests.pytest_plugins.integration import tasks
|
from ipatests.pytest_plugins.integration import tasks
|
||||||
from pytest_sourceorder import ordered
|
from pytest_sourceorder import ordered
|
||||||
|
|
||||||
log = log_mgr.get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@ordered
|
@ordered
|
||||||
@pytest.mark.usefixtures('mh')
|
@pytest.mark.usefixtures('mh')
|
||||||
|
|||||||
@@ -19,18 +19,18 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import contextlib
|
import contextlib
|
||||||
|
|
||||||
from ipapython.ipa_log_manager import log_mgr
|
|
||||||
from ipapython.dn import DN
|
from ipapython.dn import DN
|
||||||
from ipatests.test_integration.base import IntegrationTest
|
from ipatests.test_integration.base import IntegrationTest
|
||||||
from ipatests.pytest_plugins.integration import tasks
|
from ipatests.pytest_plugins.integration import tasks
|
||||||
from ipatests.test_integration.test_dnssec import wait_until_record_is_signed
|
from ipatests.test_integration.test_dnssec import wait_until_record_is_signed
|
||||||
from ipatests.util import assert_deepequal
|
from ipatests.util import assert_deepequal
|
||||||
|
|
||||||
log = log_mgr.get_logger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def assert_entries_equal(a, b):
|
def assert_entries_equal(a, b):
|
||||||
@@ -109,13 +109,13 @@ def restore_checker(host):
|
|||||||
|
|
||||||
results = []
|
results = []
|
||||||
for check, assert_func in CHECKS:
|
for check, assert_func in CHECKS:
|
||||||
log.info('Storing result for %s', check)
|
logger.info('Storing result for %s', check)
|
||||||
results.append(check(host))
|
results.append(check(host))
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
for (check, assert_func), expected in zip(CHECKS, results):
|
for (check, assert_func), expected in zip(CHECKS, results):
|
||||||
log.info('Checking result for %s', check)
|
logger.info('Checking result for %s', check)
|
||||||
got = check(host)
|
got = check(host)
|
||||||
assert_func(expected, got)
|
assert_func(expected, got)
|
||||||
|
|
||||||
@@ -130,7 +130,7 @@ def backup(host):
|
|||||||
'INFO: Backed up to ')
|
'INFO: Backed up to ')
|
||||||
if line.startswith(prefix):
|
if line.startswith(prefix):
|
||||||
backup_path = line[len(prefix):].strip()
|
backup_path = line[len(prefix):].strip()
|
||||||
log.info('Backup path for %s is %s', host, backup_path)
|
logger.info('Backup path for %s is %s', host, backup_path)
|
||||||
return backup_path
|
return backup_path
|
||||||
else:
|
else:
|
||||||
raise AssertionError('Backup directory not found in output')
|
raise AssertionError('Backup directory not found in output')
|
||||||
@@ -158,7 +158,7 @@ class TestBackupAndRestore(IntegrationTest):
|
|||||||
with restore_checker(self.master):
|
with restore_checker(self.master):
|
||||||
backup_path = backup(self.master)
|
backup_path = backup(self.master)
|
||||||
|
|
||||||
log.info('Backup path for %s is %s', self.master, backup_path)
|
logger.info('Backup path for %s is %s', self.master, backup_path)
|
||||||
|
|
||||||
self.master.run_command(['ipa-server-install',
|
self.master.run_command(['ipa-server-install',
|
||||||
'--uninstall',
|
'--uninstall',
|
||||||
@@ -181,7 +181,7 @@ class TestBackupAndRestore(IntegrationTest):
|
|||||||
with restore_checker(self.master):
|
with restore_checker(self.master):
|
||||||
backup_path = backup(self.master)
|
backup_path = backup(self.master)
|
||||||
|
|
||||||
log.info('Backup path for %s is %s', self.master, backup_path)
|
logger.info('Backup path for %s is %s', self.master, backup_path)
|
||||||
|
|
||||||
self.master.run_command(['ipa-server-install',
|
self.master.run_command(['ipa-server-install',
|
||||||
'--uninstall',
|
'--uninstall',
|
||||||
|
|||||||
Reference in New Issue
Block a user