pylint: Fix unused-variable

Fixed newly exposed unused variables.

Fixes: https://pagure.io/freeipa/issue/9117
Signed-off-by: Stanislav Levin <slev@altlinux.org>
Reviewed-By: Rob Crittenden <rcritten@redhat.com>
This commit is contained in:
Stanislav Levin
2022-02-21 11:21:20 +03:00
committed by Rob Crittenden
parent a1f0f2743d
commit ba95a377b0
13 changed files with 24 additions and 20 deletions

View File

@@ -202,7 +202,7 @@ def main():
if not (user['uid'][0] in group['member_user'] and if not (user['uid'][0] in group['member_user'] and
group['cn'][0] in user['memberof_group']): group['cn'][0] in user['memberof_group']):
raise errors.RequirementError(name='admins group membership') raise errors.RequirementError(name='admins group membership')
except errors.RequirementError as e: except errors.RequirementError:
raise ScriptError( raise ScriptError(
"Must have administrative privileges to setup AD trusts on server" "Must have administrative privileges to setup AD trusts on server"
) )

View File

@@ -3336,7 +3336,7 @@ def uninstall(options):
ipa_domain = domain.get_option('ipa_domain') ipa_domain = domain.get_option('ipa_domain')
except SSSDConfig.NoOptionError: except SSSDConfig.NoOptionError:
pass pass
except Exception as e: except Exception:
# We were unable to read existing SSSD config. This might mean few # We were unable to read existing SSSD config. This might mean few
# things: # things:
# - sssd wasn't installed # - sssd wasn't installed

View File

@@ -661,7 +661,7 @@ class API(ReadOnly):
except errors.SkipPluginModule as e: except errors.SkipPluginModule as e:
logger.debug("skipping plugin module %s: %s", name, e.reason) logger.debug("skipping plugin module %s: %s", name, e.reason)
continue continue
except Exception as e: except Exception:
tb = self.env.startup_traceback tb = self.env.startup_traceback
if tb: # pylint: disable=using-constant-test if tb: # pylint: disable=using-constant-test
logger.exception("could not load plugin module %s", name) logger.exception("could not load plugin module %s", name)

View File

@@ -742,7 +742,7 @@ class KerbTransport(SSLTransport):
self.close() self.close()
logger.debug("HTTP server has closed connection (%s)", host) logger.debug("HTTP server has closed connection (%s)", host)
raise raise
except BaseException as e: except BaseException:
# Unexpected exception may leave connections in a bad state. # Unexpected exception may leave connections in a bad state.
self.close() self.close()
logger.debug("HTTP connection destroyed (%s)", logger.debug("HTTP connection destroyed (%s)",
@@ -837,7 +837,7 @@ class KerbTransport(SSLTransport):
cookie_string, principal) cookie_string, principal)
try: try:
update_persistent_client_session_data(principal, cookie_string) update_persistent_client_session_data(principal, cookie_string)
except Exception as e: except Exception:
# Not fatal, we just can't use the session cookie we were sent. # Not fatal, we just can't use the session cookie we were sent.
pass pass
@@ -977,7 +977,7 @@ class RPCClient(Connectible):
principal, e) principal, e)
try: try:
delete_persistent_client_session_data(principal) delete_persistent_client_session_data(principal)
except Exception as e: except Exception:
pass pass
return original_url return original_url
except Cookie.URLMismatch as e: except Cookie.URLMismatch as e:

View File

@@ -277,7 +277,7 @@ class DomainValidator:
result[t_partner] = (fname_norm, result[t_partner] = (fname_norm,
security.dom_sid(trusted_sid)) security.dom_sid(trusted_sid))
return result return result
except errors.NotFound as exc: except errors.NotFound:
return [] return []
def set_trusted_domains(self): def set_trusted_domains(self):
@@ -816,7 +816,7 @@ class DomainValidator:
try: try:
answers = query_srv(gc_name) answers = query_srv(gc_name)
except DNSException as e: except DNSException:
answers = [] answers = []
for answer in answers: for answer in answers:

View File

@@ -642,7 +642,7 @@ class Restore(admintool.AdminTool):
template_dir = paths.VAR_LOG_DIRSRV_INSTANCE_TEMPLATE % instance template_dir = paths.VAR_LOG_DIRSRV_INSTANCE_TEMPLATE % instance
try: try:
os.makedirs(template_dir) os.makedirs(template_dir)
except OSError as e: except OSError:
pass pass
constants.DS_USER.chown(template_dir) constants.DS_USER.chown(template_dir)

View File

@@ -645,7 +645,7 @@ class LDAPUpdate:
while True: while True:
try: try:
entry = self.conn.get_entry(dn, attrlist) entry = self.conn.get_entry(dn, attrlist)
except errors.NotFound as e: except errors.NotFound:
logger.error("Task not found: %s", dn) logger.error("Task not found: %s", dn)
return return
except errors.DatabaseError as e: except errors.DatabaseError as e:

View File

@@ -1366,7 +1366,7 @@ class ReplicationManager:
try: try:
self.conn.add_entry(entry) self.conn.add_entry(entry)
except Exception as e: except Exception:
logger.info("Failed to create public entry for winsync replica") logger.info("Failed to create public entry for winsync replica")
# For winsync, unhashed passwords needs to be in replication changelog # For winsync, unhashed passwords needs to be in replication changelog

View File

@@ -132,7 +132,7 @@ def read_cache(dm_password):
fname, fname,
dm_password, dm_password,
top_dir) top_dir)
except Exception as e: except Exception:
shutil.rmtree(top_dir) shutil.rmtree(top_dir)
raise Exception("Decryption of answer cache in %s failed, please " raise Exception("Decryption of answer cache in %s failed, please "
"check your password." % paths.ROOT_IPA_CACHE) "check your password." % paths.ROOT_IPA_CACHE)

View File

@@ -1311,7 +1311,7 @@ def ntpd_cleanup(fqdn, fstore):
try: try:
instance.disable() instance.disable()
instance.stop() instance.stop()
except Exception as e: except Exception:
logger.debug("Service ntpd was not disabled or stopped") logger.debug("Service ntpd was not disabled or stopped")
for ntpd_file in [paths.NTP_CONF, paths.NTP_STEP_TICKERS, for ntpd_file in [paths.NTP_CONF, paths.NTP_STEP_TICKERS,

View File

@@ -377,7 +377,7 @@ def _aci_to_kw(ldap, a, test=False, pkey_only=False):
entry = ldap.make_entry(dn) entry = ldap.make_entry(dn)
try: try:
entry = ldap.get_entry(groupdn, ['cn']) entry = ldap.get_entry(groupdn, ['cn'])
except errors.NotFound as e: except errors.NotFound:
# FIXME, use real name here # FIXME, use real name here
if test: if test:
dn = DN(('cn', 'test'), api.env.container_permission, dn = DN(('cn', 'test'), api.env.container_permission,

View File

@@ -216,7 +216,7 @@ def _pre_migrate_user(ldap, pkey, dn, entry_attrs, failed, config, ctx, **kwargs
logger.warning('GID number %s of migrated user %s should ' logger.warning('GID number %s of migrated user %s should '
'match 1 group, but it matched %d groups', 'match 1 group, but it matched %d groups',
entry_attrs['gidnumber'][0], pkey, e.found) entry_attrs['gidnumber'][0], pkey, e.found)
except errors.LimitsExceeded as e: except errors.LimitsExceeded:
logger.warning('Search limit exceeded searching for GID %s', logger.warning('Search limit exceeded searching for GID %s',
entry_attrs['gidnumber'][0]) entry_attrs['gidnumber'][0])

View File

@@ -1095,7 +1095,7 @@ class login_password(Backend, KerberosSession):
armor_path, armor_path,
pkinit_anchors=[paths.KDC_CERT, paths.KDC_CA_BUNDLE_PEM], pkinit_anchors=[paths.KDC_CERT, paths.KDC_CA_BUNDLE_PEM],
) )
except RuntimeError as e: except RuntimeError:
logger.error("Failed to obtain armor cache") logger.error("Failed to obtain armor cache")
# We try to continue w/o armor, 2FA will be impacted # We try to continue w/o armor, 2FA will be impacted
armor_path = None armor_path = None
@@ -1159,8 +1159,10 @@ class change_password(Backend, HTTP_Status):
try: try:
query_dict = parse_qs(query_string) query_dict = parse_qs(query_string)
except Exception as e: except Exception:
return self.bad_request(environ, start_response, "cannot parse query data") return self.bad_request(
environ, start_response, "cannot parse query data"
)
data = {} data = {}
for field in ('user', 'old_password', 'new_password', 'otp'): for field in ('user', 'old_password', 'new_password', 'otp'):
@@ -1264,8 +1266,10 @@ class sync_token(Backend, HTTP_Status):
# Parse the query string to a dictionary. # Parse the query string to a dictionary.
try: try:
query_dict = parse_qs(query_string) query_dict = parse_qs(query_string)
except Exception as e: except Exception:
return self.bad_request(environ, start_response, "cannot parse query data") return self.bad_request(
environ, start_response, "cannot parse query data"
)
data = {} data = {}
for field in ('user', 'password', 'first_code', 'second_code', 'token'): for field in ('user', 'password', 'first_code', 'second_code', 'token'):
value = query_dict.get(field, None) value = query_dict.get(field, None)