mirror of
https://salsa.debian.org/freeipa-team/freeipa.git
synced 2025-02-25 18:55:28 -06:00
pylint: Fix consider-using-dict-items
Pylint 2.9 introduced new check: > New checker consider-using-dict-items. Emitted when iterating over dictionary keys and then indexing the same dictionary with the key within loop body. Fixes: https://pagure.io/freeipa/issue/9117 Signed-off-by: Stanislav Levin <slev@altlinux.org> Reviewed-By: Rob Crittenden <rcritten@redhat.com>
This commit is contained in:
committed by
Rob Crittenden
parent
f9d0fc8a8c
commit
851f6d48ac
@@ -71,18 +71,21 @@ def parse_options():
|
||||
|
||||
if len(args):
|
||||
n = len(args) - 1
|
||||
for cmd in commands:
|
||||
for cmd, args_info in commands.items():
|
||||
if cmd == args[0]:
|
||||
v = commands[cmd]
|
||||
err = None
|
||||
if n < v[0]:
|
||||
err = v[3]
|
||||
elif n > v[1]:
|
||||
if n < args_info[0]:
|
||||
err = args_info[3]
|
||||
elif n > args_info[1]:
|
||||
err = "too many arguments"
|
||||
else:
|
||||
valid_syntax = True
|
||||
if err:
|
||||
parser.error("Invalid syntax: %s\nUsage: %s [options] %s" % (err, cmd, v[2]))
|
||||
parser.error(
|
||||
"Invalid syntax: %s\nUsage: %s [options] %s" % (
|
||||
err, cmd, args_info[2]
|
||||
)
|
||||
)
|
||||
|
||||
if not valid_syntax:
|
||||
cmdstr = " | ".join(commands.keys())
|
||||
|
||||
@@ -116,18 +116,21 @@ def parse_options():
|
||||
|
||||
if len(args):
|
||||
n = len(args) - 1
|
||||
for cmd in commands:
|
||||
for cmd, args_info in commands.items():
|
||||
if cmd == args[0]:
|
||||
v = commands[cmd]
|
||||
err = None
|
||||
if n < v[0]:
|
||||
err = v[3]
|
||||
elif n > v[1]:
|
||||
if n < args_info[0]:
|
||||
err = args_info[3]
|
||||
elif n > args_info[1]:
|
||||
err = "too many arguments"
|
||||
else:
|
||||
valid_syntax = True
|
||||
if err:
|
||||
parser.error("Invalid syntax: %s\nUsage: %s [options] %s" % (err, cmd, v[2]))
|
||||
parser.error(
|
||||
"Invalid syntax: %s\nUsage: %s [options] %s" % (
|
||||
err, cmd, args_info[2]
|
||||
)
|
||||
)
|
||||
|
||||
if not valid_syntax:
|
||||
cmdstr = " | ".join(commands.keys())
|
||||
|
||||
@@ -1549,8 +1549,8 @@ def verify_dns_update(fqdn, ips):
|
||||
', '.join(missing_reverse))
|
||||
if wrong_reverse:
|
||||
logger.warning('Incorrect reverse record(s):')
|
||||
for ip in wrong_reverse:
|
||||
for target in wrong_reverse[ip]:
|
||||
for ip, targets in wrong_reverse.items():
|
||||
for target in targets:
|
||||
logger.warning('%s is pointing to %s instead of %s',
|
||||
ip, target, fqdn_name)
|
||||
|
||||
|
||||
@@ -173,8 +173,8 @@ def retrieve_domain_information(api):
|
||||
return []
|
||||
|
||||
l_domain = dict()
|
||||
for key in trust_keymap:
|
||||
l_domain[key] = result.get(trust_keymap[key], [None])[0]
|
||||
for key, val in trust_keymap.items():
|
||||
l_domain[key] = result.get(val, [None])[0]
|
||||
|
||||
# Pull down ID range and other details of our domain
|
||||
#
|
||||
|
||||
@@ -218,8 +218,8 @@ class automountlocation_import(Command):
|
||||
# Now iterate over the map files and add the keys. To handle
|
||||
# continuation lines I'll make a pass through it to skip comments
|
||||
# etc and also to combine lines.
|
||||
for m in maps:
|
||||
map = self.__read_mapfile(maps[m])
|
||||
for m, filename in maps.items():
|
||||
map = self.__read_mapfile(filename)
|
||||
lines = []
|
||||
cont = ''
|
||||
for x in map:
|
||||
@@ -227,7 +227,7 @@ class automountlocation_import(Command):
|
||||
continue
|
||||
x = x.rstrip()
|
||||
if x.startswith('+'):
|
||||
result['skipped'].append([m, maps[m]])
|
||||
result['skipped'].append([m, filename])
|
||||
continue
|
||||
if len(x) == 0:
|
||||
continue
|
||||
|
||||
@@ -890,13 +890,13 @@ class help(frontend.Local):
|
||||
commands = self._topics[topic][2]
|
||||
else:
|
||||
commands = []
|
||||
for t in self._topics:
|
||||
if type(self._topics[t][2]) is not dict:
|
||||
for v in self._topics.values():
|
||||
if not isinstance(v[2], dict):
|
||||
continue
|
||||
if topic not in self._topics[t][2]:
|
||||
if topic not in v[2]:
|
||||
continue
|
||||
mcl = self._topics[t][2][topic][1]
|
||||
commands = self._topics[t][2][topic][2]
|
||||
mcl = v[2][topic][1]
|
||||
commands = v[2][topic][2]
|
||||
break
|
||||
|
||||
doc, _topic = self._get_topic(topic)
|
||||
|
||||
@@ -376,9 +376,9 @@ class StateFile:
|
||||
p = SafeConfigParser(interpolation=None)
|
||||
p.optionxform = str
|
||||
|
||||
for module in self.modules:
|
||||
for module, vals in self.modules.items():
|
||||
p.add_section(module)
|
||||
for (key, value) in self.modules[module].items():
|
||||
for (key, value) in vals.items():
|
||||
p.set(module, key, str(value))
|
||||
|
||||
with open(self._path, "w") as f:
|
||||
|
||||
@@ -180,8 +180,8 @@ class Key(MutableMapping):
|
||||
"""remove default values from LDAP entry"""
|
||||
default_attrs = get_default_attrs(self.entry['objectclass'])
|
||||
empty = object()
|
||||
for attr in default_attrs:
|
||||
if self.get(attr, empty) == default_attrs[attr]:
|
||||
for attr, attr_val in default_attrs.items():
|
||||
if self.get(attr, empty) == attr_val:
|
||||
del self[attr]
|
||||
|
||||
def _update_key(self):
|
||||
@@ -299,8 +299,8 @@ class LdapKeyDB(AbstractHSM):
|
||||
# add default values not present in LDAP
|
||||
key = key_type(o, self.ldap, self)
|
||||
default_attrs = get_default_attrs(key.entry['objectclass'])
|
||||
for attr in default_attrs:
|
||||
key.setdefault(attr, default_attrs[attr])
|
||||
for attr, attr_val in default_attrs.items():
|
||||
key.setdefault(attr, attr_val)
|
||||
|
||||
if 'ipk11id' not in key:
|
||||
raise ValueError(
|
||||
|
||||
@@ -500,7 +500,7 @@ class DogtagInstance(service.Service):
|
||||
def configure_renewal(self):
|
||||
""" Configure certmonger to renew system certs """
|
||||
|
||||
for nickname in self.tracking_reqs:
|
||||
for nickname, profile in self.tracking_reqs.items():
|
||||
token_name = self.get_token_name(nickname)
|
||||
pin = self.__get_pin(token_name)
|
||||
try:
|
||||
@@ -512,7 +512,7 @@ class DogtagInstance(service.Service):
|
||||
pin=pin,
|
||||
pre_command='stop_pkicad',
|
||||
post_command='renew_ca_cert "%s"' % nickname,
|
||||
profile=self.tracking_reqs[nickname],
|
||||
profile=profile,
|
||||
)
|
||||
except RuntimeError as e:
|
||||
logger.error(
|
||||
|
||||
@@ -733,7 +733,7 @@ class LDAPObject(Object):
|
||||
container_dns = {}
|
||||
new_attrs = {}
|
||||
|
||||
for attr in self.attribute_members:
|
||||
for attr, members in self.attribute_members.items():
|
||||
try:
|
||||
value = entry_attrs.raw[attr]
|
||||
except KeyError:
|
||||
@@ -742,7 +742,7 @@ class LDAPObject(Object):
|
||||
|
||||
for member in value:
|
||||
memberdn = DN(member.decode('utf-8'))
|
||||
for ldap_obj_name in self.attribute_members[attr]:
|
||||
for ldap_obj_name in members:
|
||||
ldap_obj = self.api.Object[ldap_obj_name]
|
||||
try:
|
||||
container_dn = container_dns[ldap_obj_name]
|
||||
|
||||
@@ -3132,11 +3132,11 @@ class dnsrecord(LDAPObject):
|
||||
addr = keys[-1]
|
||||
|
||||
zone_len = 0
|
||||
for valid_zone in REVERSE_DNS_ZONES:
|
||||
for valid_zone, zone_num_components in REVERSE_DNS_ZONES.items():
|
||||
if zone.is_subdomain(valid_zone):
|
||||
zone = zone.relativize(valid_zone)
|
||||
zone_name = valid_zone
|
||||
zone_len = REVERSE_DNS_ZONES[valid_zone]
|
||||
zone_len = zone_num_components
|
||||
|
||||
if not zone_len:
|
||||
# PTR records in zones other than in-addr.arpa and ip6.arpa are
|
||||
@@ -3608,7 +3608,7 @@ class dnsrecord_add(LDAPCreate):
|
||||
assert isinstance(dn, DN)
|
||||
precallback_attrs = []
|
||||
processed_attrs = []
|
||||
for option in options:
|
||||
for option, option_val in options.items():
|
||||
try:
|
||||
param = self.params[option]
|
||||
except KeyError:
|
||||
@@ -3636,7 +3636,7 @@ class dnsrecord_add(LDAPCreate):
|
||||
|
||||
if get_extra_rrtype(param.name):
|
||||
# do not run precallback for unset flags
|
||||
if isinstance(param, Flag) and not options[option]:
|
||||
if isinstance(param, Flag) and not option_val:
|
||||
continue
|
||||
# extra option is passed, run per-type pre_callback for given RR type
|
||||
precallback_attrs.append(rrparam.name)
|
||||
@@ -3785,9 +3785,9 @@ class dnsrecord_mod(LDAPUpdate):
|
||||
raise self.obj.handle_not_found(*keys)
|
||||
|
||||
if updated_attrs:
|
||||
for attr in updated_attrs:
|
||||
for attr, attr_vals in updated_attrs.items():
|
||||
param = self.params[attr]
|
||||
old_dnsvalue, new_parts = updated_attrs[attr]
|
||||
old_dnsvalue, new_parts = attr_vals
|
||||
|
||||
if old_dnsvalue not in old_entry.get(attr, []):
|
||||
attr_name = unicode(param.label or param.name)
|
||||
|
||||
@@ -1813,9 +1813,9 @@ class ra(rabase.rabase, RestClient):
|
||||
booloptions[battr] = True
|
||||
|
||||
# Add the boolean options to our XML document
|
||||
for opt in booloptions:
|
||||
for opt, value in booloptions.items():
|
||||
e = etree.SubElement(page, opt)
|
||||
e.text = str(booloptions[opt]).lower()
|
||||
e.text = str(value).lower()
|
||||
|
||||
payload = etree.tostring(doc, pretty_print=False,
|
||||
xml_declaration=True, encoding='UTF-8')
|
||||
|
||||
@@ -104,12 +104,11 @@ def _create_topology_graphs(api_instance):
|
||||
|
||||
topology_graphs = {}
|
||||
|
||||
for suffix_name in suffix_to_masters:
|
||||
for suffix_name, masters in suffix_to_masters.items():
|
||||
segments = api_instance.Command.topologysegment_find(
|
||||
suffix_name, sizelimit=0).get('result')
|
||||
|
||||
topology_graphs[suffix_name] = create_topology_graph(
|
||||
suffix_to_masters[suffix_name], segments)
|
||||
topology_graphs[suffix_name] = create_topology_graph(masters, segments)
|
||||
|
||||
return topology_graphs
|
||||
|
||||
@@ -165,8 +164,7 @@ class TopologyConnectivity:
|
||||
|
||||
def check_current_state(self):
|
||||
err_msg = ""
|
||||
for suffix in self.errors:
|
||||
errors = self.errors[suffix]
|
||||
for suffix, errors in self.errors.items():
|
||||
if errors:
|
||||
err_msg = "\n".join([
|
||||
err_msg,
|
||||
@@ -182,8 +180,7 @@ class TopologyConnectivity:
|
||||
err_msg = ""
|
||||
errors_after_removal = self.errors_after_master_removal(master_cn)
|
||||
|
||||
for suffix in errors_after_removal:
|
||||
errors = errors_after_removal[suffix]
|
||||
for suffix, errors in errors_after_removal.items():
|
||||
if errors:
|
||||
err_msg = "\n".join([
|
||||
err_msg,
|
||||
|
||||
@@ -294,8 +294,8 @@ class IntegrationLogs:
|
||||
def init_method_logs(self):
|
||||
"""Initilize method logs with the class ones"""
|
||||
self._method_logs = {}
|
||||
for k in self._class_logs:
|
||||
self._method_logs[k] = list(self._class_logs[k])
|
||||
for host, logs in self._class_logs.items():
|
||||
self._method_logs[host] = list(logs)
|
||||
|
||||
def collect_class_log(self, host, filename):
|
||||
"""Add class scope log
|
||||
|
||||
@@ -499,13 +499,13 @@ class TestEPN(IntegrationTest):
|
||||
),
|
||||
)
|
||||
|
||||
for key in users:
|
||||
for user_info in users.values():
|
||||
tasks.user_add(
|
||||
self.master,
|
||||
users[key]["uid"],
|
||||
user_info["uid"],
|
||||
extra_args=[
|
||||
"--password-expiration",
|
||||
users[key]["krbpasswordexpiration"],
|
||||
user_info["krbpasswordexpiration"],
|
||||
],
|
||||
password=None,
|
||||
)
|
||||
|
||||
@@ -1719,8 +1719,8 @@ def modify_permissions():
|
||||
|
||||
# Restore the previous state
|
||||
host = state.pop('host')
|
||||
for path in state:
|
||||
(owner, group, mode) = state[path].split(':')
|
||||
for path, path_state in state.items():
|
||||
(owner, group, mode) = path_state.split(":", maxsplit=2)
|
||||
host.run_command(["chown", "%s:%s" % (owner, group), path])
|
||||
host.run_command(["chmod", mode, path])
|
||||
|
||||
|
||||
@@ -73,11 +73,11 @@ class TestNFS(IntegrationTest):
|
||||
"euripides": "s"
|
||||
}
|
||||
temp_pass = 'temppass'
|
||||
for user in users:
|
||||
for user, last in users.items():
|
||||
self.master.run_command([
|
||||
"ipa", "user-add",
|
||||
"%s" % user, "--first", "%s" % user,
|
||||
"--last", "%s" % users[user],
|
||||
user, "--first", user,
|
||||
"--last", last,
|
||||
'--password'], stdin_text="%s\n%s\n" % (temp_pass, temp_pass)
|
||||
)
|
||||
self.master.run_command(["kdestroy", "-A"])
|
||||
@@ -111,12 +111,12 @@ class TestNFS(IntegrationTest):
|
||||
"stdnfs": "*(ro)",
|
||||
"home": "*(sec=krb5p,rw)"
|
||||
}
|
||||
for export in exports:
|
||||
for export, options in exports.items():
|
||||
exportpath = os.sep.join(('', basedir, export))
|
||||
exportfile = os.sep.join((
|
||||
'', 'etc', 'exports.d', "%s.exports" % export
|
||||
))
|
||||
exportline = " ".join((exportpath, exports[export]))
|
||||
exportline = " ".join((exportpath, options))
|
||||
nfssrv.run_command(["mkdir", "-p", exportpath])
|
||||
nfssrv.run_command(["chmod", "770", exportpath])
|
||||
nfssrv.put_file_contents(exportfile, exportline)
|
||||
|
||||
@@ -783,8 +783,8 @@ class TestSubCAkeyReplication(IntegrationTest):
|
||||
# give replication some time
|
||||
time.sleep(15)
|
||||
|
||||
for name in subcas:
|
||||
self.check_subca(replica, name, subcas[name])
|
||||
for name, cert_nick in subcas.items():
|
||||
self.check_subca(replica, name, cert_nick)
|
||||
self.del_subca(replica, name)
|
||||
|
||||
|
||||
|
||||
@@ -83,12 +83,11 @@ def check_removal_disconnects_topology(
|
||||
)
|
||||
}
|
||||
|
||||
for suffix in err_messages_by_suffix:
|
||||
for suffix, err_str in err_messages_by_suffix.items():
|
||||
if suffix in affected_suffixes:
|
||||
tasks.assert_error(
|
||||
result, err_messages_by_suffix[suffix], returncode=1)
|
||||
tasks.assert_error(result, err_str, returncode=1)
|
||||
else:
|
||||
assert err_messages_by_suffix[suffix] not in result.stderr_text
|
||||
assert err_str not in result.stderr_text
|
||||
|
||||
|
||||
class ServerDelBase(IntegrationTest):
|
||||
|
||||
@@ -196,24 +196,26 @@ class UserTracker(CertmapdataMixin, KerberosAliasMixin, Tracker):
|
||||
ipantsecurityidentifier=[fuzzy_user_or_group_sid],
|
||||
)
|
||||
|
||||
for key in self.kwargs:
|
||||
if key == u'krbprincipalname':
|
||||
for key, value in self.kwargs.items():
|
||||
if key == "krbprincipalname":
|
||||
try:
|
||||
self.attrs[key] = [u'%s@%s' % (
|
||||
(self.kwargs[key].split('@'))[0].lower(),
|
||||
(self.kwargs[key].split('@'))[1]
|
||||
)]
|
||||
princ_splitted = value.split("@", maxsplit=1)
|
||||
self.attrs[key] = [
|
||||
"{}@{}".format(
|
||||
princ_splitted[0].lower(),
|
||||
princ_splitted[1],
|
||||
)
|
||||
]
|
||||
except IndexError:
|
||||
# we can provide just principal part
|
||||
self.attrs[key] = [u'%s@%s' % (
|
||||
(self.kwargs[key].lower(),
|
||||
self.api.env.realm)
|
||||
)]
|
||||
self.attrs[key] = [
|
||||
"{}@{}".format(value.lower(), self.api.env.realm)
|
||||
]
|
||||
else:
|
||||
if type(self.kwargs[key]) is not list:
|
||||
self.attrs[key] = [self.kwargs[key]]
|
||||
if not isinstance(value, list):
|
||||
self.attrs[key] = [value]
|
||||
else:
|
||||
self.attrs[key] = self.kwargs[key]
|
||||
self.attrs[key] = value
|
||||
|
||||
self.exists = True
|
||||
|
||||
|
||||
Reference in New Issue
Block a user