2014-10-16 08:43:29 -05:00
|
|
|
#
|
|
|
|
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
|
|
|
|
#
|
|
|
|
|
2018-04-05 02:21:16 -05:00
|
|
|
from __future__ import print_function, absolute_import
|
2015-08-12 06:44:11 -05:00
|
|
|
|
2018-03-19 05:46:12 -05:00
|
|
|
import errno
|
2020-06-03 04:13:36 -05:00
|
|
|
import logging
|
2014-10-16 08:43:29 -05:00
|
|
|
import os
|
2020-06-03 04:13:36 -05:00
|
|
|
import re
|
2014-10-16 08:43:29 -05:00
|
|
|
import shutil
|
|
|
|
import stat
|
|
|
|
|
|
|
|
import ldap
|
|
|
|
|
2016-11-22 10:55:10 -06:00
|
|
|
from ipaserver import p11helper as _ipap11helper
|
2015-07-31 03:15:01 -05:00
|
|
|
from ipapython.dnsutil import DNSName
|
|
|
|
from ipaserver.install import service
|
|
|
|
from ipaserver.install import installutils
|
2014-10-16 08:43:29 -05:00
|
|
|
from ipapython.dn import DN
|
2018-05-23 03:37:58 -05:00
|
|
|
from ipapython import directivesetter
|
2016-11-03 10:38:06 -05:00
|
|
|
from ipapython import ipautil
|
2016-03-18 05:22:33 -05:00
|
|
|
from ipaplatform.constants import constants
|
2014-10-16 08:43:29 -05:00
|
|
|
from ipaplatform.paths import paths
|
|
|
|
from ipalib import errors, api
|
2017-03-29 11:53:11 -05:00
|
|
|
from ipalib.constants import SOFTHSM_DNSSEC_TOKEN_LABEL
|
2014-10-16 08:43:29 -05:00
|
|
|
from ipaserver.install.bindinstance import dns_container_exists
|
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-10-16 08:43:29 -05:00
|
|
|
replica_keylabel_template = u"dnssec-replica:%s"
|
|
|
|
|
|
|
|
|
2016-11-11 05:45:11 -06:00
|
|
|
def dnssec_container_exists(suffix):
|
2014-10-16 08:43:29 -05:00
|
|
|
"""
|
|
|
|
Test whether the dns container exists.
|
|
|
|
"""
|
|
|
|
assert isinstance(suffix, DN)
|
2016-11-11 05:45:11 -06:00
|
|
|
return api.Backend.ldap2.entry_exists(
|
|
|
|
DN(('cn', 'sec'), ('cn', 'dns'), suffix))
|
2014-10-16 08:43:29 -05:00
|
|
|
|
2016-06-08 11:22:57 -05:00
|
|
|
|
|
|
|
def remove_replica_public_keys(hostname):
|
|
|
|
keysyncd = DNSKeySyncInstance()
|
|
|
|
keysyncd.remove_replica_public_keys(hostname)
|
|
|
|
|
|
|
|
|
2014-10-16 08:43:29 -05:00
|
|
|
class DNSKeySyncInstance(service.Service):
|
2017-05-24 09:35:07 -05:00
|
|
|
def __init__(self, fstore=None, logger=logger):
|
2016-11-03 10:38:06 -05:00
|
|
|
super(DNSKeySyncInstance, self).__init__(
|
|
|
|
"ipa-dnskeysyncd",
|
2014-10-16 08:43:29 -05:00
|
|
|
service_desc="DNS key synchronization service",
|
2016-11-03 11:54:14 -05:00
|
|
|
fstore=fstore,
|
2016-11-16 11:21:54 -06:00
|
|
|
service_prefix=u'ipa-dnskeysyncd',
|
2016-11-03 11:54:14 -05:00
|
|
|
keytab=paths.IPA_DNSKEYSYNCD_KEYTAB
|
2015-03-12 10:14:22 -05:00
|
|
|
)
|
2014-10-16 08:43:29 -05:00
|
|
|
self.extra_config = [u'dnssecVersion 1', ] # DNSSEC enabled
|
|
|
|
|
|
|
|
suffix = ipautil.dn_attribute_property('_suffix')
|
|
|
|
|
2014-11-11 06:00:18 -06:00
|
|
|
def set_dyndb_ldap_workdir_permissions(self):
|
|
|
|
"""
|
|
|
|
Setting up correct permissions to allow write/read access for daemons
|
|
|
|
"""
|
2020-12-14 10:44:38 -06:00
|
|
|
directories = [
|
|
|
|
paths.BIND_LDAP_DNS_IPA_WORKDIR,
|
|
|
|
paths.BIND_LDAP_DNS_ZONE_WORKDIR,
|
|
|
|
]
|
|
|
|
for directory in directories:
|
|
|
|
try:
|
|
|
|
os.mkdir(directory, 0o770)
|
|
|
|
except FileExistsError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
os.chmod(directory, 0o770)
|
|
|
|
# dnssec daemons require to have access into the directory
|
2020-12-22 01:20:52 -06:00
|
|
|
constants.NAMED_USER.chown(directory, gid=constants.NAMED_GROUP.gid)
|
2014-11-11 06:00:18 -06:00
|
|
|
|
2014-10-16 08:43:29 -05:00
|
|
|
def remove_replica_public_keys(self, replica_fqdn):
|
|
|
|
ldap = api.Backend.ldap2
|
|
|
|
dn_base = DN(('cn', 'keys'), ('cn', 'sec'), ('cn', 'dns'), api.env.basedn)
|
|
|
|
keylabel = replica_keylabel_template % DNSName(replica_fqdn).\
|
|
|
|
make_absolute().canonicalize().ToASCII()
|
|
|
|
# get old keys from LDAP
|
|
|
|
search_kw = {
|
|
|
|
'objectclass': u"ipaPublicKeyObject",
|
|
|
|
'ipk11Label': keylabel,
|
|
|
|
'ipk11Wrap': True,
|
|
|
|
}
|
|
|
|
filter = ldap.make_filter(search_kw, rules=ldap.MATCH_ALL)
|
2016-10-04 09:54:44 -05:00
|
|
|
entries, _truncated = ldap.find_entries(filter=filter, base_dn=dn_base)
|
2014-10-16 08:43:29 -05:00
|
|
|
for entry in entries:
|
|
|
|
ldap.delete_entry(entry)
|
|
|
|
|
|
|
|
def start_dnskeysyncd(self):
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Restarting ipa-dnskeysyncd")
|
2014-10-16 08:43:29 -05:00
|
|
|
self.__start()
|
|
|
|
|
|
|
|
def create_instance(self, fqdn, realm_name):
|
|
|
|
self.fqdn = fqdn
|
|
|
|
self.realm = realm_name
|
|
|
|
self.suffix = ipautil.realm_to_suffix(self.realm)
|
|
|
|
try:
|
|
|
|
self.stop()
|
2016-03-11 12:51:07 -06:00
|
|
|
except Exception:
|
2014-10-16 08:43:29 -05:00
|
|
|
pass
|
|
|
|
|
|
|
|
# checking status step must be first
|
|
|
|
self.step("checking status", self.__check_dnssec_status)
|
2014-11-11 06:00:18 -06:00
|
|
|
self.step("setting up bind-dyndb-ldap working directory",
|
|
|
|
self.set_dyndb_ldap_workdir_permissions)
|
2014-10-16 08:43:29 -05:00
|
|
|
self.step("setting up kerberos principal", self.__setup_principal)
|
|
|
|
self.step("setting up SoftHSM", self.__setup_softhsm)
|
|
|
|
self.step("adding DNSSEC containers", self.__setup_dnssec_containers)
|
|
|
|
self.step("creating replica keys", self.__setup_replica_keys)
|
|
|
|
self.step("configuring ipa-dnskeysyncd to start on boot", self.__enable)
|
|
|
|
# we need restart named after setting up this service
|
|
|
|
self.start_creation()
|
|
|
|
|
2020-08-11 10:39:24 -05:00
|
|
|
def __check_dnssec_status(self):
|
2016-11-11 05:45:11 -06:00
|
|
|
if not dns_container_exists(self.suffix):
|
2014-10-16 08:43:29 -05:00
|
|
|
raise RuntimeError("DNS container does not exist")
|
|
|
|
|
2015-02-09 09:18:28 -06:00
|
|
|
# ready to be installed, storing a state is required to run uninstall
|
|
|
|
self.backup_state("configured", True)
|
|
|
|
|
2014-10-16 08:43:29 -05:00
|
|
|
def __setup_dnssec_containers(self):
|
|
|
|
"""
|
|
|
|
Setup LDAP containers for DNSSEC
|
|
|
|
"""
|
2016-11-11 05:45:11 -06:00
|
|
|
if dnssec_container_exists(self.suffix):
|
2014-10-16 08:43:29 -05:00
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.info("DNSSEC container exists (step skipped)")
|
2014-10-16 08:43:29 -05:00
|
|
|
return
|
|
|
|
|
|
|
|
self._ldap_mod("dnssec.ldif", {'SUFFIX': self.suffix, })
|
|
|
|
|
2020-06-03 04:13:36 -05:00
|
|
|
def _are_named_options_configured(self, options):
|
|
|
|
"""Check whether the sysconfig of named is patched
|
|
|
|
|
|
|
|
Additional command line options for named are passed
|
|
|
|
via OPTIONS env variable. Since custom options can be
|
|
|
|
supplied by a vendor, at least, the base parsing of such
|
|
|
|
is required.
|
|
|
|
Current named command line options:
|
|
|
|
NS_MAIN_ARGS "46A:c:C:d:D:E:fFgi:lL:M:m:n:N:p:P:sS:t:T:U:u:vVx:X:"
|
|
|
|
If there are several same options the last passed wins.
|
|
|
|
"""
|
|
|
|
if options:
|
|
|
|
pattern = r"[ ]*-[a-zA-Z46]*E[ ]*(.*?)(?: |$)"
|
|
|
|
engines = re.findall(pattern, options)
|
|
|
|
if engines and engines[-1] == constants.NAMED_OPENSSL_ENGINE:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2019-09-30 08:47:08 -05:00
|
|
|
def setup_named_openssl_conf(self):
|
|
|
|
if constants.NAMED_OPENSSL_ENGINE is not None:
|
|
|
|
logger.debug("Setup OpenSSL config for BIND")
|
|
|
|
# setup OpenSSL config for BIND,
|
|
|
|
# this one is needed because FreeIPA installation
|
|
|
|
# disables p11-kit-proxy PKCS11 module
|
|
|
|
conf_file_dict = {
|
|
|
|
'OPENSSL_ENGINE': constants.NAMED_OPENSSL_ENGINE,
|
|
|
|
'SOFTHSM_MODULE': paths.LIBSOFTHSM2_SO,
|
2020-08-13 05:54:40 -05:00
|
|
|
'CRYPTO_POLICY_FILE': paths.CRYPTO_POLICY_OPENSSLCNF_FILE,
|
2019-09-30 08:47:08 -05:00
|
|
|
}
|
2020-08-13 05:54:40 -05:00
|
|
|
if paths.CRYPTO_POLICY_OPENSSLCNF_FILE is None:
|
|
|
|
opensslcnf_tmpl = "bind.openssl.cnf.template"
|
|
|
|
else:
|
|
|
|
opensslcnf_tmpl = "bind.openssl.cryptopolicy.cnf.template"
|
|
|
|
|
2019-09-30 08:47:08 -05:00
|
|
|
named_openssl_txt = ipautil.template_file(
|
2020-08-13 05:54:40 -05:00
|
|
|
os.path.join(paths.USR_SHARE_IPA_DIR, opensslcnf_tmpl),
|
|
|
|
conf_file_dict
|
|
|
|
)
|
2019-09-30 08:47:08 -05:00
|
|
|
with open(paths.DNSSEC_OPENSSL_CONF, 'w') as f:
|
|
|
|
os.fchmod(f.fileno(), 0o640)
|
2020-12-22 01:20:52 -06:00
|
|
|
os.fchown(f.fileno(), 0, gid=constants.NAMED_GROUP.gid)
|
2019-09-30 08:47:08 -05:00
|
|
|
f.write(named_openssl_txt)
|
|
|
|
|
|
|
|
def setup_named_sysconfig(self):
|
|
|
|
logger.debug("Setup BIND sysconfig")
|
|
|
|
sysconfig = paths.SYSCONFIG_NAMED
|
|
|
|
self.fstore.backup_file(sysconfig)
|
|
|
|
|
|
|
|
directivesetter.set_directive(
|
|
|
|
sysconfig,
|
|
|
|
'SOFTHSM2_CONF', paths.DNSSEC_SOFTHSM2_CONF,
|
|
|
|
quotes=False, separator='=')
|
|
|
|
|
|
|
|
if constants.NAMED_OPENSSL_ENGINE is not None:
|
|
|
|
directivesetter.set_directive(
|
|
|
|
sysconfig,
|
|
|
|
'OPENSSL_CONF', paths.DNSSEC_OPENSSL_CONF,
|
|
|
|
quotes=False, separator='=')
|
|
|
|
|
2020-06-03 04:13:36 -05:00
|
|
|
options = directivesetter.get_directive(
|
|
|
|
paths.SYSCONFIG_NAMED,
|
|
|
|
constants.NAMED_OPTIONS_VAR,
|
|
|
|
separator="="
|
|
|
|
) or ''
|
|
|
|
if not self._are_named_options_configured(options):
|
|
|
|
engine_cmd = "-E {}".format(constants.NAMED_OPENSSL_ENGINE)
|
|
|
|
new_options = ' '.join([options, engine_cmd])
|
|
|
|
directivesetter.set_directive(
|
|
|
|
sysconfig,
|
|
|
|
constants.NAMED_OPTIONS_VAR, new_options,
|
|
|
|
quotes=True, separator='=')
|
2019-09-30 08:47:08 -05:00
|
|
|
|
|
|
|
def setup_ipa_dnskeysyncd_sysconfig(self):
|
|
|
|
logger.debug("Setup ipa-dnskeysyncd sysconfig")
|
|
|
|
sysconfig = paths.SYSCONFIG_IPA_DNSKEYSYNCD
|
|
|
|
directivesetter.set_directive(
|
|
|
|
sysconfig,
|
|
|
|
'SOFTHSM2_CONF', paths.DNSSEC_SOFTHSM2_CONF,
|
|
|
|
quotes=False, separator='=')
|
|
|
|
|
|
|
|
if constants.NAMED_OPENSSL_ENGINE is not None:
|
|
|
|
directivesetter.set_directive(
|
|
|
|
sysconfig,
|
|
|
|
'OPENSSL_CONF', paths.DNSSEC_OPENSSL_CONF,
|
|
|
|
quotes=False, separator='=')
|
|
|
|
|
2014-10-16 08:43:29 -05:00
|
|
|
def __setup_softhsm(self):
|
|
|
|
token_dir_exists = os.path.exists(paths.DNSSEC_TOKENS_DIR)
|
|
|
|
|
|
|
|
# create dnssec directory
|
|
|
|
if not os.path.exists(paths.IPA_DNSSEC_DIR):
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Creating %s directory", paths.IPA_DNSSEC_DIR)
|
2015-12-09 05:12:22 -06:00
|
|
|
os.mkdir(paths.IPA_DNSSEC_DIR)
|
|
|
|
os.chmod(paths.IPA_DNSSEC_DIR, 0o770)
|
2014-10-16 08:43:29 -05:00
|
|
|
# chown ods:named
|
2020-12-22 01:20:52 -06:00
|
|
|
constants.ODS_USER.chown(paths.IPA_DNSSEC_DIR,
|
|
|
|
gid=constants.NAMED_GROUP.gid)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
# setup softhsm2 config file
|
|
|
|
softhsm_conf_txt = ("# SoftHSM v2 configuration file \n"
|
|
|
|
"# File generated by IPA instalation\n"
|
|
|
|
"directories.tokendir = %(tokens_dir)s\n"
|
|
|
|
"objectstore.backend = file") % {
|
|
|
|
'tokens_dir': paths.DNSSEC_TOKENS_DIR
|
|
|
|
}
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Creating new softhsm config file")
|
2019-09-30 08:47:08 -05:00
|
|
|
with open(paths.DNSSEC_SOFTHSM2_CONF, 'w') as f:
|
|
|
|
os.fchmod(f.fileno(), 0o644)
|
|
|
|
f.write(softhsm_conf_txt)
|
|
|
|
|
|
|
|
# setting up named and ipa-dnskeysyncd to use our softhsm2 and
|
|
|
|
# openssl configs
|
|
|
|
self.setup_named_openssl_conf()
|
|
|
|
self.setup_named_sysconfig()
|
|
|
|
self.setup_ipa_dnskeysyncd_sysconfig()
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
if (token_dir_exists and os.path.exists(paths.DNSSEC_SOFTHSM_PIN) and
|
|
|
|
os.path.exists(paths.DNSSEC_SOFTHSM_PIN_SO)):
|
|
|
|
# there is initialized softhsm
|
|
|
|
return
|
|
|
|
|
|
|
|
# remove old tokens
|
|
|
|
if token_dir_exists:
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug('Removing old tokens directory %s',
|
|
|
|
paths.DNSSEC_TOKENS_DIR)
|
2014-10-16 08:43:29 -05:00
|
|
|
shutil.rmtree(paths.DNSSEC_TOKENS_DIR)
|
|
|
|
|
|
|
|
# create tokens subdirectory
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug('Creating tokens %s directory', paths.DNSSEC_TOKENS_DIR)
|
2014-10-16 08:43:29 -05:00
|
|
|
# sticky bit is required by daemon
|
|
|
|
os.mkdir(paths.DNSSEC_TOKENS_DIR)
|
2015-07-15 09:38:06 -05:00
|
|
|
os.chmod(paths.DNSSEC_TOKENS_DIR, 0o770 | stat.S_ISGID)
|
2014-10-16 08:43:29 -05:00
|
|
|
# chown to ods:named
|
2020-12-22 01:20:52 -06:00
|
|
|
constants.ODS_USER.chown(paths.DNSSEC_TOKENS_DIR,
|
|
|
|
gid=constants.NAMED_GROUP.gid)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
# generate PINs for softhsm
|
|
|
|
pin_length = 30 # Bind allows max 32 bytes including ending '\0'
|
2016-12-21 08:07:34 -06:00
|
|
|
pin = ipautil.ipa_generate_password(
|
|
|
|
entropy_bits=0, special=None, min_len=pin_length)
|
|
|
|
pin_so = ipautil.ipa_generate_password(
|
|
|
|
entropy_bits=0, special=None, min_len=pin_length)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Saving user PIN to %s", paths.DNSSEC_SOFTHSM_PIN)
|
2019-09-30 08:47:08 -05:00
|
|
|
with open(paths.DNSSEC_SOFTHSM_PIN, 'w') as f:
|
|
|
|
# chown to ods:named
|
2020-12-22 01:20:52 -06:00
|
|
|
constants.ODS_USER.chown(f.fileno(), gid=constants.NAMED_GROUP.gid)
|
2019-09-30 08:47:08 -05:00
|
|
|
os.fchmod(f.fileno(), 0o660)
|
|
|
|
f.write(pin)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Saving SO PIN to %s", paths.DNSSEC_SOFTHSM_PIN_SO)
|
2019-09-30 08:47:08 -05:00
|
|
|
with open(paths.DNSSEC_SOFTHSM_PIN_SO, 'w') as f:
|
|
|
|
# owner must be root
|
|
|
|
os.fchmod(f.fileno(), 0o400)
|
|
|
|
f.write(pin_so)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
# initialize SoftHSM
|
|
|
|
|
|
|
|
command = [
|
|
|
|
paths.SOFTHSM2_UTIL,
|
|
|
|
'--init-token',
|
2017-03-29 11:53:11 -05:00
|
|
|
'--free', # use random free slot
|
|
|
|
'--label', SOFTHSM_DNSSEC_TOKEN_LABEL,
|
2014-10-16 08:43:29 -05:00
|
|
|
'--pin', pin,
|
|
|
|
'--so-pin', pin_so,
|
|
|
|
]
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Initializing tokens")
|
2014-10-16 08:43:29 -05:00
|
|
|
os.environ["SOFTHSM2_CONF"] = paths.DNSSEC_SOFTHSM2_CONF
|
|
|
|
ipautil.run(command, nolog=(pin, pin_so,))
|
|
|
|
|
|
|
|
def __setup_replica_keys(self):
|
|
|
|
keylabel = replica_keylabel_template % DNSName(self.fqdn).\
|
|
|
|
make_absolute().canonicalize().ToASCII()
|
|
|
|
|
2016-11-09 05:53:14 -06:00
|
|
|
ldap = api.Backend.ldap2
|
2014-10-16 08:43:29 -05:00
|
|
|
dn_base = DN(('cn', 'keys'), ('cn', 'sec'), ('cn', 'dns'), api.env.basedn)
|
|
|
|
|
|
|
|
with open(paths.DNSSEC_SOFTHSM_PIN, "r") as f:
|
|
|
|
pin = f.read()
|
|
|
|
|
|
|
|
os.environ["SOFTHSM2_CONF"] = paths.DNSSEC_SOFTHSM2_CONF
|
2017-03-29 11:53:11 -05:00
|
|
|
p11 = _ipap11helper.P11_Helper(
|
|
|
|
SOFTHSM_DNSSEC_TOKEN_LABEL, pin, paths.LIBSOFTHSM2_SO)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
# generate replica keypair
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Creating replica's key pair")
|
2014-10-16 08:43:29 -05:00
|
|
|
key_id = None
|
|
|
|
while True:
|
|
|
|
# check if key with this ID exist in softHSM
|
2017-02-10 10:36:19 -06:00
|
|
|
key_id = _ipap11helper.gen_key_id()
|
2014-10-16 08:43:29 -05:00
|
|
|
replica_pubkey_dn = DN(('ipk11UniqueId', 'autogenerate'), dn_base)
|
|
|
|
|
|
|
|
|
|
|
|
pub_keys = p11.find_keys(_ipap11helper.KEY_CLASS_PUBLIC_KEY,
|
|
|
|
label=keylabel,
|
|
|
|
id=key_id)
|
|
|
|
if pub_keys:
|
|
|
|
# key with id exists
|
|
|
|
continue
|
|
|
|
|
|
|
|
priv_keys = p11.find_keys(_ipap11helper.KEY_CLASS_PRIVATE_KEY,
|
|
|
|
label=keylabel,
|
|
|
|
id=key_id)
|
|
|
|
if not priv_keys:
|
|
|
|
break # we found unique id
|
|
|
|
|
2016-10-04 09:54:44 -05:00
|
|
|
public_key_handle, _privkey_handle = p11.generate_replica_key_pair(
|
2014-10-16 08:43:29 -05:00
|
|
|
keylabel, key_id,
|
|
|
|
pub_cka_verify=False,
|
|
|
|
pub_cka_verify_recover=False,
|
|
|
|
pub_cka_wrap=True,
|
|
|
|
priv_cka_unwrap=True,
|
|
|
|
priv_cka_sensitive=True,
|
|
|
|
priv_cka_extractable=False)
|
|
|
|
|
|
|
|
# export public key
|
|
|
|
public_key_blob = p11.export_public_key(public_key_handle)
|
|
|
|
|
|
|
|
# save key to LDAP
|
|
|
|
replica_pubkey_objectclass = [
|
|
|
|
'ipk11Object', 'ipk11PublicKey', 'ipaPublicKeyObject', 'top'
|
|
|
|
]
|
|
|
|
kw = {
|
|
|
|
'objectclass': replica_pubkey_objectclass,
|
|
|
|
'ipk11UniqueId': [u'autogenerate'],
|
|
|
|
'ipk11Label': [keylabel],
|
|
|
|
'ipaPublicKey': [public_key_blob],
|
|
|
|
'ipk11Id': [key_id],
|
|
|
|
'ipk11Wrap': [True],
|
|
|
|
'ipk11Verify': [False],
|
|
|
|
'ipk11VerifyRecover': [False],
|
|
|
|
}
|
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Storing replica public key to LDAP, %s",
|
|
|
|
replica_pubkey_dn)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
entry = ldap.make_entry(replica_pubkey_dn, **kw)
|
|
|
|
ldap.add_entry(entry)
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Replica public key stored")
|
2014-10-16 08:43:29 -05:00
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Setting CKA_WRAP=False for old replica keys")
|
2014-10-16 08:43:29 -05:00
|
|
|
# first create new keys, we don't want disable keys before, we
|
|
|
|
# have new keys in softhsm and LDAP
|
|
|
|
|
|
|
|
# get replica pub keys with CKA_WRAP=True
|
|
|
|
replica_pub_keys = p11.find_keys(_ipap11helper.KEY_CLASS_PUBLIC_KEY,
|
|
|
|
label=keylabel,
|
|
|
|
cka_wrap=True)
|
|
|
|
# old keys in softHSM
|
|
|
|
for handle in replica_pub_keys:
|
|
|
|
# don't disable wrapping for new key
|
|
|
|
# compare IDs not handle
|
|
|
|
if key_id != p11.get_attribute(handle, _ipap11helper.CKA_ID):
|
|
|
|
p11.set_attribute(handle, _ipap11helper.CKA_WRAP, False)
|
|
|
|
|
|
|
|
# get old keys from LDAP
|
|
|
|
search_kw = {
|
|
|
|
'objectclass': u"ipaPublicKeyObject",
|
|
|
|
'ipk11Label': keylabel,
|
|
|
|
'ipk11Wrap': True,
|
|
|
|
}
|
|
|
|
filter = ldap.make_filter(search_kw, rules=ldap.MATCH_ALL)
|
2016-10-04 09:54:44 -05:00
|
|
|
entries, _truncated = ldap.find_entries(filter=filter,
|
2014-10-16 08:43:29 -05:00
|
|
|
base_dn=dn_base)
|
|
|
|
for entry in entries:
|
|
|
|
# don't disable wrapping for new key
|
|
|
|
if entry.single_value['ipk11Id'] != key_id:
|
|
|
|
entry['ipk11Wrap'] = [False]
|
|
|
|
ldap.update_entry(entry)
|
|
|
|
|
|
|
|
finally:
|
|
|
|
p11.finalize()
|
|
|
|
|
|
|
|
# change tokens mod/owner
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Changing ownership of token files")
|
2014-10-16 08:43:29 -05:00
|
|
|
for (root, dirs, files) in os.walk(paths.DNSSEC_TOKENS_DIR):
|
|
|
|
for directory in dirs:
|
|
|
|
dir_path = os.path.join(root, directory)
|
2015-07-15 09:38:06 -05:00
|
|
|
os.chmod(dir_path, 0o770 | stat.S_ISGID)
|
2014-10-16 08:43:29 -05:00
|
|
|
# chown to ods:named
|
2020-12-22 01:20:52 -06:00
|
|
|
constants.ODS_USER.chown(dir_path,
|
|
|
|
gid=constants.NAMED_GROUP.gid)
|
2014-10-16 08:43:29 -05:00
|
|
|
for filename in files:
|
|
|
|
file_path = os.path.join(root, filename)
|
2019-09-30 08:47:08 -05:00
|
|
|
os.chmod(file_path, 0o660 | stat.S_ISGID)
|
2014-10-16 08:43:29 -05:00
|
|
|
# chown to ods:named
|
2020-12-22 01:20:52 -06:00
|
|
|
constants.ODS_USER.chown(file_path,
|
|
|
|
gid=constants.NAMED_GROUP.gid)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
def __enable(self):
|
|
|
|
try:
|
2018-07-05 17:04:39 -05:00
|
|
|
self.ldap_configure('DNSKeySync', self.fqdn, None,
|
|
|
|
self.suffix, self.extra_config)
|
2014-10-16 08:43:29 -05:00
|
|
|
except errors.DuplicateEntry:
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.error("DNSKeySync service already exists")
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
def __setup_principal(self):
|
2019-06-11 10:05:29 -05:00
|
|
|
ipautil.remove_keytab(self.keytab)
|
2016-11-03 11:54:14 -05:00
|
|
|
installutils.kadmin_addprinc(self.principal)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
# Store the keytab on disk
|
2016-11-03 11:54:14 -05:00
|
|
|
installutils.create_keytab(self.keytab, self.principal)
|
|
|
|
p = self.move_service(self.principal)
|
2014-10-16 08:43:29 -05:00
|
|
|
if p is None:
|
|
|
|
# the service has already been moved, perhaps we're doing a DNS reinstall
|
|
|
|
dnssynckey_principal_dn = DN(
|
2016-11-03 11:54:14 -05:00
|
|
|
('krbprincipalname', self.principal),
|
2014-10-16 08:43:29 -05:00
|
|
|
('cn', 'services'), ('cn', 'accounts'), self.suffix)
|
|
|
|
else:
|
|
|
|
dnssynckey_principal_dn = p
|
|
|
|
|
|
|
|
# Make sure access is strictly reserved to the named user
|
2020-12-22 01:20:52 -06:00
|
|
|
os.chown(self.keytab, 0, constants.ODS_GROUP.gid)
|
2016-11-03 11:54:14 -05:00
|
|
|
os.chmod(self.keytab, 0o440)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
dns_group = DN(('cn', 'DNS Servers'), ('cn', 'privileges'),
|
|
|
|
('cn', 'pbac'), self.suffix)
|
|
|
|
mod = [(ldap.MOD_ADD, 'member', dnssynckey_principal_dn)]
|
|
|
|
|
|
|
|
try:
|
2016-11-09 05:53:14 -06:00
|
|
|
api.Backend.ldap2.modify_s(dns_group, mod)
|
2014-10-16 08:43:29 -05:00
|
|
|
except ldap.TYPE_OR_VALUE_EXISTS:
|
|
|
|
pass
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.critical("Could not modify principal's %s entry: %s",
|
|
|
|
dnssynckey_principal_dn, str(e))
|
2014-10-16 08:43:29 -05:00
|
|
|
raise
|
|
|
|
|
|
|
|
# bind-dyndb-ldap persistent search feature requires both size and time
|
|
|
|
# limit-free connection
|
|
|
|
|
|
|
|
mod = [(ldap.MOD_REPLACE, 'nsTimeLimit', '-1'),
|
|
|
|
(ldap.MOD_REPLACE, 'nsSizeLimit', '-1'),
|
|
|
|
(ldap.MOD_REPLACE, 'nsIdleTimeout', '-1'),
|
|
|
|
(ldap.MOD_REPLACE, 'nsLookThroughLimit', '-1')]
|
|
|
|
try:
|
2016-11-09 05:53:14 -06:00
|
|
|
api.Backend.ldap2.modify_s(dnssynckey_principal_dn, mod)
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.critical("Could not set principal's %s LDAP limits: %s",
|
|
|
|
dnssynckey_principal_dn, str(e))
|
2014-10-16 08:43:29 -05:00
|
|
|
raise
|
|
|
|
|
|
|
|
def __start(self):
|
|
|
|
try:
|
|
|
|
self.restart()
|
|
|
|
except Exception as e:
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Failed to start ipa-dnskeysyncd")
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("Failed to start ipa-dnskeysyncd: %s", e)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
|
|
|
|
def uninstall(self):
|
2015-02-09 09:18:28 -06:00
|
|
|
if self.is_configured():
|
|
|
|
self.print_msg("Unconfiguring %s" % self.service_name)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
2015-01-27 04:04:03 -06:00
|
|
|
# Just eat states
|
|
|
|
self.restore_state("running")
|
|
|
|
self.restore_state("enabled")
|
2015-02-09 09:18:28 -06:00
|
|
|
self.restore_state("configured")
|
2014-10-16 08:43:29 -05:00
|
|
|
|
2015-01-27 04:04:03 -06:00
|
|
|
# stop and disable service (IPA service, we do not need it anymore)
|
|
|
|
self.stop()
|
|
|
|
self.disable()
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
for f in [paths.SYSCONFIG_NAMED]:
|
|
|
|
try:
|
|
|
|
self.fstore.restore_file(f)
|
2015-07-30 09:49:29 -05:00
|
|
|
except ValueError as error:
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug('%s', error)
|
2014-10-16 08:43:29 -05:00
|
|
|
|
|
|
|
# remove softhsm pin, to make sure new installation will generate
|
|
|
|
# new token database
|
|
|
|
# do not delete *so pin*, user can need it to get token data
|
2019-06-11 10:05:29 -05:00
|
|
|
ipautil.remove_file(paths.DNSSEC_SOFTHSM_PIN)
|
|
|
|
ipautil.remove_file(paths.DNSSEC_SOFTHSM2_CONF)
|
2018-03-19 05:46:12 -05:00
|
|
|
|
2014-10-16 08:43:29 -05:00
|
|
|
try:
|
2018-03-19 05:46:12 -05:00
|
|
|
shutil.rmtree(paths.DNSSEC_TOKENS_DIR)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != errno.ENOENT:
|
|
|
|
logger.exception(
|
|
|
|
"Failed to remove %s", paths.DNSSEC_TOKENS_DIR
|
|
|
|
)
|
2015-10-09 11:08:38 -05:00
|
|
|
|
2019-06-11 10:05:29 -05:00
|
|
|
ipautil.remove_keytab(self.keytab)
|