2008-05-07 08:33:00 -05:00
|
|
|
#!/usr/bin/python
|
|
|
|
#
|
2009-09-15 16:40:34 -05:00
|
|
|
# Authors:
|
|
|
|
# Rob Crittenden <rcritten@redhat.com>
|
|
|
|
#
|
|
|
|
# Copyright (C) 2009 Red Hat
|
|
|
|
# see file 'COPYING' for use and warranty information
|
|
|
|
#
|
2010-12-09 06:59:11 -06:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
2009-09-15 16:40:34 -05:00
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2010-12-09 06:59:11 -06:00
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2009-09-15 16:40:34 -05:00
|
|
|
|
|
|
|
"""
|
|
|
|
Upgrade configuration files to a newer template.
|
|
|
|
"""
|
2008-05-07 08:33:00 -05:00
|
|
|
|
|
|
|
import sys
|
2013-01-30 08:51:08 -06:00
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import pwd
|
|
|
|
import fileinput
|
|
|
|
|
|
|
|
from ipalib import api
|
|
|
|
import ipalib.util
|
|
|
|
import ipalib.errors
|
|
|
|
from ipapython import ipautil, sysrestore, version, services
|
|
|
|
from ipapython.config import IPAOptionParser
|
|
|
|
from ipapython.ipa_log_manager import *
|
|
|
|
from ipapython import certmonger
|
|
|
|
from ipapython import dogtag
|
|
|
|
from ipapython.dn import DN
|
|
|
|
from ipaserver.install import installutils
|
|
|
|
from ipaserver.install import dsinstance
|
|
|
|
from ipaserver.install import httpinstance
|
|
|
|
from ipaserver.install import memcacheinstance
|
|
|
|
from ipaserver.install import bindinstance
|
|
|
|
from ipaserver.install import service
|
|
|
|
from ipaserver.install import cainstance
|
|
|
|
from ipaserver.install import certs
|
|
|
|
from ipaserver.install import sysupgrade
|
|
|
|
|
2008-05-07 08:33:00 -05:00
|
|
|
|
2012-05-31 10:02:44 -05:00
|
|
|
def parse_options():
|
|
|
|
parser = IPAOptionParser(version=version.VERSION)
|
|
|
|
parser.add_option("-d", "--debug", dest="debug", action="store_true",
|
|
|
|
default=False, help="print debugging information")
|
2012-10-17 06:05:24 -05:00
|
|
|
parser.add_option("-q", "--quiet", dest="quiet",
|
|
|
|
action="store_true",
|
|
|
|
default=False, help="Output only errors")
|
2012-05-31 10:02:44 -05:00
|
|
|
|
|
|
|
options, args = parser.parse_args()
|
|
|
|
safe_options = parser.get_safe_opts(options)
|
|
|
|
|
|
|
|
return safe_options, options
|
|
|
|
|
2012-02-13 08:16:26 -06:00
|
|
|
class KpasswdInstance(service.SimpleServiceInstance):
|
|
|
|
def __init__(self):
|
|
|
|
service.SimpleServiceInstance.__init__(self, "ipa_kpasswd")
|
|
|
|
|
|
|
|
def uninstall_ipa_kpasswd():
|
|
|
|
"""
|
|
|
|
We can't use the full service uninstaller because that will attempt
|
|
|
|
to stop and disable the service which by now doesn't exist. We just
|
|
|
|
want to clean up sysrestore.state to remove all references to
|
|
|
|
ipa_kpasswd.
|
|
|
|
"""
|
|
|
|
ipa_kpasswd = KpasswdInstance()
|
|
|
|
|
|
|
|
running = ipa_kpasswd.restore_state("running")
|
|
|
|
enabled = not ipa_kpasswd.restore_state("enabled")
|
|
|
|
|
|
|
|
if enabled is not None and not enabled:
|
|
|
|
ipa_kpasswd.remove()
|
|
|
|
|
2008-05-07 08:33:00 -05:00
|
|
|
def backup_file(filename, ext):
|
|
|
|
"""Make a backup of filename using ext as the extension. Do not overwrite
|
|
|
|
previous backups."""
|
|
|
|
if not os.path.isabs(filename):
|
|
|
|
raise ValueError("Absolute path required")
|
|
|
|
|
|
|
|
backupfile = filename + ".bak"
|
|
|
|
(reldir, file) = os.path.split(filename)
|
|
|
|
|
|
|
|
while os.path.exists(backupfile):
|
|
|
|
backupfile = backupfile + "." + str(ext)
|
|
|
|
|
2011-09-09 16:07:09 -05:00
|
|
|
try:
|
|
|
|
shutil.copy2(filename, backupfile)
|
|
|
|
except IOError, e:
|
|
|
|
if e.errno == 2: # No such file or directory
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise e
|
2008-05-07 08:33:00 -05:00
|
|
|
|
|
|
|
def update_conf(sub_dict, filename, template_filename):
|
|
|
|
template = ipautil.template_file(template_filename, sub_dict)
|
|
|
|
fd = open(filename, "w")
|
|
|
|
fd.write(template)
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
def find_hostname():
|
|
|
|
"""Find the hostname currently configured in ipa-rewrite.conf"""
|
|
|
|
filename="/etc/httpd/conf.d/ipa-rewrite.conf"
|
2009-09-15 16:40:34 -05:00
|
|
|
|
|
|
|
if not ipautil.file_exists(filename):
|
|
|
|
return None
|
|
|
|
|
2008-10-29 13:34:47 -05:00
|
|
|
pattern = "^[\s#]*.*https:\/\/([A-Za-z0-9\.\-]*)\/.*"
|
|
|
|
p = re.compile(pattern)
|
|
|
|
for line in fileinput.input(filename):
|
|
|
|
if p.search(line):
|
|
|
|
fileinput.close()
|
|
|
|
return p.search(line).group(1)
|
|
|
|
fileinput.close()
|
2008-05-07 08:33:00 -05:00
|
|
|
|
2009-09-15 16:40:34 -05:00
|
|
|
raise RuntimeError("Unable to determine the fully qualified hostname from %s" % filename)
|
2008-05-07 08:33:00 -05:00
|
|
|
|
2012-06-20 13:09:55 -05:00
|
|
|
def find_autoredirect(fqdn):
|
|
|
|
"""
|
|
|
|
When upgrading ipa-rewrite.conf we need to see if the automatic redirect
|
|
|
|
was disabled during install time (or afterward). So sift through the
|
|
|
|
configuration file and see if we can determine the status.
|
|
|
|
|
|
|
|
Returns True if autoredirect is enabled, False otherwise
|
|
|
|
"""
|
|
|
|
filename = '/etc/httpd/conf.d/ipa-rewrite.conf'
|
|
|
|
if os.path.exists(filename):
|
|
|
|
pattern = "^RewriteRule \^/\$ https://%s/ipa/ui \[L,NC,R=301\]" % fqdn
|
|
|
|
p = re.compile(pattern)
|
|
|
|
for line in fileinput.input(filename):
|
|
|
|
if p.search(line):
|
|
|
|
fileinput.close()
|
|
|
|
return True
|
|
|
|
fileinput.close()
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2008-05-07 08:33:00 -05:00
|
|
|
def find_version(filename):
|
2012-11-01 11:16:25 -05:00
|
|
|
"""Find the version of a configuration file
|
|
|
|
|
|
|
|
If no VERSION entry exists in the file, returns 0.
|
|
|
|
If the file does not exist, returns -1.
|
|
|
|
"""
|
2008-05-07 08:33:00 -05:00
|
|
|
if os.path.exists(filename):
|
|
|
|
pattern = "^[\s#]*VERSION\s+([0-9]+)\s+.*"
|
|
|
|
p = re.compile(pattern)
|
|
|
|
for line in fileinput.input(filename):
|
|
|
|
if p.search(line):
|
|
|
|
fileinput.close()
|
|
|
|
return p.search(line).group(1)
|
|
|
|
fileinput.close()
|
|
|
|
|
|
|
|
# no VERSION found
|
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
|
2011-09-09 16:07:09 -05:00
|
|
|
def upgrade(sub_dict, filename, template, add=False):
|
|
|
|
"""
|
|
|
|
Get the version from the current and template files and update the
|
|
|
|
installed configuration file if there is a new template.
|
|
|
|
|
|
|
|
If add is True then create a new configuration file.
|
|
|
|
"""
|
2008-05-07 08:33:00 -05:00
|
|
|
old = int(find_version(filename))
|
|
|
|
new = int(find_version(template))
|
|
|
|
|
2011-09-09 16:07:09 -05:00
|
|
|
if old < 0 and not add:
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.error("%s not found." % filename)
|
2008-05-07 08:33:00 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if new < 0:
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.error("%s not found." % template)
|
2008-05-07 08:33:00 -05:00
|
|
|
|
2012-11-01 11:16:25 -05:00
|
|
|
if old == 0:
|
|
|
|
# The original file does not have a VERSION entry. This means it's now
|
|
|
|
# managed by IPA, but previously was not.
|
|
|
|
root_logger.warning("%s is now managed by IPA. It will be "
|
|
|
|
"overwritten. A backup of the original will be made.", filename)
|
|
|
|
|
2012-02-13 08:16:26 -06:00
|
|
|
if old < new or (add and old == 0):
|
2008-05-07 08:33:00 -05:00
|
|
|
backup_file(filename, new)
|
|
|
|
update_conf(sub_dict, filename, template)
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.info("Upgraded %s to version %d", filename, new)
|
2008-05-07 08:33:00 -05:00
|
|
|
|
2009-09-15 16:40:34 -05:00
|
|
|
def check_certs():
|
2008-12-01 14:06:20 -06:00
|
|
|
"""Check ca.crt is in the right place, and try to fix if not"""
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.info('[Verifying that root certificate is published]')
|
2008-12-01 14:06:20 -06:00
|
|
|
if not os.path.exists("/usr/share/ipa/html/ca.crt"):
|
2009-09-15 16:40:34 -05:00
|
|
|
ca_file = "/etc/httpd/alias/cacert.asc"
|
2008-12-01 14:06:20 -06:00
|
|
|
if os.path.exists(ca_file):
|
2011-06-17 07:19:45 -05:00
|
|
|
old_umask = os.umask(022) # make sure its readable by httpd
|
|
|
|
try:
|
|
|
|
shutil.copyfile(ca_file, "/usr/share/ipa/html/ca.crt")
|
|
|
|
finally:
|
|
|
|
os.umask(old_umask)
|
2008-12-01 14:06:20 -06:00
|
|
|
else:
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.error("Missing Certification Authority file.")
|
|
|
|
root_logger.error("You should place a copy of the CA certificate in /usr/share/ipa/html/ca.crt")
|
|
|
|
else:
|
|
|
|
root_logger.debug('Certificate file exists')
|
2008-12-01 14:06:20 -06:00
|
|
|
|
2012-10-10 05:37:24 -05:00
|
|
|
def upgrade_pki(ca, fstore):
|
2011-10-06 19:37:18 -05:00
|
|
|
"""
|
|
|
|
Update/add the dogtag proxy configuration. The IPA side of this is
|
|
|
|
handled in ipa-pki-proxy.conf.
|
|
|
|
|
|
|
|
This requires enabling SSL renegotiation.
|
|
|
|
"""
|
2012-10-09 10:25:27 -05:00
|
|
|
configured_constants = dogtag.configured_constants()
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.info('[Verifying that CA proxy configuration is correct]')
|
2012-10-10 05:37:24 -05:00
|
|
|
if not ca.is_configured():
|
|
|
|
root_logger.info('CA is not configured')
|
2011-10-10 09:15:41 -05:00
|
|
|
return
|
|
|
|
|
2011-10-06 19:37:18 -05:00
|
|
|
http = httpinstance.HTTPInstance(fstore)
|
|
|
|
http.enable_mod_nss_renegotiate()
|
2012-10-09 10:25:27 -05:00
|
|
|
if not installutils.get_directive(configured_constants.CS_CFG_PATH,
|
2011-10-06 19:37:18 -05:00
|
|
|
'proxy.securePort', '=') and \
|
|
|
|
os.path.exists('/usr/bin/pki-setup-proxy'):
|
|
|
|
ipautil.run(['/usr/bin/pki-setup-proxy', '-pki_instance_root=/var/lib'
|
|
|
|
,'-pki_instance_name=pki-ca','-subsystem_type=ca'])
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.debug('Proxy configuration updated')
|
|
|
|
else:
|
|
|
|
root_logger.debug('Proxy configuration up-to-date')
|
2011-10-06 19:37:18 -05:00
|
|
|
|
2012-02-13 08:16:26 -06:00
|
|
|
def update_dbmodules(realm, filename="/etc/krb5.conf"):
|
|
|
|
newfile = []
|
|
|
|
found_dbrealm = False
|
|
|
|
found_realm = False
|
|
|
|
prefix = ''
|
|
|
|
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.info('[Verifying that KDC configuration is using ipa-kdb backend]')
|
2012-02-13 08:16:26 -06:00
|
|
|
st = os.stat(filename)
|
|
|
|
fd = open(filename)
|
|
|
|
|
|
|
|
lines = fd.readlines()
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
if ' db_library = ipadb.so\n' in lines:
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.debug('dbmodules already updated in %s', filename)
|
2012-02-13 08:16:26 -06:00
|
|
|
return
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
if line.startswith('[dbmodules]'):
|
|
|
|
found_dbrealm = True
|
|
|
|
if found_dbrealm and line.find(realm) > -1:
|
|
|
|
found_realm = True
|
|
|
|
prefix = '#'
|
|
|
|
if found_dbrealm and line.find('}') > -1 and found_realm:
|
|
|
|
found_realm = False
|
|
|
|
newfile.append('#%s' % line)
|
|
|
|
prefix = ''
|
|
|
|
continue
|
Use DN objects instead of strings
* Convert every string specifying a DN into a DN object
* Every place a dn was manipulated in some fashion it was replaced by
the use of DN operators
* Add new DNParam parameter type for parameters which are DN's
* DN objects are used 100% of the time throughout the entire data
pipeline whenever something is logically a dn.
* Many classes now enforce DN usage for their attributes which are
dn's. This is implmented via ipautil.dn_attribute_property(). The
only permitted types for a class attribute specified to be a DN are
either None or a DN object.
* Require that every place a dn is used it must be a DN object.
This translates into lot of::
assert isinstance(dn, DN)
sprinkled through out the code. Maintaining these asserts is
valuable to preserve DN type enforcement. The asserts can be
disabled in production.
The goal of 100% DN usage 100% of the time has been realized, these
asserts are meant to preserve that.
The asserts also proved valuable in detecting functions which did
not obey their function signatures, such as the baseldap pre and
post callbacks.
* Moved ipalib.dn to ipapython.dn because DN class is shared with all
components, not just the server which uses ipalib.
* All API's now accept DN's natively, no need to convert to str (or
unicode).
* Removed ipalib.encoder and encode/decode decorators. Type conversion
is now explicitly performed in each IPASimpleLDAPObject method which
emulates a ldap.SimpleLDAPObject method.
* Entity & Entry classes now utilize DN's
* Removed __getattr__ in Entity & Entity clases. There were two
problems with it. It presented synthetic Python object attributes
based on the current LDAP data it contained. There is no way to
validate synthetic attributes using code checkers, you can't search
the code to find LDAP attribute accesses (because synthetic
attriutes look like Python attributes instead of LDAP data) and
error handling is circumscribed. Secondly __getattr__ was hiding
Python internal methods which broke class semantics.
* Replace use of methods inherited from ldap.SimpleLDAPObject via
IPAdmin class with IPAdmin methods. Directly using inherited methods
was causing us to bypass IPA logic. Mostly this meant replacing the
use of search_s() with getEntry() or getList(). Similarly direct
access of the LDAP data in classes using IPAdmin were replaced with
calls to getValue() or getValues().
* Objects returned by ldap2.find_entries() are now compatible with
either the python-ldap access methodology or the Entity/Entry access
methodology.
* All ldap operations now funnel through the common
IPASimpleLDAPObject giving us a single location where we interface
to python-ldap and perform conversions.
* The above 4 modifications means we've greatly reduced the
proliferation of multiple inconsistent ways to perform LDAP
operations. We are well on the way to having a single API in IPA for
doing LDAP (a long range goal).
* All certificate subject bases are now DN's
* DN objects were enhanced thusly:
- find, rfind, index, rindex, replace and insert methods were added
- AVA, RDN and DN classes were refactored in immutable and mutable
variants, the mutable variants are EditableAVA, EditableRDN and
EditableDN. By default we use the immutable variants preserving
important semantics. To edit a DN cast it to an EditableDN and
cast it back to DN when done editing. These issues are fully
described in other documentation.
- first_key_match was removed
- DN equalty comparison permits comparison to a basestring
* Fixed ldapupdate to work with DN's. This work included:
- Enhance test_updates.py to do more checking after applying
update. Add test for update_from_dict(). Convert code to use
unittest classes.
- Consolidated duplicate code.
- Moved code which should have been in the class into the class.
- Fix the handling of the 'deleteentry' update action. It's no longer
necessary to supply fake attributes to make it work. Detect case
where subsequent update applies a change to entry previously marked
for deletetion. General clean-up and simplification of the
'deleteentry' logic.
- Rewrote a couple of functions to be clearer and more Pythonic.
- Added documentation on the data structure being used.
- Simplfy the use of update_from_dict()
* Removed all usage of get_schema() which was being called prior to
accessing the .schema attribute of an object. If a class is using
internal lazy loading as an optimization it's not right to require
users of the interface to be aware of internal
optimization's. schema is now a property and when the schema
property is accessed it calls a private internal method to perform
the lazy loading.
* Added SchemaCache class to cache the schema's from individual
servers. This was done because of the observation we talk to
different LDAP servers, each of which may have it's own
schema. Previously we globally cached the schema from the first
server we connected to and returned that schema in all contexts. The
cache includes controls to invalidate it thus forcing a schema
refresh.
* Schema caching is now senstive to the run time context. During
install and upgrade the schema can change leading to errors due to
out-of-date cached schema. The schema cache is refreshed in these
contexts.
* We are aware of the LDAP syntax of all LDAP attributes. Every
attribute returned from an LDAP operation is passed through a
central table look-up based on it's LDAP syntax. The table key is
the LDAP syntax it's value is a Python callable that returns a
Python object matching the LDAP syntax. There are a handful of LDAP
attributes whose syntax is historically incorrect
(e.g. DistguishedNames that are defined as DirectoryStrings). The
table driven conversion mechanism is augmented with a table of
hard coded exceptions.
Currently only the following conversions occur via the table:
- dn's are converted to DN objects
- binary objects are converted to Python str objects (IPA
convention).
- everything else is converted to unicode using UTF-8 decoding (IPA
convention).
However, now that the table driven conversion mechanism is in place
it would be trivial to do things such as converting attributes
which have LDAP integer syntax into a Python integer, etc.
* Expected values in the unit tests which are a DN no longer need to
use lambda expressions to promote the returned value to a DN for
equality comparison. The return value is automatically promoted to
a DN. The lambda expressions have been removed making the code much
simpler and easier to read.
* Add class level logging to a number of classes which did not support
logging, less need for use of root_logger.
* Remove ipaserver/conn.py, it was unused.
* Consolidated duplicate code wherever it was found.
* Fixed many places that used string concatenation to form a new
string rather than string formatting operators. This is necessary
because string formatting converts it's arguments to a string prior
to building the result string. You can't concatenate a string and a
non-string.
* Simplify logic in rename_managed plugin. Use DN operators to edit
dn's.
* The live version of ipa-ldap-updater did not generate a log file.
The offline version did, now both do.
https://fedorahosted.org/freeipa/ticket/1670
https://fedorahosted.org/freeipa/ticket/1671
https://fedorahosted.org/freeipa/ticket/1672
https://fedorahosted.org/freeipa/ticket/1673
https://fedorahosted.org/freeipa/ticket/1674
https://fedorahosted.org/freeipa/ticket/1392
https://fedorahosted.org/freeipa/ticket/2872
2012-05-13 06:36:35 -05:00
|
|
|
|
2012-02-13 08:16:26 -06:00
|
|
|
newfile.append('%s%s' % (prefix, line))
|
|
|
|
|
|
|
|
# Append updated dbmodules information
|
|
|
|
newfile.append(' %s = {\n' % realm)
|
|
|
|
newfile.append(' db_library = ipadb.so\n')
|
|
|
|
newfile.append(' }\n')
|
|
|
|
|
|
|
|
# Write out new file
|
|
|
|
fd = open(filename, 'w')
|
|
|
|
fd.write("".join(newfile))
|
|
|
|
fd.close()
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.debug('%s updated', filename)
|
2012-02-13 08:16:26 -06:00
|
|
|
|
2012-04-03 03:47:40 -05:00
|
|
|
def cleanup_kdc(fstore):
|
2012-02-22 15:40:29 -06:00
|
|
|
"""
|
|
|
|
Clean up old KDC files if they exist. We need to remove the actual
|
|
|
|
file and any references in the uninstall configuration.
|
|
|
|
"""
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.info('[Checking for deprecated KDC configuration files]')
|
2012-02-22 15:40:29 -06:00
|
|
|
for file in ['kpasswd.keytab', 'ldappwd']:
|
|
|
|
filename = '/var/kerberos/krb5kdc/%s' % file
|
|
|
|
installutils.remove_file(filename)
|
|
|
|
if fstore.has_file(filename):
|
|
|
|
fstore.untrack_file(filename)
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.debug('Uninstalling %s', filename)
|
2012-02-22 15:40:29 -06:00
|
|
|
|
2012-10-08 06:54:47 -05:00
|
|
|
|
|
|
|
def setup_firefox_extension(fstore):
|
|
|
|
"""Set up the Firefox configuration extension, if it's not set up yet
|
|
|
|
"""
|
|
|
|
root_logger.info('[Setting up Firefox extension]')
|
|
|
|
http = httpinstance.HTTPInstance(fstore)
|
|
|
|
realm = api.env.realm
|
|
|
|
domain = api.env.domain
|
|
|
|
http.setup_firefox_extension(realm, domain)
|
|
|
|
|
|
|
|
|
2012-11-19 09:32:28 -06:00
|
|
|
def upgrade_ipa_profile(ca, domain, fqdn):
|
2012-03-07 16:46:33 -06:00
|
|
|
"""
|
|
|
|
Update the IPA Profile provided by dogtag
|
2012-10-09 10:25:27 -05:00
|
|
|
|
|
|
|
Returns True if restart is needed, False otherwise.
|
2012-03-07 16:46:33 -06:00
|
|
|
"""
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.info('[Verifying that CA service certificate profile is updated]')
|
2012-03-07 16:46:33 -06:00
|
|
|
if ca.is_configured():
|
2012-10-09 10:25:27 -05:00
|
|
|
ski = ca.enable_subject_key_identifier()
|
|
|
|
if ski:
|
|
|
|
root_logger.debug('Subject Key Identifier updated.')
|
2012-06-18 15:41:06 -05:00
|
|
|
else:
|
|
|
|
root_logger.debug('Subject Key Identifier already set.')
|
2012-10-09 10:25:27 -05:00
|
|
|
audit = ca.set_audit_renewal()
|
2012-11-19 09:32:28 -06:00
|
|
|
uri = ca.set_crl_ocsp_extensions(domain, fqdn)
|
|
|
|
if audit or ski or uri:
|
2012-10-09 10:25:27 -05:00
|
|
|
return True
|
2012-06-18 15:41:06 -05:00
|
|
|
else:
|
2012-10-10 05:37:24 -05:00
|
|
|
root_logger.info('CA is not configured')
|
2012-03-07 16:46:33 -06:00
|
|
|
|
2012-10-09 10:25:27 -05:00
|
|
|
return False
|
|
|
|
|
2012-04-03 03:47:40 -05:00
|
|
|
|
2012-06-28 09:46:48 -05:00
|
|
|
def named_enable_psearch():
|
2012-05-31 10:02:44 -05:00
|
|
|
"""
|
|
|
|
From IPA 3.0, persistent search is a preferred mechanism for new DNS zone
|
|
|
|
detection and is also needed for other features (DNSSEC, SOA serial
|
|
|
|
updates). Enable psearch and make sure connections attribute is right.
|
|
|
|
This step is done just once for a case when user switched the persistent
|
|
|
|
search back to disabled.
|
|
|
|
|
|
|
|
When some change in named.conf is done, this functions returns True
|
|
|
|
"""
|
|
|
|
changed = False
|
|
|
|
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.info('[Enabling persistent search in DNS]')
|
|
|
|
|
2012-05-31 10:02:44 -05:00
|
|
|
if not bindinstance.named_conf_exists():
|
|
|
|
# DNS service may not be configured
|
2012-10-10 05:37:24 -05:00
|
|
|
root_logger.info('DNS is not configured')
|
2012-05-31 10:02:44 -05:00
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
2012-06-28 09:46:48 -05:00
|
|
|
psearch = bindinstance.named_conf_get_directive('psearch')
|
2012-05-31 10:02:44 -05:00
|
|
|
except IOError, e:
|
|
|
|
root_logger.debug('Cannot retrieve psearch option from %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
return
|
2012-06-28 09:46:48 -05:00
|
|
|
else:
|
|
|
|
psearch = None if psearch is None else psearch.lower()
|
2012-05-31 10:02:44 -05:00
|
|
|
if not sysupgrade.get_upgrade_state('named.conf', 'psearch_enabled'):
|
|
|
|
if psearch != "yes":
|
|
|
|
try:
|
|
|
|
bindinstance.named_conf_set_directive('zone_refresh', 0)
|
|
|
|
bindinstance.named_conf_set_directive('psearch', 'yes')
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.error('Cannot enable psearch in %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
else:
|
|
|
|
changed = True
|
2012-07-18 05:47:07 -05:00
|
|
|
psearch = "yes"
|
2012-05-31 10:02:44 -05:00
|
|
|
sysupgrade.set_upgrade_state('named.conf', 'psearch_enabled', True)
|
2012-06-18 15:41:06 -05:00
|
|
|
root_logger.debug('Persistent search enabled')
|
2012-05-31 10:02:44 -05:00
|
|
|
|
|
|
|
# make sure number of connections is right
|
|
|
|
minimum_connections = 2
|
|
|
|
if psearch == 'yes':
|
2012-06-28 09:46:48 -05:00
|
|
|
# serial_autoincrement increased the minimal number of connections to 4
|
|
|
|
minimum_connections = 4
|
2012-05-31 10:02:44 -05:00
|
|
|
try:
|
|
|
|
connections = bindinstance.named_conf_get_directive('connections')
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.debug('Cannot retrieve connections option from %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
return
|
2012-07-18 05:47:07 -05:00
|
|
|
try:
|
|
|
|
if connections is not None:
|
2012-05-31 10:02:44 -05:00
|
|
|
connections = int(connections)
|
2012-07-18 05:47:07 -05:00
|
|
|
except ValueError:
|
|
|
|
# this should not happend, but there is some bad value in
|
|
|
|
# "connections" option, bail out
|
|
|
|
pass
|
|
|
|
else:
|
2013-03-13 04:55:48 -05:00
|
|
|
if connections is not None and connections < minimum_connections:
|
2012-07-18 05:47:07 -05:00
|
|
|
try:
|
|
|
|
bindinstance.named_conf_set_directive('connections',
|
|
|
|
minimum_connections)
|
|
|
|
root_logger.debug('Connections set to %d', minimum_connections)
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.error('Cannot update connections in %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
else:
|
|
|
|
changed = True
|
2012-05-31 10:02:44 -05:00
|
|
|
|
2012-06-18 15:41:06 -05:00
|
|
|
if not changed:
|
|
|
|
root_logger.debug('No changes made')
|
2012-05-31 10:02:44 -05:00
|
|
|
return changed
|
|
|
|
|
2012-06-28 09:46:48 -05:00
|
|
|
def named_enable_serial_autoincrement():
|
|
|
|
"""
|
|
|
|
Serial autoincrement is a requirement for zone transfers or DNSSEC. It
|
|
|
|
should be enabled both for new installs and upgraded servers.
|
|
|
|
|
|
|
|
When some change in named.conf is done, this functions returns True
|
|
|
|
"""
|
|
|
|
changed = False
|
|
|
|
|
|
|
|
root_logger.info('[Enabling serial autoincrement in DNS]')
|
|
|
|
|
|
|
|
if not bindinstance.named_conf_exists():
|
|
|
|
# DNS service may not be configured
|
2012-10-10 05:37:24 -05:00
|
|
|
root_logger.info('DNS is not configured')
|
2012-06-28 09:46:48 -05:00
|
|
|
return changed
|
|
|
|
|
|
|
|
try:
|
|
|
|
psearch = bindinstance.named_conf_get_directive('psearch')
|
|
|
|
serial_autoincrement = bindinstance.named_conf_get_directive(
|
|
|
|
'serial_autoincrement')
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.debug('Cannot retrieve psearch option from %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
return changed
|
|
|
|
else:
|
|
|
|
psearch = None if psearch is None else psearch.lower()
|
|
|
|
serial_autoincrement = None if serial_autoincrement is None \
|
|
|
|
else serial_autoincrement.lower()
|
|
|
|
|
|
|
|
# enable SOA serial autoincrement
|
|
|
|
if not sysupgrade.get_upgrade_state('named.conf', 'autoincrement_enabled'):
|
|
|
|
if psearch != "yes": # psearch is required
|
2012-07-18 05:47:07 -05:00
|
|
|
root_logger.error('Persistent search is disabled, '
|
2012-06-28 09:46:48 -05:00
|
|
|
'serial autoincrement cannot be enabled')
|
|
|
|
else:
|
|
|
|
if serial_autoincrement != 'yes':
|
|
|
|
try:
|
|
|
|
bindinstance.named_conf_set_directive('serial_autoincrement', 'yes')
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.error('Cannot enable serial_autoincrement in %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
return changed
|
|
|
|
else:
|
|
|
|
root_logger.debug('Serial autoincrement enabled')
|
|
|
|
changed = True
|
|
|
|
else:
|
|
|
|
root_logger.debug('Serial autoincrement is alredy enabled')
|
|
|
|
sysupgrade.set_upgrade_state('named.conf', 'autoincrement_enabled', True)
|
|
|
|
else:
|
|
|
|
root_logger.debug('Skip serial autoincrement check')
|
|
|
|
|
|
|
|
return changed
|
|
|
|
|
2013-03-14 04:30:32 -05:00
|
|
|
def named_update_gssapi_configuration():
|
|
|
|
"""
|
|
|
|
Update GSSAPI configuration in named.conf to a recent API.
|
|
|
|
tkey-gssapi-credential and tkey-domain is replaced with tkey-gssapi-keytab.
|
|
|
|
Details can be found in https://fedorahosted.org/freeipa/ticket/3429.
|
|
|
|
|
|
|
|
When some change in named.conf is done, this functions returns True
|
|
|
|
"""
|
|
|
|
|
|
|
|
root_logger.info('[Updating GSSAPI configuration in DNS]')
|
|
|
|
|
|
|
|
if not bindinstance.named_conf_exists():
|
|
|
|
# DNS service may not be configured
|
|
|
|
root_logger.info('DNS is not configured')
|
|
|
|
return False
|
|
|
|
|
|
|
|
if sysupgrade.get_upgrade_state('named.conf', 'gssapi_updated'):
|
|
|
|
root_logger.debug('Skip GSSAPI configuration check')
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
gssapi_keytab = bindinstance.named_conf_get_directive('tkey-gssapi-keytab',
|
|
|
|
bindinstance.NAMED_SECTION_OPTIONS)
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.error('Cannot retrieve tkey-gssapi-keytab option from %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if gssapi_keytab:
|
|
|
|
root_logger.debug('GSSAPI configuration already updated')
|
|
|
|
sysupgrade.set_upgrade_state('named.conf', 'gssapi_updated', True)
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
tkey_credential = bindinstance.named_conf_get_directive('tkey-gssapi-credential',
|
|
|
|
bindinstance.NAMED_SECTION_OPTIONS)
|
|
|
|
tkey_domain = bindinstance.named_conf_get_directive('tkey-domain',
|
|
|
|
bindinstance.NAMED_SECTION_OPTIONS)
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.error('Cannot retrieve tkey-gssapi-credential option from %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not tkey_credential or not tkey_domain:
|
|
|
|
root_logger.error('Either tkey-gssapi-credential or tkey-domain is missing in %s. '
|
|
|
|
'Skip update.', bindinstance.NAMED_CONF)
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
bindinstance.named_conf_set_directive('tkey-gssapi-credential', None,
|
|
|
|
bindinstance.NAMED_SECTION_OPTIONS)
|
|
|
|
bindinstance.named_conf_set_directive('tkey-domain', None,
|
|
|
|
bindinstance.NAMED_SECTION_OPTIONS)
|
|
|
|
bindinstance.named_conf_set_directive('tkey-gssapi-keytab', '/etc/named.keytab',
|
|
|
|
bindinstance.NAMED_SECTION_OPTIONS)
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.error('Cannot update GSSAPI configuration in %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
root_logger.debug('GSSAPI configuration updated')
|
|
|
|
|
|
|
|
sysupgrade.set_upgrade_state('named.conf', 'gssapi_updated', True)
|
|
|
|
return True
|
|
|
|
|
2013-03-20 09:39:59 -05:00
|
|
|
def named_update_pid_file():
|
|
|
|
"""
|
|
|
|
Make sure that named reads the pid file from the right file
|
|
|
|
"""
|
|
|
|
root_logger.info('[Updating pid-file configuration in DNS]')
|
|
|
|
|
|
|
|
if not bindinstance.named_conf_exists():
|
|
|
|
# DNS service may not be configured
|
|
|
|
root_logger.info('DNS is not configured')
|
|
|
|
return False
|
|
|
|
|
|
|
|
if sysupgrade.get_upgrade_state('named.conf', 'pid-file_updated'):
|
|
|
|
root_logger.debug('Skip pid-file configuration check')
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
pid_file = bindinstance.named_conf_get_directive('pid-file',
|
|
|
|
bindinstance.NAMED_SECTION_OPTIONS)
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.error('Cannot retrieve pid-file option from %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if pid_file:
|
|
|
|
root_logger.debug('pid-file configuration already updated')
|
|
|
|
sysupgrade.set_upgrade_state('named.conf', 'pid-file_updated', True)
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
bindinstance.named_conf_set_directive('pid-file', '/run/named/named.pid',
|
|
|
|
bindinstance.NAMED_SECTION_OPTIONS)
|
|
|
|
except IOError, e:
|
|
|
|
root_logger.error('Cannot update pid-file configuration in %s: %s',
|
|
|
|
bindinstance.NAMED_CONF, e)
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
root_logger.debug('pid-file configuration updated')
|
|
|
|
|
|
|
|
sysupgrade.set_upgrade_state('named.conf', 'pid-file_updated', True)
|
|
|
|
return True
|
|
|
|
|
2013-03-14 04:30:32 -05:00
|
|
|
|
2012-10-08 08:58:48 -05:00
|
|
|
def enable_certificate_renewal(ca):
|
2012-07-11 14:51:01 -05:00
|
|
|
"""
|
|
|
|
If the CA subsystem certificates are not being tracked for renewal then
|
|
|
|
tell certmonger to start tracking them.
|
2012-10-08 08:58:48 -05:00
|
|
|
|
|
|
|
Returns True when CA needs to be restarted
|
2012-07-11 14:51:01 -05:00
|
|
|
"""
|
2012-10-10 05:37:24 -05:00
|
|
|
root_logger.info('[Enable certificate renewal]')
|
2012-07-11 14:51:01 -05:00
|
|
|
if not ca.is_configured():
|
2012-10-10 05:37:24 -05:00
|
|
|
root_logger.info('CA is not configured')
|
2012-10-08 08:58:48 -05:00
|
|
|
return False
|
2012-07-11 14:51:01 -05:00
|
|
|
|
|
|
|
# Using the nickname find the certmonger request_id
|
|
|
|
criteria = (('cert_storage_location', '/etc/httpd/alias', certmonger.NPATH),('cert_nickname', 'ipaCert', None))
|
|
|
|
request_id = certmonger.get_request_id(criteria)
|
|
|
|
if request_id is not None:
|
|
|
|
root_logger.debug('Certificate renewal already configured')
|
2012-10-08 08:58:48 -05:00
|
|
|
return False
|
2012-07-11 14:51:01 -05:00
|
|
|
|
|
|
|
if not sysupgrade.get_upgrade_state('dogtag', 'renewal_configured'):
|
|
|
|
if ca.is_master():
|
|
|
|
ca.configure_renewal()
|
|
|
|
else:
|
|
|
|
ca.configure_certmonger_renewal()
|
|
|
|
ca.configure_clone_renewal()
|
|
|
|
ca.configure_agent_renewal()
|
|
|
|
ca.track_servercert()
|
|
|
|
sysupgrade.set_upgrade_state('dogtag', 'renewal_configured', True)
|
|
|
|
root_logger.debug('CA subsystem certificate renewal enabled')
|
2012-10-08 08:58:48 -05:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-12-02 12:18:36 -06:00
|
|
|
def certificate_renewal_stop_ca(ca):
|
|
|
|
"""
|
|
|
|
Validate the certmonger configuration on certificates that already
|
|
|
|
have renewal configured.
|
|
|
|
|
|
|
|
As of certmonger 0.65 it now does locking from the point where it
|
|
|
|
generates the CSR to the end of the post-command. This is to ensure
|
|
|
|
that only one certmonger renewal, and hopefully, one process at a
|
|
|
|
time holds the NSS database open in read/write.
|
|
|
|
"""
|
|
|
|
root_logger.info('[Certificate renewal should stop the CA]')
|
|
|
|
if not ca.is_configured():
|
|
|
|
root_logger.info('CA is not configured')
|
|
|
|
return False
|
|
|
|
|
|
|
|
nss_dir = dogtag.configured_constants().ALIAS_DIR
|
|
|
|
# Using the nickname find the certmonger request_id
|
|
|
|
criteria = (('cert_storage_location', nss_dir, certmonger.NPATH),('cert_nickname', 'auditSigningCert cert-pki-ca', None))
|
|
|
|
id = certmonger.get_request_id(criteria)
|
|
|
|
if id is None:
|
|
|
|
root_logger.error('Unable to find certmonger request ID for auditSigning Cert')
|
|
|
|
return False
|
|
|
|
|
|
|
|
if sysupgrade.get_upgrade_state('dogtag', 'stop_ca_during_renewal'):
|
|
|
|
return False
|
|
|
|
|
|
|
|
# State not set, lets see if we are already configured
|
|
|
|
pre_command = certmonger.get_request_value(id, 'pre_certsave_command')
|
|
|
|
if pre_command is not None:
|
|
|
|
if pre_command.strip().endswith('stop_pkicad'):
|
|
|
|
root_logger.info('Already configured to stop CA')
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Ok, now we need to stop tracking, then we can start tracking them
|
|
|
|
# again with new configuration:
|
|
|
|
cainstance.stop_tracking_certificates(dogtag.configured_constants())
|
|
|
|
if ca.is_master():
|
|
|
|
ca.configure_renewal()
|
|
|
|
else:
|
|
|
|
ca.configure_certmonger_renewal()
|
|
|
|
ca.configure_clone_renewal()
|
|
|
|
ca.configure_agent_renewal()
|
|
|
|
ca.track_servercert()
|
|
|
|
sysupgrade.set_upgrade_state('dogtag', 'stop_ca_during_renewal', True)
|
|
|
|
root_logger.debug('CA subsystem certificate renewal configured to stop the CA')
|
|
|
|
return True
|
|
|
|
|
2012-10-08 08:58:48 -05:00
|
|
|
def copy_crl_file(old_path, new_path=None):
|
|
|
|
"""
|
|
|
|
Copy CRL to new location, update permissions and SELinux context
|
|
|
|
"""
|
|
|
|
if new_path is None:
|
|
|
|
filename = os.path.basename(old_path)
|
|
|
|
new_path = os.path.join(dogtag.configured_constants().CRL_PUBLISH_PATH,
|
|
|
|
filename)
|
|
|
|
root_logger.debug('copy_crl_file: %s -> %s', old_path, new_path)
|
|
|
|
|
|
|
|
if os.path.islink(old_path):
|
|
|
|
# update symlink to the most most recent CRL file
|
|
|
|
filename = os.path.basename(os.readlink(old_path))
|
|
|
|
realpath = os.path.join(dogtag.configured_constants().CRL_PUBLISH_PATH,
|
|
|
|
filename)
|
|
|
|
root_logger.debug('copy_crl_file: Create symlink %s -> %s',
|
|
|
|
new_path, realpath)
|
|
|
|
os.symlink(realpath, new_path)
|
|
|
|
else:
|
|
|
|
shutil.copy2(old_path, new_path)
|
2013-01-10 07:31:02 -06:00
|
|
|
pent = pwd.getpwnam(cainstance.PKI_USER)
|
|
|
|
os.chown(new_path, pent.pw_uid, pent.pw_gid)
|
2012-10-08 08:58:48 -05:00
|
|
|
|
|
|
|
services.restore_context(new_path)
|
|
|
|
|
|
|
|
def migrate_crl_publish_dir(ca):
|
|
|
|
"""
|
|
|
|
Move CRL publish dir from /var/lib/pki-ca/publish to IPA controlled tree:
|
|
|
|
/var/lib/ipa/pki-ca/publish
|
|
|
|
"""
|
|
|
|
root_logger.info('[Migrate CRL publish directory]')
|
|
|
|
if sysupgrade.get_upgrade_state('dogtag', 'moved_crl_publish_dir'):
|
|
|
|
root_logger.info('CRL tree already moved')
|
|
|
|
return False
|
|
|
|
|
2012-10-10 05:37:24 -05:00
|
|
|
if not ca.is_configured():
|
|
|
|
root_logger.info('CA is not configured')
|
|
|
|
return False
|
|
|
|
|
2012-10-08 08:58:48 -05:00
|
|
|
caconfig = dogtag.configured_constants()
|
|
|
|
|
2012-10-10 05:37:24 -05:00
|
|
|
try:
|
|
|
|
old_publish_dir = installutils.get_directive(caconfig.CS_CFG_PATH,
|
|
|
|
'ca.publish.publisher.instance.FileBaseCRLPublisher.directory',
|
|
|
|
separator='=')
|
|
|
|
except OSError, e:
|
|
|
|
root_logger.error('Cannot read CA configuration file "%s": %s',
|
|
|
|
caconfig.CS_CFG_PATH, e)
|
|
|
|
return False
|
2012-10-08 08:58:48 -05:00
|
|
|
|
|
|
|
if old_publish_dir == caconfig.CRL_PUBLISH_PATH:
|
|
|
|
# publish dir is already updated
|
|
|
|
root_logger.info('Publish directory already set to new location')
|
|
|
|
sysupgrade.set_upgrade_state('dogtag', 'moved_crl_publish_dir', True)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Prepare target publish dir (permissions, SELinux context)
|
|
|
|
publishdir = ca.prepare_crl_publish_dir()
|
|
|
|
|
|
|
|
# Copy all CRLs to new directory
|
|
|
|
root_logger.info('Copy all CRLs to new publish directory')
|
|
|
|
try:
|
2013-01-10 07:31:02 -06:00
|
|
|
crl_files_unsorted = cainstance.get_crl_files(old_publish_dir)
|
2012-10-08 08:58:48 -05:00
|
|
|
except OSError, e:
|
|
|
|
root_logger.error('Cannot move CRL files to new directory: %s', e)
|
|
|
|
else:
|
2013-01-10 07:31:02 -06:00
|
|
|
# Move CRL files at the end of the list to make sure that the actual
|
|
|
|
# CRL files are copied first
|
|
|
|
crl_files = sorted(crl_files_unsorted,
|
|
|
|
key=lambda f: os.path.islink(f))
|
2012-10-08 08:58:48 -05:00
|
|
|
for f in crl_files:
|
|
|
|
try:
|
|
|
|
copy_crl_file(f)
|
|
|
|
except Exception, e:
|
|
|
|
root_logger.error('Cannot move CRL file to new directory: %s', e)
|
|
|
|
|
2012-10-10 05:37:24 -05:00
|
|
|
try:
|
|
|
|
installutils.set_directive(caconfig.CS_CFG_PATH,
|
|
|
|
'ca.publish.publisher.instance.FileBaseCRLPublisher.directory',
|
|
|
|
publishdir, quotes=False, separator='=')
|
|
|
|
except OSError, e:
|
|
|
|
root_logger.error('Cannot update CA configuration file "%s": %s',
|
|
|
|
caconfig.CS_CFG_PATH, e)
|
|
|
|
return False
|
2012-10-08 08:58:48 -05:00
|
|
|
sysupgrade.set_upgrade_state('dogtag', 'moved_crl_publish_dir', True)
|
|
|
|
root_logger.info('CRL publish directory has been migrated, '
|
|
|
|
'request pki-ca restart')
|
|
|
|
return True
|
2012-07-11 14:51:01 -05:00
|
|
|
|
2012-11-19 09:32:28 -06:00
|
|
|
def add_server_cname_records():
|
|
|
|
root_logger.info('[Add missing server CNAME records]')
|
|
|
|
|
|
|
|
if not sysupgrade.get_upgrade_state('dns', 'ipa_ca_cname'):
|
|
|
|
try:
|
|
|
|
api.Backend.ldap2.connect(autobind=True)
|
|
|
|
except ipalib.errors.PublicError, e:
|
|
|
|
root_logger.error("Cannot connect to LDAP to add DNS records: %s", e)
|
|
|
|
else:
|
|
|
|
ret = api.Command['dns_is_enabled']()
|
|
|
|
if not ret['result']:
|
|
|
|
root_logger.info('DNS is not configured')
|
|
|
|
sysupgrade.set_upgrade_state('dns', 'ipa_ca_cname', True)
|
|
|
|
return
|
|
|
|
|
|
|
|
bind = bindinstance.BindInstance()
|
|
|
|
# DNS is enabled, so let bindinstance find out if CA is enabled
|
|
|
|
# and let it add the CNAME in that case
|
|
|
|
bind.add_ipa_ca_cname(api.env.host, api.env.domain, ca_configured=None)
|
|
|
|
sysupgrade.set_upgrade_state('dns', 'ipa_ca_cname', True)
|
|
|
|
finally:
|
|
|
|
if api.Backend.ldap2.isconnected():
|
|
|
|
api.Backend.ldap2.disconnect()
|
|
|
|
else:
|
|
|
|
root_logger.info('IPA CA CNAME already processed')
|
|
|
|
|
2008-05-07 08:33:00 -05:00
|
|
|
def main():
|
2009-09-15 16:40:34 -05:00
|
|
|
"""
|
|
|
|
Get some basics about the system. If getting those basics fail then
|
|
|
|
this is likely because the machine isn't currently an IPA server so
|
|
|
|
exit gracefully.
|
|
|
|
"""
|
|
|
|
|
2012-01-20 12:30:25 -06:00
|
|
|
if not os.geteuid()==0:
|
|
|
|
sys.exit("\nYou must be root to run this script.\n")
|
|
|
|
|
2012-07-11 14:51:01 -05:00
|
|
|
if not installutils.is_ipa_configured():
|
|
|
|
sys.exit(0)
|
|
|
|
|
2012-05-31 10:02:44 -05:00
|
|
|
safe_options, options = parse_options()
|
|
|
|
|
2012-10-17 06:05:24 -05:00
|
|
|
verbose = not options.quiet
|
|
|
|
if options.debug:
|
|
|
|
console_format = '%(levelname)s: %(message)s'
|
|
|
|
else:
|
|
|
|
console_format = '%(message)s'
|
|
|
|
|
|
|
|
standard_logging_setup('/var/log/ipaupgrade.log', debug=options.debug,
|
|
|
|
verbose=verbose, console_format=console_format, filemode='a')
|
|
|
|
root_logger.debug('%s was invoked with options: %s' % (sys.argv[0], safe_options))
|
2012-05-31 10:02:44 -05:00
|
|
|
|
2012-04-03 03:47:40 -05:00
|
|
|
fstore = sysrestore.FileStore('/var/lib/ipa/sysrestore')
|
|
|
|
|
2012-11-19 09:32:28 -06:00
|
|
|
api.bootstrap(context='restart', in_server=True)
|
2012-07-11 14:51:01 -05:00
|
|
|
api.finalize()
|
2008-12-01 14:06:20 -06:00
|
|
|
|
2009-09-15 16:40:34 -05:00
|
|
|
fqdn = find_hostname()
|
|
|
|
if fqdn is None:
|
2008-10-29 13:34:47 -05:00
|
|
|
# ipa-rewrite.conf doesn't exist, nothing to do
|
|
|
|
sys.exit(0)
|
2008-05-07 08:33:00 -05:00
|
|
|
|
2009-09-15 16:40:34 -05:00
|
|
|
# Ok, we are an IPA server, do the additional tests
|
|
|
|
|
|
|
|
check_certs()
|
2008-05-07 08:33:00 -05:00
|
|
|
|
2012-06-20 13:09:55 -05:00
|
|
|
auto_redirect = find_autoredirect(fqdn)
|
2012-08-23 11:38:45 -05:00
|
|
|
configured_constants = dogtag.configured_constants()
|
|
|
|
sub_dict = dict(
|
|
|
|
REALM=api.env.realm,
|
|
|
|
FQDN=fqdn,
|
|
|
|
AUTOREDIR='' if auto_redirect else '#',
|
|
|
|
CRL_PUBLISH_PATH=configured_constants.CRL_PUBLISH_PATH,
|
|
|
|
DOGTAG_PORT=configured_constants.AJP_PORT,
|
2012-10-09 09:40:20 -05:00
|
|
|
CLONE='#'
|
2012-08-23 11:38:45 -05:00
|
|
|
)
|
2008-05-07 08:33:00 -05:00
|
|
|
|
2012-10-09 09:40:20 -05:00
|
|
|
ca = cainstance.CAInstance(api.env.realm, certs.NSS_DIR)
|
2012-10-08 08:58:48 -05:00
|
|
|
|
|
|
|
# migrate CRL publish dir before the location in ipa.conf is updated
|
|
|
|
ca_restart = migrate_crl_publish_dir(ca)
|
|
|
|
|
2012-10-09 09:40:20 -05:00
|
|
|
if ca.is_configured():
|
|
|
|
crl = installutils.get_directive(configured_constants.CS_CFG_PATH,
|
|
|
|
'ca.crl.MasterCRL.enableCRLUpdates',
|
|
|
|
'=')
|
|
|
|
sub_dict['CLONE']='#' if crl.lower() == 'true' else ''
|
|
|
|
|
2012-11-01 11:16:25 -05:00
|
|
|
certmap_dir = dsinstance.config_dirname(
|
|
|
|
dsinstance.realm_to_serverid(api.env.realm))
|
|
|
|
|
2008-05-07 08:33:00 -05:00
|
|
|
upgrade(sub_dict, "/etc/httpd/conf.d/ipa.conf", ipautil.SHARE_DIR + "ipa.conf")
|
|
|
|
upgrade(sub_dict, "/etc/httpd/conf.d/ipa-rewrite.conf", ipautil.SHARE_DIR + "ipa-rewrite.conf")
|
2011-09-09 16:07:09 -05:00
|
|
|
upgrade(sub_dict, "/etc/httpd/conf.d/ipa-pki-proxy.conf", ipautil.SHARE_DIR + "ipa-pki-proxy.conf", add=True)
|
2012-11-01 11:16:25 -05:00
|
|
|
upgrade(sub_dict, os.path.join(certmap_dir, "certmap.conf"),
|
|
|
|
os.path.join(ipautil.SHARE_DIR, "certmap.conf.template"))
|
2012-10-10 05:37:24 -05:00
|
|
|
upgrade_pki(ca, fstore)
|
2012-07-11 14:51:01 -05:00
|
|
|
update_dbmodules(api.env.realm)
|
2012-02-13 08:16:26 -06:00
|
|
|
uninstall_ipa_kpasswd()
|
2012-02-15 08:42:59 -06:00
|
|
|
|
2012-04-03 03:47:40 -05:00
|
|
|
http = httpinstance.HTTPInstance(fstore)
|
2012-02-15 08:42:59 -06:00
|
|
|
http.remove_httpd_ccache()
|
2012-04-03 03:47:40 -05:00
|
|
|
http.configure_selinux_for_httpd()
|
2012-02-15 08:42:59 -06:00
|
|
|
|
2012-02-15 15:55:59 -06:00
|
|
|
memcache = memcacheinstance.MemcacheInstance()
|
|
|
|
memcache.ldapi = True
|
2012-07-11 14:51:01 -05:00
|
|
|
memcache.realm = api.env.realm
|
2012-02-15 15:55:59 -06:00
|
|
|
try:
|
|
|
|
if not memcache.is_configured():
|
2012-02-25 18:08:17 -06:00
|
|
|
# 389-ds needs to be running to create the memcache instance
|
|
|
|
# because we record the new service in cn=masters.
|
|
|
|
ds = dsinstance.DsInstance()
|
|
|
|
ds.start()
|
2012-07-11 14:51:01 -05:00
|
|
|
memcache.create_instance('MEMCACHE', fqdn, None, ipautil.realm_to_suffix(api.env.realm))
|
2013-01-30 08:51:08 -06:00
|
|
|
except ipalib.errors.DuplicateEntry:
|
2012-02-15 15:55:59 -06:00
|
|
|
pass
|
|
|
|
|
2012-04-03 03:47:40 -05:00
|
|
|
cleanup_kdc(fstore)
|
2012-10-08 06:54:47 -05:00
|
|
|
setup_firefox_extension(fstore)
|
2012-11-19 09:32:28 -06:00
|
|
|
add_server_cname_records()
|
2012-06-28 09:46:48 -05:00
|
|
|
changed_psearch = named_enable_psearch()
|
|
|
|
changed_autoincrement = named_enable_serial_autoincrement()
|
2013-03-14 04:30:32 -05:00
|
|
|
changed_gssapi_conf = named_update_gssapi_configuration()
|
2013-03-20 09:39:59 -05:00
|
|
|
changed_pid_file_conf = named_update_pid_file()
|
|
|
|
if (changed_psearch or changed_autoincrement or changed_gssapi_conf
|
|
|
|
or changed_pid_file_conf):
|
2012-05-31 10:02:44 -05:00
|
|
|
# configuration has changed, restart the name server
|
2012-06-28 09:46:48 -05:00
|
|
|
root_logger.info('Changes to named.conf have been made, restart named')
|
2013-01-15 09:33:22 -06:00
|
|
|
bind = bindinstance.BindInstance(fstore)
|
|
|
|
try:
|
|
|
|
bind.restart()
|
|
|
|
except ipautil.CalledProcessError, e:
|
|
|
|
root_logger.error("Failed to restart %s: %s", bind.service_name, e)
|
2014-12-02 12:18:36 -06:00
|
|
|
ca_restart = any([
|
|
|
|
ca_restart,
|
|
|
|
enable_certificate_renewal(ca),
|
|
|
|
upgrade_ipa_profile(ca, api.env.domain, fqdn),
|
|
|
|
certificate_renewal_stop_ca(ca),
|
|
|
|
])
|
2012-10-08 08:58:48 -05:00
|
|
|
|
|
|
|
if ca_restart:
|
|
|
|
root_logger.info('pki-ca configuration changed, restart pki-ca')
|
2013-01-15 09:33:22 -06:00
|
|
|
try:
|
|
|
|
ca.restart(dogtag.configured_constants().PKI_INSTANCE_NAME)
|
|
|
|
except ipautil.CalledProcessError, e:
|
|
|
|
root_logger.error("Failed to restart %s: %s", ca.service_name, e)
|
2012-03-07 16:46:33 -06:00
|
|
|
|
2012-05-31 07:34:09 -05:00
|
|
|
if __name__ == '__main__':
|
|
|
|
installutils.run_script(main, operation_name='ipa-upgradeconfig')
|