2012-05-29 13:20:38 -05:00
|
|
|
#
|
|
|
|
# Authors:
|
|
|
|
# Rob Crittenden <rcritten@redhat.com>
|
|
|
|
#
|
2019-06-26 11:03:17 -05:00
|
|
|
# Copyright (C) 2012, 2019 Red Hat
|
2012-05-29 13:20:38 -05:00
|
|
|
# see file 'COPYING' for use and warranty information
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
# Configure the automount client for ldap.
|
|
|
|
|
2015-08-12 06:44:11 -05:00
|
|
|
from __future__ import print_function
|
|
|
|
|
2017-05-24 09:35:07 -05:00
|
|
|
import logging
|
2012-05-29 13:20:38 -05:00
|
|
|
import sys
|
|
|
|
import os
|
2019-04-08 00:44:02 -05:00
|
|
|
import shutil
|
2012-05-29 13:20:38 -05:00
|
|
|
import time
|
|
|
|
import tempfile
|
2015-07-20 09:04:07 -05:00
|
|
|
import gssapi
|
2019-06-26 11:03:17 -05:00
|
|
|
|
2016-11-16 04:11:13 -06:00
|
|
|
try:
|
|
|
|
from xml.etree import cElementTree as etree
|
|
|
|
except ImportError:
|
|
|
|
from xml.etree import ElementTree as etree
|
2012-05-29 13:20:38 -05:00
|
|
|
import SSSDConfig
|
2019-06-26 11:03:17 -05:00
|
|
|
|
2016-08-24 06:37:30 -05:00
|
|
|
# pylint: disable=import-error
|
2015-09-14 05:52:29 -05:00
|
|
|
from six.moves.urllib.parse import urlsplit
|
2019-06-26 11:03:17 -05:00
|
|
|
|
2016-08-24 06:37:30 -05:00
|
|
|
# pylint: enable=import-error
|
2016-11-23 03:04:43 -06:00
|
|
|
from optparse import OptionParser # pylint: disable=deprecated-module
|
2019-08-16 13:10:15 -05:00
|
|
|
from ipapython import ipachangeconf
|
|
|
|
from ipaclient.install import ipadiscovery
|
2019-06-26 11:03:17 -05:00
|
|
|
from ipaclient.install.client import (
|
|
|
|
CLIENT_NOT_CONFIGURED,
|
|
|
|
CLIENT_ALREADY_CONFIGURED,
|
|
|
|
)
|
2012-05-29 13:20:38 -05:00
|
|
|
from ipalib import api, errors
|
2016-11-23 08:04:40 -06:00
|
|
|
from ipalib.install import sysrestore
|
2016-11-23 10:40:47 -06:00
|
|
|
from ipalib.install.kinit import kinit_keytab
|
2017-08-04 16:25:12 -05:00
|
|
|
from ipalib.util import check_client_configuration
|
2012-05-29 13:20:38 -05:00
|
|
|
from ipapython import ipautil
|
2017-05-24 09:35:07 -05:00
|
|
|
from ipapython.ipa_log_manager import standard_logging_setup
|
Use DN objects instead of strings
* Convert every string specifying a DN into a DN object
* Every place a dn was manipulated in some fashion it was replaced by
the use of DN operators
* Add new DNParam parameter type for parameters which are DN's
* DN objects are used 100% of the time throughout the entire data
pipeline whenever something is logically a dn.
* Many classes now enforce DN usage for their attributes which are
dn's. This is implmented via ipautil.dn_attribute_property(). The
only permitted types for a class attribute specified to be a DN are
either None or a DN object.
* Require that every place a dn is used it must be a DN object.
This translates into lot of::
assert isinstance(dn, DN)
sprinkled through out the code. Maintaining these asserts is
valuable to preserve DN type enforcement. The asserts can be
disabled in production.
The goal of 100% DN usage 100% of the time has been realized, these
asserts are meant to preserve that.
The asserts also proved valuable in detecting functions which did
not obey their function signatures, such as the baseldap pre and
post callbacks.
* Moved ipalib.dn to ipapython.dn because DN class is shared with all
components, not just the server which uses ipalib.
* All API's now accept DN's natively, no need to convert to str (or
unicode).
* Removed ipalib.encoder and encode/decode decorators. Type conversion
is now explicitly performed in each IPASimpleLDAPObject method which
emulates a ldap.SimpleLDAPObject method.
* Entity & Entry classes now utilize DN's
* Removed __getattr__ in Entity & Entity clases. There were two
problems with it. It presented synthetic Python object attributes
based on the current LDAP data it contained. There is no way to
validate synthetic attributes using code checkers, you can't search
the code to find LDAP attribute accesses (because synthetic
attriutes look like Python attributes instead of LDAP data) and
error handling is circumscribed. Secondly __getattr__ was hiding
Python internal methods which broke class semantics.
* Replace use of methods inherited from ldap.SimpleLDAPObject via
IPAdmin class with IPAdmin methods. Directly using inherited methods
was causing us to bypass IPA logic. Mostly this meant replacing the
use of search_s() with getEntry() or getList(). Similarly direct
access of the LDAP data in classes using IPAdmin were replaced with
calls to getValue() or getValues().
* Objects returned by ldap2.find_entries() are now compatible with
either the python-ldap access methodology or the Entity/Entry access
methodology.
* All ldap operations now funnel through the common
IPASimpleLDAPObject giving us a single location where we interface
to python-ldap and perform conversions.
* The above 4 modifications means we've greatly reduced the
proliferation of multiple inconsistent ways to perform LDAP
operations. We are well on the way to having a single API in IPA for
doing LDAP (a long range goal).
* All certificate subject bases are now DN's
* DN objects were enhanced thusly:
- find, rfind, index, rindex, replace and insert methods were added
- AVA, RDN and DN classes were refactored in immutable and mutable
variants, the mutable variants are EditableAVA, EditableRDN and
EditableDN. By default we use the immutable variants preserving
important semantics. To edit a DN cast it to an EditableDN and
cast it back to DN when done editing. These issues are fully
described in other documentation.
- first_key_match was removed
- DN equalty comparison permits comparison to a basestring
* Fixed ldapupdate to work with DN's. This work included:
- Enhance test_updates.py to do more checking after applying
update. Add test for update_from_dict(). Convert code to use
unittest classes.
- Consolidated duplicate code.
- Moved code which should have been in the class into the class.
- Fix the handling of the 'deleteentry' update action. It's no longer
necessary to supply fake attributes to make it work. Detect case
where subsequent update applies a change to entry previously marked
for deletetion. General clean-up and simplification of the
'deleteentry' logic.
- Rewrote a couple of functions to be clearer and more Pythonic.
- Added documentation on the data structure being used.
- Simplfy the use of update_from_dict()
* Removed all usage of get_schema() which was being called prior to
accessing the .schema attribute of an object. If a class is using
internal lazy loading as an optimization it's not right to require
users of the interface to be aware of internal
optimization's. schema is now a property and when the schema
property is accessed it calls a private internal method to perform
the lazy loading.
* Added SchemaCache class to cache the schema's from individual
servers. This was done because of the observation we talk to
different LDAP servers, each of which may have it's own
schema. Previously we globally cached the schema from the first
server we connected to and returned that schema in all contexts. The
cache includes controls to invalidate it thus forcing a schema
refresh.
* Schema caching is now senstive to the run time context. During
install and upgrade the schema can change leading to errors due to
out-of-date cached schema. The schema cache is refreshed in these
contexts.
* We are aware of the LDAP syntax of all LDAP attributes. Every
attribute returned from an LDAP operation is passed through a
central table look-up based on it's LDAP syntax. The table key is
the LDAP syntax it's value is a Python callable that returns a
Python object matching the LDAP syntax. There are a handful of LDAP
attributes whose syntax is historically incorrect
(e.g. DistguishedNames that are defined as DirectoryStrings). The
table driven conversion mechanism is augmented with a table of
hard coded exceptions.
Currently only the following conversions occur via the table:
- dn's are converted to DN objects
- binary objects are converted to Python str objects (IPA
convention).
- everything else is converted to unicode using UTF-8 decoding (IPA
convention).
However, now that the table driven conversion mechanism is in place
it would be trivial to do things such as converting attributes
which have LDAP integer syntax into a Python integer, etc.
* Expected values in the unit tests which are a DN no longer need to
use lambda expressions to promote the returned value to a DN for
equality comparison. The return value is automatically promoted to
a DN. The lambda expressions have been removed making the code much
simpler and easier to read.
* Add class level logging to a number of classes which did not support
logging, less need for use of root_logger.
* Remove ipaserver/conn.py, it was unused.
* Consolidated duplicate code wherever it was found.
* Fixed many places that used string concatenation to form a new
string rather than string formatting operators. This is necessary
because string formatting converts it's arguments to a string prior
to building the result string. You can't concatenate a string and a
non-string.
* Simplify logic in rename_managed plugin. Use DN operators to edit
dn's.
* The live version of ipa-ldap-updater did not generate a log file.
The offline version did, now both do.
https://fedorahosted.org/freeipa/ticket/1670
https://fedorahosted.org/freeipa/ticket/1671
https://fedorahosted.org/freeipa/ticket/1672
https://fedorahosted.org/freeipa/ticket/1673
https://fedorahosted.org/freeipa/ticket/1674
https://fedorahosted.org/freeipa/ticket/1392
https://fedorahosted.org/freeipa/ticket/2872
2012-05-13 06:36:35 -05:00
|
|
|
from ipapython.dn import DN
|
2015-10-06 08:35:24 -05:00
|
|
|
from ipaplatform.constants import constants
|
2014-05-29 03:51:08 -05:00
|
|
|
from ipaplatform.tasks import tasks
|
2014-05-29 03:37:18 -05:00
|
|
|
from ipaplatform import services
|
2014-06-17 04:45:43 -05:00
|
|
|
from ipaplatform.paths import paths
|
2017-08-04 16:25:12 -05:00
|
|
|
from ipapython.admintool import ScriptError
|
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2017-05-24 09:35:07 -05:00
|
|
|
logger = logging.getLogger(os.path.basename(__file__))
|
|
|
|
|
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
def parse_options():
|
|
|
|
usage = "%prog [options]\n"
|
|
|
|
parser = OptionParser(usage=usage)
|
2019-06-26 11:03:17 -05:00
|
|
|
parser.add_option("--server", dest="server", help="FQDN of IPA server")
|
2019-05-03 03:49:28 -05:00
|
|
|
parser.add_option(
|
2019-06-26 11:03:17 -05:00
|
|
|
"--location",
|
|
|
|
dest="location",
|
|
|
|
default="default",
|
|
|
|
help="Automount location",
|
2019-05-03 03:49:28 -05:00
|
|
|
)
|
|
|
|
parser.add_option(
|
2019-06-26 11:03:17 -05:00
|
|
|
"-S",
|
|
|
|
"--no-sssd",
|
|
|
|
dest="sssd",
|
|
|
|
action="store_false",
|
|
|
|
default=True,
|
|
|
|
help="Do not configure the client to use SSSD for automount",
|
2019-05-03 03:49:28 -05:00
|
|
|
)
|
|
|
|
parser.add_option(
|
2019-06-26 11:03:17 -05:00
|
|
|
"--idmap-domain",
|
|
|
|
dest="idmapdomain",
|
|
|
|
default=None,
|
|
|
|
help="nfs domain for idmap.conf",
|
2019-05-03 03:49:28 -05:00
|
|
|
)
|
|
|
|
parser.add_option(
|
2019-06-26 11:03:17 -05:00
|
|
|
"--debug",
|
|
|
|
dest="debug",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="enable debugging",
|
2019-05-03 03:49:28 -05:00
|
|
|
)
|
|
|
|
parser.add_option(
|
2019-06-26 11:03:17 -05:00
|
|
|
"-U",
|
|
|
|
"--unattended",
|
|
|
|
dest="unattended",
|
|
|
|
action="store_true",
|
2019-05-03 03:49:28 -05:00
|
|
|
default=False,
|
2019-06-26 11:03:17 -05:00
|
|
|
help="unattended installation never prompts the user",
|
2019-05-03 03:49:28 -05:00
|
|
|
)
|
|
|
|
parser.add_option(
|
2019-06-26 11:03:17 -05:00
|
|
|
"--uninstall",
|
|
|
|
dest="uninstall",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="Unconfigure automount",
|
2019-05-03 03:49:28 -05:00
|
|
|
)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
options, args = parser.parse_args()
|
|
|
|
return options, args
|
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
def wait_for_sssd():
|
|
|
|
"""
|
|
|
|
It takes a bit for sssd to get going, lets loop until it is
|
|
|
|
serving data.
|
|
|
|
|
|
|
|
This function returns nothing.
|
|
|
|
"""
|
|
|
|
n = 0
|
|
|
|
found = False
|
|
|
|
time.sleep(1)
|
|
|
|
while n < 10 and not found:
|
|
|
|
try:
|
2018-02-07 10:18:07 -06:00
|
|
|
ipautil.run([paths.GETENT, "passwd", "admin@%s" % api.env.realm])
|
2012-05-29 13:20:38 -05:00
|
|
|
found = True
|
2016-10-04 09:54:44 -05:00
|
|
|
except Exception:
|
2012-05-29 13:20:38 -05:00
|
|
|
time.sleep(1)
|
|
|
|
n = n + 1
|
|
|
|
|
|
|
|
# This should never happen but if it does, may as well warn the user
|
|
|
|
if not found:
|
2019-06-26 11:03:17 -05:00
|
|
|
err_msg = (
|
|
|
|
"Unable to find 'admin' user with "
|
|
|
|
"'getent passwd admin@%s'!" % api.env.realm
|
|
|
|
)
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug('%s', err_msg)
|
2015-08-12 06:44:11 -05:00
|
|
|
print(err_msg)
|
2019-06-26 11:03:17 -05:00
|
|
|
print(
|
|
|
|
"This may mean that sssd didn't re-start properly after "
|
|
|
|
"the configuration changes."
|
|
|
|
)
|
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
def configure_xml(fstore):
|
2016-11-16 04:11:13 -06:00
|
|
|
authconf = paths.AUTOFS_LDAP_AUTH_CONF
|
|
|
|
fstore.backup_file(authconf)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
try:
|
2016-11-16 04:11:13 -06:00
|
|
|
tree = etree.parse(authconf)
|
2015-07-30 09:49:29 -05:00
|
|
|
except IOError as e:
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug('Unable to open file %s', e)
|
|
|
|
logger.debug('Creating new from template')
|
2016-11-16 04:11:13 -06:00
|
|
|
tree = etree.ElementTree(
|
|
|
|
element=etree.Element('autofs_ldap_sasl_conf')
|
|
|
|
)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2016-11-16 04:11:13 -06:00
|
|
|
element = tree.getroot()
|
|
|
|
if element.tag != 'autofs_ldap_sasl_conf':
|
|
|
|
raise RuntimeError('Invalid XML root in file %s' % authconf)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2016-11-16 04:11:13 -06:00
|
|
|
element.set('usetls', 'no')
|
|
|
|
element.set('tlsrequired', 'no')
|
|
|
|
element.set('authrequired', 'yes')
|
|
|
|
element.set('authtype', 'GSSAPI')
|
|
|
|
element.set('clientprinc', 'host/%s@%s' % (api.env.host, api.env.realm))
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
try:
|
2016-11-16 04:11:13 -06:00
|
|
|
tree.write(authconf, xml_declaration=True, encoding='UTF-8')
|
2015-07-30 09:49:29 -05:00
|
|
|
except IOError as e:
|
2016-11-16 04:11:13 -06:00
|
|
|
print("Unable to write %s: %s" % (authconf, e))
|
|
|
|
else:
|
|
|
|
print("Configured %s" % authconf)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
def configure_autofs_sssd(fstore, statestore, autodiscover, options):
|
|
|
|
try:
|
|
|
|
sssdconfig = SSSDConfig.SSSDConfig()
|
|
|
|
sssdconfig.import_config()
|
|
|
|
domains = sssdconfig.list_active_domains()
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2012-05-29 13:20:38 -05:00
|
|
|
sys.exit(e)
|
|
|
|
|
|
|
|
try:
|
2012-10-29 04:06:45 -05:00
|
|
|
sssdconfig.new_service('autofs')
|
|
|
|
except SSSDConfig.ServiceAlreadyExists:
|
|
|
|
pass
|
|
|
|
except SSSDConfig.ServiceNotRecognizedError:
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.error("Unable to activate the Autofs service in SSSD config.")
|
|
|
|
logger.info(
|
|
|
|
"Please make sure you have SSSD built with autofs support "
|
2019-06-26 11:03:17 -05:00
|
|
|
"installed."
|
|
|
|
)
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.info(
|
2019-06-26 11:03:17 -05:00
|
|
|
"Configure autofs support manually in /etc/sssd/sssd.conf."
|
|
|
|
)
|
2012-10-29 04:06:45 -05:00
|
|
|
sys.exit("Cannot create the autofs service in sssd.conf")
|
|
|
|
|
|
|
|
sssdconfig.activate_service('autofs')
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
domain = None
|
|
|
|
for name in domains:
|
|
|
|
domain = sssdconfig.get_domain(name)
|
|
|
|
try:
|
|
|
|
provider = domain.get_option('id_provider')
|
|
|
|
except SSSDConfig.NoOptionError:
|
|
|
|
continue
|
|
|
|
if provider == "ipa":
|
|
|
|
domain.add_provider('ipa', 'autofs')
|
|
|
|
try:
|
2016-10-04 09:54:44 -05:00
|
|
|
domain.get_option('ipa_automount_location')
|
2018-04-30 16:16:40 -05:00
|
|
|
print('An automount location is already configured')
|
|
|
|
sys.exit(CLIENT_ALREADY_CONFIGURED)
|
2012-05-29 13:20:38 -05:00
|
|
|
except SSSDConfig.NoOptionError:
|
|
|
|
domain.set_option('ipa_automount_location', options.location)
|
|
|
|
break
|
|
|
|
|
|
|
|
if domain is None:
|
|
|
|
sys.exit('SSSD is not configured.')
|
|
|
|
|
|
|
|
sssdconfig.save_domain(domain)
|
2014-06-17 04:45:43 -05:00
|
|
|
sssdconfig.write(paths.SSSD_CONF)
|
2012-05-29 13:20:38 -05:00
|
|
|
statestore.backup_state('autofs', 'sssd', True)
|
|
|
|
|
2017-04-25 11:19:21 -05:00
|
|
|
sssd = services.service('sssd', api)
|
2012-05-29 13:20:38 -05:00
|
|
|
sssd.restart()
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Restarting sssd, waiting for it to become available.")
|
2012-05-29 13:20:38 -05:00
|
|
|
wait_for_sssd()
|
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
def configure_autofs(fstore, statestore, autodiscover, server, options):
|
|
|
|
"""
|
|
|
|
fstore: the FileStore to back up files in
|
|
|
|
options.server: the IPA server to use
|
|
|
|
options.location: the Automount location to use
|
|
|
|
"""
|
|
|
|
if not autodiscover:
|
|
|
|
ldap_uri = "ldap://%s" % server
|
|
|
|
else:
|
|
|
|
ldap_uri = "ldap:///%s" % api.env.basedn
|
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
search_base = str(
|
|
|
|
DN(
|
|
|
|
('cn', options.location),
|
|
|
|
api.env.container_automount,
|
|
|
|
api.env.basedn,
|
|
|
|
)
|
|
|
|
)
|
2012-05-29 13:20:38 -05:00
|
|
|
replacevars = {
|
|
|
|
'MAP_OBJECT_CLASS': 'automountMap',
|
|
|
|
'ENTRY_OBJECT_CLASS': 'automount',
|
|
|
|
'MAP_ATTRIBUTE': 'automountMapName',
|
|
|
|
'ENTRY_ATTRIBUTE': 'automountKey',
|
|
|
|
'VALUE_ATTRIBUTE': 'automountInformation',
|
|
|
|
'SEARCH_BASE': search_base,
|
|
|
|
'LDAP_URI': ldap_uri,
|
|
|
|
}
|
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
ipautil.backup_config_and_replace_variables(
|
|
|
|
fstore, paths.SYSCONFIG_AUTOFS, replacevars=replacevars
|
|
|
|
)
|
2014-10-03 18:27:57 -05:00
|
|
|
tasks.restore_context(paths.SYSCONFIG_AUTOFS)
|
2012-05-29 13:20:38 -05:00
|
|
|
statestore.backup_state('autofs', 'sssd', False)
|
|
|
|
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Configured %s" % paths.SYSCONFIG_AUTOFS)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
def configure_autofs_common(fstore, statestore, options):
|
2014-05-29 03:37:18 -05:00
|
|
|
autofs = services.knownservices.autofs
|
2012-05-29 13:20:38 -05:00
|
|
|
statestore.backup_state('autofs', 'enabled', autofs.is_enabled())
|
|
|
|
statestore.backup_state('autofs', 'running', autofs.is_running())
|
|
|
|
try:
|
|
|
|
autofs.restart()
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Started %s" % autofs.service_name)
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.error("%s failed to restart: %s", autofs.service_name, e)
|
2012-05-29 13:20:38 -05:00
|
|
|
try:
|
|
|
|
autofs.enable()
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2019-06-26 11:03:17 -05:00
|
|
|
print(
|
|
|
|
"Failed to configure automatic startup of the %s daemon"
|
|
|
|
% (autofs.service_name)
|
|
|
|
)
|
|
|
|
logger.error(
|
|
|
|
"Failed to enable automatic startup of the %s daemon: %s",
|
|
|
|
autofs.service_name,
|
|
|
|
str(e),
|
|
|
|
)
|
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
def uninstall(fstore, statestore):
|
2019-06-26 11:03:17 -05:00
|
|
|
RESTORE_FILES = [
|
|
|
|
paths.SYSCONFIG_AUTOFS,
|
|
|
|
paths.AUTOFS_LDAP_AUTH_CONF,
|
|
|
|
paths.SYSCONFIG_NFS,
|
|
|
|
paths.IDMAPD_CONF,
|
2018-04-30 16:16:40 -05:00
|
|
|
]
|
2019-06-26 11:03:17 -05:00
|
|
|
STATES = ['autofs', 'rpcidmapd', 'rpcgssd']
|
2018-04-30 16:16:40 -05:00
|
|
|
|
2019-08-16 13:10:15 -05:00
|
|
|
if not statestore.get_state('autofs', 'sssd'):
|
|
|
|
tasks.disable_ldap_automount(statestore)
|
2018-04-30 16:16:40 -05:00
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
if not any(fstore.has_file(f) for f in RESTORE_FILES) or not any(
|
|
|
|
statestore.has_state(s) for s in STATES
|
|
|
|
):
|
2018-04-30 16:16:40 -05:00
|
|
|
print("IPA automount is not configured on this system")
|
|
|
|
return CLIENT_NOT_CONFIGURED
|
|
|
|
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Restoring configuration")
|
2018-04-30 16:16:40 -05:00
|
|
|
|
|
|
|
for filepath in RESTORE_FILES:
|
|
|
|
if fstore.has_file(filepath):
|
|
|
|
fstore.restore_file(filepath)
|
2012-05-29 13:20:38 -05:00
|
|
|
if statestore.has_state('autofs'):
|
|
|
|
enabled = statestore.restore_state('autofs', 'enabled')
|
|
|
|
running = statestore.restore_state('autofs', 'running')
|
|
|
|
sssd = statestore.restore_state('autofs', 'sssd')
|
2014-05-29 03:37:18 -05:00
|
|
|
autofs = services.knownservices.autofs
|
2012-05-29 13:20:38 -05:00
|
|
|
if not enabled:
|
|
|
|
autofs.disable()
|
|
|
|
if not running:
|
|
|
|
autofs.stop()
|
|
|
|
if sssd:
|
|
|
|
try:
|
|
|
|
sssdconfig = SSSDConfig.SSSDConfig()
|
|
|
|
sssdconfig.import_config()
|
|
|
|
sssdconfig.deactivate_service('autofs')
|
|
|
|
domains = sssdconfig.list_active_domains()
|
|
|
|
for name in domains:
|
|
|
|
domain = sssdconfig.get_domain(name)
|
|
|
|
try:
|
|
|
|
provider = domain.get_option('id_provider')
|
|
|
|
except SSSDConfig.NoOptionError:
|
|
|
|
continue
|
|
|
|
if provider == "ipa":
|
|
|
|
domain.remove_option('ipa_automount_location')
|
|
|
|
domain.remove_provider('autofs')
|
|
|
|
break
|
|
|
|
sssdconfig.save_domain(domain)
|
2014-06-17 04:45:43 -05:00
|
|
|
sssdconfig.write(paths.SSSD_CONF)
|
2017-04-25 11:19:21 -05:00
|
|
|
sssd = services.service('sssd', api)
|
2012-05-29 13:20:38 -05:00
|
|
|
sssd.restart()
|
|
|
|
wait_for_sssd()
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2015-08-12 06:44:11 -05:00
|
|
|
print('Unable to restore SSSD configuration: %s' % str(e))
|
2019-03-06 14:44:32 -06:00
|
|
|
logger.debug(
|
2019-06-26 11:03:17 -05:00
|
|
|
'Unable to restore SSSD configuration: %s', str(e)
|
|
|
|
)
|
2019-03-06 14:44:32 -06:00
|
|
|
|
2018-12-06 10:29:26 -06:00
|
|
|
# rpcidmapd and rpcgssd are static units now
|
2012-05-29 13:20:38 -05:00
|
|
|
if statestore.has_state('rpcidmapd'):
|
2019-06-26 11:03:17 -05:00
|
|
|
statestore.delete_state('rpcidmapd', 'enabled')
|
|
|
|
statestore.delete_state('rpcidmapd', 'running')
|
2012-05-29 13:20:38 -05:00
|
|
|
if statestore.has_state('rpcgssd'):
|
2019-06-26 11:03:17 -05:00
|
|
|
statestore.delete_state('rpcgssd', 'enabled')
|
|
|
|
statestore.delete_state('rpcgssd', 'running')
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2018-12-06 10:29:26 -06:00
|
|
|
nfsutils = services.knownservices['nfs-utils']
|
|
|
|
try:
|
|
|
|
nfsutils.restart()
|
|
|
|
except Exception as e:
|
2019-03-06 14:44:32 -06:00
|
|
|
logger.error("Failed to restart nfs client services (%s)", str(e))
|
2018-12-06 10:29:26 -06:00
|
|
|
return 1
|
2012-05-29 13:20:38 -05:00
|
|
|
return 0
|
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
|
2019-05-03 03:49:28 -05:00
|
|
|
def configure_nfs(fstore, statestore, options):
|
2012-05-29 13:20:38 -05:00
|
|
|
"""
|
|
|
|
Configure secure NFS
|
|
|
|
"""
|
2019-02-26 06:59:06 -06:00
|
|
|
# Newer Fedora releases ship /etc/nfs.conf instead of /etc/sysconfig/nfs
|
|
|
|
# and do not require changes there. On these, SECURE_NFS_VAR == None
|
|
|
|
if constants.SECURE_NFS_VAR:
|
2019-06-26 11:03:17 -05:00
|
|
|
replacevars = {constants.SECURE_NFS_VAR: 'yes'}
|
|
|
|
ipautil.backup_config_and_replace_variables(
|
|
|
|
fstore, paths.SYSCONFIG_NFS, replacevars=replacevars
|
|
|
|
)
|
2019-02-26 06:59:06 -06:00
|
|
|
tasks.restore_context(paths.SYSCONFIG_NFS)
|
|
|
|
print("Configured %s" % paths.SYSCONFIG_NFS)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2015-11-11 07:28:46 -06:00
|
|
|
# Prepare the changes
|
|
|
|
# We need to use IPAChangeConf as simple regexp substitution
|
|
|
|
# does not cut it here
|
|
|
|
conf = ipachangeconf.IPAChangeConf("IPA automount installer")
|
|
|
|
conf.case_insensitive_sections = False
|
|
|
|
conf.setOptionAssignment(" = ")
|
|
|
|
conf.setSectionNameDelimiters(("[", "]"))
|
|
|
|
|
2019-05-03 03:49:28 -05:00
|
|
|
if options.idmapdomain is None:
|
|
|
|
# Set NFSv4 domain to the IPA domain
|
|
|
|
changes = [conf.setOption('Domain', api.env.domain)]
|
|
|
|
elif options.idmapdomain == 'DNS':
|
|
|
|
# Rely on idmapd auto-detection (DNS)
|
2019-06-25 09:24:00 -05:00
|
|
|
changes = [conf.rmOption('Domain')]
|
2019-05-03 03:49:28 -05:00
|
|
|
else:
|
|
|
|
# Set NFSv4 domain to what was provided
|
|
|
|
changes = [conf.setOption('Domain', options.idmapdomain)]
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2019-05-03 03:49:28 -05:00
|
|
|
if changes is not None:
|
|
|
|
section_with_changes = [conf.setSection('General', changes)]
|
|
|
|
# Backup the file and apply the changes
|
|
|
|
fstore.backup_file(paths.IDMAPD_CONF)
|
|
|
|
conf.changeConf(paths.IDMAPD_CONF, section_with_changes)
|
|
|
|
tasks.restore_context(paths.IDMAPD_CONF)
|
|
|
|
print("Configured %s" % paths.IDMAPD_CONF)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2014-05-29 03:37:18 -05:00
|
|
|
rpcgssd = services.knownservices.rpcgssd
|
2012-05-29 13:20:38 -05:00
|
|
|
try:
|
|
|
|
rpcgssd.restart()
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2019-03-06 14:44:32 -06:00
|
|
|
logger.error("Failed to restart rpc-gssd (%s)", str(e))
|
2018-12-06 10:29:26 -06:00
|
|
|
nfsutils = services.knownservices['nfs-utils']
|
2012-05-29 13:20:38 -05:00
|
|
|
try:
|
2018-12-06 10:29:26 -06:00
|
|
|
nfsutils.restart()
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2019-03-06 14:44:32 -06:00
|
|
|
logger.error("Failed to restart nfs client services (%s)", str(e))
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
|
|
|
|
def configure_automount():
|
2017-08-04 16:25:12 -05:00
|
|
|
try:
|
|
|
|
check_client_configuration()
|
|
|
|
except ScriptError as e:
|
2018-04-30 16:16:40 -05:00
|
|
|
print(e.msg)
|
|
|
|
sys.exit(e.rval)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2014-06-17 04:45:43 -05:00
|
|
|
fstore = sysrestore.FileStore(paths.IPA_CLIENT_SYSRESTORE)
|
|
|
|
statestore = sysrestore.StateFile(paths.IPA_CLIENT_SYSRESTORE)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2016-10-04 09:54:44 -05:00
|
|
|
options, _args = parse_options()
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
standard_logging_setup(
|
2019-06-26 11:03:17 -05:00
|
|
|
paths.IPACLIENT_INSTALL_LOG,
|
|
|
|
verbose=False,
|
|
|
|
debug=options.debug,
|
|
|
|
filemode='a',
|
|
|
|
console_format='%(message)s',
|
|
|
|
)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
cfg = dict(
|
|
|
|
context='cli_installer',
|
2016-11-28 09:24:33 -06:00
|
|
|
confdir=paths.ETC_IPA,
|
2012-05-29 13:20:38 -05:00
|
|
|
in_server=False,
|
|
|
|
debug=options.debug,
|
|
|
|
verbose=0,
|
|
|
|
)
|
|
|
|
|
2017-04-25 11:19:21 -05:00
|
|
|
# Bootstrap API early so that env object is available
|
2012-05-29 13:20:38 -05:00
|
|
|
api.bootstrap(**cfg)
|
2017-04-25 11:19:21 -05:00
|
|
|
|
|
|
|
if options.uninstall:
|
|
|
|
return uninstall(fstore, statestore)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2015-02-26 03:44:26 -06:00
|
|
|
ca_cert_path = None
|
|
|
|
if os.path.exists(paths.IPA_CA_CRT):
|
|
|
|
ca_cert_path = paths.IPA_CA_CRT
|
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
if statestore.has_state('autofs'):
|
2018-04-30 16:16:40 -05:00
|
|
|
print('An automount location is already configured')
|
|
|
|
sys.exit(CLIENT_ALREADY_CONFIGURED)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
autodiscover = False
|
|
|
|
ds = ipadiscovery.IPADiscovery()
|
2013-02-04 08:35:13 -06:00
|
|
|
if not options.server:
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Searching for IPA server...")
|
2015-02-26 03:44:26 -06:00
|
|
|
ret = ds.search(ca_cert_path=ca_cert_path)
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug('Executing DNS discovery')
|
2012-05-29 13:20:38 -05:00
|
|
|
if ret == ipadiscovery.NO_LDAP_SERVER:
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug('Autodiscovery did not find LDAP server')
|
2015-09-14 05:52:29 -05:00
|
|
|
s = urlsplit(api.env.xmlrpc_uri)
|
2013-02-04 08:35:13 -06:00
|
|
|
server = [s.netloc]
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug('Setting server to %s', s.netloc)
|
2012-05-29 13:20:38 -05:00
|
|
|
else:
|
|
|
|
autodiscover = True
|
2013-02-04 08:35:13 -06:00
|
|
|
if not ds.servers:
|
2019-06-26 11:03:17 -05:00
|
|
|
sys.exit(
|
|
|
|
'Autodiscovery was successful but didn\'t return a server'
|
|
|
|
)
|
|
|
|
logger.debug(
|
|
|
|
'Autodiscovery success, possible servers %s',
|
|
|
|
','.join(ds.servers),
|
|
|
|
)
|
2013-02-04 08:35:13 -06:00
|
|
|
server = ds.servers[0]
|
|
|
|
else:
|
|
|
|
server = options.server
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug("Verifying that %s is an IPA server", server)
|
2015-02-26 03:44:26 -06:00
|
|
|
ldapret = ds.ipacheckldap(server, api.env.realm, ca_cert_path)
|
2013-02-04 08:35:13 -06:00
|
|
|
if ldapret[0] == ipadiscovery.NO_ACCESS_TO_LDAP:
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Anonymous access to the LDAP server is disabled.")
|
|
|
|
print("Proceeding without strict verification.")
|
2019-06-26 11:03:17 -05:00
|
|
|
print(
|
|
|
|
"Note: This is not an error if anonymous access has been "
|
|
|
|
"explicitly restricted."
|
|
|
|
)
|
2015-02-26 03:44:26 -06:00
|
|
|
elif ldapret[0] == ipadiscovery.NO_TLS_LDAP:
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.warning("Unencrypted access to LDAP is not supported.")
|
2013-02-04 08:35:13 -06:00
|
|
|
elif ldapret[0] != 0:
|
|
|
|
sys.exit('Unable to confirm that %s is an IPA server' % server)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
|
|
|
if not autodiscover:
|
2015-08-12 06:44:11 -05:00
|
|
|
print("IPA server: %s" % server)
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug('Using fixed server %s', server)
|
2012-05-29 13:20:38 -05:00
|
|
|
else:
|
2015-08-12 06:44:11 -05:00
|
|
|
print("IPA server: DNS discovery")
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug('Configuring to use DNS discovery')
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Location: %s" % options.location)
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug('Using automount location %s', options.location)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2015-07-20 09:04:07 -05:00
|
|
|
ccache_dir = tempfile.mkdtemp()
|
|
|
|
ccache_name = os.path.join(ccache_dir, 'ccache')
|
2012-05-29 13:20:38 -05:00
|
|
|
try:
|
|
|
|
try:
|
2015-03-16 10:43:10 -05:00
|
|
|
host_princ = str('host/%s@%s' % (api.env.host, api.env.realm))
|
2016-11-23 10:40:47 -06:00
|
|
|
kinit_keytab(host_princ, paths.KRB5_KEYTAB, ccache_name)
|
2012-05-29 13:20:38 -05:00
|
|
|
os.environ['KRB5CCNAME'] = ccache_name
|
2015-07-20 09:04:07 -05:00
|
|
|
except gssapi.exceptions.GSSError as e:
|
2015-03-16 10:43:10 -05:00
|
|
|
sys.exit("Failed to obtain host TGT: %s" % e)
|
2017-04-25 11:19:21 -05:00
|
|
|
|
|
|
|
# Finalize API when TGT obtained using host keytab exists
|
|
|
|
api.finalize()
|
|
|
|
|
2012-05-29 13:20:38 -05:00
|
|
|
# Now we have a TGT, connect to IPA
|
|
|
|
try:
|
2012-12-19 03:25:24 -06:00
|
|
|
api.Backend.rpcclient.connect()
|
2015-07-30 09:49:29 -05:00
|
|
|
except errors.KerberosError as e:
|
2012-05-29 13:20:38 -05:00
|
|
|
sys.exit('Cannot connect to the server due to ' + str(e))
|
|
|
|
try:
|
2014-04-01 09:23:14 -05:00
|
|
|
# Use the RPC directly so older servers are supported
|
2016-10-04 09:54:44 -05:00
|
|
|
api.Backend.rpcclient.forward(
|
2014-04-01 09:23:14 -05:00
|
|
|
'automountlocation_show',
|
2015-08-31 02:08:38 -05:00
|
|
|
ipautil.fsdecode(options.location),
|
2014-04-01 09:23:14 -05:00
|
|
|
version=u'2.0',
|
|
|
|
)
|
2015-07-30 09:49:29 -05:00
|
|
|
except errors.VersionError as e:
|
2012-05-29 13:20:38 -05:00
|
|
|
sys.exit('This client is incompatible: ' + str(e))
|
|
|
|
except errors.NotFound:
|
2019-06-26 11:03:17 -05:00
|
|
|
sys.exit(
|
|
|
|
"Automount location '%s' does not exist" % options.location
|
|
|
|
)
|
2015-07-30 09:49:29 -05:00
|
|
|
except errors.PublicError as e:
|
2019-06-26 11:03:17 -05:00
|
|
|
sys.exit(
|
|
|
|
"Cannot connect to the server due to generic error: %s"
|
|
|
|
% str(e)
|
|
|
|
)
|
2012-05-29 13:20:38 -05:00
|
|
|
finally:
|
2019-04-08 00:44:02 -05:00
|
|
|
shutil.rmtree(ccache_dir)
|
2012-05-29 13:20:38 -05:00
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
if not options.unattended and not ipautil.user_input(
|
|
|
|
"Continue to configure the system with these values?", False
|
|
|
|
):
|
2012-05-29 13:20:38 -05:00
|
|
|
sys.exit("Installation aborted")
|
|
|
|
|
|
|
|
try:
|
2014-05-14 09:21:34 -05:00
|
|
|
if not options.sssd:
|
2019-08-16 13:10:15 -05:00
|
|
|
tasks.enable_ldap_automount(statestore)
|
2019-05-03 03:49:28 -05:00
|
|
|
configure_nfs(fstore, statestore, options)
|
2012-05-29 13:20:38 -05:00
|
|
|
if options.sssd:
|
|
|
|
configure_autofs_sssd(fstore, statestore, autodiscover, options)
|
|
|
|
else:
|
|
|
|
configure_xml(fstore)
|
2019-06-26 11:03:17 -05:00
|
|
|
configure_autofs(
|
|
|
|
fstore, statestore, autodiscover, server, options
|
|
|
|
)
|
2012-05-29 13:20:38 -05:00
|
|
|
configure_autofs_common(fstore, statestore, options)
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.debug('Raised exception %s', e)
|
2015-08-12 06:44:11 -05:00
|
|
|
print("Installation failed. Rolling back changes.")
|
2012-05-29 13:20:38 -05:00
|
|
|
uninstall(fstore, statestore)
|
|
|
|
return 1
|
|
|
|
|
|
|
|
return 0
|
|
|
|
|
2019-06-26 11:03:17 -05:00
|
|
|
|
|
|
|
def main():
|
|
|
|
try:
|
|
|
|
if not os.geteuid() == 0:
|
|
|
|
sys.exit("\nMust be run as root\n")
|
|
|
|
configure_automount()
|
|
|
|
except SystemExit as e:
|
|
|
|
sys.exit(e)
|
|
|
|
except RuntimeError as e:
|
|
|
|
sys.exit(e)
|
|
|
|
except (KeyboardInterrupt, EOFError):
|
|
|
|
sys.exit(1)
|