2008-10-02 20:09:13 -05:00
|
|
|
# Authors:
|
|
|
|
# Jason Gerard DeRose <jderose@redhat.com>
|
|
|
|
#
|
|
|
|
# Copyright (C) 2008 Red Hat
|
|
|
|
# see file 'COPYING' for use and warranty information
|
|
|
|
#
|
2010-12-09 06:59:11 -06:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
2008-10-02 20:09:13 -05:00
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2010-12-09 06:59:11 -06:00
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2008-10-02 20:09:13 -05:00
|
|
|
|
|
|
|
"""
|
|
|
|
Various utility functions.
|
|
|
|
"""
|
2008-10-27 01:23:43 -05:00
|
|
|
|
2016-09-12 07:38:12 -05:00
|
|
|
from __future__ import (
|
|
|
|
absolute_import,
|
|
|
|
print_function,
|
|
|
|
)
|
2016-06-01 07:55:13 -05:00
|
|
|
|
2017-05-23 11:35:57 -05:00
|
|
|
import logging
|
2008-10-27 01:23:43 -05:00
|
|
|
import os
|
2009-02-18 16:12:27 -06:00
|
|
|
import socket
|
2010-10-08 12:15:03 -05:00
|
|
|
import re
|
2012-09-05 07:35:44 -05:00
|
|
|
import decimal
|
2014-10-16 09:27:00 -05:00
|
|
|
import dns
|
2014-11-27 07:16:23 -06:00
|
|
|
import encodings
|
2016-09-12 07:38:12 -05:00
|
|
|
import sys
|
2016-12-20 03:05:36 -06:00
|
|
|
import ssl
|
2017-10-30 10:09:14 -05:00
|
|
|
import termios
|
|
|
|
import fcntl
|
2018-12-05 07:54:29 -06:00
|
|
|
import shutil
|
2017-10-30 10:09:14 -05:00
|
|
|
import struct
|
|
|
|
import subprocess
|
2015-08-10 11:29:33 -05:00
|
|
|
|
|
|
|
import netaddr
|
2020-08-28 08:31:10 -05:00
|
|
|
from dns import rdatatype
|
2012-05-11 07:38:09 -05:00
|
|
|
from dns.exception import DNSException
|
2015-04-24 06:37:07 -05:00
|
|
|
from dns.resolver import NXDOMAIN
|
2014-05-14 05:52:26 -05:00
|
|
|
from netaddr.core import AddrFormatError
|
2015-08-10 11:29:33 -05:00
|
|
|
import six
|
2010-08-10 15:40:00 -05:00
|
|
|
|
2016-12-20 03:05:36 -06:00
|
|
|
try:
|
|
|
|
from httplib import HTTPSConnection
|
|
|
|
except ImportError:
|
|
|
|
# Python 3
|
|
|
|
from http.client import HTTPSConnection
|
|
|
|
|
2015-04-22 08:29:21 -05:00
|
|
|
from ipalib import errors, messages
|
2016-12-20 03:05:36 -06:00
|
|
|
from ipalib.constants import (
|
|
|
|
DOMAIN_LEVEL_0,
|
2019-11-22 03:42:11 -06:00
|
|
|
TLS_VERSIONS, TLS_VERSION_MINIMAL, TLS_VERSION_MAXIMAL,
|
|
|
|
TLS_VERSION_DEFAULT_MIN, TLS_VERSION_DEFAULT_MAX,
|
2016-12-20 03:05:36 -06:00
|
|
|
)
|
2020-07-09 09:38:42 -05:00
|
|
|
from ipalib.facts import is_ipa_client_configured
|
2011-06-08 09:54:41 -05:00
|
|
|
from ipalib.text import _
|
2018-02-09 04:50:32 -06:00
|
|
|
from ipaplatform.constants import constants
|
2017-10-11 05:09:30 -05:00
|
|
|
from ipaplatform.paths import paths
|
2021-02-15 23:03:59 -06:00
|
|
|
from ipapython import ipautil
|
2012-09-03 08:33:30 -05:00
|
|
|
from ipapython.ssh import SSHPublicKey
|
Use DN objects instead of strings
* Convert every string specifying a DN into a DN object
* Every place a dn was manipulated in some fashion it was replaced by
the use of DN operators
* Add new DNParam parameter type for parameters which are DN's
* DN objects are used 100% of the time throughout the entire data
pipeline whenever something is logically a dn.
* Many classes now enforce DN usage for their attributes which are
dn's. This is implmented via ipautil.dn_attribute_property(). The
only permitted types for a class attribute specified to be a DN are
either None or a DN object.
* Require that every place a dn is used it must be a DN object.
This translates into lot of::
assert isinstance(dn, DN)
sprinkled through out the code. Maintaining these asserts is
valuable to preserve DN type enforcement. The asserts can be
disabled in production.
The goal of 100% DN usage 100% of the time has been realized, these
asserts are meant to preserve that.
The asserts also proved valuable in detecting functions which did
not obey their function signatures, such as the baseldap pre and
post callbacks.
* Moved ipalib.dn to ipapython.dn because DN class is shared with all
components, not just the server which uses ipalib.
* All API's now accept DN's natively, no need to convert to str (or
unicode).
* Removed ipalib.encoder and encode/decode decorators. Type conversion
is now explicitly performed in each IPASimpleLDAPObject method which
emulates a ldap.SimpleLDAPObject method.
* Entity & Entry classes now utilize DN's
* Removed __getattr__ in Entity & Entity clases. There were two
problems with it. It presented synthetic Python object attributes
based on the current LDAP data it contained. There is no way to
validate synthetic attributes using code checkers, you can't search
the code to find LDAP attribute accesses (because synthetic
attriutes look like Python attributes instead of LDAP data) and
error handling is circumscribed. Secondly __getattr__ was hiding
Python internal methods which broke class semantics.
* Replace use of methods inherited from ldap.SimpleLDAPObject via
IPAdmin class with IPAdmin methods. Directly using inherited methods
was causing us to bypass IPA logic. Mostly this meant replacing the
use of search_s() with getEntry() or getList(). Similarly direct
access of the LDAP data in classes using IPAdmin were replaced with
calls to getValue() or getValues().
* Objects returned by ldap2.find_entries() are now compatible with
either the python-ldap access methodology or the Entity/Entry access
methodology.
* All ldap operations now funnel through the common
IPASimpleLDAPObject giving us a single location where we interface
to python-ldap and perform conversions.
* The above 4 modifications means we've greatly reduced the
proliferation of multiple inconsistent ways to perform LDAP
operations. We are well on the way to having a single API in IPA for
doing LDAP (a long range goal).
* All certificate subject bases are now DN's
* DN objects were enhanced thusly:
- find, rfind, index, rindex, replace and insert methods were added
- AVA, RDN and DN classes were refactored in immutable and mutable
variants, the mutable variants are EditableAVA, EditableRDN and
EditableDN. By default we use the immutable variants preserving
important semantics. To edit a DN cast it to an EditableDN and
cast it back to DN when done editing. These issues are fully
described in other documentation.
- first_key_match was removed
- DN equalty comparison permits comparison to a basestring
* Fixed ldapupdate to work with DN's. This work included:
- Enhance test_updates.py to do more checking after applying
update. Add test for update_from_dict(). Convert code to use
unittest classes.
- Consolidated duplicate code.
- Moved code which should have been in the class into the class.
- Fix the handling of the 'deleteentry' update action. It's no longer
necessary to supply fake attributes to make it work. Detect case
where subsequent update applies a change to entry previously marked
for deletetion. General clean-up and simplification of the
'deleteentry' logic.
- Rewrote a couple of functions to be clearer and more Pythonic.
- Added documentation on the data structure being used.
- Simplfy the use of update_from_dict()
* Removed all usage of get_schema() which was being called prior to
accessing the .schema attribute of an object. If a class is using
internal lazy loading as an optimization it's not right to require
users of the interface to be aware of internal
optimization's. schema is now a property and when the schema
property is accessed it calls a private internal method to perform
the lazy loading.
* Added SchemaCache class to cache the schema's from individual
servers. This was done because of the observation we talk to
different LDAP servers, each of which may have it's own
schema. Previously we globally cached the schema from the first
server we connected to and returned that schema in all contexts. The
cache includes controls to invalidate it thus forcing a schema
refresh.
* Schema caching is now senstive to the run time context. During
install and upgrade the schema can change leading to errors due to
out-of-date cached schema. The schema cache is refreshed in these
contexts.
* We are aware of the LDAP syntax of all LDAP attributes. Every
attribute returned from an LDAP operation is passed through a
central table look-up based on it's LDAP syntax. The table key is
the LDAP syntax it's value is a Python callable that returns a
Python object matching the LDAP syntax. There are a handful of LDAP
attributes whose syntax is historically incorrect
(e.g. DistguishedNames that are defined as DirectoryStrings). The
table driven conversion mechanism is augmented with a table of
hard coded exceptions.
Currently only the following conversions occur via the table:
- dn's are converted to DN objects
- binary objects are converted to Python str objects (IPA
convention).
- everything else is converted to unicode using UTF-8 decoding (IPA
convention).
However, now that the table driven conversion mechanism is in place
it would be trivial to do things such as converting attributes
which have LDAP integer syntax into a Python integer, etc.
* Expected values in the unit tests which are a DN no longer need to
use lambda expressions to promote the returned value to a DN for
equality comparison. The return value is automatically promoted to
a DN. The lambda expressions have been removed making the code much
simpler and easier to read.
* Add class level logging to a number of classes which did not support
logging, less need for use of root_logger.
* Remove ipaserver/conn.py, it was unused.
* Consolidated duplicate code wherever it was found.
* Fixed many places that used string concatenation to form a new
string rather than string formatting operators. This is necessary
because string formatting converts it's arguments to a string prior
to building the result string. You can't concatenate a string and a
non-string.
* Simplify logic in rename_managed plugin. Use DN operators to edit
dn's.
* The live version of ipa-ldap-updater did not generate a log file.
The offline version did, now both do.
https://fedorahosted.org/freeipa/ticket/1670
https://fedorahosted.org/freeipa/ticket/1671
https://fedorahosted.org/freeipa/ticket/1672
https://fedorahosted.org/freeipa/ticket/1673
https://fedorahosted.org/freeipa/ticket/1674
https://fedorahosted.org/freeipa/ticket/1392
https://fedorahosted.org/freeipa/ticket/2872
2012-05-13 06:36:35 -05:00
|
|
|
from ipapython.dn import DN, RDN
|
2020-08-28 08:31:10 -05:00
|
|
|
from ipapython.dnsutil import (
|
|
|
|
DNSName,
|
|
|
|
DNSResolver,
|
|
|
|
resolve,
|
|
|
|
resolve_ip_addresses,
|
|
|
|
)
|
2017-08-04 16:25:12 -05:00
|
|
|
from ipapython.admintool import ScriptError
|
2023-03-24 01:07:04 -05:00
|
|
|
from ipapython.kerberos import Principal
|
2016-12-20 03:05:36 -06:00
|
|
|
|
2018-02-14 09:59:50 -06:00
|
|
|
if sys.version_info >= (3, 2):
|
2022-02-21 06:36:10 -06:00
|
|
|
import reprlib
|
2018-02-14 09:59:50 -06:00
|
|
|
else:
|
|
|
|
reprlib = None
|
|
|
|
|
2015-09-11 06:43:28 -05:00
|
|
|
if six.PY3:
|
|
|
|
unicode = str
|
|
|
|
|
2017-10-20 06:22:33 -05:00
|
|
|
_IPA_CLIENT_SYSRESTORE = "/var/lib/ipa-client/sysrestore"
|
|
|
|
_IPA_DEFAULT_CONF = "/etc/ipa/default.conf"
|
|
|
|
|
2017-05-23 11:35:57 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2008-10-02 20:09:13 -05:00
|
|
|
|
2010-08-09 15:45:26 -05:00
|
|
|
def json_serialize(obj):
|
2010-08-10 15:40:00 -05:00
|
|
|
if isinstance(obj, (list, tuple)):
|
|
|
|
return [json_serialize(o) for o in obj]
|
|
|
|
if isinstance(obj, dict):
|
Use Python3-compatible dict method names
Python 2 has keys()/values()/items(), which return lists,
iterkeys()/itervalues()/iteritems(), which return iterators,
and viewkeys()/viewvalues()/viewitems() which return views.
Python 3 has only keys()/values()/items(), which return views.
To get iterators, one can use iter() or a for loop/comprehension;
for lists there's the list() constructor.
When iterating through the entire dict, without modifying the dict,
the difference between Python 2's items() and iteritems() is
negligible, especially on small dicts (the main overhead is
extra memory, not CPU time). In the interest of simpler code,
this patch changes many instances of iteritems() to items(),
iterkeys() to keys() etc.
In other cases, helpers like six.itervalues are used.
Reviewed-By: Christian Heimes <cheimes@redhat.com>
Reviewed-By: Jan Cholasta <jcholast@redhat.com>
2015-08-11 06:51:14 -05:00
|
|
|
return {k: json_serialize(v) for (k, v) in obj.items()}
|
2018-09-26 05:33:30 -05:00
|
|
|
if isinstance(obj, (int, bool, float, unicode, type(None))):
|
2010-08-10 15:40:00 -05:00
|
|
|
return obj
|
|
|
|
if isinstance(obj, str):
|
|
|
|
return obj.decode('utf-8')
|
2012-09-05 07:35:44 -05:00
|
|
|
if isinstance(obj, (decimal.Decimal, DN)):
|
|
|
|
return str(obj)
|
2010-08-09 15:45:26 -05:00
|
|
|
if not callable(getattr(obj, '__json__', None)):
|
|
|
|
# raise TypeError('%r is not JSON serializable')
|
|
|
|
return ''
|
2010-08-10 15:40:00 -05:00
|
|
|
return json_serialize(obj.__json__())
|
2010-08-09 15:45:26 -05:00
|
|
|
|
2016-05-17 10:20:25 -05:00
|
|
|
|
|
|
|
def verify_host_resolvable(fqdn):
|
2016-05-17 10:28:36 -05:00
|
|
|
try:
|
|
|
|
if not resolve_ip_addresses(fqdn):
|
|
|
|
raise errors.DNSNotARecordError(hostname=fqdn)
|
|
|
|
except dns.exception.DNSException as ex:
|
|
|
|
# wrap DNSException in a PublicError
|
|
|
|
raise errors.DNSResolverError(exception=ex)
|
2010-10-08 12:15:03 -05:00
|
|
|
|
2013-02-12 09:50:00 -06:00
|
|
|
|
|
|
|
def has_soa_or_ns_record(domain):
|
|
|
|
"""
|
|
|
|
Checks to see if given domain has SOA or NS record.
|
|
|
|
Returns True or False.
|
|
|
|
"""
|
|
|
|
try:
|
2020-08-28 08:31:10 -05:00
|
|
|
resolve(domain, rdatatype.SOA)
|
2013-02-12 09:50:00 -06:00
|
|
|
soa_record_found = True
|
|
|
|
except DNSException:
|
|
|
|
soa_record_found = False
|
|
|
|
|
|
|
|
try:
|
2020-08-28 08:31:10 -05:00
|
|
|
resolve(domain, rdatatype.NS)
|
2013-02-12 09:50:00 -06:00
|
|
|
ns_record_found = True
|
|
|
|
except DNSException:
|
|
|
|
ns_record_found = False
|
|
|
|
|
|
|
|
return soa_record_found or ns_record_found
|
|
|
|
|
|
|
|
|
2012-11-15 04:21:16 -06:00
|
|
|
def normalize_name(name):
|
|
|
|
result = dict()
|
|
|
|
components = name.split('@')
|
|
|
|
if len(components) == 2:
|
|
|
|
result['domain'] = unicode(components[1]).lower()
|
|
|
|
result['name'] = unicode(components[0]).lower()
|
|
|
|
else:
|
|
|
|
components = name.split('\\')
|
|
|
|
if len(components) == 2:
|
|
|
|
result['flatname'] = unicode(components[0]).lower()
|
|
|
|
result['name'] = unicode(components[1]).lower()
|
|
|
|
else:
|
|
|
|
result['name'] = unicode(name).lower()
|
|
|
|
return result
|
|
|
|
|
2010-10-08 12:15:03 -05:00
|
|
|
def isvalid_base64(data):
|
|
|
|
"""
|
2017-07-03 10:10:34 -05:00
|
|
|
Validate the incoming data as valid base64 data or not. This is only
|
|
|
|
used in the ipalib.Parameters module which expects ``data`` to be unicode.
|
2010-10-08 12:15:03 -05:00
|
|
|
|
|
|
|
The character set must only include of a-z, A-Z, 0-9, + or / and
|
|
|
|
be padded with = to be a length divisible by 4 (so only 0-2 =s are
|
2020-06-11 07:24:29 -05:00
|
|
|
allowed). Its length must be divisible by 4. Whitespace is
|
2010-10-08 12:15:03 -05:00
|
|
|
not significant so it is removed.
|
|
|
|
|
|
|
|
This doesn't guarantee we have a base64-encoded value, just that it
|
|
|
|
fits the base64 requirements.
|
|
|
|
"""
|
|
|
|
|
|
|
|
data = ''.join(data.split())
|
|
|
|
|
2018-09-24 03:49:45 -05:00
|
|
|
if (len(data) % 4 > 0 or
|
|
|
|
re.match(r'^[a-zA-Z0-9\+\/]+\={0,2}$', data) is None):
|
2010-10-08 12:15:03 -05:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
2010-11-23 16:47:29 -06:00
|
|
|
|
2017-09-25 02:18:41 -05:00
|
|
|
|
|
|
|
def strip_csr_header(csr):
|
|
|
|
"""
|
|
|
|
Remove the header and footer (and surrounding material) from a CSR.
|
|
|
|
"""
|
|
|
|
headerlen = 40
|
|
|
|
s = csr.find(b"-----BEGIN NEW CERTIFICATE REQUEST-----")
|
|
|
|
if s == -1:
|
|
|
|
headerlen = 36
|
|
|
|
s = csr.find(b"-----BEGIN CERTIFICATE REQUEST-----")
|
|
|
|
if s >= 0:
|
|
|
|
e = csr.find(b"-----END")
|
|
|
|
csr = csr[s + headerlen:e]
|
|
|
|
|
|
|
|
return csr
|
|
|
|
|
|
|
|
|
2010-11-23 16:47:29 -06:00
|
|
|
def validate_ipaddr(ipaddr):
|
|
|
|
"""
|
|
|
|
Check to see if the given IP address is a valid IPv4 or IPv6 address.
|
|
|
|
|
|
|
|
Returns True or False
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
socket.inet_pton(socket.AF_INET, ipaddr)
|
|
|
|
except socket.error:
|
|
|
|
try:
|
|
|
|
socket.inet_pton(socket.AF_INET6, ipaddr)
|
|
|
|
except socket.error:
|
|
|
|
return False
|
|
|
|
return True
|
2011-06-08 09:54:41 -05:00
|
|
|
|
|
|
|
def check_writable_file(filename):
|
|
|
|
"""
|
|
|
|
Determine if the file is writable. If the file doesn't exist then
|
|
|
|
open the file to test writability.
|
|
|
|
"""
|
|
|
|
if filename is None:
|
2012-07-04 07:52:47 -05:00
|
|
|
raise errors.FileError(reason=_('Filename is empty'))
|
2011-06-08 09:54:41 -05:00
|
|
|
try:
|
2017-05-09 10:49:56 -05:00
|
|
|
if os.path.isfile(filename):
|
2011-06-08 09:54:41 -05:00
|
|
|
if not os.access(filename, os.W_OK):
|
|
|
|
raise errors.FileError(reason=_('Permission denied: %(file)s') % dict(file=filename))
|
|
|
|
else:
|
|
|
|
fp = open(filename, 'w')
|
|
|
|
fp.close()
|
2015-07-30 09:49:29 -05:00
|
|
|
except (IOError, OSError) as e:
|
2011-06-08 09:54:41 -05:00
|
|
|
raise errors.FileError(reason=str(e))
|
2011-10-24 11:35:48 -05:00
|
|
|
|
2011-11-23 09:03:51 -06:00
|
|
|
def normalize_zonemgr(zonemgr):
|
2018-09-26 05:24:33 -05:00
|
|
|
if not zonemgr or not isinstance(zonemgr, str):
|
2011-11-23 09:03:51 -06:00
|
|
|
return zonemgr
|
|
|
|
if '@' in zonemgr:
|
|
|
|
# local-part needs to be normalized
|
2016-10-07 08:07:49 -05:00
|
|
|
name, _at, domain = zonemgr.partition('@')
|
2011-11-23 09:03:51 -06:00
|
|
|
name = name.replace('.', '\\.')
|
|
|
|
zonemgr = u''.join((name, u'.', domain))
|
|
|
|
|
|
|
|
return zonemgr
|
2011-10-24 11:35:48 -05:00
|
|
|
|
2012-09-05 02:56:27 -05:00
|
|
|
def normalize_zone(zone):
|
|
|
|
if zone[-1] != '.':
|
|
|
|
return zone + '.'
|
|
|
|
else:
|
|
|
|
return zone
|
|
|
|
|
2014-01-31 08:42:31 -06:00
|
|
|
|
2016-12-20 03:05:36 -06:00
|
|
|
def get_proper_tls_version_span(tls_version_min, tls_version_max):
|
|
|
|
"""
|
|
|
|
This function checks whether the given TLS versions are known in
|
2021-01-19 14:35:41 -06:00
|
|
|
IPA and that these versions fulfill the requirements for minimal
|
2016-12-20 03:05:36 -06:00
|
|
|
TLS version (see
|
|
|
|
`ipalib.constants: TLS_VERSIONS, TLS_VERSION_MINIMAL`).
|
|
|
|
|
|
|
|
:param tls_version_min:
|
|
|
|
the lower value in the TLS min-max span, raised to the lowest
|
|
|
|
allowed value if too low
|
|
|
|
:param tls_version_max:
|
|
|
|
the higher value in the TLS min-max span, raised to tls_version_min
|
|
|
|
if lower than TLS_VERSION_MINIMAL
|
|
|
|
:raises: ValueError
|
|
|
|
"""
|
2019-11-22 03:42:11 -06:00
|
|
|
if tls_version_min is None and tls_version_max is None:
|
|
|
|
# no defaults, use system's default TLS version range
|
|
|
|
return None
|
|
|
|
if tls_version_min is None:
|
|
|
|
tls_version_min = TLS_VERSION_MINIMAL
|
|
|
|
if tls_version_max is None:
|
|
|
|
tls_version_max = TLS_VERSION_MAXIMAL
|
2016-12-20 03:05:36 -06:00
|
|
|
min_allowed_idx = TLS_VERSIONS.index(TLS_VERSION_MINIMAL)
|
|
|
|
|
|
|
|
try:
|
|
|
|
min_version_idx = TLS_VERSIONS.index(tls_version_min)
|
|
|
|
except ValueError:
|
|
|
|
raise ValueError("tls_version_min ('{val}') is not a known "
|
|
|
|
"TLS version.".format(val=tls_version_min))
|
|
|
|
|
|
|
|
try:
|
|
|
|
max_version_idx = TLS_VERSIONS.index(tls_version_max)
|
|
|
|
except ValueError:
|
|
|
|
raise ValueError("tls_version_max ('{val}') is not a known "
|
|
|
|
"TLS version.".format(val=tls_version_max))
|
|
|
|
|
|
|
|
if min_version_idx > max_version_idx:
|
|
|
|
raise ValueError("tls_version_min is higher than "
|
|
|
|
"tls_version_max.")
|
|
|
|
|
|
|
|
if min_version_idx < min_allowed_idx:
|
|
|
|
min_version_idx = min_allowed_idx
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.warning("tls_version_min set too low ('%s'),using '%s' instead",
|
|
|
|
tls_version_min, TLS_VERSIONS[min_version_idx])
|
2016-12-20 03:05:36 -06:00
|
|
|
|
|
|
|
if max_version_idx < min_allowed_idx:
|
|
|
|
max_version_idx = min_version_idx
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.warning("tls_version_max set too low ('%s'),using '%s' instead",
|
|
|
|
tls_version_max, TLS_VERSIONS[max_version_idx])
|
2016-12-20 03:05:36 -06:00
|
|
|
return TLS_VERSIONS[min_version_idx:max_version_idx+1]
|
|
|
|
|
|
|
|
|
|
|
|
def create_https_connection(
|
|
|
|
host, port=HTTPSConnection.default_port,
|
|
|
|
cafile=None,
|
|
|
|
client_certfile=None, client_keyfile=None,
|
|
|
|
keyfile_passwd=None,
|
2019-07-01 03:41:23 -05:00
|
|
|
tls_version_min=TLS_VERSION_DEFAULT_MIN,
|
|
|
|
tls_version_max=TLS_VERSION_DEFAULT_MAX,
|
2016-12-20 03:05:36 -06:00
|
|
|
**kwargs
|
|
|
|
):
|
|
|
|
"""
|
|
|
|
Create a customized HTTPSConnection object.
|
|
|
|
|
|
|
|
:param host: The host to connect to
|
|
|
|
:param port: The port to connect to, defaults to
|
|
|
|
HTTPSConnection.default_port
|
|
|
|
:param cafile: A PEM-format file containning the trusted
|
|
|
|
CA certificates
|
|
|
|
:param client_certfile:
|
|
|
|
A PEM-format client certificate file that will be used to
|
|
|
|
identificate the user to the server.
|
|
|
|
:param client_keyfile:
|
|
|
|
A file with the client private key. If this argument is not
|
|
|
|
supplied, the key will be sought in client_certfile.
|
|
|
|
:param keyfile_passwd:
|
|
|
|
A path to the file which stores the password that is used to
|
|
|
|
encrypt client_keyfile. Leave default value if the keyfile
|
|
|
|
is not encrypted.
|
|
|
|
:returns An established HTTPS connection to host:port
|
|
|
|
"""
|
|
|
|
tls_cutoff_map = {
|
|
|
|
"ssl2": ssl.OP_NO_SSLv2,
|
|
|
|
"ssl3": ssl.OP_NO_SSLv3,
|
|
|
|
"tls1.0": ssl.OP_NO_TLSv1,
|
|
|
|
"tls1.1": ssl.OP_NO_TLSv1_1,
|
|
|
|
"tls1.2": ssl.OP_NO_TLSv1_2,
|
2019-07-01 03:41:23 -05:00
|
|
|
"tls1.3": getattr(ssl, "OP_NO_TLSv1_3", 0),
|
2016-12-20 03:05:36 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if cafile is None:
|
|
|
|
raise RuntimeError("cafile argument is required to perform server "
|
|
|
|
"certificate verification")
|
|
|
|
|
2017-09-06 15:24:39 -05:00
|
|
|
if not os.path.isfile(cafile) or not os.access(cafile, os.R_OK):
|
|
|
|
raise RuntimeError("cafile \'{file}\' doesn't exist or is unreadable".
|
|
|
|
format(file=cafile))
|
|
|
|
|
2016-12-20 03:05:36 -06:00
|
|
|
# official Python documentation states that the best option to get
|
|
|
|
# TLSv1 and later is to setup SSLContext with PROTOCOL_SSLv23
|
2022-03-17 02:55:02 -05:00
|
|
|
# and then negate the insecure SSLv2 and SSLv3. However, with Python 3.10
|
|
|
|
# PROTOCOL_SSLv23 is deprecated as well as PROTOCOL_TLS. We should use
|
|
|
|
# PROTOCOL_TLS_CLIENT since Python 3.6
|
|
|
|
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
2016-12-20 03:05:36 -06:00
|
|
|
ctx.options |= (
|
|
|
|
ssl.OP_ALL | ssl.OP_NO_COMPRESSION | ssl.OP_SINGLE_DH_USE |
|
|
|
|
ssl.OP_SINGLE_ECDH_USE
|
|
|
|
)
|
|
|
|
|
2019-07-02 02:49:59 -05:00
|
|
|
if constants.TLS_HIGH_CIPHERS is not None:
|
|
|
|
# configure ciphers, uses system crypto policies on RH platforms.
|
|
|
|
ctx.set_ciphers(constants.TLS_HIGH_CIPHERS)
|
2017-02-23 07:31:50 -06:00
|
|
|
|
2019-11-22 03:42:11 -06:00
|
|
|
# remove the slice of negating protocol options according to options
|
|
|
|
tls_span = get_proper_tls_version_span(tls_version_min, tls_version_max)
|
|
|
|
|
2016-12-20 03:05:36 -06:00
|
|
|
# set up the correct TLS version flags for the SSL context
|
2019-11-22 03:42:11 -06:00
|
|
|
if tls_span is not None:
|
|
|
|
for version in TLS_VERSIONS:
|
|
|
|
if version in tls_span:
|
|
|
|
# make sure the required TLS versions are available if Python
|
|
|
|
# decides to modify the default TLS flags
|
|
|
|
ctx.options &= ~tls_cutoff_map[version]
|
|
|
|
else:
|
|
|
|
# disable all TLS versions not in tls_span
|
|
|
|
ctx.options |= tls_cutoff_map[version]
|
2016-12-20 03:05:36 -06:00
|
|
|
|
2019-11-18 03:40:44 -06:00
|
|
|
# Enable TLS 1.3 post-handshake auth
|
|
|
|
if getattr(ctx, "post_handshake_auth", None) is not None:
|
|
|
|
ctx.post_handshake_auth = True
|
|
|
|
|
2016-12-20 03:05:36 -06:00
|
|
|
ctx.verify_mode = ssl.CERT_REQUIRED
|
|
|
|
ctx.check_hostname = True
|
|
|
|
ctx.load_verify_locations(cafile)
|
|
|
|
|
|
|
|
if client_certfile is not None:
|
|
|
|
if keyfile_passwd is not None:
|
|
|
|
with open(keyfile_passwd) as pwd_f:
|
|
|
|
passwd = pwd_f.read()
|
|
|
|
else:
|
|
|
|
passwd = None
|
|
|
|
ctx.load_cert_chain(client_certfile, client_keyfile, passwd)
|
|
|
|
|
|
|
|
return HTTPSConnection(host, port, context=ctx, **kwargs)
|
|
|
|
|
|
|
|
|
2014-01-31 08:42:31 -06:00
|
|
|
def validate_dns_label(dns_label, allow_underscore=False, allow_slash=False):
|
2014-05-30 06:49:02 -05:00
|
|
|
base_chars = 'a-z0-9'
|
|
|
|
extra_chars = ''
|
|
|
|
middle_chars = ''
|
2014-01-31 08:42:31 -06:00
|
|
|
|
2014-05-30 06:49:02 -05:00
|
|
|
if allow_underscore:
|
|
|
|
extra_chars += '_'
|
|
|
|
if allow_slash:
|
|
|
|
middle_chars += '/'
|
2014-01-31 08:42:31 -06:00
|
|
|
|
2014-05-30 06:49:02 -05:00
|
|
|
middle_chars = middle_chars + '-' #has to be always the last in the regex [....-]
|
2014-01-31 08:42:31 -06:00
|
|
|
|
2016-07-04 05:43:08 -05:00
|
|
|
label_regex = r'''^[%(base)s%(extra)s] # must begin with an alphanumeric
|
|
|
|
# character, or underscore if
|
|
|
|
# allow_underscore is True
|
|
|
|
([%(base)s%(extra)s%(middle)s]* # can contain all allowed character
|
|
|
|
# classes in the middle
|
|
|
|
[%(base)s%(extra)s])*$ # must end with alphanumeric
|
|
|
|
# character or underscore if
|
|
|
|
# allow_underscore is True
|
|
|
|
''' % dict(base=base_chars, extra=extra_chars, middle=middle_chars)
|
|
|
|
regex = re.compile(label_regex, re.IGNORECASE | re.VERBOSE)
|
2014-01-31 08:42:31 -06:00
|
|
|
|
2014-05-30 06:49:02 -05:00
|
|
|
if not dns_label:
|
|
|
|
raise ValueError(_('empty DNS label'))
|
2014-01-31 08:42:31 -06:00
|
|
|
|
2014-05-30 06:49:02 -05:00
|
|
|
if len(dns_label) > 63:
|
|
|
|
raise ValueError(_('DNS label cannot be longer that 63 characters'))
|
2014-01-31 08:42:31 -06:00
|
|
|
|
2014-05-30 06:49:02 -05:00
|
|
|
if not regex.match(dns_label):
|
|
|
|
chars = ', '.join("'%s'" % c for c in extra_chars + middle_chars)
|
|
|
|
chars2 = ', '.join("'%s'" % c for c in middle_chars)
|
|
|
|
raise ValueError(_("only letters, numbers, %(chars)s are allowed. " \
|
|
|
|
"DNS label may not start or end with %(chars2)s") \
|
|
|
|
% dict(chars=chars, chars2=chars2))
|
2014-01-31 08:42:31 -06:00
|
|
|
|
|
|
|
|
2017-10-24 04:33:33 -05:00
|
|
|
def validate_domain_name(
|
|
|
|
domain_name, allow_underscore=False,
|
|
|
|
allow_slash=False, entity='domain'
|
|
|
|
):
|
2012-02-28 02:05:01 -06:00
|
|
|
if domain_name.endswith('.'):
|
|
|
|
domain_name = domain_name[:-1]
|
|
|
|
|
|
|
|
domain_name = domain_name.split(".")
|
|
|
|
|
2017-10-24 04:33:33 -05:00
|
|
|
if len(domain_name) < 2:
|
|
|
|
raise ValueError(_(
|
|
|
|
'single label {}s are not supported'.format(entity)))
|
|
|
|
|
2012-02-28 02:05:01 -06:00
|
|
|
# apply DNS name validator to every name part
|
2015-08-12 05:25:30 -05:00
|
|
|
for label in domain_name:
|
|
|
|
validate_dns_label(label, allow_underscore, allow_slash)
|
2012-02-28 02:05:01 -06:00
|
|
|
|
|
|
|
|
2011-10-24 11:35:48 -05:00
|
|
|
def validate_zonemgr(zonemgr):
|
2014-05-16 05:21:04 -05:00
|
|
|
assert isinstance(zonemgr, DNSName)
|
2017-07-27 06:24:44 -05:00
|
|
|
if any(b'@' in label for label in zonemgr.labels):
|
2011-10-24 11:35:48 -05:00
|
|
|
raise ValueError(_('too many \'@\' characters'))
|
2014-09-16 05:23:21 -05:00
|
|
|
|
2012-04-30 06:51:03 -05:00
|
|
|
|
2014-05-16 05:21:04 -05:00
|
|
|
def validate_zonemgr_str(zonemgr):
|
|
|
|
zonemgr = normalize_zonemgr(zonemgr)
|
2014-11-27 07:16:23 -06:00
|
|
|
validate_idna_domain(zonemgr)
|
2014-10-24 05:15:17 -05:00
|
|
|
zonemgr = DNSName(zonemgr)
|
2014-05-16 05:21:04 -05:00
|
|
|
return validate_zonemgr(zonemgr)
|
2012-04-30 06:51:03 -05:00
|
|
|
|
2019-05-01 09:15:37 -05:00
|
|
|
|
|
|
|
def validate_hostname(hostname, check_fqdn=True, allow_underscore=False,
|
|
|
|
allow_slash=False, maxlen=255):
|
2012-01-06 08:12:41 -06:00
|
|
|
""" See RFC 952, 1123
|
|
|
|
|
2019-05-01 09:15:37 -05:00
|
|
|
Length limit of 64 imposed by MAXHOSTNAMELEN on Linux.
|
|
|
|
|
|
|
|
DNS and other operating systems has a max length of 255. Default to
|
|
|
|
the theoretical max unless explicitly told to limit. The cases
|
|
|
|
where a limit would be set might include:
|
|
|
|
* *-install --hostname
|
|
|
|
* ipa host-add
|
|
|
|
|
|
|
|
The *-install commands by definition are executed on Linux hosts so
|
|
|
|
the maximum length needs to be limited.
|
|
|
|
|
2012-01-06 08:12:41 -06:00
|
|
|
:param hostname Checked value
|
|
|
|
:param check_fqdn Check if hostname is fully qualified
|
|
|
|
"""
|
2019-05-01 09:15:37 -05:00
|
|
|
if len(hostname) > maxlen:
|
|
|
|
raise ValueError(_('cannot be longer that {} characters'.format(
|
|
|
|
maxlen)))
|
2011-11-09 10:35:52 -06:00
|
|
|
|
|
|
|
if hostname.endswith('.'):
|
|
|
|
hostname = hostname[:-1]
|
|
|
|
|
2012-09-27 06:45:32 -05:00
|
|
|
if '..' in hostname:
|
|
|
|
raise ValueError(_('hostname contains empty label (consecutive dots)'))
|
|
|
|
|
2012-02-28 02:05:01 -06:00
|
|
|
if '.' not in hostname:
|
|
|
|
if check_fqdn:
|
|
|
|
raise ValueError(_('not fully qualified'))
|
2014-01-31 08:42:31 -06:00
|
|
|
validate_dns_label(hostname, allow_underscore, allow_slash)
|
2012-02-28 02:05:01 -06:00
|
|
|
else:
|
2014-01-31 08:42:31 -06:00
|
|
|
validate_domain_name(hostname, allow_underscore, allow_slash)
|
2011-11-01 07:58:05 -05:00
|
|
|
|
2012-09-03 08:33:30 -05:00
|
|
|
def normalize_sshpubkey(value):
|
|
|
|
return SSHPublicKey(value).openssh()
|
|
|
|
|
2017-12-15 10:00:04 -06:00
|
|
|
|
2012-09-03 08:33:30 -05:00
|
|
|
def validate_sshpubkey(ugettext, value):
|
2011-12-07 01:50:31 -06:00
|
|
|
try:
|
2012-09-03 08:33:30 -05:00
|
|
|
SSHPublicKey(value)
|
2016-03-02 10:13:27 -06:00
|
|
|
except (ValueError, UnicodeDecodeError):
|
2011-12-07 01:50:31 -06:00
|
|
|
return _('invalid SSH public key')
|
2017-12-15 10:00:04 -06:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2011-12-07 01:50:31 -06:00
|
|
|
|
2012-09-03 08:33:30 -05:00
|
|
|
def validate_sshpubkey_no_options(ugettext, value):
|
|
|
|
try:
|
|
|
|
pubkey = SSHPublicKey(value)
|
2016-03-02 10:13:27 -06:00
|
|
|
except (ValueError, UnicodeDecodeError):
|
2012-09-03 08:33:30 -05:00
|
|
|
return _('invalid SSH public key')
|
|
|
|
|
|
|
|
if pubkey.has_options():
|
|
|
|
return _('options are not allowed')
|
2017-12-15 10:00:04 -06:00
|
|
|
else:
|
|
|
|
return None
|
2012-09-03 08:33:30 -05:00
|
|
|
|
2016-03-14 11:42:56 -05:00
|
|
|
|
|
|
|
def convert_sshpubkey_post(entry_attrs):
|
|
|
|
pubkeys = entry_attrs.get('ipasshpubkey')
|
2012-09-03 08:33:30 -05:00
|
|
|
if not pubkeys:
|
2011-12-07 01:50:31 -06:00
|
|
|
return
|
|
|
|
|
2012-09-03 08:33:30 -05:00
|
|
|
newpubkeys = []
|
2011-12-07 01:50:31 -06:00
|
|
|
fingerprints = []
|
|
|
|
for pubkey in pubkeys:
|
|
|
|
try:
|
2012-09-03 08:33:30 -05:00
|
|
|
pubkey = SSHPublicKey(pubkey)
|
2016-03-02 10:13:27 -06:00
|
|
|
except (ValueError, UnicodeDecodeError):
|
2012-09-03 08:33:30 -05:00
|
|
|
continue
|
|
|
|
|
2016-12-12 09:59:48 -06:00
|
|
|
fp = pubkey.fingerprint_hex_sha256()
|
2012-09-03 08:33:30 -05:00
|
|
|
comment = pubkey.comment()
|
|
|
|
if comment:
|
|
|
|
fp = u'%s %s' % (fp, comment)
|
|
|
|
fp = u'%s (%s)' % (fp, pubkey.keytype())
|
|
|
|
|
|
|
|
newpubkeys.append(pubkey.openssh())
|
|
|
|
fingerprints.append(fp)
|
|
|
|
|
2012-10-11 03:26:56 -05:00
|
|
|
if 'ipasshpubkey' in entry_attrs:
|
|
|
|
entry_attrs['ipasshpubkey'] = newpubkeys or None
|
2011-12-07 01:50:31 -06:00
|
|
|
if fingerprints:
|
|
|
|
entry_attrs['sshpubkeyfp'] = fingerprints
|
|
|
|
|
2016-03-14 11:42:56 -05:00
|
|
|
|
|
|
|
def add_sshpubkey_to_attrs_pre(context, attrs_list):
|
|
|
|
"""
|
|
|
|
Attribute ipasshpubkey should be added to attrs_list to be able compute
|
|
|
|
ssh fingerprint. This attribute must be removed later if was added here
|
|
|
|
(see remove_sshpubkey_from_output_post).
|
|
|
|
"""
|
|
|
|
if not ('ipasshpubkey' in attrs_list or '*' in attrs_list):
|
|
|
|
setattr(context, 'ipasshpubkey_added', True)
|
|
|
|
attrs_list.append('ipasshpubkey')
|
|
|
|
|
|
|
|
|
|
|
|
def remove_sshpubkey_from_output_post(context, entry_attrs):
|
|
|
|
"""
|
|
|
|
Remove ipasshpubkey from output if it was added in pre_callbacks
|
|
|
|
"""
|
|
|
|
if getattr(context, 'ipasshpubkey_added', False):
|
|
|
|
entry_attrs.pop('ipasshpubkey', None)
|
|
|
|
delattr(context, 'ipasshpubkey_added')
|
|
|
|
|
|
|
|
|
|
|
|
def remove_sshpubkey_from_output_list_post(context, entries):
|
|
|
|
"""
|
|
|
|
Remove ipasshpubkey from output if it was added in pre_callbacks
|
|
|
|
"""
|
|
|
|
if getattr(context, 'ipasshpubkey_added', False):
|
|
|
|
for entry_attrs in entries:
|
|
|
|
entry_attrs.pop('ipasshpubkey', None)
|
|
|
|
delattr(context, 'ipasshpubkey_added')
|
|
|
|
|
|
|
|
|
add session manager and cache krb auth
This patch adds a session manager and support for caching
authentication in the session. Major elements of the patch are:
* Add a session manager to support cookie based sessions which
stores session data in a memcached entry.
* Add ipalib/krb_utils.py which contains functions to parse ccache
names, format principals, format KRB timestamps, and a KRB_CCache
class which reads ccache entry and allows one to extract information
such as the principal, credentials, credential timestamps, etc.
* Move krb constants defined in ipalib/rpc.py to ipa_krb_utils.py so
that all kerberos items are co-located.
* Modify javascript in ipa.js so that the IPA.command() RPC call
checks for authentication needed error response and if it receives
it sends a GET request to /ipa/login URL to refresh credentials.
* Add session_auth_duration config item to constants.py, used to
configure how long a session remains valid.
* Add parse_time_duration utility to ipalib/util.py. Used to parse the
session_auth_duration config item.
* Update the default.conf.5 man page to document session_auth_duration
config item (also added documentation for log_manager config items
which had been inadvertantly omitted from a previous commit).
* Add SessionError object to ipalib/errors.py
* Move Kerberos protection in Apache config from /ipa to /ipa/xml and
/ipa/login
* Add SessionCCache class to session.py to manage temporary Kerberos
ccache file in effect for the duration of an RPC command.
* Adds a krblogin plugin used to implement the /ipa/login
handler. login handler sets the session expiration time, currently
60 minutes or the expiration of the TGT, whichever is shorter. It
also copies the ccache provied by mod_auth_kerb into the session
data. The json handler will later extract and validate the ccache
belonging to the session.
* Refactored the WSGI handlers so that json and xlmrpc could have
independent behavior, this also moves where create and destroy
context occurs, now done in the individual handler rather than the
parent class.
* The json handler now looks up the session data, validates the ccache
bound to the session, if it's expired replies with authenicated
needed error.
* Add documentation to session.py. Fully documents the entire process,
got questions, read the doc.
* Add exclusions to make-lint as needed.
2012-02-06 12:29:56 -06:00
|
|
|
# regexp matching signed floating point number (group 1) followed by
|
|
|
|
# optional whitespace followed by time unit, e.g. day, hour (group 7)
|
|
|
|
time_duration_re = re.compile(r'([-+]?((\d+)|(\d+\.\d+)|(\.\d+)|(\d+\.)))\s*([a-z]+)', re.IGNORECASE)
|
|
|
|
|
|
|
|
# number of seconds in a time unit
|
|
|
|
time_duration_units = {
|
|
|
|
'year' : 365*24*60*60,
|
|
|
|
'years' : 365*24*60*60,
|
|
|
|
'y' : 365*24*60*60,
|
|
|
|
'month' : 30*24*60*60,
|
|
|
|
'months' : 30*24*60*60,
|
|
|
|
'week' : 7*24*60*60,
|
|
|
|
'weeks' : 7*24*60*60,
|
|
|
|
'w' : 7*24*60*60,
|
|
|
|
'day' : 24*60*60,
|
|
|
|
'days' : 24*60*60,
|
|
|
|
'd' : 24*60*60,
|
|
|
|
'hour' : 60*60,
|
|
|
|
'hours' : 60*60,
|
|
|
|
'h' : 60*60,
|
|
|
|
'minute' : 60,
|
|
|
|
'minutes' : 60,
|
|
|
|
'min' : 60,
|
|
|
|
'second' : 1,
|
|
|
|
'seconds' : 1,
|
|
|
|
'sec' : 1,
|
|
|
|
's' : 1,
|
|
|
|
}
|
|
|
|
|
|
|
|
def parse_time_duration(value):
|
|
|
|
'''
|
|
|
|
|
|
|
|
Given a time duration string, parse it and return the total number
|
|
|
|
of seconds represented as a floating point value. Negative values
|
|
|
|
are permitted.
|
|
|
|
|
|
|
|
The string should be composed of one or more numbers followed by a
|
|
|
|
time unit. Whitespace and punctuation is optional. The numbers may
|
|
|
|
be optionally signed. The time units are case insenstive except
|
|
|
|
for the single character 'M' or 'm' which means month and minute
|
|
|
|
respectively.
|
|
|
|
|
|
|
|
Recognized time units are:
|
|
|
|
|
|
|
|
* year, years, y
|
|
|
|
* month, months, M
|
|
|
|
* week, weeks, w
|
|
|
|
* day, days, d
|
|
|
|
* hour, hours, h
|
|
|
|
* minute, minutes, min, m
|
|
|
|
* second, seconds, sec, s
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
"1h" # 1 hour
|
|
|
|
"2 HOURS, 30 Minutes" # 2.5 hours
|
|
|
|
"1week -1 day" # 6 days
|
|
|
|
".5day" # 12 hours
|
|
|
|
"2M" # 2 months
|
|
|
|
"1h:15m" # 1.25 hours
|
|
|
|
"1h, -15min" # 45 minutes
|
|
|
|
"30 seconds" # .5 minute
|
|
|
|
|
|
|
|
Note: Despite the appearance you can perform arithmetic the
|
|
|
|
parsing is much simpler, the parser searches for signed values and
|
|
|
|
adds the signed value to a running total. Only + and - are permitted
|
|
|
|
and must appear prior to a digit.
|
|
|
|
|
|
|
|
:parameters:
|
|
|
|
value : string
|
|
|
|
A time duration string in the specified format
|
|
|
|
:returns:
|
|
|
|
total number of seconds as float (may be negative)
|
|
|
|
'''
|
|
|
|
|
|
|
|
matches = 0
|
|
|
|
duration = 0.0
|
|
|
|
for match in time_duration_re.finditer(value):
|
|
|
|
matches += 1
|
|
|
|
magnitude = match.group(1)
|
|
|
|
unit = match.group(7)
|
|
|
|
|
|
|
|
# Get the unit, only M and m are case sensitive
|
|
|
|
if unit == 'M': # month
|
|
|
|
seconds_per_unit = 30*24*60*60
|
|
|
|
elif unit == 'm': # minute
|
|
|
|
seconds_per_unit = 60
|
|
|
|
else:
|
|
|
|
unit = unit.lower()
|
|
|
|
seconds_per_unit = time_duration_units.get(unit)
|
|
|
|
if seconds_per_unit is None:
|
|
|
|
raise ValueError('unknown time duration unit "%s"' % unit)
|
|
|
|
magnitude = float(magnitude)
|
|
|
|
seconds = magnitude * seconds_per_unit
|
|
|
|
duration += seconds
|
|
|
|
|
|
|
|
if matches == 0:
|
|
|
|
raise ValueError('no time duration found in "%s"' % value)
|
|
|
|
|
|
|
|
return duration
|
2012-02-24 09:23:52 -06:00
|
|
|
|
2012-06-04 10:53:34 -05:00
|
|
|
def get_dns_forward_zone_update_policy(realm, rrtypes=('A', 'AAAA', 'SSHFP')):
|
2012-02-24 09:23:52 -06:00
|
|
|
"""
|
2012-06-04 10:53:34 -05:00
|
|
|
Generate update policy for a forward DNS zone (idnsUpdatePolicy
|
|
|
|
attribute). Bind uses this policy to grant/reject access for client
|
|
|
|
machines trying to dynamically update their records.
|
2012-02-24 09:23:52 -06:00
|
|
|
|
|
|
|
:param realm: A realm of the of the client
|
|
|
|
:param rrtypes: A list of resource records types that client shall be
|
|
|
|
allowed to update
|
|
|
|
"""
|
|
|
|
policy_element = "grant %(realm)s krb5-self * %(rrtype)s"
|
|
|
|
policies = [ policy_element % dict(realm=realm, rrtype=rrtype) \
|
|
|
|
for rrtype in rrtypes ]
|
|
|
|
policy = "; ".join(policies)
|
|
|
|
policy += ";"
|
|
|
|
|
|
|
|
return policy
|
2012-04-16 01:33:26 -05:00
|
|
|
|
2012-06-04 10:53:34 -05:00
|
|
|
def get_dns_reverse_zone_update_policy(realm, reverse_zone, rrtypes=('PTR',)):
|
|
|
|
"""
|
|
|
|
Generate update policy for a reverse DNS zone (idnsUpdatePolicy
|
|
|
|
attribute). Bind uses this policy to grant/reject access for client
|
|
|
|
machines trying to dynamically update their records.
|
|
|
|
|
|
|
|
:param realm: A realm of the of the client
|
|
|
|
:param reverse_zone: Name of the actual zone. All clients with IPs in this
|
|
|
|
sub-domain will be allowed to perform changes
|
|
|
|
:param rrtypes: A list of resource records types that client shall be
|
|
|
|
allowed to update
|
|
|
|
"""
|
|
|
|
policy_element = "grant %(realm)s krb5-subdomain %(zone)s %(rrtype)s"
|
|
|
|
policies = [ policy_element \
|
|
|
|
% dict(realm=realm, zone=reverse_zone, rrtype=rrtype) \
|
|
|
|
for rrtype in rrtypes ]
|
|
|
|
policy = "; ".join(policies)
|
|
|
|
policy += ";"
|
|
|
|
|
|
|
|
return policy
|
|
|
|
|
2012-09-25 03:36:01 -05:00
|
|
|
# dictionary of valid reverse zone -> number of address components
|
|
|
|
REVERSE_DNS_ZONES = {
|
2014-05-16 05:21:04 -05:00
|
|
|
DNSName.ip4_rev_zone : 4,
|
|
|
|
DNSName.ip6_rev_zone : 32,
|
2012-09-25 03:36:01 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
def zone_is_reverse(zone_name):
|
2014-05-16 05:21:04 -05:00
|
|
|
return DNSName(zone_name).is_reverse()
|
2012-09-25 03:36:01 -05:00
|
|
|
|
2012-09-05 02:56:27 -05:00
|
|
|
def get_reverse_zone_default(ip_address):
|
2012-11-08 09:06:35 -06:00
|
|
|
ip = netaddr.IPAddress(str(ip_address))
|
2012-09-05 02:56:27 -05:00
|
|
|
items = ip.reverse_dns.split('.')
|
|
|
|
|
|
|
|
if ip.version == 4:
|
|
|
|
items = items[1:] # /24 for IPv4
|
|
|
|
elif ip.version == 6:
|
|
|
|
items = items[16:] # /64 for IPv6
|
|
|
|
|
|
|
|
return normalize_zone('.'.join(items))
|
|
|
|
|
2017-12-15 10:00:04 -06:00
|
|
|
|
2012-04-16 01:33:26 -05:00
|
|
|
def validate_rdn_param(ugettext, value):
|
|
|
|
try:
|
2016-10-07 08:07:49 -05:00
|
|
|
RDN(value)
|
2015-07-30 09:49:29 -05:00
|
|
|
except Exception as e:
|
2012-04-16 01:33:26 -05:00
|
|
|
return str(e)
|
2017-12-15 10:00:04 -06:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2014-05-14 05:52:26 -05:00
|
|
|
|
|
|
|
def validate_hostmask(ugettext, hostmask):
|
|
|
|
try:
|
|
|
|
netaddr.IPNetwork(hostmask)
|
|
|
|
except (ValueError, AddrFormatError):
|
|
|
|
return _('invalid hostmask')
|
2017-12-15 10:00:04 -06:00
|
|
|
else:
|
|
|
|
return None
|
2014-10-16 09:27:00 -05:00
|
|
|
|
|
|
|
|
2015-04-22 08:29:21 -05:00
|
|
|
class ForwarderValidationError(Exception):
|
|
|
|
format = None
|
2014-10-16 09:27:00 -05:00
|
|
|
|
2015-04-22 08:29:21 -05:00
|
|
|
def __init__(self, format=None, message=None, **kw):
|
|
|
|
messages.process_message_arguments(self, format, message, **kw)
|
|
|
|
super(ForwarderValidationError, self).__init__(self.msg)
|
|
|
|
|
|
|
|
|
|
|
|
class UnresolvableRecordError(ForwarderValidationError):
|
|
|
|
format = _("query '%(owner)s %(rtype)s': %(error)s")
|
|
|
|
|
|
|
|
|
|
|
|
class EDNS0UnsupportedError(ForwarderValidationError):
|
|
|
|
format = _("query '%(owner)s %(rtype)s' with EDNS0: %(error)s")
|
|
|
|
|
|
|
|
|
|
|
|
class DNSSECSignatureMissingError(ForwarderValidationError):
|
|
|
|
format = _("answer to query '%(owner)s %(rtype)s' is missing DNSSEC "
|
|
|
|
"signatures (no RRSIG data)")
|
|
|
|
|
|
|
|
|
2015-04-24 06:37:07 -05:00
|
|
|
class DNSSECValidationError(ForwarderValidationError):
|
2015-06-04 10:27:03 -05:00
|
|
|
format = _("record '%(owner)s %(rtype)s' "
|
|
|
|
"failed DNSSEC validation on server %(ip)s")
|
2015-04-24 06:37:07 -05:00
|
|
|
|
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
def _log_response(e):
|
2014-10-16 09:27:00 -05:00
|
|
|
"""
|
2015-04-22 08:29:21 -05:00
|
|
|
If exception contains response from server, log this response to debug log
|
|
|
|
:param log: if log is None, do not log
|
|
|
|
:param e: DNSException
|
|
|
|
"""
|
|
|
|
assert isinstance(e, DNSException)
|
2017-05-23 11:35:57 -05:00
|
|
|
response = getattr(e, 'kwargs', {}).get('response')
|
|
|
|
if response:
|
2017-05-24 08:42:23 -05:00
|
|
|
logger.debug("DNSException: %s; server response: %s", e, response)
|
2015-04-22 08:29:21 -05:00
|
|
|
|
|
|
|
|
|
|
|
def _resolve_record(owner, rtype, nameserver_ip=None, edns0=False,
|
2015-04-24 06:37:07 -05:00
|
|
|
dnssec=False, flag_cd=False, timeout=10):
|
2015-04-22 08:29:21 -05:00
|
|
|
"""
|
|
|
|
:param nameserver_ip: if None, default resolvers will be used
|
|
|
|
:param edns0: enables EDNS0
|
|
|
|
:param dnssec: enabled EDNS0, flags: DO
|
2015-04-24 06:37:07 -05:00
|
|
|
:param flag_cd: requires dnssec=True, adds flag CD
|
2015-04-22 08:29:21 -05:00
|
|
|
:raise DNSException: if error occurs
|
|
|
|
"""
|
2018-09-26 05:24:33 -05:00
|
|
|
assert isinstance(nameserver_ip, str) or nameserver_ip is None
|
|
|
|
assert isinstance(rtype, str)
|
2015-04-22 08:29:21 -05:00
|
|
|
|
2020-08-28 08:31:10 -05:00
|
|
|
res = DNSResolver()
|
2015-04-22 08:29:21 -05:00
|
|
|
if nameserver_ip:
|
|
|
|
res.nameservers = [nameserver_ip]
|
|
|
|
res.lifetime = timeout
|
|
|
|
|
|
|
|
# Recursion Desired,
|
|
|
|
# this option prevents to get answers in authority section instead of answer
|
|
|
|
res.set_flags(dns.flags.RD)
|
|
|
|
|
|
|
|
if dnssec:
|
|
|
|
res.use_edns(0, dns.flags.DO, 4096)
|
2015-04-24 06:37:07 -05:00
|
|
|
flags = dns.flags.RD
|
|
|
|
if flag_cd:
|
|
|
|
flags = flags | dns.flags.CD
|
|
|
|
res.set_flags(flags)
|
2015-04-22 08:29:21 -05:00
|
|
|
elif edns0:
|
|
|
|
res.use_edns(0, 0, 4096)
|
|
|
|
|
2020-08-28 08:31:10 -05:00
|
|
|
return res.resolve(owner, rtype)
|
2014-10-16 09:27:00 -05:00
|
|
|
|
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
def _validate_edns0_forwarder(owner, rtype, ip_addr, timeout=10):
|
2015-04-22 08:29:21 -05:00
|
|
|
"""
|
|
|
|
Validate if forwarder supports EDNS0
|
|
|
|
|
|
|
|
:raise UnresolvableRecordError: record cannot be resolved
|
|
|
|
:raise EDNS0UnsupportedError: EDNS0 is not supported by forwarder
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
_resolve_record(owner, rtype, nameserver_ip=ip_addr, timeout=timeout)
|
|
|
|
except DNSException as e:
|
2017-05-24 08:42:23 -05:00
|
|
|
_log_response(e)
|
2015-04-22 08:29:21 -05:00
|
|
|
raise UnresolvableRecordError(owner=owner, rtype=rtype, ip=ip_addr,
|
|
|
|
error=e)
|
|
|
|
|
|
|
|
try:
|
|
|
|
_resolve_record(owner, rtype, nameserver_ip=ip_addr, edns0=True,
|
|
|
|
timeout=timeout)
|
|
|
|
except DNSException as e:
|
2017-05-24 08:42:23 -05:00
|
|
|
_log_response(e)
|
2015-04-22 08:29:21 -05:00
|
|
|
raise EDNS0UnsupportedError(owner=owner, rtype=rtype, ip=ip_addr,
|
|
|
|
error=e)
|
|
|
|
|
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
def validate_dnssec_global_forwarder(ip_addr, timeout=10):
|
2015-04-22 08:29:21 -05:00
|
|
|
"""Test DNS forwarder properties. against root zone.
|
|
|
|
|
|
|
|
Global forwarders should be able return signed root zone
|
|
|
|
|
|
|
|
:raise UnresolvableRecordError: record cannot be resolved
|
|
|
|
:raise EDNS0UnsupportedError: EDNS0 is not supported by forwarder
|
|
|
|
:raise DNSSECSignatureMissingError: did not receive RRSIG for root zone
|
|
|
|
"""
|
|
|
|
|
|
|
|
ip_addr = str(ip_addr)
|
|
|
|
owner = "."
|
|
|
|
rtype = "SOA"
|
|
|
|
|
2017-05-24 08:42:23 -05:00
|
|
|
_validate_edns0_forwarder(owner, rtype, ip_addr, timeout=timeout)
|
2014-10-16 09:27:00 -05:00
|
|
|
|
|
|
|
# DNS root has to be signed
|
|
|
|
try:
|
2015-04-22 08:29:21 -05:00
|
|
|
ans = _resolve_record(owner, rtype, nameserver_ip=ip_addr, dnssec=True,
|
|
|
|
timeout=timeout)
|
|
|
|
except DNSException as e:
|
2017-05-24 08:42:23 -05:00
|
|
|
_log_response(e)
|
2017-01-31 09:47:44 -06:00
|
|
|
raise DNSSECSignatureMissingError(owner=owner, rtype=rtype, ip=ip_addr)
|
2014-10-16 09:27:00 -05:00
|
|
|
|
|
|
|
try:
|
2015-04-22 08:29:21 -05:00
|
|
|
ans.response.find_rrset(
|
|
|
|
ans.response.answer, dns.name.root, dns.rdataclass.IN,
|
|
|
|
dns.rdatatype.RRSIG, dns.rdatatype.SOA
|
|
|
|
)
|
2014-10-16 09:27:00 -05:00
|
|
|
except KeyError:
|
2015-04-22 08:29:21 -05:00
|
|
|
raise DNSSECSignatureMissingError(owner=owner, rtype=rtype, ip=ip_addr)
|
2014-10-16 09:27:00 -05:00
|
|
|
|
2014-11-27 07:16:23 -06:00
|
|
|
|
2017-05-23 11:35:57 -05:00
|
|
|
def validate_dnssec_zone_forwarder_step1(ip_addr, fwzone, timeout=10):
|
2015-04-24 06:37:07 -05:00
|
|
|
"""
|
|
|
|
Only forwarders in forward zones can be validated in this way
|
|
|
|
:raise UnresolvableRecordError: record cannot be resolved
|
|
|
|
:raise EDNS0UnsupportedError: ENDS0 is not supported by forwarder
|
|
|
|
"""
|
2017-05-23 11:35:57 -05:00
|
|
|
_validate_edns0_forwarder(fwzone, "SOA", ip_addr, timeout=timeout)
|
2015-04-24 06:37:07 -05:00
|
|
|
|
|
|
|
|
2017-05-23 11:35:57 -05:00
|
|
|
def validate_dnssec_zone_forwarder_step2(ipa_ip_addr, fwzone, timeout=10):
|
2015-04-24 06:37:07 -05:00
|
|
|
"""
|
2015-06-04 10:27:03 -05:00
|
|
|
This step must be executed after forwarders are added into LDAP, and only
|
2015-04-24 06:37:07 -05:00
|
|
|
when we are sure the forwarders work.
|
|
|
|
Query will be send to IPA DNS server, to verify if reply passed,
|
|
|
|
or DNSSEC validation failed.
|
|
|
|
Only forwarders in forward zones can be validated in this way
|
|
|
|
:raise UnresolvableRecordError: record cannot be resolved
|
|
|
|
:raise DNSSECValidationError: response from forwarder is not DNSSEC valid
|
|
|
|
"""
|
|
|
|
rtype = "SOA"
|
|
|
|
try:
|
2015-06-04 10:27:03 -05:00
|
|
|
ans_cd = _resolve_record(fwzone, rtype, nameserver_ip=ipa_ip_addr,
|
|
|
|
edns0=True, dnssec=True, flag_cd=True,
|
|
|
|
timeout=timeout)
|
2015-08-24 05:53:30 -05:00
|
|
|
except NXDOMAIN as e:
|
|
|
|
# sometimes CD flag is ignored and NXDomain is returned
|
2017-05-24 08:42:23 -05:00
|
|
|
_log_response(e)
|
2015-08-24 05:53:30 -05:00
|
|
|
raise DNSSECValidationError(owner=fwzone, rtype=rtype, ip=ipa_ip_addr)
|
2015-04-24 06:37:07 -05:00
|
|
|
except DNSException as e:
|
2017-05-24 08:42:23 -05:00
|
|
|
_log_response(e)
|
2015-08-24 05:53:30 -05:00
|
|
|
raise UnresolvableRecordError(owner=fwzone, rtype=rtype,
|
|
|
|
ip=ipa_ip_addr, error=e)
|
2015-04-24 06:37:07 -05:00
|
|
|
|
|
|
|
try:
|
2015-06-04 10:27:03 -05:00
|
|
|
ans_do = _resolve_record(fwzone, rtype, nameserver_ip=ipa_ip_addr,
|
|
|
|
edns0=True, dnssec=True, timeout=timeout)
|
2015-04-24 06:37:07 -05:00
|
|
|
except DNSException as e:
|
2017-05-24 08:42:23 -05:00
|
|
|
_log_response(e)
|
2015-08-24 05:53:30 -05:00
|
|
|
raise DNSSECValidationError(owner=fwzone, rtype=rtype, ip=ipa_ip_addr)
|
2015-04-24 06:37:07 -05:00
|
|
|
else:
|
2015-06-04 10:27:03 -05:00
|
|
|
if (ans_do.canonical_name == ans_cd.canonical_name
|
|
|
|
and ans_do.rrset == ans_cd.rrset):
|
|
|
|
return
|
|
|
|
# records received with and without CD flag are not equivalent:
|
|
|
|
# this might be caused by an DNSSEC validation failure in cases where
|
|
|
|
# existing zone id being 'shadowed' by another zone on forwarder
|
2015-04-24 06:37:07 -05:00
|
|
|
raise DNSSECValidationError(owner=fwzone, rtype=rtype, ip=ipa_ip_addr)
|
|
|
|
|
2014-11-27 07:16:23 -06:00
|
|
|
|
|
|
|
def validate_idna_domain(value):
|
|
|
|
"""
|
|
|
|
Validate if value is valid IDNA domain.
|
|
|
|
|
|
|
|
If domain is not valid, raises ValueError
|
|
|
|
:param value:
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
error = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
DNSName(value)
|
|
|
|
except dns.name.BadEscape:
|
|
|
|
error = _('invalid escape code in domain name')
|
|
|
|
except dns.name.EmptyLabel:
|
|
|
|
error = _('empty DNS label')
|
|
|
|
except dns.name.NameTooLong:
|
|
|
|
error = _('domain name cannot be longer than 255 characters')
|
|
|
|
except dns.name.LabelTooLong:
|
|
|
|
error = _('DNS label cannot be longer than 63 characters')
|
|
|
|
except dns.exception.SyntaxError:
|
|
|
|
error = _('invalid domain name')
|
|
|
|
else:
|
|
|
|
#compare if IDN normalized and original domain match
|
|
|
|
#there is N:1 mapping between unicode and IDNA names
|
|
|
|
#user should use normalized names to avoid mistakes
|
|
|
|
labels = re.split(u'[.\uff0e\u3002\uff61]', value, flags=re.UNICODE)
|
|
|
|
try:
|
2015-08-12 05:25:30 -05:00
|
|
|
for label in labels:
|
|
|
|
label.encode("ascii")
|
2014-11-27 07:16:23 -06:00
|
|
|
except UnicodeError:
|
|
|
|
# IDNA
|
|
|
|
is_nonnorm = any(encodings.idna.nameprep(x) != x for x in labels)
|
|
|
|
if is_nonnorm:
|
|
|
|
error = _("domain name '%(domain)s' should be normalized to"
|
|
|
|
": %(normalized)s") % {
|
|
|
|
'domain': value,
|
|
|
|
'normalized': '.'.join([encodings.idna.nameprep(x)
|
|
|
|
for x in labels])}
|
|
|
|
|
|
|
|
if error:
|
|
|
|
raise ValueError(error)
|
2015-06-17 06:33:24 -05:00
|
|
|
|
|
|
|
|
2015-09-03 05:13:32 -05:00
|
|
|
def detect_dns_zone_realm_type(api, domain):
|
|
|
|
"""
|
|
|
|
Detects the type of the realm that the given DNS zone belongs to.
|
|
|
|
Note: This method is heuristic. Possible values:
|
|
|
|
- 'current': For IPA domains belonging in the current realm.
|
|
|
|
- 'foreign': For domains belonging in a foreing kerberos realm.
|
|
|
|
- 'unknown': For domains whose allegiance could not be detected.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# First, try to detect _kerberos TXT record in the domain
|
|
|
|
# This would indicate that the domain belongs to IPA realm
|
|
|
|
|
|
|
|
kerberos_prefix = DNSName('_kerberos')
|
|
|
|
domain_suffix = DNSName(domain)
|
|
|
|
kerberos_record_name = kerberos_prefix + domain_suffix
|
|
|
|
|
|
|
|
try:
|
2020-08-28 08:31:10 -05:00
|
|
|
result = resolve(kerberos_record_name, rdatatype.TXT)
|
2015-09-03 05:13:32 -05:00
|
|
|
answer = result.response.answer
|
|
|
|
|
|
|
|
# IPA domain will have only one _kerberos TXT record
|
|
|
|
if (len(answer) == 1 and
|
|
|
|
len(answer[0]) == 1 and
|
|
|
|
answer[0].rdtype == rdatatype.TXT):
|
|
|
|
|
|
|
|
record = answer[0][0]
|
|
|
|
|
|
|
|
# If the record contains our current realm, it is 'ipa-current'
|
|
|
|
if record.to_text() == '"{0}"'.format(api.env.realm):
|
|
|
|
return 'current'
|
|
|
|
else:
|
|
|
|
return 'foreign'
|
|
|
|
|
2016-10-07 08:07:49 -05:00
|
|
|
except DNSException:
|
2015-09-03 05:13:32 -05:00
|
|
|
pass
|
|
|
|
|
|
|
|
# Try to detect AD specific record in the zone.
|
|
|
|
# This would indicate that the domain belongs to foreign (AD) realm
|
|
|
|
|
|
|
|
gc_prefix = DNSName('_ldap._tcp.gc._msdcs')
|
|
|
|
ad_specific_record_name = gc_prefix + domain_suffix
|
|
|
|
|
|
|
|
try:
|
|
|
|
# The presence of this record is enough, return foreign in such case
|
2020-08-28 08:31:10 -05:00
|
|
|
resolve(ad_specific_record_name, rdatatype.SRV)
|
2016-10-07 08:07:49 -05:00
|
|
|
except DNSException:
|
2018-06-15 10:03:29 -05:00
|
|
|
# If we could not detect type with certainty, return unknown
|
|
|
|
return 'unknown'
|
|
|
|
else:
|
|
|
|
return 'foreign'
|
2015-09-03 05:13:32 -05:00
|
|
|
|
2015-10-30 07:06:21 -05:00
|
|
|
|
|
|
|
def has_managed_topology(api):
|
|
|
|
domainlevel = api.Command['domainlevel_get']().get('result', DOMAIN_LEVEL_0)
|
|
|
|
return domainlevel > DOMAIN_LEVEL_0
|
2016-06-14 06:02:30 -05:00
|
|
|
|
|
|
|
|
2021-02-15 23:03:59 -06:00
|
|
|
def print_replication_status(entry, verbose):
|
|
|
|
"""Pretty print nsds5replicalastinitstatus, nsds5replicalastinitend,
|
|
|
|
nsds5replicalastupdatestatus, nsds5replicalastupdateend for a
|
|
|
|
replication agreement.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if verbose:
|
|
|
|
initstatus = entry.single_value.get('nsds5replicalastinitstatus')
|
|
|
|
if initstatus is not None:
|
|
|
|
print(" last init status: %s" % initstatus)
|
|
|
|
print(" last init ended: %s" % str(
|
|
|
|
ipautil.parse_generalized_time(
|
|
|
|
entry.single_value['nsds5replicalastinitend'])))
|
|
|
|
updatestatus = entry.single_value.get(
|
|
|
|
'nsds5replicalastupdatestatus'
|
|
|
|
)
|
|
|
|
if updatestatus is not None:
|
|
|
|
print(" last update status: %s" % updatestatus)
|
|
|
|
print(" last update ended: %s" % str(
|
|
|
|
ipautil.parse_generalized_time(
|
|
|
|
entry.single_value['nsds5replicalastupdateend']
|
|
|
|
))
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-09-26 04:59:50 -05:00
|
|
|
class classproperty:
|
2016-06-14 06:02:30 -05:00
|
|
|
__slots__ = ('__doc__', 'fget')
|
|
|
|
|
|
|
|
def __init__(self, fget=None, doc=None):
|
Add __signature__ to plugins
Auto-generate inspect.Signature from plugin arguments and options. The
signature is used by (amongst others) pydoc / help.
```
$ ipa console
>>> help(api.Command.group_add)
Help on group_add in module ipaserver.plugins.group object:
class group_add(ipaserver.plugins.baseldap.LDAPCreate)
| group_add(cn: str, *, description: str = None, gidnumber: int = None, setattr: List[str] = None, addattr: List[str] = None, nonposix: bool, external: bool, all: bool, raw: bool, version: str = None, no_members: bool) -> Dict[str, Any]
```
Fixes: https://pagure.io/freeipa/issue/8388
Signed-off-by: Christian Heimes <cheimes@redhat.com>
Reviewed-By: Alexander Bokovoy <abokovoy@redhat.com>
2020-06-26 10:07:50 -05:00
|
|
|
assert isinstance(fget, classmethod)
|
2016-06-14 06:02:30 -05:00
|
|
|
if doc is None and fget is not None:
|
|
|
|
doc = fget.__doc__
|
|
|
|
|
|
|
|
self.fget = fget
|
|
|
|
self.__doc__ = doc
|
|
|
|
|
|
|
|
def __get__(self, obj, obj_type):
|
|
|
|
if self.fget is not None:
|
|
|
|
return self.fget.__get__(obj, obj_type)()
|
|
|
|
raise AttributeError("unreadable attribute")
|
|
|
|
|
|
|
|
def __set__(self, obj, value):
|
|
|
|
raise AttributeError("can't set attribute")
|
|
|
|
|
|
|
|
def __delete__(self, obj):
|
|
|
|
raise AttributeError("can't delete attribute")
|
|
|
|
|
|
|
|
def getter(self, fget):
|
|
|
|
self.fget = fget
|
|
|
|
return self
|
2016-06-13 13:02:21 -05:00
|
|
|
|
|
|
|
|
Add __signature__ to plugins
Auto-generate inspect.Signature from plugin arguments and options. The
signature is used by (amongst others) pydoc / help.
```
$ ipa console
>>> help(api.Command.group_add)
Help on group_add in module ipaserver.plugins.group object:
class group_add(ipaserver.plugins.baseldap.LDAPCreate)
| group_add(cn: str, *, description: str = None, gidnumber: int = None, setattr: List[str] = None, addattr: List[str] = None, nonposix: bool, external: bool, all: bool, raw: bool, version: str = None, no_members: bool) -> Dict[str, Any]
```
Fixes: https://pagure.io/freeipa/issue/8388
Signed-off-by: Christian Heimes <cheimes@redhat.com>
Reviewed-By: Alexander Bokovoy <abokovoy@redhat.com>
2020-06-26 10:07:50 -05:00
|
|
|
class classobjectproperty(classproperty):
|
|
|
|
# A class property that also passes the object to the getter
|
|
|
|
# obj is None for class objects and 'self' for instance objects.
|
2022-11-18 10:22:12 -06:00
|
|
|
__slots__ = ('__doc__',) # pylint: disable=redefined-slots-in-subclass
|
Add __signature__ to plugins
Auto-generate inspect.Signature from plugin arguments and options. The
signature is used by (amongst others) pydoc / help.
```
$ ipa console
>>> help(api.Command.group_add)
Help on group_add in module ipaserver.plugins.group object:
class group_add(ipaserver.plugins.baseldap.LDAPCreate)
| group_add(cn: str, *, description: str = None, gidnumber: int = None, setattr: List[str] = None, addattr: List[str] = None, nonposix: bool, external: bool, all: bool, raw: bool, version: str = None, no_members: bool) -> Dict[str, Any]
```
Fixes: https://pagure.io/freeipa/issue/8388
Signed-off-by: Christian Heimes <cheimes@redhat.com>
Reviewed-By: Alexander Bokovoy <abokovoy@redhat.com>
2020-06-26 10:07:50 -05:00
|
|
|
|
|
|
|
def __get__(self, obj, obj_type):
|
|
|
|
if self.fget is not None:
|
|
|
|
return self.fget.__get__(obj, obj_type)(obj)
|
|
|
|
raise AttributeError("unreadable attribute")
|
|
|
|
|
|
|
|
|
2016-06-13 13:02:21 -05:00
|
|
|
def normalize_hostname(hostname):
|
|
|
|
"""Use common fqdn form without the trailing dot"""
|
|
|
|
if hostname.endswith(u'.'):
|
|
|
|
hostname = hostname[:-1]
|
|
|
|
hostname = hostname.lower()
|
|
|
|
return hostname
|
|
|
|
|
|
|
|
|
2019-05-01 09:15:37 -05:00
|
|
|
def hostname_validator(ugettext, value, maxlen=255):
|
|
|
|
"""Validator used by plugins to ensure hostname compliance.
|
|
|
|
|
|
|
|
In Linux the maximum hostname length is 64. In DNS and
|
|
|
|
other operaring systems (Solaris) it is 255. If not explicitly
|
|
|
|
checking a Linux hostname (e.g. the server) use the DNS
|
|
|
|
default.
|
|
|
|
"""
|
2016-06-13 13:02:21 -05:00
|
|
|
try:
|
2019-05-01 09:15:37 -05:00
|
|
|
validate_hostname(value, maxlen=maxlen)
|
2016-06-13 13:02:21 -05:00
|
|
|
except ValueError as e:
|
|
|
|
return _('invalid domain-name: %s') % unicode(e)
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def ipaddr_validator(ugettext, ipaddr, ip_version=None):
|
|
|
|
try:
|
|
|
|
ip = netaddr.IPAddress(str(ipaddr), flags=netaddr.INET_PTON)
|
|
|
|
|
|
|
|
if ip_version is not None:
|
|
|
|
if ip.version != ip_version:
|
|
|
|
return _(
|
|
|
|
'invalid IP address version (is %(value)d, must be '
|
|
|
|
'%(required_value)d)!') % dict(
|
|
|
|
value=ip.version,
|
|
|
|
required_value=ip_version
|
|
|
|
)
|
|
|
|
except (netaddr.AddrFormatError, ValueError):
|
|
|
|
return _('invalid IP address format')
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def validate_bind_forwarder(ugettext, forwarder):
|
|
|
|
ip_address, sep, port = forwarder.partition(u' port ')
|
|
|
|
|
|
|
|
ip_address_validation = ipaddr_validator(ugettext, ip_address)
|
|
|
|
|
|
|
|
if ip_address_validation is not None:
|
|
|
|
return ip_address_validation
|
|
|
|
|
|
|
|
if sep:
|
|
|
|
try:
|
|
|
|
port = int(port)
|
|
|
|
if port < 0 or port > 65535:
|
|
|
|
raise ValueError()
|
|
|
|
except ValueError:
|
|
|
|
return _('%(port)s is not a valid port' % dict(port=port))
|
|
|
|
|
|
|
|
return None
|
2015-09-08 10:43:30 -05:00
|
|
|
|
|
|
|
|
|
|
|
def set_krbcanonicalname(entry_attrs):
|
|
|
|
objectclasses = set(i.lower() for i in entry_attrs['objectclass'])
|
|
|
|
|
|
|
|
if 'krbprincipalaux' not in objectclasses:
|
|
|
|
return
|
|
|
|
|
|
|
|
if ('krbprincipalname' in entry_attrs
|
|
|
|
and 'krbcanonicalname' not in entry_attrs):
|
|
|
|
entry_attrs['krbcanonicalname'] = entry_attrs['krbprincipalname']
|
2016-06-23 13:06:42 -05:00
|
|
|
|
|
|
|
|
|
|
|
def ensure_last_krbprincipalname(ldap, entry_attrs, *keys):
|
|
|
|
"""
|
|
|
|
ensure that the LDAP entry has at least one value of krbprincipalname
|
|
|
|
and that this value is equal to krbcanonicalname
|
|
|
|
|
|
|
|
:param ldap: LDAP connection object
|
|
|
|
:param entry_attrs: LDAP entry made prior to update
|
|
|
|
:param options: command options
|
|
|
|
"""
|
|
|
|
entry = ldap.get_entry(
|
|
|
|
entry_attrs.dn, ['krbcanonicalname', 'krbprincipalname'])
|
|
|
|
|
|
|
|
krbcanonicalname = entry.single_value.get('krbcanonicalname', None)
|
|
|
|
|
|
|
|
if krbcanonicalname in keys[-1]:
|
|
|
|
raise errors.ValidationError(
|
|
|
|
name='krbprincipalname',
|
|
|
|
error=_('at least one value equal to the canonical '
|
|
|
|
'principal name must be present')
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def ensure_krbcanonicalname_set(ldap, entry_attrs):
|
|
|
|
old_entry = ldap.get_entry(
|
|
|
|
entry_attrs.dn,
|
|
|
|
['krbcanonicalname', 'krbprincipalname', 'objectclass'])
|
|
|
|
|
|
|
|
if old_entry.single_value.get('krbcanonicalname', None) is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
set_krbcanonicalname(old_entry)
|
|
|
|
|
|
|
|
old_entry.pop('krbprincipalname', None)
|
|
|
|
old_entry.pop('objectclass', None)
|
|
|
|
|
|
|
|
entry_attrs.update(old_entry)
|
|
|
|
|
|
|
|
|
2019-01-08 01:54:07 -06:00
|
|
|
def check_client_configuration(env=None):
|
2017-08-04 16:25:12 -05:00
|
|
|
"""
|
|
|
|
Check if IPA client is configured on the system.
|
2018-10-10 13:07:33 -05:00
|
|
|
|
2020-07-09 09:38:42 -05:00
|
|
|
This is a convenience wrapper that also supports using
|
|
|
|
a custom configuration via IPA_CONFDIR.
|
|
|
|
|
|
|
|
Raises a ScriptError exception if the client is not
|
|
|
|
configured.
|
|
|
|
|
2018-10-10 13:07:33 -05:00
|
|
|
Hardcode return code to avoid recursive imports
|
2017-08-04 16:25:12 -05:00
|
|
|
"""
|
2019-12-02 02:48:20 -06:00
|
|
|
CLIENT_NOT_CONFIGURED = 2
|
|
|
|
if env is not None and env.confdir != paths.ETC_IPA:
|
|
|
|
# custom IPA conf dir, check for custom conf_default
|
|
|
|
if os.path.isfile(env.conf_default):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
raise ScriptError(
|
|
|
|
f'IPA client is not configured on this system (confdir '
|
|
|
|
f'{env.confdir} is missing {env.conf_default})',
|
|
|
|
CLIENT_NOT_CONFIGURED
|
|
|
|
)
|
2020-07-09 09:38:42 -05:00
|
|
|
|
|
|
|
if is_ipa_client_configured():
|
2019-12-02 02:48:20 -06:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
raise ScriptError(
|
|
|
|
'IPA client is not configured on this system',
|
|
|
|
CLIENT_NOT_CONFIGURED
|
|
|
|
)
|
2017-08-04 16:25:12 -05:00
|
|
|
|
|
|
|
|
2023-03-24 01:07:04 -05:00
|
|
|
def _collect_trust_namespaces(api_instance, add_local=False):
|
2016-06-23 13:06:42 -05:00
|
|
|
"""
|
2023-03-24 01:07:04 -05:00
|
|
|
Return UPNs and realm names of trusted forests.
|
2016-06-23 13:06:42 -05:00
|
|
|
|
|
|
|
:param api_instance: API instance
|
2023-03-24 01:07:04 -05:00
|
|
|
:param add_local: bool flag
|
2016-06-23 13:06:42 -05:00
|
|
|
|
2023-03-24 01:07:04 -05:00
|
|
|
:return: set of namespace names as strings.
|
|
|
|
If add_local is True, add own realm namesapce
|
2016-06-23 13:06:42 -05:00
|
|
|
"""
|
|
|
|
trust_objects = api_instance.Command.trust_find(u'', sizelimit=0)['result']
|
|
|
|
|
|
|
|
trust_suffix_namespace = set()
|
|
|
|
|
|
|
|
for obj in trust_objects:
|
2016-07-20 08:46:22 -05:00
|
|
|
nt_suffixes = obj.get('ipantadditionalsuffixes', [])
|
2016-06-23 13:06:42 -05:00
|
|
|
|
|
|
|
trust_suffix_namespace.update(
|
2016-07-20 08:46:22 -05:00
|
|
|
set(upn.lower() for upn in nt_suffixes))
|
|
|
|
|
|
|
|
if 'ipantflatname' in obj:
|
|
|
|
trust_suffix_namespace.add(obj['ipantflatname'][0].lower())
|
|
|
|
|
|
|
|
trust_suffix_namespace.add(obj['cn'][0].lower())
|
2016-06-23 13:06:42 -05:00
|
|
|
|
2023-03-24 01:07:04 -05:00
|
|
|
if add_local:
|
|
|
|
trust_suffix_namespace.add(api_instance.env.realm.lower())
|
|
|
|
|
|
|
|
return trust_suffix_namespace
|
|
|
|
|
|
|
|
|
|
|
|
def check_principal_realm_in_trust_namespace(api_instance, *suffixes,
|
|
|
|
attr_name='krbprincipalname'):
|
|
|
|
"""
|
|
|
|
Check that principal name's suffix does not overlap with UPNs and realm
|
|
|
|
names of trusted forests.
|
|
|
|
|
|
|
|
:param api_instance: API instance
|
|
|
|
:param suffixes: principal suffixes
|
|
|
|
|
|
|
|
:raises: ValidationError if the suffix coincides with realm name, UPN
|
|
|
|
suffix or netbios name of trusted domains
|
|
|
|
"""
|
|
|
|
trust_suffix_namespace = _collect_trust_namespaces(api_instance,
|
|
|
|
add_local=False)
|
|
|
|
|
|
|
|
for p in suffixes[-1]:
|
|
|
|
principal = Principal(p, realm=api_instance.env.realm)
|
2016-06-23 13:06:42 -05:00
|
|
|
realm = principal.realm
|
|
|
|
upn = principal.upn_suffix if principal.is_enterprise else None
|
|
|
|
|
|
|
|
if realm in trust_suffix_namespace or upn in trust_suffix_namespace:
|
|
|
|
raise errors.ValidationError(
|
2023-03-24 01:07:04 -05:00
|
|
|
name=attr_name,
|
2016-06-23 13:06:42 -05:00
|
|
|
error=_('realm or UPN suffix overlaps with trusted domain '
|
|
|
|
'namespace'))
|
2016-09-12 07:38:12 -05:00
|
|
|
|
|
|
|
|
2023-03-24 01:07:04 -05:00
|
|
|
def check_principal_realm_supported(api_instance, *suffixes,
|
|
|
|
attr_name='krbprincipalname'):
|
|
|
|
"""
|
|
|
|
Check that principal name's suffix does not overlap with UPNs and realm
|
|
|
|
names of trusted forests.
|
|
|
|
|
|
|
|
:param api_instance: API instance
|
|
|
|
:param suffixes: principal suffixes
|
|
|
|
|
|
|
|
:raises: ValidationError if the suffix does not match with realm name, UPN
|
|
|
|
suffix or netbios name of trusted domains or IPA domain
|
|
|
|
"""
|
|
|
|
trust_suffix_namespace = _collect_trust_namespaces(api_instance,
|
|
|
|
add_local=True)
|
|
|
|
|
|
|
|
for p in suffixes[-1]:
|
|
|
|
principal = Principal(p, realm=api_instance.env.realm)
|
|
|
|
realm = principal.realm
|
|
|
|
upn = principal.upn_suffix if principal.is_enterprise else None
|
|
|
|
|
|
|
|
conditions = [(realm.lower() not in trust_suffix_namespace),
|
|
|
|
(upn is not None and (
|
|
|
|
upn.lower() not in trust_suffix_namespace))]
|
|
|
|
if any(conditions):
|
|
|
|
raise errors.ValidationError(
|
|
|
|
name=attr_name,
|
|
|
|
error=_('realm or UPN suffix outside of supported realm '
|
|
|
|
'domains or trusted domains namespace'))
|
|
|
|
|
|
|
|
|
2017-05-31 08:50:05 -05:00
|
|
|
def no_matching_interface_for_ip_address_warning(addr_list):
|
|
|
|
for ip in addr_list:
|
|
|
|
if not ip.get_matching_interface():
|
2017-05-24 09:35:07 -05:00
|
|
|
logger.warning(
|
2017-05-31 08:50:05 -05:00
|
|
|
"No network interface matches the IP address %s", ip)
|
|
|
|
# fixme: once when loggers will be fixed, we can remove this
|
|
|
|
# print
|
|
|
|
print(
|
|
|
|
"WARNING: No network interface matches the IP address "
|
|
|
|
"{}".format(ip),
|
|
|
|
file=sys.stderr
|
|
|
|
)
|
2017-10-30 10:09:14 -05:00
|
|
|
|
|
|
|
|
|
|
|
def get_terminal_height(fd=1):
|
|
|
|
"""
|
|
|
|
Get current terminal height
|
|
|
|
|
|
|
|
Args:
|
|
|
|
fd (int): file descriptor. Default: 1 (stdout)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
int: Terminal height
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return struct.unpack(
|
|
|
|
'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, b'1234'))[0]
|
|
|
|
except (IOError, OSError, struct.error):
|
|
|
|
return os.environ.get("LINES", 25)
|
|
|
|
|
|
|
|
|
2018-12-05 07:54:29 -06:00
|
|
|
def get_pager():
|
|
|
|
""" Get path to a pager
|
|
|
|
|
|
|
|
:return: path to the file if it exists otherwise None
|
|
|
|
:rtype: str or None
|
|
|
|
"""
|
|
|
|
pager = os.environ.get('PAGER', 'less')
|
|
|
|
return shutil.which(pager)
|
|
|
|
|
|
|
|
|
|
|
|
def open_in_pager(data, pager):
|
2017-10-30 10:09:14 -05:00
|
|
|
"""
|
|
|
|
Open text data in pager
|
|
|
|
|
|
|
|
Args:
|
|
|
|
data (bytes): data to view in pager
|
2018-12-05 07:54:29 -06:00
|
|
|
pager (str): path to the pager
|
2017-10-30 10:09:14 -05:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
None
|
|
|
|
"""
|
|
|
|
pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE)
|
|
|
|
|
|
|
|
try:
|
|
|
|
pager_process.stdin.write(data)
|
|
|
|
pager_process.communicate()
|
|
|
|
except IOError:
|
|
|
|
pass
|
2018-02-14 09:59:50 -06:00
|
|
|
|
|
|
|
|
|
|
|
if reprlib is not None:
|
|
|
|
class APIRepr(reprlib.Repr):
|
|
|
|
builtin_types = {
|
|
|
|
bool, int, float,
|
|
|
|
str, bytes,
|
|
|
|
dict, tuple, list, set, frozenset,
|
|
|
|
type(None),
|
|
|
|
}
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super(APIRepr, self).__init__()
|
|
|
|
# no limitation
|
|
|
|
for k, v in self.__dict__.items():
|
|
|
|
if isinstance(v, int):
|
|
|
|
setattr(self, k, sys.maxsize)
|
|
|
|
|
|
|
|
def repr_str(self, x, level):
|
|
|
|
"""Output with u'' prefix"""
|
|
|
|
return 'u' + repr(x)
|
|
|
|
|
|
|
|
def repr_type(self, x, level):
|
|
|
|
if x is str:
|
|
|
|
return "<type 'unicode'>"
|
|
|
|
if x in self.builtin_types:
|
|
|
|
return "<type '{}'>".format(x.__name__)
|
|
|
|
else:
|
|
|
|
return repr(x)
|
|
|
|
|
|
|
|
apirepr = APIRepr().repr
|
|
|
|
else:
|
|
|
|
apirepr = repr
|