2008-10-02 20:09:13 -05:00
# Authors:
# Jason Gerard DeRose <jderose@redhat.com>
#
# Copyright (C) 2008 Red Hat
# see file 'COPYING' for use and warranty information
#
2010-12-09 06:59:11 -06:00
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
2008-10-02 20:09:13 -05:00
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
2010-12-09 06:59:11 -06:00
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2008-10-02 20:09:13 -05:00
"""
Various utility functions .
"""
2008-10-27 01:23:43 -05:00
import os
import imp
2008-10-31 21:25:33 -05:00
import time
2009-02-18 16:12:27 -06:00
import socket
2010-10-08 12:15:03 -05:00
import re
2012-09-05 07:35:44 -05:00
import decimal
2012-09-05 02:56:27 -05:00
import netaddr
2010-08-10 15:40:00 -05:00
from types import NoneType
2011-11-01 07:58:05 -05:00
from weakref import WeakKeyDictionary
2012-05-11 07:38:09 -05:00
from dns import resolver , rdatatype
from dns . exception import DNSException
2010-08-10 15:40:00 -05:00
2009-04-23 07:51:59 -05:00
from ipalib import errors
2011-06-08 09:54:41 -05:00
from ipalib . text import _
2012-09-03 08:33:30 -05:00
from ipapython . ssh import SSHPublicKey
Use DN objects instead of strings
* Convert every string specifying a DN into a DN object
* Every place a dn was manipulated in some fashion it was replaced by
the use of DN operators
* Add new DNParam parameter type for parameters which are DN's
* DN objects are used 100% of the time throughout the entire data
pipeline whenever something is logically a dn.
* Many classes now enforce DN usage for their attributes which are
dn's. This is implmented via ipautil.dn_attribute_property(). The
only permitted types for a class attribute specified to be a DN are
either None or a DN object.
* Require that every place a dn is used it must be a DN object.
This translates into lot of::
assert isinstance(dn, DN)
sprinkled through out the code. Maintaining these asserts is
valuable to preserve DN type enforcement. The asserts can be
disabled in production.
The goal of 100% DN usage 100% of the time has been realized, these
asserts are meant to preserve that.
The asserts also proved valuable in detecting functions which did
not obey their function signatures, such as the baseldap pre and
post callbacks.
* Moved ipalib.dn to ipapython.dn because DN class is shared with all
components, not just the server which uses ipalib.
* All API's now accept DN's natively, no need to convert to str (or
unicode).
* Removed ipalib.encoder and encode/decode decorators. Type conversion
is now explicitly performed in each IPASimpleLDAPObject method which
emulates a ldap.SimpleLDAPObject method.
* Entity & Entry classes now utilize DN's
* Removed __getattr__ in Entity & Entity clases. There were two
problems with it. It presented synthetic Python object attributes
based on the current LDAP data it contained. There is no way to
validate synthetic attributes using code checkers, you can't search
the code to find LDAP attribute accesses (because synthetic
attriutes look like Python attributes instead of LDAP data) and
error handling is circumscribed. Secondly __getattr__ was hiding
Python internal methods which broke class semantics.
* Replace use of methods inherited from ldap.SimpleLDAPObject via
IPAdmin class with IPAdmin methods. Directly using inherited methods
was causing us to bypass IPA logic. Mostly this meant replacing the
use of search_s() with getEntry() or getList(). Similarly direct
access of the LDAP data in classes using IPAdmin were replaced with
calls to getValue() or getValues().
* Objects returned by ldap2.find_entries() are now compatible with
either the python-ldap access methodology or the Entity/Entry access
methodology.
* All ldap operations now funnel through the common
IPASimpleLDAPObject giving us a single location where we interface
to python-ldap and perform conversions.
* The above 4 modifications means we've greatly reduced the
proliferation of multiple inconsistent ways to perform LDAP
operations. We are well on the way to having a single API in IPA for
doing LDAP (a long range goal).
* All certificate subject bases are now DN's
* DN objects were enhanced thusly:
- find, rfind, index, rindex, replace and insert methods were added
- AVA, RDN and DN classes were refactored in immutable and mutable
variants, the mutable variants are EditableAVA, EditableRDN and
EditableDN. By default we use the immutable variants preserving
important semantics. To edit a DN cast it to an EditableDN and
cast it back to DN when done editing. These issues are fully
described in other documentation.
- first_key_match was removed
- DN equalty comparison permits comparison to a basestring
* Fixed ldapupdate to work with DN's. This work included:
- Enhance test_updates.py to do more checking after applying
update. Add test for update_from_dict(). Convert code to use
unittest classes.
- Consolidated duplicate code.
- Moved code which should have been in the class into the class.
- Fix the handling of the 'deleteentry' update action. It's no longer
necessary to supply fake attributes to make it work. Detect case
where subsequent update applies a change to entry previously marked
for deletetion. General clean-up and simplification of the
'deleteentry' logic.
- Rewrote a couple of functions to be clearer and more Pythonic.
- Added documentation on the data structure being used.
- Simplfy the use of update_from_dict()
* Removed all usage of get_schema() which was being called prior to
accessing the .schema attribute of an object. If a class is using
internal lazy loading as an optimization it's not right to require
users of the interface to be aware of internal
optimization's. schema is now a property and when the schema
property is accessed it calls a private internal method to perform
the lazy loading.
* Added SchemaCache class to cache the schema's from individual
servers. This was done because of the observation we talk to
different LDAP servers, each of which may have it's own
schema. Previously we globally cached the schema from the first
server we connected to and returned that schema in all contexts. The
cache includes controls to invalidate it thus forcing a schema
refresh.
* Schema caching is now senstive to the run time context. During
install and upgrade the schema can change leading to errors due to
out-of-date cached schema. The schema cache is refreshed in these
contexts.
* We are aware of the LDAP syntax of all LDAP attributes. Every
attribute returned from an LDAP operation is passed through a
central table look-up based on it's LDAP syntax. The table key is
the LDAP syntax it's value is a Python callable that returns a
Python object matching the LDAP syntax. There are a handful of LDAP
attributes whose syntax is historically incorrect
(e.g. DistguishedNames that are defined as DirectoryStrings). The
table driven conversion mechanism is augmented with a table of
hard coded exceptions.
Currently only the following conversions occur via the table:
- dn's are converted to DN objects
- binary objects are converted to Python str objects (IPA
convention).
- everything else is converted to unicode using UTF-8 decoding (IPA
convention).
However, now that the table driven conversion mechanism is in place
it would be trivial to do things such as converting attributes
which have LDAP integer syntax into a Python integer, etc.
* Expected values in the unit tests which are a DN no longer need to
use lambda expressions to promote the returned value to a DN for
equality comparison. The return value is automatically promoted to
a DN. The lambda expressions have been removed making the code much
simpler and easier to read.
* Add class level logging to a number of classes which did not support
logging, less need for use of root_logger.
* Remove ipaserver/conn.py, it was unused.
* Consolidated duplicate code wherever it was found.
* Fixed many places that used string concatenation to form a new
string rather than string formatting operators. This is necessary
because string formatting converts it's arguments to a string prior
to building the result string. You can't concatenate a string and a
non-string.
* Simplify logic in rename_managed plugin. Use DN operators to edit
dn's.
* The live version of ipa-ldap-updater did not generate a log file.
The offline version did, now both do.
https://fedorahosted.org/freeipa/ticket/1670
https://fedorahosted.org/freeipa/ticket/1671
https://fedorahosted.org/freeipa/ticket/1672
https://fedorahosted.org/freeipa/ticket/1673
https://fedorahosted.org/freeipa/ticket/1674
https://fedorahosted.org/freeipa/ticket/1392
https://fedorahosted.org/freeipa/ticket/2872
2012-05-13 06:36:35 -05:00
from ipapython . dn import DN , RDN
2008-10-28 02:39:02 -05:00
2008-10-02 20:09:13 -05:00
2010-08-09 15:45:26 -05:00
def json_serialize ( obj ) :
2010-08-10 15:40:00 -05:00
if isinstance ( obj , ( list , tuple ) ) :
return [ json_serialize ( o ) for o in obj ]
if isinstance ( obj , dict ) :
return dict ( ( k , json_serialize ( v ) ) for ( k , v ) in obj . iteritems ( ) )
2012-09-11 07:31:13 -05:00
if isinstance ( obj , ( bool , float , int , long , unicode , NoneType ) ) :
2010-08-10 15:40:00 -05:00
return obj
if isinstance ( obj , str ) :
return obj . decode ( ' utf-8 ' )
2012-09-05 07:35:44 -05:00
if isinstance ( obj , ( decimal . Decimal , DN ) ) :
return str ( obj )
2010-08-09 15:45:26 -05:00
if not callable ( getattr ( obj , ' __json__ ' , None ) ) :
# raise TypeError('%r is not JSON serializable')
return ' '
2010-08-10 15:40:00 -05:00
return json_serialize ( obj . __json__ ( ) )
2010-08-09 15:45:26 -05:00
2008-10-22 16:54:04 -05:00
def get_current_principal ( ) :
try :
2013-12-03 10:14:00 -06:00
import kerberos
rc , vc = kerberos . authGSSClientInit ( " notempty " )
rc = kerberos . authGSSClientInquireCred ( vc )
username = kerberos . authGSSClientUserName ( vc )
kerberos . authGSSClientClean ( vc )
return unicode ( username )
2010-09-10 14:57:40 -05:00
except ImportError :
2013-12-03 10:14:00 -06:00
raise RuntimeError ( ' python-kerberos is not available. ' )
except kerberos . GSSError , e :
2009-04-20 12:58:26 -05:00
#TODO: do a kinit?
2009-04-23 07:51:59 -05:00
raise errors . CCacheError ( )
2008-10-27 01:23:43 -05:00
# FIXME: This function has no unit test
def find_modules_in_dir ( src_dir ) :
"""
Iterate through module names found in ` ` src_dir ` ` .
"""
2009-04-20 12:58:26 -05:00
if not ( os . path . abspath ( src_dir ) == src_dir and os . path . isdir ( src_dir ) ) :
2008-10-27 01:23:43 -05:00
return
2009-04-20 12:58:26 -05:00
if os . path . islink ( src_dir ) :
2008-10-27 01:23:43 -05:00
return
suffix = ' .py '
for name in sorted ( os . listdir ( src_dir ) ) :
if not name . endswith ( suffix ) :
continue
2009-04-20 12:58:26 -05:00
pyfile = os . path . join ( src_dir , name )
2013-09-02 16:08:13 -05:00
if not os . path . isfile ( pyfile ) :
2008-10-27 01:23:43 -05:00
continue
module = name [ : - len ( suffix ) ]
if module == ' __init__ ' :
continue
2009-02-12 03:10:12 -06:00
yield ( module , pyfile )
2008-10-27 01:23:43 -05:00
2010-07-22 13:16:22 -05:00
def validate_host_dns ( log , fqdn ) :
"""
See if the hostname has a DNS A record .
"""
2012-05-11 07:38:09 -05:00
try :
answers = resolver . query ( fqdn , rdatatype . A )
2010-07-22 13:16:22 -05:00
log . debug (
2012-05-11 07:38:09 -05:00
' IPA: found %d records for %s : %s ' % ( len ( answers ) , fqdn ,
' ' . join ( str ( answer ) for answer in answers ) )
2010-07-22 13:16:22 -05:00
)
2012-05-11 07:38:09 -05:00
except DNSException , e :
2010-07-22 13:16:22 -05:00
log . debug (
2012-05-11 07:38:09 -05:00
' IPA: DNS A record lookup failed for %s ' % fqdn
2010-07-22 13:16:22 -05:00
)
2012-05-11 07:38:09 -05:00
raise errors . DNSNotARecordError ( )
2010-10-08 12:15:03 -05:00
2013-02-12 09:50:00 -06:00
def has_soa_or_ns_record ( domain ) :
"""
Checks to see if given domain has SOA or NS record .
Returns True or False .
"""
try :
resolver . query ( domain , rdatatype . SOA )
soa_record_found = True
except DNSException :
soa_record_found = False
try :
resolver . query ( domain , rdatatype . NS )
ns_record_found = True
except DNSException :
ns_record_found = False
return soa_record_found or ns_record_found
2012-11-15 04:21:16 -06:00
def normalize_name ( name ) :
result = dict ( )
components = name . split ( ' @ ' )
if len ( components ) == 2 :
result [ ' domain ' ] = unicode ( components [ 1 ] ) . lower ( )
result [ ' name ' ] = unicode ( components [ 0 ] ) . lower ( )
else :
components = name . split ( ' \\ ' )
if len ( components ) == 2 :
result [ ' flatname ' ] = unicode ( components [ 0 ] ) . lower ( )
result [ ' name ' ] = unicode ( components [ 1 ] ) . lower ( )
else :
result [ ' name ' ] = unicode ( name ) . lower ( )
return result
2010-10-08 12:15:03 -05:00
def isvalid_base64 ( data ) :
"""
Validate the incoming data as valid base64 data or not .
The character set must only include of a - z , A - Z , 0 - 9 , + or / and
be padded with = to be a length divisible by 4 ( so only 0 - 2 = s are
allowed ) . Its length must be divisible by 4. White space is
not significant so it is removed .
This doesn ' t guarantee we have a base64-encoded value, just that it
fits the base64 requirements .
"""
data = ' ' . join ( data . split ( ) )
if len ( data ) % 4 > 0 or \
re . match ( ' ^[a-zA-Z0-9 \ + \ /]+ \ = { 0,2}$ ' , data ) is None :
return False
else :
return True
2010-11-23 16:47:29 -06:00
def validate_ipaddr ( ipaddr ) :
"""
Check to see if the given IP address is a valid IPv4 or IPv6 address .
Returns True or False
"""
try :
socket . inet_pton ( socket . AF_INET , ipaddr )
except socket . error :
try :
socket . inet_pton ( socket . AF_INET6 , ipaddr )
except socket . error :
return False
return True
2011-06-08 09:54:41 -05:00
def check_writable_file ( filename ) :
"""
Determine if the file is writable . If the file doesn ' t exist then
open the file to test writability .
"""
if filename is None :
2012-07-04 07:52:47 -05:00
raise errors . FileError ( reason = _ ( ' Filename is empty ' ) )
2011-06-08 09:54:41 -05:00
try :
if os . path . exists ( filename ) :
if not os . access ( filename , os . W_OK ) :
raise errors . FileError ( reason = _ ( ' Permission denied: %(file)s ' ) % dict ( file = filename ) )
else :
fp = open ( filename , ' w ' )
fp . close ( )
except ( IOError , OSError ) , e :
raise errors . FileError ( reason = str ( e ) )
2011-10-24 11:35:48 -05:00
2011-11-23 09:03:51 -06:00
def normalize_zonemgr ( zonemgr ) :
if not zonemgr :
# do not normalize empty or None value
return zonemgr
if ' @ ' in zonemgr :
# local-part needs to be normalized
name , at , domain = zonemgr . partition ( ' @ ' )
name = name . replace ( ' . ' , ' \\ . ' )
zonemgr = u ' ' . join ( ( name , u ' . ' , domain ) )
if not zonemgr . endswith ( ' . ' ) :
zonemgr = zonemgr + u ' . '
return zonemgr
2011-10-24 11:35:48 -05:00
2012-09-05 02:56:27 -05:00
def normalize_zone ( zone ) :
if zone [ - 1 ] != ' . ' :
return zone + ' . '
else :
return zone
2014-01-31 08:42:31 -06:00
def validate_dns_label ( dns_label , allow_underscore = False , allow_slash = False ) :
base_chars = ' a-z0-9 '
extra_chars = ' '
middle_chars = ' '
if allow_underscore :
extra_chars + = ' _ '
if allow_slash :
middle_chars + = ' / '
middle_chars = middle_chars + ' - ' #has to be always the last in the regex [....-]
label_regex = r ' ^[ %(base)s %(extra)s ]([ %(base)s %(extra)s %(middle)s ]?[ %(base)s %(extra)s ])*$ ' \
% dict ( base = base_chars , extra = extra_chars , middle = middle_chars )
regex = re . compile ( label_regex , re . IGNORECASE )
if not dns_label :
raise ValueError ( _ ( ' empty DNS label ' ) )
if len ( dns_label ) > 63 :
raise ValueError ( _ ( ' DNS label cannot be longer that 63 characters ' ) )
if not regex . match ( dns_label ) :
chars = ' , ' . join ( " ' %s ' " % c for c in extra_chars + middle_chars )
chars2 = ' , ' . join ( " ' %s ' " % c for c in middle_chars )
raise ValueError ( _ ( " only letters, numbers, %(chars)s are allowed. " \
" DNS label may not start or end with %(chars2)s " ) \
% dict ( chars = chars , chars2 = chars2 ) )
def validate_domain_name ( domain_name , allow_underscore = False , allow_slash = False ) :
2012-02-28 02:05:01 -06:00
if domain_name . endswith ( ' . ' ) :
domain_name = domain_name [ : - 1 ]
domain_name = domain_name . split ( " . " )
# apply DNS name validator to every name part
2014-01-31 08:42:31 -06:00
map ( lambda label : validate_dns_label ( label , allow_underscore , allow_slash ) , domain_name )
2012-02-28 02:05:01 -06:00
2011-10-24 11:35:48 -05:00
def validate_zonemgr ( zonemgr ) :
""" See RFC 1033, 1035 """
2012-04-30 06:51:03 -05:00
regex_local_part = re . compile ( r ' ^[a-z0-9]([a-z0-9-_]?[a-z0-9])*$ ' ,
2011-11-23 09:03:51 -06:00
re . IGNORECASE )
2012-04-30 06:51:03 -05:00
local_part_errmsg = _ ( ' mail account may only include letters, numbers, -, _ and a dot. There may not be consecutive -, _ and . characters. Its parts may not start or end with - or _ ' )
local_part_sep = ' . '
local_part = None
domain = None
2011-10-24 11:35:48 -05:00
if len ( zonemgr ) > 255 :
raise ValueError ( _ ( ' cannot be longer that 255 characters ' ) )
2011-11-23 09:03:51 -06:00
if zonemgr . endswith ( ' . ' ) :
zonemgr = zonemgr [ : - 1 ]
2011-10-24 11:35:48 -05:00
if zonemgr . count ( ' @ ' ) == 1 :
2011-11-23 09:03:51 -06:00
local_part , dot , domain = zonemgr . partition ( ' @ ' )
2011-10-24 11:35:48 -05:00
elif zonemgr . count ( ' @ ' ) > 1 :
raise ValueError ( _ ( ' too many \' @ \' characters ' ) )
else :
2011-11-23 09:03:51 -06:00
last_fake_sep = zonemgr . rfind ( ' \\ . ' )
if last_fake_sep != - 1 : # there is a 'fake' local-part/domain separator
2012-04-30 06:51:03 -05:00
local_part_sep = ' \\ . '
2011-11-23 09:03:51 -06:00
sep = zonemgr . find ( ' . ' , last_fake_sep + 2 )
2012-04-30 06:51:03 -05:00
if sep != - 1 :
local_part = zonemgr [ : sep ]
domain = zonemgr [ sep + 1 : ]
2011-11-23 09:03:51 -06:00
else :
local_part , dot , domain = zonemgr . partition ( ' . ' )
2011-10-24 11:35:48 -05:00
2012-04-30 06:51:03 -05:00
if not domain :
raise ValueError ( _ ( ' missing address domain ' ) )
2011-10-24 11:35:48 -05:00
2012-02-28 02:05:01 -06:00
validate_domain_name ( domain )
2011-11-09 10:35:52 -06:00
2012-04-30 06:51:03 -05:00
if not local_part :
raise ValueError ( _ ( ' missing mail account ' ) )
if not all ( regex_local_part . match ( part ) for part in \
local_part . split ( local_part_sep ) ) :
raise ValueError ( local_part_errmsg )
2014-01-31 08:42:31 -06:00
def validate_hostname ( hostname , check_fqdn = True , allow_underscore = False , allow_slash = False ) :
2012-01-06 08:12:41 -06:00
""" See RFC 952, 1123
: param hostname Checked value
: param check_fqdn Check if hostname is fully qualified
"""
2011-11-09 10:35:52 -06:00
if len ( hostname ) > 255 :
raise ValueError ( _ ( ' cannot be longer that 255 characters ' ) )
if hostname . endswith ( ' . ' ) :
hostname = hostname [ : - 1 ]
2012-09-27 06:45:32 -05:00
if ' .. ' in hostname :
raise ValueError ( _ ( ' hostname contains empty label (consecutive dots) ' ) )
2012-02-28 02:05:01 -06:00
if ' . ' not in hostname :
if check_fqdn :
raise ValueError ( _ ( ' not fully qualified ' ) )
2014-01-31 08:42:31 -06:00
validate_dns_label ( hostname , allow_underscore , allow_slash )
2012-02-28 02:05:01 -06:00
else :
2014-01-31 08:42:31 -06:00
validate_domain_name ( hostname , allow_underscore , allow_slash )
2011-11-01 07:58:05 -05:00
2012-09-03 08:33:30 -05:00
def normalize_sshpubkey ( value ) :
return SSHPublicKey ( value ) . openssh ( )
def validate_sshpubkey ( ugettext , value ) :
2011-12-07 01:50:31 -06:00
try :
2012-09-03 08:33:30 -05:00
SSHPublicKey ( value )
except ValueError , UnicodeDecodeError :
2011-12-07 01:50:31 -06:00
return _ ( ' invalid SSH public key ' )
2012-09-03 08:33:30 -05:00
def validate_sshpubkey_no_options ( ugettext , value ) :
try :
pubkey = SSHPublicKey ( value )
except ValueError , UnicodeDecodeError :
return _ ( ' invalid SSH public key ' )
if pubkey . has_options ( ) :
return _ ( ' options are not allowed ' )
def convert_sshpubkey_post ( ldap , dn , entry_attrs ) :
2011-12-07 01:50:31 -06:00
if ' ipasshpubkey ' in entry_attrs :
2012-10-11 03:26:56 -05:00
pubkeys = entry_attrs [ ' ipasshpubkey ' ]
2011-12-07 01:50:31 -06:00
else :
2012-09-03 08:33:30 -05:00
old_entry_attrs = ldap . get_entry ( dn , [ ' ipasshpubkey ' ] )
2013-10-31 11:54:21 -05:00
pubkeys = old_entry_attrs . get ( ' ipasshpubkey ' )
2012-09-03 08:33:30 -05:00
if not pubkeys :
2011-12-07 01:50:31 -06:00
return
2012-09-03 08:33:30 -05:00
newpubkeys = [ ]
2011-12-07 01:50:31 -06:00
fingerprints = [ ]
for pubkey in pubkeys :
try :
2012-09-03 08:33:30 -05:00
pubkey = SSHPublicKey ( pubkey )
except ValueError , UnicodeDecodeError :
continue
fp = pubkey . fingerprint_hex_md5 ( )
comment = pubkey . comment ( )
if comment :
fp = u ' %s %s ' % ( fp , comment )
fp = u ' %s ( %s ) ' % ( fp , pubkey . keytype ( ) )
newpubkeys . append ( pubkey . openssh ( ) )
fingerprints . append ( fp )
2012-10-11 03:26:56 -05:00
if ' ipasshpubkey ' in entry_attrs :
entry_attrs [ ' ipasshpubkey ' ] = newpubkeys or None
2011-12-07 01:50:31 -06:00
if fingerprints :
entry_attrs [ ' sshpubkeyfp ' ] = fingerprints
2011-11-01 07:58:05 -05:00
class cachedproperty ( object ) :
"""
A property - like attribute that caches the return value of a method call .
When the attribute is first read , the method is called and its return
value is saved and returned . On subsequent reads , the saved value is
returned .
Typical usage :
class C ( object ) :
@cachedproperty
def attr ( self ) :
return ' value '
"""
__slots__ = ( ' getter ' , ' store ' )
def __init__ ( self , getter ) :
self . getter = getter
self . store = WeakKeyDictionary ( )
def __get__ ( self , obj , cls ) :
if obj is None :
return None
if obj not in self . store :
self . store [ obj ] = self . getter ( obj )
return self . store [ obj ]
def __set__ ( self , obj , value ) :
raise AttributeError ( " can ' t set attribute " )
def __delete__ ( self , obj ) :
raise AttributeError ( " can ' t delete attribute " )
add session manager and cache krb auth
This patch adds a session manager and support for caching
authentication in the session. Major elements of the patch are:
* Add a session manager to support cookie based sessions which
stores session data in a memcached entry.
* Add ipalib/krb_utils.py which contains functions to parse ccache
names, format principals, format KRB timestamps, and a KRB_CCache
class which reads ccache entry and allows one to extract information
such as the principal, credentials, credential timestamps, etc.
* Move krb constants defined in ipalib/rpc.py to ipa_krb_utils.py so
that all kerberos items are co-located.
* Modify javascript in ipa.js so that the IPA.command() RPC call
checks for authentication needed error response and if it receives
it sends a GET request to /ipa/login URL to refresh credentials.
* Add session_auth_duration config item to constants.py, used to
configure how long a session remains valid.
* Add parse_time_duration utility to ipalib/util.py. Used to parse the
session_auth_duration config item.
* Update the default.conf.5 man page to document session_auth_duration
config item (also added documentation for log_manager config items
which had been inadvertantly omitted from a previous commit).
* Add SessionError object to ipalib/errors.py
* Move Kerberos protection in Apache config from /ipa to /ipa/xml and
/ipa/login
* Add SessionCCache class to session.py to manage temporary Kerberos
ccache file in effect for the duration of an RPC command.
* Adds a krblogin plugin used to implement the /ipa/login
handler. login handler sets the session expiration time, currently
60 minutes or the expiration of the TGT, whichever is shorter. It
also copies the ccache provied by mod_auth_kerb into the session
data. The json handler will later extract and validate the ccache
belonging to the session.
* Refactored the WSGI handlers so that json and xlmrpc could have
independent behavior, this also moves where create and destroy
context occurs, now done in the individual handler rather than the
parent class.
* The json handler now looks up the session data, validates the ccache
bound to the session, if it's expired replies with authenicated
needed error.
* Add documentation to session.py. Fully documents the entire process,
got questions, read the doc.
* Add exclusions to make-lint as needed.
2012-02-06 12:29:56 -06:00
# regexp matching signed floating point number (group 1) followed by
# optional whitespace followed by time unit, e.g. day, hour (group 7)
time_duration_re = re . compile ( r ' ([-+]?(( \ d+)|( \ d+ \ . \ d+)|( \ . \ d+)|( \ d+ \ .))) \ s*([a-z]+) ' , re . IGNORECASE )
# number of seconds in a time unit
time_duration_units = {
' year ' : 365 * 24 * 60 * 60 ,
' years ' : 365 * 24 * 60 * 60 ,
' y ' : 365 * 24 * 60 * 60 ,
' month ' : 30 * 24 * 60 * 60 ,
' months ' : 30 * 24 * 60 * 60 ,
' week ' : 7 * 24 * 60 * 60 ,
' weeks ' : 7 * 24 * 60 * 60 ,
' w ' : 7 * 24 * 60 * 60 ,
' day ' : 24 * 60 * 60 ,
' days ' : 24 * 60 * 60 ,
' d ' : 24 * 60 * 60 ,
' hour ' : 60 * 60 ,
' hours ' : 60 * 60 ,
' h ' : 60 * 60 ,
' minute ' : 60 ,
' minutes ' : 60 ,
' min ' : 60 ,
' second ' : 1 ,
' seconds ' : 1 ,
' sec ' : 1 ,
' s ' : 1 ,
}
def parse_time_duration ( value ) :
'''
Given a time duration string , parse it and return the total number
of seconds represented as a floating point value . Negative values
are permitted .
The string should be composed of one or more numbers followed by a
time unit . Whitespace and punctuation is optional . The numbers may
be optionally signed . The time units are case insenstive except
for the single character ' M ' or ' m ' which means month and minute
respectively .
Recognized time units are :
* year , years , y
* month , months , M
* week , weeks , w
* day , days , d
* hour , hours , h
* minute , minutes , min , m
* second , seconds , sec , s
Examples :
" 1h " # 1 hour
" 2 HOURS, 30 Minutes " # 2.5 hours
" 1week -1 day " # 6 days
" .5day " # 12 hours
" 2M " # 2 months
" 1h:15m " # 1.25 hours
" 1h, -15min " # 45 minutes
" 30 seconds " # .5 minute
Note : Despite the appearance you can perform arithmetic the
parsing is much simpler , the parser searches for signed values and
adds the signed value to a running total . Only + and - are permitted
and must appear prior to a digit .
: parameters :
value : string
A time duration string in the specified format
: returns :
total number of seconds as float ( may be negative )
'''
matches = 0
duration = 0.0
for match in time_duration_re . finditer ( value ) :
matches + = 1
magnitude = match . group ( 1 )
unit = match . group ( 7 )
# Get the unit, only M and m are case sensitive
if unit == ' M ' : # month
seconds_per_unit = 30 * 24 * 60 * 60
elif unit == ' m ' : # minute
seconds_per_unit = 60
else :
unit = unit . lower ( )
seconds_per_unit = time_duration_units . get ( unit )
if seconds_per_unit is None :
raise ValueError ( ' unknown time duration unit " %s " ' % unit )
magnitude = float ( magnitude )
seconds = magnitude * seconds_per_unit
duration + = seconds
if matches == 0 :
raise ValueError ( ' no time duration found in " %s " ' % value )
return duration
2012-02-24 09:23:52 -06:00
2012-06-04 10:53:34 -05:00
def get_dns_forward_zone_update_policy ( realm , rrtypes = ( ' A ' , ' AAAA ' , ' SSHFP ' ) ) :
2012-02-24 09:23:52 -06:00
"""
2012-06-04 10:53:34 -05:00
Generate update policy for a forward DNS zone ( idnsUpdatePolicy
attribute ) . Bind uses this policy to grant / reject access for client
machines trying to dynamically update their records .
2012-02-24 09:23:52 -06:00
: param realm : A realm of the of the client
: param rrtypes : A list of resource records types that client shall be
allowed to update
"""
policy_element = " grant %(realm)s krb5-self * %(rrtype)s "
policies = [ policy_element % dict ( realm = realm , rrtype = rrtype ) \
for rrtype in rrtypes ]
policy = " ; " . join ( policies )
policy + = " ; "
return policy
2012-04-16 01:33:26 -05:00
2012-06-04 10:53:34 -05:00
def get_dns_reverse_zone_update_policy ( realm , reverse_zone , rrtypes = ( ' PTR ' , ) ) :
"""
Generate update policy for a reverse DNS zone ( idnsUpdatePolicy
attribute ) . Bind uses this policy to grant / reject access for client
machines trying to dynamically update their records .
: param realm : A realm of the of the client
: param reverse_zone : Name of the actual zone . All clients with IPs in this
sub - domain will be allowed to perform changes
: param rrtypes : A list of resource records types that client shall be
allowed to update
"""
policy_element = " grant %(realm)s krb5-subdomain %(zone)s %(rrtype)s "
policies = [ policy_element \
% dict ( realm = realm , zone = reverse_zone , rrtype = rrtype ) \
for rrtype in rrtypes ]
policy = " ; " . join ( policies )
policy + = " ; "
return policy
2012-09-25 03:36:01 -05:00
# dictionary of valid reverse zone -> number of address components
REVERSE_DNS_ZONES = {
' .in-addr.arpa. ' : 4 ,
' .ip6.arpa. ' : 32 ,
}
def zone_is_reverse ( zone_name ) :
zone_name = normalize_zone ( zone_name )
if any ( zone_name . endswith ( name ) for name in REVERSE_DNS_ZONES ) :
return True
return False
2012-09-05 02:56:27 -05:00
def get_reverse_zone_default ( ip_address ) :
2012-11-08 09:06:35 -06:00
ip = netaddr . IPAddress ( str ( ip_address ) )
2012-09-05 02:56:27 -05:00
items = ip . reverse_dns . split ( ' . ' )
if ip . version == 4 :
items = items [ 1 : ] # /24 for IPv4
elif ip . version == 6 :
items = items [ 16 : ] # /64 for IPv6
return normalize_zone ( ' . ' . join ( items ) )
2012-04-16 01:33:26 -05:00
def validate_rdn_param ( ugettext , value ) :
try :
rdn = RDN ( value )
except Exception , e :
return str ( e )
return None