2008-11-24 13:51:03 -06:00
|
|
|
# Authors:
|
|
|
|
# Jason Gerard DeRose <jderose@redhat.com>
|
2009-01-22 15:00:37 -06:00
|
|
|
# Rob Crittenden <rcritten@redhat.com>
|
2008-11-24 13:51:03 -06:00
|
|
|
#
|
|
|
|
# Copyright (C) 2008 Red Hat
|
|
|
|
# see file 'COPYING' for use and warranty information
|
|
|
|
#
|
2010-12-09 06:59:11 -06:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
2008-11-24 13:51:03 -06:00
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2010-12-09 06:59:11 -06:00
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2008-11-24 13:51:03 -06:00
|
|
|
|
|
|
|
"""
|
2009-01-16 02:56:39 -06:00
|
|
|
RPC client and shared RPC client/server functionality.
|
2009-01-16 02:23:55 -06:00
|
|
|
|
|
|
|
This module adds some additional functionality on top of the ``xmlrpclib``
|
|
|
|
module in the Python standard library. For documentation on the
|
|
|
|
``xmlrpclib`` module, see:
|
|
|
|
|
|
|
|
http://docs.python.org/library/xmlrpclib.html
|
2009-01-16 02:56:39 -06:00
|
|
|
|
|
|
|
Also see the `ipaserver.rpcserver` module.
|
2008-11-24 13:51:03 -06:00
|
|
|
"""
|
|
|
|
|
|
|
|
from types import NoneType
|
2012-01-17 04:19:00 -06:00
|
|
|
from decimal import Decimal
|
2010-08-31 15:59:27 -05:00
|
|
|
import sys
|
2014-01-09 04:14:56 -06:00
|
|
|
import datetime
|
2009-02-19 16:20:37 -06:00
|
|
|
import os
|
2011-02-15 13:10:38 -06:00
|
|
|
import locale
|
2012-12-19 03:25:24 -06:00
|
|
|
import base64
|
|
|
|
import urllib
|
|
|
|
import json
|
|
|
|
import socket
|
|
|
|
from urllib2 import urlparse
|
|
|
|
|
2014-01-09 04:14:56 -06:00
|
|
|
from xmlrpclib import (Binary, Fault, DateTime, dumps, loads, ServerProxy,
|
|
|
|
Transport, ProtocolError, MININT, MAXINT)
|
2009-01-22 15:00:37 -06:00
|
|
|
import kerberos
|
2012-05-11 07:38:09 -05:00
|
|
|
from dns import resolver, rdatatype
|
|
|
|
from dns.exception import DNSException
|
2012-12-19 03:25:24 -06:00
|
|
|
from nss.error import NSPRError
|
2012-05-11 07:38:09 -05:00
|
|
|
|
2009-01-23 19:02:32 -06:00
|
|
|
from ipalib.backend import Connectible
|
2014-01-09 04:14:56 -06:00
|
|
|
from ipalib.constants import LDAP_GENERALIZED_TIME_FORMAT
|
2012-12-19 03:25:24 -06:00
|
|
|
from ipalib.errors import (public_errors, UnknownError, NetworkError,
|
|
|
|
KerberosError, XMLRPCMarshallError, JSONError, ConversionError)
|
2014-01-09 04:14:56 -06:00
|
|
|
from ipalib import errors, capabilities
|
2012-03-03 18:50:21 -06:00
|
|
|
from ipalib.request import context, Connection
|
2012-07-16 09:40:12 -05:00
|
|
|
from ipalib.util import get_current_principal
|
2012-12-04 17:20:17 -06:00
|
|
|
from ipapython.ipa_log_manager import root_logger
|
2012-05-11 07:38:09 -05:00
|
|
|
from ipapython import ipautil
|
2012-06-06 21:54:16 -05:00
|
|
|
from ipapython import kernel_keyring
|
2014-05-29 07:47:17 -05:00
|
|
|
from ipaplatform.paths import paths
|
2012-12-04 17:20:17 -06:00
|
|
|
from ipapython.cookie import Cookie
|
2014-04-11 07:45:43 -05:00
|
|
|
from ipapython.dnsutil import DNSName
|
2012-07-04 07:52:47 -05:00
|
|
|
from ipalib.text import _
|
2014-09-16 19:11:35 -05:00
|
|
|
import ipapython.nsslib
|
2010-08-31 15:59:27 -05:00
|
|
|
from ipapython.nsslib import NSSHTTPS, NSSConnection
|
add session manager and cache krb auth
This patch adds a session manager and support for caching
authentication in the session. Major elements of the patch are:
* Add a session manager to support cookie based sessions which
stores session data in a memcached entry.
* Add ipalib/krb_utils.py which contains functions to parse ccache
names, format principals, format KRB timestamps, and a KRB_CCache
class which reads ccache entry and allows one to extract information
such as the principal, credentials, credential timestamps, etc.
* Move krb constants defined in ipalib/rpc.py to ipa_krb_utils.py so
that all kerberos items are co-located.
* Modify javascript in ipa.js so that the IPA.command() RPC call
checks for authentication needed error response and if it receives
it sends a GET request to /ipa/login URL to refresh credentials.
* Add session_auth_duration config item to constants.py, used to
configure how long a session remains valid.
* Add parse_time_duration utility to ipalib/util.py. Used to parse the
session_auth_duration config item.
* Update the default.conf.5 man page to document session_auth_duration
config item (also added documentation for log_manager config items
which had been inadvertantly omitted from a previous commit).
* Add SessionError object to ipalib/errors.py
* Move Kerberos protection in Apache config from /ipa to /ipa/xml and
/ipa/login
* Add SessionCCache class to session.py to manage temporary Kerberos
ccache file in effect for the duration of an RPC command.
* Adds a krblogin plugin used to implement the /ipa/login
handler. login handler sets the session expiration time, currently
60 minutes or the expiration of the TGT, whichever is shorter. It
also copies the ccache provied by mod_auth_kerb into the session
data. The json handler will later extract and validate the ccache
belonging to the session.
* Refactored the WSGI handlers so that json and xlmrpc could have
independent behavior, this also moves where create and destroy
context occurs, now done in the individual handler rather than the
parent class.
* The json handler now looks up the session data, validates the ccache
bound to the session, if it's expired replies with authenicated
needed error.
* Add documentation to session.py. Fully documents the entire process,
got questions, read the doc.
* Add exclusions to make-lint as needed.
2012-02-06 12:29:56 -06:00
|
|
|
from ipalib.krb_utils import KRB5KDC_ERR_S_PRINCIPAL_UNKNOWN, KRB5KRB_AP_ERR_TKT_EXPIRED, \
|
|
|
|
KRB5_FCC_PERM, KRB5_FCC_NOFILE, KRB5_CC_FORMAT, KRB5_REALM_CANT_RESOLVE
|
Use DN objects instead of strings
* Convert every string specifying a DN into a DN object
* Every place a dn was manipulated in some fashion it was replaced by
the use of DN operators
* Add new DNParam parameter type for parameters which are DN's
* DN objects are used 100% of the time throughout the entire data
pipeline whenever something is logically a dn.
* Many classes now enforce DN usage for their attributes which are
dn's. This is implmented via ipautil.dn_attribute_property(). The
only permitted types for a class attribute specified to be a DN are
either None or a DN object.
* Require that every place a dn is used it must be a DN object.
This translates into lot of::
assert isinstance(dn, DN)
sprinkled through out the code. Maintaining these asserts is
valuable to preserve DN type enforcement. The asserts can be
disabled in production.
The goal of 100% DN usage 100% of the time has been realized, these
asserts are meant to preserve that.
The asserts also proved valuable in detecting functions which did
not obey their function signatures, such as the baseldap pre and
post callbacks.
* Moved ipalib.dn to ipapython.dn because DN class is shared with all
components, not just the server which uses ipalib.
* All API's now accept DN's natively, no need to convert to str (or
unicode).
* Removed ipalib.encoder and encode/decode decorators. Type conversion
is now explicitly performed in each IPASimpleLDAPObject method which
emulates a ldap.SimpleLDAPObject method.
* Entity & Entry classes now utilize DN's
* Removed __getattr__ in Entity & Entity clases. There were two
problems with it. It presented synthetic Python object attributes
based on the current LDAP data it contained. There is no way to
validate synthetic attributes using code checkers, you can't search
the code to find LDAP attribute accesses (because synthetic
attriutes look like Python attributes instead of LDAP data) and
error handling is circumscribed. Secondly __getattr__ was hiding
Python internal methods which broke class semantics.
* Replace use of methods inherited from ldap.SimpleLDAPObject via
IPAdmin class with IPAdmin methods. Directly using inherited methods
was causing us to bypass IPA logic. Mostly this meant replacing the
use of search_s() with getEntry() or getList(). Similarly direct
access of the LDAP data in classes using IPAdmin were replaced with
calls to getValue() or getValues().
* Objects returned by ldap2.find_entries() are now compatible with
either the python-ldap access methodology or the Entity/Entry access
methodology.
* All ldap operations now funnel through the common
IPASimpleLDAPObject giving us a single location where we interface
to python-ldap and perform conversions.
* The above 4 modifications means we've greatly reduced the
proliferation of multiple inconsistent ways to perform LDAP
operations. We are well on the way to having a single API in IPA for
doing LDAP (a long range goal).
* All certificate subject bases are now DN's
* DN objects were enhanced thusly:
- find, rfind, index, rindex, replace and insert methods were added
- AVA, RDN and DN classes were refactored in immutable and mutable
variants, the mutable variants are EditableAVA, EditableRDN and
EditableDN. By default we use the immutable variants preserving
important semantics. To edit a DN cast it to an EditableDN and
cast it back to DN when done editing. These issues are fully
described in other documentation.
- first_key_match was removed
- DN equalty comparison permits comparison to a basestring
* Fixed ldapupdate to work with DN's. This work included:
- Enhance test_updates.py to do more checking after applying
update. Add test for update_from_dict(). Convert code to use
unittest classes.
- Consolidated duplicate code.
- Moved code which should have been in the class into the class.
- Fix the handling of the 'deleteentry' update action. It's no longer
necessary to supply fake attributes to make it work. Detect case
where subsequent update applies a change to entry previously marked
for deletetion. General clean-up and simplification of the
'deleteentry' logic.
- Rewrote a couple of functions to be clearer and more Pythonic.
- Added documentation on the data structure being used.
- Simplfy the use of update_from_dict()
* Removed all usage of get_schema() which was being called prior to
accessing the .schema attribute of an object. If a class is using
internal lazy loading as an optimization it's not right to require
users of the interface to be aware of internal
optimization's. schema is now a property and when the schema
property is accessed it calls a private internal method to perform
the lazy loading.
* Added SchemaCache class to cache the schema's from individual
servers. This was done because of the observation we talk to
different LDAP servers, each of which may have it's own
schema. Previously we globally cached the schema from the first
server we connected to and returned that schema in all contexts. The
cache includes controls to invalidate it thus forcing a schema
refresh.
* Schema caching is now senstive to the run time context. During
install and upgrade the schema can change leading to errors due to
out-of-date cached schema. The schema cache is refreshed in these
contexts.
* We are aware of the LDAP syntax of all LDAP attributes. Every
attribute returned from an LDAP operation is passed through a
central table look-up based on it's LDAP syntax. The table key is
the LDAP syntax it's value is a Python callable that returns a
Python object matching the LDAP syntax. There are a handful of LDAP
attributes whose syntax is historically incorrect
(e.g. DistguishedNames that are defined as DirectoryStrings). The
table driven conversion mechanism is augmented with a table of
hard coded exceptions.
Currently only the following conversions occur via the table:
- dn's are converted to DN objects
- binary objects are converted to Python str objects (IPA
convention).
- everything else is converted to unicode using UTF-8 decoding (IPA
convention).
However, now that the table driven conversion mechanism is in place
it would be trivial to do things such as converting attributes
which have LDAP integer syntax into a Python integer, etc.
* Expected values in the unit tests which are a DN no longer need to
use lambda expressions to promote the returned value to a DN for
equality comparison. The return value is automatically promoted to
a DN. The lambda expressions have been removed making the code much
simpler and easier to read.
* Add class level logging to a number of classes which did not support
logging, less need for use of root_logger.
* Remove ipaserver/conn.py, it was unused.
* Consolidated duplicate code wherever it was found.
* Fixed many places that used string concatenation to form a new
string rather than string formatting operators. This is necessary
because string formatting converts it's arguments to a string prior
to building the result string. You can't concatenate a string and a
non-string.
* Simplify logic in rename_managed plugin. Use DN operators to edit
dn's.
* The live version of ipa-ldap-updater did not generate a log file.
The offline version did, now both do.
https://fedorahosted.org/freeipa/ticket/1670
https://fedorahosted.org/freeipa/ticket/1671
https://fedorahosted.org/freeipa/ticket/1672
https://fedorahosted.org/freeipa/ticket/1673
https://fedorahosted.org/freeipa/ticket/1674
https://fedorahosted.org/freeipa/ticket/1392
https://fedorahosted.org/freeipa/ticket/2872
2012-05-13 06:36:35 -05:00
|
|
|
from ipapython.dn import DN
|
2014-03-28 03:51:10 -05:00
|
|
|
from ipalib.capabilities import VERSION_WITHOUT_CAPABILITIES
|
2014-10-30 10:52:14 -05:00
|
|
|
from ipalib import api
|
2008-11-24 13:51:03 -06:00
|
|
|
|
2012-12-04 17:20:17 -06:00
|
|
|
COOKIE_NAME = 'ipa_session'
|
|
|
|
KEYRING_COOKIE_NAME = '%s_cookie:%%s' % COOKIE_NAME
|
|
|
|
|
2012-12-19 03:25:24 -06:00
|
|
|
errors_by_code = dict((e.errno, e) for e in public_errors)
|
|
|
|
|
2012-12-04 17:20:17 -06:00
|
|
|
|
|
|
|
def client_session_keyring_keyname(principal):
|
|
|
|
'''
|
|
|
|
Return the key name used for storing the client session data for
|
|
|
|
the given principal.
|
|
|
|
'''
|
|
|
|
|
|
|
|
return KEYRING_COOKIE_NAME % principal
|
|
|
|
|
|
|
|
def update_persistent_client_session_data(principal, data):
|
|
|
|
'''
|
|
|
|
Given a principal create or update the session data for that
|
|
|
|
principal in the persistent secure storage.
|
|
|
|
|
|
|
|
Raises ValueError if unable to perform the action for any reason.
|
|
|
|
'''
|
|
|
|
|
|
|
|
try:
|
|
|
|
keyname = client_session_keyring_keyname(principal)
|
|
|
|
except Exception, e:
|
|
|
|
raise ValueError(str(e))
|
|
|
|
|
|
|
|
# kernel_keyring only raises ValueError (why??)
|
|
|
|
kernel_keyring.update_key(keyname, data)
|
|
|
|
|
|
|
|
def read_persistent_client_session_data(principal):
|
|
|
|
'''
|
|
|
|
Given a principal return the stored session data for that
|
|
|
|
principal from the persistent secure storage.
|
|
|
|
|
|
|
|
Raises ValueError if unable to perform the action for any reason.
|
|
|
|
'''
|
|
|
|
|
|
|
|
try:
|
|
|
|
keyname = client_session_keyring_keyname(principal)
|
|
|
|
except Exception, e:
|
|
|
|
raise ValueError(str(e))
|
|
|
|
|
|
|
|
# kernel_keyring only raises ValueError (why??)
|
|
|
|
return kernel_keyring.read_key(keyname)
|
|
|
|
|
|
|
|
def delete_persistent_client_session_data(principal):
|
|
|
|
'''
|
|
|
|
Given a principal remove the session data for that
|
|
|
|
principal from the persistent secure storage.
|
|
|
|
|
|
|
|
Raises ValueError if unable to perform the action for any reason.
|
|
|
|
'''
|
|
|
|
|
|
|
|
try:
|
|
|
|
keyname = client_session_keyring_keyname(principal)
|
|
|
|
except Exception, e:
|
|
|
|
raise ValueError(str(e))
|
|
|
|
|
|
|
|
# kernel_keyring only raises ValueError (why??)
|
|
|
|
kernel_keyring.del_key(keyname)
|
2012-07-16 09:40:12 -05:00
|
|
|
|
2014-03-28 03:51:10 -05:00
|
|
|
def xml_wrap(value, version):
|
2008-11-24 13:51:03 -06:00
|
|
|
"""
|
|
|
|
Wrap all ``str`` in ``xmlrpclib.Binary``.
|
|
|
|
|
|
|
|
Because ``xmlrpclib.dumps()`` will itself convert all ``unicode`` instances
|
|
|
|
into UTF-8 encoded ``str`` instances, we don't do it here.
|
|
|
|
|
2009-01-16 02:23:55 -06:00
|
|
|
So in total, when encoding data for an XML-RPC packet, the following
|
2008-11-24 13:51:03 -06:00
|
|
|
transformations occur:
|
|
|
|
|
|
|
|
* All ``str`` instances are treated as binary data and are wrapped in
|
|
|
|
an ``xmlrpclib.Binary()`` instance.
|
|
|
|
|
|
|
|
* Only ``unicode`` instances are treated as character data. They get
|
|
|
|
converted to UTF-8 encoded ``str`` instances (although as mentioned,
|
|
|
|
not by this function).
|
|
|
|
|
2009-01-16 02:23:55 -06:00
|
|
|
Also see `xml_unwrap()`.
|
|
|
|
|
|
|
|
:param value: The simple scalar or simple compound value to wrap.
|
2008-11-24 13:51:03 -06:00
|
|
|
"""
|
|
|
|
if type(value) in (list, tuple):
|
2014-03-28 03:51:10 -05:00
|
|
|
return tuple(xml_wrap(v, version) for v in value)
|
Use DN objects instead of strings
* Convert every string specifying a DN into a DN object
* Every place a dn was manipulated in some fashion it was replaced by
the use of DN operators
* Add new DNParam parameter type for parameters which are DN's
* DN objects are used 100% of the time throughout the entire data
pipeline whenever something is logically a dn.
* Many classes now enforce DN usage for their attributes which are
dn's. This is implmented via ipautil.dn_attribute_property(). The
only permitted types for a class attribute specified to be a DN are
either None or a DN object.
* Require that every place a dn is used it must be a DN object.
This translates into lot of::
assert isinstance(dn, DN)
sprinkled through out the code. Maintaining these asserts is
valuable to preserve DN type enforcement. The asserts can be
disabled in production.
The goal of 100% DN usage 100% of the time has been realized, these
asserts are meant to preserve that.
The asserts also proved valuable in detecting functions which did
not obey their function signatures, such as the baseldap pre and
post callbacks.
* Moved ipalib.dn to ipapython.dn because DN class is shared with all
components, not just the server which uses ipalib.
* All API's now accept DN's natively, no need to convert to str (or
unicode).
* Removed ipalib.encoder and encode/decode decorators. Type conversion
is now explicitly performed in each IPASimpleLDAPObject method which
emulates a ldap.SimpleLDAPObject method.
* Entity & Entry classes now utilize DN's
* Removed __getattr__ in Entity & Entity clases. There were two
problems with it. It presented synthetic Python object attributes
based on the current LDAP data it contained. There is no way to
validate synthetic attributes using code checkers, you can't search
the code to find LDAP attribute accesses (because synthetic
attriutes look like Python attributes instead of LDAP data) and
error handling is circumscribed. Secondly __getattr__ was hiding
Python internal methods which broke class semantics.
* Replace use of methods inherited from ldap.SimpleLDAPObject via
IPAdmin class with IPAdmin methods. Directly using inherited methods
was causing us to bypass IPA logic. Mostly this meant replacing the
use of search_s() with getEntry() or getList(). Similarly direct
access of the LDAP data in classes using IPAdmin were replaced with
calls to getValue() or getValues().
* Objects returned by ldap2.find_entries() are now compatible with
either the python-ldap access methodology or the Entity/Entry access
methodology.
* All ldap operations now funnel through the common
IPASimpleLDAPObject giving us a single location where we interface
to python-ldap and perform conversions.
* The above 4 modifications means we've greatly reduced the
proliferation of multiple inconsistent ways to perform LDAP
operations. We are well on the way to having a single API in IPA for
doing LDAP (a long range goal).
* All certificate subject bases are now DN's
* DN objects were enhanced thusly:
- find, rfind, index, rindex, replace and insert methods were added
- AVA, RDN and DN classes were refactored in immutable and mutable
variants, the mutable variants are EditableAVA, EditableRDN and
EditableDN. By default we use the immutable variants preserving
important semantics. To edit a DN cast it to an EditableDN and
cast it back to DN when done editing. These issues are fully
described in other documentation.
- first_key_match was removed
- DN equalty comparison permits comparison to a basestring
* Fixed ldapupdate to work with DN's. This work included:
- Enhance test_updates.py to do more checking after applying
update. Add test for update_from_dict(). Convert code to use
unittest classes.
- Consolidated duplicate code.
- Moved code which should have been in the class into the class.
- Fix the handling of the 'deleteentry' update action. It's no longer
necessary to supply fake attributes to make it work. Detect case
where subsequent update applies a change to entry previously marked
for deletetion. General clean-up and simplification of the
'deleteentry' logic.
- Rewrote a couple of functions to be clearer and more Pythonic.
- Added documentation on the data structure being used.
- Simplfy the use of update_from_dict()
* Removed all usage of get_schema() which was being called prior to
accessing the .schema attribute of an object. If a class is using
internal lazy loading as an optimization it's not right to require
users of the interface to be aware of internal
optimization's. schema is now a property and when the schema
property is accessed it calls a private internal method to perform
the lazy loading.
* Added SchemaCache class to cache the schema's from individual
servers. This was done because of the observation we talk to
different LDAP servers, each of which may have it's own
schema. Previously we globally cached the schema from the first
server we connected to and returned that schema in all contexts. The
cache includes controls to invalidate it thus forcing a schema
refresh.
* Schema caching is now senstive to the run time context. During
install and upgrade the schema can change leading to errors due to
out-of-date cached schema. The schema cache is refreshed in these
contexts.
* We are aware of the LDAP syntax of all LDAP attributes. Every
attribute returned from an LDAP operation is passed through a
central table look-up based on it's LDAP syntax. The table key is
the LDAP syntax it's value is a Python callable that returns a
Python object matching the LDAP syntax. There are a handful of LDAP
attributes whose syntax is historically incorrect
(e.g. DistguishedNames that are defined as DirectoryStrings). The
table driven conversion mechanism is augmented with a table of
hard coded exceptions.
Currently only the following conversions occur via the table:
- dn's are converted to DN objects
- binary objects are converted to Python str objects (IPA
convention).
- everything else is converted to unicode using UTF-8 decoding (IPA
convention).
However, now that the table driven conversion mechanism is in place
it would be trivial to do things such as converting attributes
which have LDAP integer syntax into a Python integer, etc.
* Expected values in the unit tests which are a DN no longer need to
use lambda expressions to promote the returned value to a DN for
equality comparison. The return value is automatically promoted to
a DN. The lambda expressions have been removed making the code much
simpler and easier to read.
* Add class level logging to a number of classes which did not support
logging, less need for use of root_logger.
* Remove ipaserver/conn.py, it was unused.
* Consolidated duplicate code wherever it was found.
* Fixed many places that used string concatenation to form a new
string rather than string formatting operators. This is necessary
because string formatting converts it's arguments to a string prior
to building the result string. You can't concatenate a string and a
non-string.
* Simplify logic in rename_managed plugin. Use DN operators to edit
dn's.
* The live version of ipa-ldap-updater did not generate a log file.
The offline version did, now both do.
https://fedorahosted.org/freeipa/ticket/1670
https://fedorahosted.org/freeipa/ticket/1671
https://fedorahosted.org/freeipa/ticket/1672
https://fedorahosted.org/freeipa/ticket/1673
https://fedorahosted.org/freeipa/ticket/1674
https://fedorahosted.org/freeipa/ticket/1392
https://fedorahosted.org/freeipa/ticket/2872
2012-05-13 06:36:35 -05:00
|
|
|
if isinstance(value, dict):
|
2008-11-24 13:51:03 -06:00
|
|
|
return dict(
|
2014-03-28 03:51:10 -05:00
|
|
|
(k, xml_wrap(v, version)) for (k, v) in value.iteritems()
|
2008-11-24 13:51:03 -06:00
|
|
|
)
|
|
|
|
if type(value) is str:
|
|
|
|
return Binary(value)
|
2012-01-17 04:19:00 -06:00
|
|
|
if type(value) is Decimal:
|
|
|
|
# transfer Decimal as a string
|
|
|
|
return unicode(value)
|
2012-09-04 08:49:26 -05:00
|
|
|
if isinstance(value, (int, long)) and (value < MININT or value > MAXINT):
|
|
|
|
return unicode(value)
|
Use DN objects instead of strings
* Convert every string specifying a DN into a DN object
* Every place a dn was manipulated in some fashion it was replaced by
the use of DN operators
* Add new DNParam parameter type for parameters which are DN's
* DN objects are used 100% of the time throughout the entire data
pipeline whenever something is logically a dn.
* Many classes now enforce DN usage for their attributes which are
dn's. This is implmented via ipautil.dn_attribute_property(). The
only permitted types for a class attribute specified to be a DN are
either None or a DN object.
* Require that every place a dn is used it must be a DN object.
This translates into lot of::
assert isinstance(dn, DN)
sprinkled through out the code. Maintaining these asserts is
valuable to preserve DN type enforcement. The asserts can be
disabled in production.
The goal of 100% DN usage 100% of the time has been realized, these
asserts are meant to preserve that.
The asserts also proved valuable in detecting functions which did
not obey their function signatures, such as the baseldap pre and
post callbacks.
* Moved ipalib.dn to ipapython.dn because DN class is shared with all
components, not just the server which uses ipalib.
* All API's now accept DN's natively, no need to convert to str (or
unicode).
* Removed ipalib.encoder and encode/decode decorators. Type conversion
is now explicitly performed in each IPASimpleLDAPObject method which
emulates a ldap.SimpleLDAPObject method.
* Entity & Entry classes now utilize DN's
* Removed __getattr__ in Entity & Entity clases. There were two
problems with it. It presented synthetic Python object attributes
based on the current LDAP data it contained. There is no way to
validate synthetic attributes using code checkers, you can't search
the code to find LDAP attribute accesses (because synthetic
attriutes look like Python attributes instead of LDAP data) and
error handling is circumscribed. Secondly __getattr__ was hiding
Python internal methods which broke class semantics.
* Replace use of methods inherited from ldap.SimpleLDAPObject via
IPAdmin class with IPAdmin methods. Directly using inherited methods
was causing us to bypass IPA logic. Mostly this meant replacing the
use of search_s() with getEntry() or getList(). Similarly direct
access of the LDAP data in classes using IPAdmin were replaced with
calls to getValue() or getValues().
* Objects returned by ldap2.find_entries() are now compatible with
either the python-ldap access methodology or the Entity/Entry access
methodology.
* All ldap operations now funnel through the common
IPASimpleLDAPObject giving us a single location where we interface
to python-ldap and perform conversions.
* The above 4 modifications means we've greatly reduced the
proliferation of multiple inconsistent ways to perform LDAP
operations. We are well on the way to having a single API in IPA for
doing LDAP (a long range goal).
* All certificate subject bases are now DN's
* DN objects were enhanced thusly:
- find, rfind, index, rindex, replace and insert methods were added
- AVA, RDN and DN classes were refactored in immutable and mutable
variants, the mutable variants are EditableAVA, EditableRDN and
EditableDN. By default we use the immutable variants preserving
important semantics. To edit a DN cast it to an EditableDN and
cast it back to DN when done editing. These issues are fully
described in other documentation.
- first_key_match was removed
- DN equalty comparison permits comparison to a basestring
* Fixed ldapupdate to work with DN's. This work included:
- Enhance test_updates.py to do more checking after applying
update. Add test for update_from_dict(). Convert code to use
unittest classes.
- Consolidated duplicate code.
- Moved code which should have been in the class into the class.
- Fix the handling of the 'deleteentry' update action. It's no longer
necessary to supply fake attributes to make it work. Detect case
where subsequent update applies a change to entry previously marked
for deletetion. General clean-up and simplification of the
'deleteentry' logic.
- Rewrote a couple of functions to be clearer and more Pythonic.
- Added documentation on the data structure being used.
- Simplfy the use of update_from_dict()
* Removed all usage of get_schema() which was being called prior to
accessing the .schema attribute of an object. If a class is using
internal lazy loading as an optimization it's not right to require
users of the interface to be aware of internal
optimization's. schema is now a property and when the schema
property is accessed it calls a private internal method to perform
the lazy loading.
* Added SchemaCache class to cache the schema's from individual
servers. This was done because of the observation we talk to
different LDAP servers, each of which may have it's own
schema. Previously we globally cached the schema from the first
server we connected to and returned that schema in all contexts. The
cache includes controls to invalidate it thus forcing a schema
refresh.
* Schema caching is now senstive to the run time context. During
install and upgrade the schema can change leading to errors due to
out-of-date cached schema. The schema cache is refreshed in these
contexts.
* We are aware of the LDAP syntax of all LDAP attributes. Every
attribute returned from an LDAP operation is passed through a
central table look-up based on it's LDAP syntax. The table key is
the LDAP syntax it's value is a Python callable that returns a
Python object matching the LDAP syntax. There are a handful of LDAP
attributes whose syntax is historically incorrect
(e.g. DistguishedNames that are defined as DirectoryStrings). The
table driven conversion mechanism is augmented with a table of
hard coded exceptions.
Currently only the following conversions occur via the table:
- dn's are converted to DN objects
- binary objects are converted to Python str objects (IPA
convention).
- everything else is converted to unicode using UTF-8 decoding (IPA
convention).
However, now that the table driven conversion mechanism is in place
it would be trivial to do things such as converting attributes
which have LDAP integer syntax into a Python integer, etc.
* Expected values in the unit tests which are a DN no longer need to
use lambda expressions to promote the returned value to a DN for
equality comparison. The return value is automatically promoted to
a DN. The lambda expressions have been removed making the code much
simpler and easier to read.
* Add class level logging to a number of classes which did not support
logging, less need for use of root_logger.
* Remove ipaserver/conn.py, it was unused.
* Consolidated duplicate code wherever it was found.
* Fixed many places that used string concatenation to form a new
string rather than string formatting operators. This is necessary
because string formatting converts it's arguments to a string prior
to building the result string. You can't concatenate a string and a
non-string.
* Simplify logic in rename_managed plugin. Use DN operators to edit
dn's.
* The live version of ipa-ldap-updater did not generate a log file.
The offline version did, now both do.
https://fedorahosted.org/freeipa/ticket/1670
https://fedorahosted.org/freeipa/ticket/1671
https://fedorahosted.org/freeipa/ticket/1672
https://fedorahosted.org/freeipa/ticket/1673
https://fedorahosted.org/freeipa/ticket/1674
https://fedorahosted.org/freeipa/ticket/1392
https://fedorahosted.org/freeipa/ticket/2872
2012-05-13 06:36:35 -05:00
|
|
|
if isinstance(value, DN):
|
|
|
|
return str(value)
|
2014-01-09 04:14:56 -06:00
|
|
|
|
|
|
|
# Encode datetime.datetime objects as xmlrpclib.DateTime objects
|
|
|
|
if isinstance(value, datetime.datetime):
|
|
|
|
if capabilities.client_has_capability(version, 'datetime_values'):
|
|
|
|
return DateTime(value)
|
|
|
|
else:
|
|
|
|
return value.strftime(LDAP_GENERALIZED_TIME_FORMAT)
|
|
|
|
|
2014-04-11 07:45:43 -05:00
|
|
|
if isinstance(value, DNSName):
|
|
|
|
if capabilities.client_has_capability(version, 'dns_name_values'):
|
|
|
|
return {'__dns_name__': unicode(value)}
|
|
|
|
else:
|
|
|
|
return unicode(value)
|
|
|
|
|
2012-09-04 08:49:26 -05:00
|
|
|
assert type(value) in (unicode, int, long, float, bool, NoneType)
|
2008-11-24 13:51:03 -06:00
|
|
|
return value
|
|
|
|
|
|
|
|
|
2009-01-16 00:52:50 -06:00
|
|
|
def xml_unwrap(value, encoding='UTF-8'):
|
2008-11-24 13:51:03 -06:00
|
|
|
"""
|
|
|
|
Unwrap all ``xmlrpc.Binary``, decode all ``str`` into ``unicode``.
|
|
|
|
|
2009-01-16 02:23:55 -06:00
|
|
|
When decoding data from an XML-RPC packet, the following transformations
|
2008-11-24 13:51:03 -06:00
|
|
|
occur:
|
|
|
|
|
|
|
|
* The binary payloads of all ``xmlrpclib.Binary`` instances are
|
|
|
|
returned as ``str`` instances.
|
|
|
|
|
2009-01-16 02:23:55 -06:00
|
|
|
* All ``str`` instances are treated as UTF-8 encoded Unicode strings.
|
2008-11-24 13:51:03 -06:00
|
|
|
They are decoded and the resulting ``unicode`` instance is returned.
|
|
|
|
|
2009-01-16 02:23:55 -06:00
|
|
|
Also see `xml_wrap()`.
|
|
|
|
|
|
|
|
:param value: The value to unwrap.
|
|
|
|
:param encoding: The Unicode encoding to use (defaults to ``'UTF-8'``).
|
2008-11-24 13:51:03 -06:00
|
|
|
"""
|
|
|
|
if type(value) in (list, tuple):
|
2009-01-16 00:52:50 -06:00
|
|
|
return tuple(xml_unwrap(v, encoding) for v in value)
|
2008-11-24 13:51:03 -06:00
|
|
|
if type(value) is dict:
|
2014-04-11 07:45:43 -05:00
|
|
|
if '__dns_name__' in value:
|
|
|
|
return DNSName(value['__dns_name__'])
|
|
|
|
else:
|
|
|
|
return dict(
|
|
|
|
(k, xml_unwrap(v, encoding)) for (k, v) in value.iteritems()
|
|
|
|
)
|
2008-11-24 13:51:03 -06:00
|
|
|
if type(value) is str:
|
|
|
|
return value.decode(encoding)
|
|
|
|
if isinstance(value, Binary):
|
|
|
|
assert type(value.data) is str
|
|
|
|
return value.data
|
2014-01-09 04:14:56 -06:00
|
|
|
if isinstance(value, DateTime):
|
|
|
|
# xmlprc DateTime is converted to string of %Y%m%dT%H:%M:%S format
|
|
|
|
return datetime.datetime.strptime(str(value), "%Y%m%dT%H:%M:%S")
|
2008-11-24 13:51:03 -06:00
|
|
|
assert type(value) in (unicode, int, float, bool, NoneType)
|
|
|
|
return value
|
2009-01-16 00:52:50 -06:00
|
|
|
|
|
|
|
|
2014-03-28 03:51:10 -05:00
|
|
|
def xml_dumps(params, version, methodname=None, methodresponse=False,
|
|
|
|
encoding='UTF-8'):
|
2009-01-16 02:23:55 -06:00
|
|
|
"""
|
|
|
|
Encode an XML-RPC data packet, transparently wraping ``params``.
|
|
|
|
|
|
|
|
This function will wrap ``params`` using `xml_wrap()` and will
|
|
|
|
then encode the XML-RPC data packet using ``xmlrpclib.dumps()`` (from the
|
|
|
|
Python standard library).
|
|
|
|
|
|
|
|
For documentation on the ``xmlrpclib.dumps()`` function, see:
|
|
|
|
|
|
|
|
http://docs.python.org/library/xmlrpclib.html#convenience-functions
|
|
|
|
|
|
|
|
Also see `xml_loads()`.
|
|
|
|
|
|
|
|
:param params: A ``tuple`` or an ``xmlrpclib.Fault`` instance.
|
|
|
|
:param methodname: The name of the method to call if this is a request.
|
|
|
|
:param methodresponse: Set this to ``True`` if this is a response.
|
|
|
|
:param encoding: The Unicode encoding to use (defaults to ``'UTF-8'``).
|
|
|
|
"""
|
2009-01-16 00:52:50 -06:00
|
|
|
if type(params) is tuple:
|
2014-03-28 03:51:10 -05:00
|
|
|
params = xml_wrap(params, version)
|
2009-01-16 00:52:50 -06:00
|
|
|
else:
|
|
|
|
assert isinstance(params, Fault)
|
|
|
|
return dumps(params,
|
|
|
|
methodname=methodname,
|
|
|
|
methodresponse=methodresponse,
|
|
|
|
encoding=encoding,
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
2009-01-31 00:46:51 -06:00
|
|
|
|
2014-03-28 03:51:10 -05:00
|
|
|
def json_encode_binary(val, version):
|
2012-12-19 03:25:24 -06:00
|
|
|
'''
|
|
|
|
JSON cannot encode binary values. We encode binary values in Python str
|
|
|
|
objects and text in Python unicode objects. In order to allow a binary
|
|
|
|
object to be passed through JSON we base64 encode it thus converting it to
|
|
|
|
text which JSON can transport. To assure we recognize the value is a base64
|
|
|
|
encoded representation of the original binary value and not confuse it with
|
|
|
|
other text we convert the binary value to a dict in this form:
|
|
|
|
|
|
|
|
{'__base64__' : base64_encoding_of_binary_value}
|
|
|
|
|
|
|
|
This modification of the original input value cannot be done "in place" as
|
|
|
|
one might first assume (e.g. replacing any binary items in a container
|
|
|
|
(e.g. list, tuple, dict) with the base64 dict because the container might be
|
|
|
|
an immutable object (i.e. a tuple). Therefore this function returns a copy
|
|
|
|
of any container objects it encounters with tuples replaced by lists. This
|
|
|
|
is O.K. because the JSON encoding will map both lists and tuples to JSON
|
|
|
|
arrays.
|
|
|
|
'''
|
|
|
|
|
|
|
|
if isinstance(val, dict):
|
|
|
|
new_dict = {}
|
|
|
|
for k, v in val.items():
|
2014-03-28 03:51:10 -05:00
|
|
|
new_dict[k] = json_encode_binary(v, version)
|
2012-12-19 03:25:24 -06:00
|
|
|
return new_dict
|
|
|
|
elif isinstance(val, (list, tuple)):
|
2014-03-28 03:51:10 -05:00
|
|
|
new_list = [json_encode_binary(v, version) for v in val]
|
2012-12-19 03:25:24 -06:00
|
|
|
return new_list
|
|
|
|
elif isinstance(val, str):
|
|
|
|
return {'__base64__': base64.b64encode(val)}
|
|
|
|
elif isinstance(val, Decimal):
|
|
|
|
return {'__base64__': base64.b64encode(str(val))}
|
|
|
|
elif isinstance(val, DN):
|
|
|
|
return str(val)
|
2014-01-09 04:14:56 -06:00
|
|
|
elif isinstance(val, datetime.datetime):
|
|
|
|
if capabilities.client_has_capability(version, 'datetime_values'):
|
|
|
|
return {'__datetime__': val.strftime(LDAP_GENERALIZED_TIME_FORMAT)}
|
|
|
|
else:
|
|
|
|
return val.strftime(LDAP_GENERALIZED_TIME_FORMAT)
|
2014-04-11 07:45:43 -05:00
|
|
|
elif isinstance(val, DNSName):
|
|
|
|
if capabilities.client_has_capability(version, 'dns_name_values'):
|
|
|
|
return {'__dns_name__': unicode(val)}
|
|
|
|
else:
|
|
|
|
return unicode(val)
|
2012-12-19 03:25:24 -06:00
|
|
|
else:
|
|
|
|
return val
|
|
|
|
|
|
|
|
|
|
|
|
def json_decode_binary(val):
|
|
|
|
'''
|
|
|
|
JSON cannot transport binary data. In order to transport binary data we
|
|
|
|
convert binary data to a form like this:
|
|
|
|
|
|
|
|
{'__base64__' : base64_encoding_of_binary_value}
|
|
|
|
|
|
|
|
see json_encode_binary()
|
|
|
|
|
|
|
|
After JSON had decoded the JSON stream back into a Python object we must
|
|
|
|
recursively scan the object looking for any dicts which might represent
|
|
|
|
binary values and replace the dict containing the base64 encoding of the
|
|
|
|
binary value with the decoded binary value. Unlike the encoding problem
|
|
|
|
where the input might consist of immutable object, all JSON decoded
|
|
|
|
container are mutable so the conversion could be done in place. However we
|
|
|
|
don't modify objects in place because of side effects which may be
|
|
|
|
dangerous. Thus we elect to spend a few more cycles and avoid the
|
|
|
|
possibility of unintended side effects in favor of robustness.
|
|
|
|
'''
|
|
|
|
|
|
|
|
if isinstance(val, dict):
|
|
|
|
if '__base64__' in val:
|
|
|
|
return base64.b64decode(val['__base64__'])
|
2014-01-09 04:14:56 -06:00
|
|
|
elif '__datetime__' in val:
|
|
|
|
return datetime.datetime.strptime(val['__datetime__'],
|
|
|
|
LDAP_GENERALIZED_TIME_FORMAT)
|
2014-04-11 07:45:43 -05:00
|
|
|
elif '__dns_name__' in val:
|
|
|
|
return DNSName(val['__dns_name__'])
|
2012-12-19 03:25:24 -06:00
|
|
|
else:
|
|
|
|
return dict((k, json_decode_binary(v)) for k, v in val.items())
|
|
|
|
elif isinstance(val, list):
|
|
|
|
return tuple(json_decode_binary(v) for v in val)
|
|
|
|
else:
|
|
|
|
if isinstance(val, basestring):
|
|
|
|
try:
|
|
|
|
return val.decode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
raise ConversionError(
|
|
|
|
name=val,
|
|
|
|
error='incorrect type'
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return val
|
|
|
|
|
|
|
|
|
2009-01-22 16:41:54 -06:00
|
|
|
def decode_fault(e, encoding='UTF-8'):
|
|
|
|
assert isinstance(e, Fault)
|
|
|
|
if type(e.faultString) is str:
|
|
|
|
return Fault(e.faultCode, e.faultString.decode(encoding))
|
|
|
|
return e
|
2009-01-16 00:52:50 -06:00
|
|
|
|
2009-01-31 00:46:51 -06:00
|
|
|
|
2009-01-22 16:41:54 -06:00
|
|
|
def xml_loads(data, encoding='UTF-8'):
|
2009-01-16 02:23:55 -06:00
|
|
|
"""
|
2009-01-16 02:56:39 -06:00
|
|
|
Decode the XML-RPC packet in ``data``, transparently unwrapping its params.
|
2009-01-16 02:23:55 -06:00
|
|
|
|
|
|
|
This function will decode the XML-RPC packet in ``data`` using
|
|
|
|
``xmlrpclib.loads()`` (from the Python standard library). If ``data``
|
|
|
|
contains a fault, ``xmlrpclib.loads()`` will itself raise an
|
|
|
|
``xmlrpclib.Fault`` exception.
|
|
|
|
|
|
|
|
Assuming an exception is not raised, this function will then unwrap the
|
|
|
|
params in ``data`` using `xml_unwrap()`. Finally, a
|
|
|
|
``(params, methodname)`` tuple is returned containing the unwrapped params
|
|
|
|
and the name of the method being called. If the packet contains no method
|
|
|
|
name, ``methodname`` will be ``None``.
|
|
|
|
|
|
|
|
For documentation on the ``xmlrpclib.loads()`` function, see:
|
|
|
|
|
|
|
|
http://docs.python.org/library/xmlrpclib.html#convenience-functions
|
|
|
|
|
|
|
|
Also see `xml_dumps()`.
|
|
|
|
|
|
|
|
:param data: The XML-RPC packet to decode.
|
|
|
|
"""
|
2009-01-22 16:41:54 -06:00
|
|
|
try:
|
|
|
|
(params, method) = loads(data)
|
|
|
|
return (xml_unwrap(params), method)
|
|
|
|
except Fault, e:
|
|
|
|
raise decode_fault(e)
|
2009-01-19 22:10:42 -06:00
|
|
|
|
|
|
|
|
2012-12-19 03:25:24 -06:00
|
|
|
class DummyParser(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.data = ''
|
|
|
|
|
|
|
|
def feed(self, data):
|
|
|
|
self.data += data
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
return self.data
|
|
|
|
|
|
|
|
|
|
|
|
class MultiProtocolTransport(Transport):
|
|
|
|
"""Transport that handles both XML-RPC and JSON"""
|
|
|
|
def __init__(self, protocol):
|
|
|
|
Transport.__init__(self)
|
|
|
|
self.protocol = protocol
|
|
|
|
|
|
|
|
def getparser(self):
|
|
|
|
if self.protocol == 'json':
|
|
|
|
parser = DummyParser()
|
|
|
|
return parser, parser
|
|
|
|
else:
|
|
|
|
return Transport.getparser(self)
|
|
|
|
|
|
|
|
def send_content(self, connection, request_body):
|
|
|
|
if self.protocol == 'json':
|
|
|
|
connection.putheader("Content-Type", "application/json")
|
|
|
|
else:
|
|
|
|
connection.putheader("Content-Type", "text/xml")
|
|
|
|
|
|
|
|
# gzip compression would be set up here, but we have it turned off
|
|
|
|
# (encode_threshold is None)
|
|
|
|
|
|
|
|
connection.putheader("Content-Length", str(len(request_body)))
|
|
|
|
connection.endheaders(request_body)
|
|
|
|
|
|
|
|
|
|
|
|
class LanguageAwareTransport(MultiProtocolTransport):
|
2011-02-15 13:10:38 -06:00
|
|
|
"""Transport sending Accept-Language header"""
|
|
|
|
def get_host_info(self, host):
|
2012-12-19 03:25:24 -06:00
|
|
|
host, extra_headers, x509 = MultiProtocolTransport.get_host_info(
|
|
|
|
self, host)
|
2011-02-15 13:10:38 -06:00
|
|
|
|
2011-02-07 12:26:25 -06:00
|
|
|
try:
|
|
|
|
lang = locale.setlocale(locale.LC_ALL, '').split('.')[0].lower()
|
|
|
|
except locale.Error:
|
|
|
|
# fallback to default locale
|
|
|
|
lang = 'en_us'
|
2011-02-15 13:10:38 -06:00
|
|
|
|
|
|
|
if not isinstance(extra_headers, list):
|
|
|
|
extra_headers = []
|
|
|
|
|
|
|
|
extra_headers.append(
|
|
|
|
('Accept-Language', lang.replace('_', '-'))
|
|
|
|
)
|
2011-10-20 10:29:26 -05:00
|
|
|
extra_headers.append(
|
|
|
|
('Referer', 'https://%s/ipa/xml' % str(host))
|
|
|
|
)
|
2011-02-15 13:10:38 -06:00
|
|
|
|
|
|
|
return (host, extra_headers, x509)
|
|
|
|
|
|
|
|
class SSLTransport(LanguageAwareTransport):
|
2009-02-19 16:20:37 -06:00
|
|
|
"""Handles an HTTPS transaction to an XML-RPC server."""
|
|
|
|
|
2014-09-16 19:11:35 -05:00
|
|
|
def get_connection_dbdir(self):
|
2012-03-03 18:50:21 -06:00
|
|
|
"""
|
2014-09-16 19:11:35 -05:00
|
|
|
If there is a connections open it may have already initialized
|
|
|
|
NSS database. Return the database location used by the connection.
|
2012-03-03 18:50:21 -06:00
|
|
|
"""
|
|
|
|
for value in context.__dict__.values():
|
|
|
|
if not isinstance(value, Connection):
|
|
|
|
continue
|
2013-09-12 07:10:20 -05:00
|
|
|
if not isinstance(
|
|
|
|
getattr(value.conn, '_ServerProxy__transport', None),
|
|
|
|
SSLTransport):
|
2012-03-03 18:50:21 -06:00
|
|
|
continue
|
2014-09-16 19:11:35 -05:00
|
|
|
if hasattr(value.conn._ServerProxy__transport, 'dbdir'):
|
|
|
|
return value.conn._ServerProxy__transport.dbdir
|
|
|
|
return None
|
2012-03-03 18:50:21 -06:00
|
|
|
|
2009-02-19 16:20:37 -06:00
|
|
|
def make_connection(self, host):
|
2010-08-31 15:59:27 -05:00
|
|
|
host, self._extra_headers, x509 = self.get_host_info(host)
|
|
|
|
# Python 2.7 changed the internal class used in xmlrpclib from
|
|
|
|
# HTTP to HTTPConnection. We need to use the proper subclass
|
2012-03-03 18:50:21 -06:00
|
|
|
|
2012-10-17 15:58:54 -05:00
|
|
|
if sys.version_info >= (2, 7):
|
|
|
|
if self._connection and host == self._connection[0]:
|
|
|
|
return self._connection[1]
|
|
|
|
|
2014-09-18 09:28:59 -05:00
|
|
|
dbdir = getattr(context, 'nss_dir', paths.IPA_NSSDB_DIR)
|
2014-09-16 19:11:35 -05:00
|
|
|
connection_dbdir = self.get_connection_dbdir()
|
|
|
|
|
|
|
|
if connection_dbdir:
|
|
|
|
# If an existing connection is already using the same NSS
|
|
|
|
# database there is no need to re-initialize.
|
|
|
|
no_init = dbdir == connection_dbdir
|
|
|
|
|
|
|
|
else:
|
|
|
|
# If the NSS database is already being used there is no
|
|
|
|
# need to re-initialize.
|
|
|
|
no_init = dbdir == ipapython.nsslib.current_dbdir
|
|
|
|
|
2012-10-17 15:58:54 -05:00
|
|
|
if sys.version_info < (2, 7):
|
2012-03-07 15:36:52 -06:00
|
|
|
conn = NSSHTTPS(host, 443, dbdir=dbdir, no_init=no_init)
|
2010-08-31 15:59:27 -05:00
|
|
|
else:
|
2014-10-30 10:52:14 -05:00
|
|
|
conn = NSSConnection(host, 443, dbdir=dbdir, no_init=no_init,
|
|
|
|
tls_version_min=api.env.tls_version_min,
|
|
|
|
tls_version_max=api.env.tls_version_max)
|
2012-03-07 15:36:52 -06:00
|
|
|
self.dbdir=dbdir
|
2012-10-17 15:58:54 -05:00
|
|
|
|
2010-05-31 06:40:17 -05:00
|
|
|
conn.connect()
|
2012-10-17 15:58:54 -05:00
|
|
|
if sys.version_info < (2, 7):
|
|
|
|
return conn
|
|
|
|
else:
|
|
|
|
self._connection = host, conn
|
|
|
|
return self._connection[1]
|
2009-02-19 16:20:37 -06:00
|
|
|
|
2012-06-06 21:54:16 -05:00
|
|
|
|
2009-02-19 16:20:37 -06:00
|
|
|
class KerbTransport(SSLTransport):
|
2009-01-22 15:00:37 -06:00
|
|
|
"""
|
|
|
|
Handles Kerberos Negotiation authentication to an XML-RPC server.
|
|
|
|
"""
|
2012-02-15 10:06:54 -06:00
|
|
|
flags = kerberos.GSS_C_MUTUAL_FLAG | kerberos.GSS_C_SEQUENCE_FLAG
|
2009-01-22 15:00:37 -06:00
|
|
|
|
2009-04-13 17:01:58 -05:00
|
|
|
def _handle_exception(self, e, service=None):
|
|
|
|
(major, minor) = ipautil.get_gsserror(e)
|
|
|
|
if minor[1] == KRB5KDC_ERR_S_PRINCIPAL_UNKNOWN:
|
2009-04-23 07:51:59 -05:00
|
|
|
raise errors.ServiceError(service=service)
|
2009-04-13 17:01:58 -05:00
|
|
|
elif minor[1] == KRB5_FCC_NOFILE:
|
2009-04-23 07:51:59 -05:00
|
|
|
raise errors.NoCCacheError()
|
2009-04-13 17:01:58 -05:00
|
|
|
elif minor[1] == KRB5KRB_AP_ERR_TKT_EXPIRED:
|
2009-04-23 07:51:59 -05:00
|
|
|
raise errors.TicketExpired()
|
2009-04-13 17:01:58 -05:00
|
|
|
elif minor[1] == KRB5_FCC_PERM:
|
2009-04-23 07:51:59 -05:00
|
|
|
raise errors.BadCCachePerms()
|
2009-04-13 17:01:58 -05:00
|
|
|
elif minor[1] == KRB5_CC_FORMAT:
|
2009-04-23 07:51:59 -05:00
|
|
|
raise errors.BadCCacheFormat()
|
2009-04-13 17:01:58 -05:00
|
|
|
elif minor[1] == KRB5_REALM_CANT_RESOLVE:
|
2009-04-23 07:51:59 -05:00
|
|
|
raise errors.CannotResolveKDC()
|
2009-04-13 17:01:58 -05:00
|
|
|
else:
|
2009-04-23 07:51:59 -05:00
|
|
|
raise errors.KerberosError(major=major, minor=minor)
|
2009-04-13 17:01:58 -05:00
|
|
|
|
2009-01-22 15:00:37 -06:00
|
|
|
def get_host_info(self, host):
|
2012-06-06 21:54:16 -05:00
|
|
|
"""
|
|
|
|
Two things can happen here. If we have a session we will add
|
|
|
|
a cookie for that. If not we will set an Authorization header.
|
|
|
|
"""
|
2009-02-19 16:20:37 -06:00
|
|
|
(host, extra_headers, x509) = SSLTransport.get_host_info(self, host)
|
2009-01-22 15:00:37 -06:00
|
|
|
|
2012-06-06 21:54:16 -05:00
|
|
|
if not isinstance(extra_headers, list):
|
|
|
|
extra_headers = []
|
|
|
|
|
2012-12-04 17:20:17 -06:00
|
|
|
session_cookie = getattr(context, 'session_cookie', None)
|
|
|
|
if session_cookie:
|
|
|
|
extra_headers.append(('Cookie', session_cookie))
|
2012-06-06 21:54:16 -05:00
|
|
|
return (host, extra_headers, x509)
|
|
|
|
|
2009-01-22 15:00:37 -06:00
|
|
|
# Set the remote host principal
|
|
|
|
service = "HTTP@" + host.split(':')[0]
|
|
|
|
|
|
|
|
try:
|
2015-06-26 10:48:09 -05:00
|
|
|
(rc, vc) = kerberos.authGSSClientInit(service=service,
|
|
|
|
gssflags=self.flags)
|
2009-01-22 15:00:37 -06:00
|
|
|
except kerberos.GSSError, e:
|
2009-04-13 17:01:58 -05:00
|
|
|
self._handle_exception(e)
|
2009-01-22 15:00:37 -06:00
|
|
|
|
|
|
|
try:
|
|
|
|
kerberos.authGSSClientStep(vc, "")
|
|
|
|
except kerberos.GSSError, e:
|
2009-04-13 17:01:58 -05:00
|
|
|
self._handle_exception(e, service=service)
|
2009-01-22 15:00:37 -06:00
|
|
|
|
2011-02-15 13:10:38 -06:00
|
|
|
for (h, v) in extra_headers:
|
|
|
|
if h == 'Authorization':
|
|
|
|
extra_headers.remove((h, v))
|
|
|
|
break
|
|
|
|
|
|
|
|
extra_headers.append(
|
2009-01-22 15:00:37 -06:00
|
|
|
('Authorization', 'negotiate %s' % kerberos.authGSSClientResponse(vc))
|
2011-02-15 13:10:38 -06:00
|
|
|
)
|
2009-01-22 15:00:37 -06:00
|
|
|
|
|
|
|
return (host, extra_headers, x509)
|
|
|
|
|
2012-10-17 15:58:54 -05:00
|
|
|
def single_request(self, host, handler, request_body, verbose=0):
|
|
|
|
try:
|
|
|
|
return SSLTransport.single_request(self, host, handler, request_body, verbose)
|
|
|
|
finally:
|
|
|
|
self.close()
|
|
|
|
|
2012-12-04 17:20:17 -06:00
|
|
|
def store_session_cookie(self, cookie_header):
|
|
|
|
'''
|
|
|
|
Given the contents of a Set-Cookie header scan the header and
|
|
|
|
extract each cookie contained within until the session cookie
|
|
|
|
is located. Examine the session cookie if the domain and path
|
|
|
|
are specified, if not update the cookie with those values from
|
|
|
|
the request URL. Then write the session cookie into the key
|
|
|
|
store for the principal. If the cookie header is None or the
|
|
|
|
session cookie is not present in the header no action is
|
|
|
|
taken.
|
|
|
|
|
|
|
|
Context Dependencies:
|
|
|
|
|
|
|
|
The per thread context is expected to contain:
|
|
|
|
principal
|
|
|
|
The current pricipal the HTTP request was issued for.
|
|
|
|
request_url
|
|
|
|
The URL of the HTTP request.
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
if cookie_header is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
principal = getattr(context, 'principal', None)
|
|
|
|
request_url = getattr(context, 'request_url', None)
|
|
|
|
root_logger.debug("received Set-Cookie '%s'", cookie_header)
|
|
|
|
|
|
|
|
# Search for the session cookie
|
|
|
|
try:
|
|
|
|
session_cookie = Cookie.get_named_cookie_from_string(cookie_header,
|
|
|
|
COOKIE_NAME, request_url)
|
|
|
|
except Exception, e:
|
|
|
|
root_logger.error("unable to parse cookie header '%s': %s", cookie_header, e)
|
|
|
|
return
|
|
|
|
|
|
|
|
if session_cookie is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
cookie_string = str(session_cookie)
|
|
|
|
root_logger.debug("storing cookie '%s' for principal %s", cookie_string, principal)
|
|
|
|
try:
|
|
|
|
update_persistent_client_session_data(principal, cookie_string)
|
|
|
|
except Exception, e:
|
|
|
|
# Not fatal, we just can't use the session cookie we were sent.
|
|
|
|
pass
|
|
|
|
|
2012-07-16 09:40:12 -05:00
|
|
|
def parse_response(self, response):
|
2012-12-04 17:20:17 -06:00
|
|
|
self.store_session_cookie(response.getheader('Set-Cookie'))
|
2012-07-16 09:40:12 -05:00
|
|
|
return SSLTransport.parse_response(self, response)
|
|
|
|
|
2009-01-22 15:00:37 -06:00
|
|
|
|
2012-02-15 10:06:54 -06:00
|
|
|
class DelegatedKerbTransport(KerbTransport):
|
|
|
|
"""
|
|
|
|
Handles Kerberos Negotiation authentication and TGT delegation to an
|
|
|
|
XML-RPC server.
|
|
|
|
"""
|
|
|
|
flags = kerberos.GSS_C_DELEG_FLAG | kerberos.GSS_C_MUTUAL_FLAG | \
|
|
|
|
kerberos.GSS_C_SEQUENCE_FLAG
|
|
|
|
|
2012-12-19 03:25:24 -06:00
|
|
|
|
|
|
|
class RPCClient(Connectible):
|
2009-01-19 22:10:42 -06:00
|
|
|
"""
|
2009-01-23 14:08:24 -06:00
|
|
|
Forwarding backend plugin for XML-RPC client.
|
|
|
|
|
|
|
|
Also see the `ipaserver.rpcserver.xmlserver` plugin.
|
2009-01-19 22:10:42 -06:00
|
|
|
"""
|
|
|
|
|
2012-12-19 03:25:24 -06:00
|
|
|
# Values to set on subclasses:
|
|
|
|
session_path = None
|
|
|
|
server_proxy_class = ServerProxy
|
|
|
|
protocol = None
|
|
|
|
env_rpc_uri_key = None
|
2009-01-19 22:10:42 -06:00
|
|
|
|
2012-12-19 03:25:24 -06:00
|
|
|
def get_url_list(self, rpc_uri):
|
2010-07-26 16:54:38 -05:00
|
|
|
"""
|
|
|
|
Create a list of urls consisting of the available IPA servers.
|
|
|
|
"""
|
|
|
|
# the configured URL defines what we use for the discovered servers
|
2012-12-19 03:25:24 -06:00
|
|
|
(scheme, netloc, path, params, query, fragment
|
|
|
|
) = urlparse.urlparse(rpc_uri)
|
2010-07-26 16:54:38 -05:00
|
|
|
servers = []
|
|
|
|
name = '_ldap._tcp.%s.' % self.env.domain
|
2012-05-11 07:38:09 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
answers = resolver.query(name, rdatatype.SRV)
|
|
|
|
except DNSException, e:
|
|
|
|
answers = []
|
|
|
|
|
|
|
|
for answer in answers:
|
|
|
|
server = str(answer.target).rstrip(".")
|
|
|
|
servers.append('https://%s%s' % (ipautil.format_netloc(server), path))
|
|
|
|
|
2010-07-26 16:54:38 -05:00
|
|
|
servers = list(set(servers))
|
|
|
|
# the list/set conversion won't preserve order so stick in the
|
|
|
|
# local config file version here.
|
2012-12-19 03:25:24 -06:00
|
|
|
cfg_server = rpc_uri
|
2011-09-19 11:37:27 -05:00
|
|
|
if cfg_server in servers:
|
|
|
|
# make sure the configured master server is there just once and
|
|
|
|
# it is the first one
|
|
|
|
servers.remove(cfg_server)
|
|
|
|
servers.insert(0, cfg_server)
|
|
|
|
else:
|
|
|
|
servers.insert(0, cfg_server)
|
|
|
|
|
2010-07-26 16:54:38 -05:00
|
|
|
return servers
|
|
|
|
|
2012-12-04 17:20:17 -06:00
|
|
|
def get_session_cookie_from_persistent_storage(self, principal):
|
|
|
|
'''
|
|
|
|
Retrieves the session cookie for the given principal from the
|
|
|
|
persistent secure storage. Returns None if not found or unable
|
|
|
|
to retrieve the session cookie for any reason, otherwise
|
|
|
|
returns a Cookie object containing the session cookie.
|
|
|
|
'''
|
|
|
|
|
|
|
|
# Get the session data, it should contain a cookie string
|
|
|
|
# (possibly with more than one cookie).
|
|
|
|
try:
|
|
|
|
cookie_string = read_persistent_client_session_data(principal)
|
|
|
|
except Exception, e:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Search for the session cookie within the cookie string
|
|
|
|
try:
|
|
|
|
session_cookie = Cookie.get_named_cookie_from_string(cookie_string, COOKIE_NAME)
|
|
|
|
except Exception, e:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return session_cookie
|
|
|
|
|
|
|
|
def apply_session_cookie(self, url):
|
|
|
|
'''
|
|
|
|
Attempt to load a session cookie for the current principal
|
|
|
|
from the persistent secure storage. If the cookie is
|
|
|
|
successfully loaded adjust the input url's to point to the
|
|
|
|
session path and insert the session cookie into the per thread
|
|
|
|
context for later insertion into the HTTP request. If the
|
|
|
|
cookie is not successfully loaded then the original url is
|
|
|
|
returned and the per thread context is not modified.
|
|
|
|
|
|
|
|
Context Dependencies:
|
|
|
|
|
|
|
|
The per thread context is expected to contain:
|
|
|
|
principal
|
|
|
|
The current pricipal the HTTP request was issued for.
|
|
|
|
|
|
|
|
The per thread context will be updated with:
|
|
|
|
session_cookie
|
|
|
|
A cookie string to be inserted into the Cookie header
|
|
|
|
of the HTPP request.
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
original_url = url
|
|
|
|
principal = getattr(context, 'principal', None)
|
|
|
|
|
|
|
|
session_cookie = self.get_session_cookie_from_persistent_storage(principal)
|
|
|
|
if session_cookie is None:
|
|
|
|
self.log.debug("failed to find session_cookie in persistent storage for principal '%s'",
|
|
|
|
principal)
|
|
|
|
return original_url
|
|
|
|
else:
|
|
|
|
self.debug("found session_cookie in persistent storage for principal '%s', cookie: '%s'",
|
|
|
|
principal, session_cookie)
|
|
|
|
|
|
|
|
# Decide if we should send the cookie to the server
|
|
|
|
try:
|
|
|
|
session_cookie.http_return_ok(original_url)
|
|
|
|
except Cookie.Expired, e:
|
|
|
|
self.debug("deleting session data for principal '%s': %s", principal, e)
|
|
|
|
try:
|
|
|
|
delete_persistent_client_session_data(principal)
|
|
|
|
except Exception, e:
|
|
|
|
pass
|
|
|
|
return original_url
|
|
|
|
except Cookie.URLMismatch, e:
|
|
|
|
self.debug("not sending session cookie, URL mismatch: %s", e)
|
|
|
|
return original_url
|
|
|
|
except Exception, e:
|
|
|
|
self.error("not sending session cookie, unknown error: %s", e)
|
|
|
|
return original_url
|
|
|
|
|
|
|
|
# O.K. session_cookie is valid to be returned, stash it away where it will will
|
|
|
|
# get included in a HTTP Cookie headed sent to the server.
|
|
|
|
self.log.debug("setting session_cookie into context '%s'", session_cookie.http_cookie())
|
|
|
|
setattr(context, 'session_cookie', session_cookie.http_cookie())
|
|
|
|
|
|
|
|
# Form the session URL by substituting the session path into the original URL
|
|
|
|
scheme, netloc, path, params, query, fragment = urlparse.urlparse(original_url)
|
2012-12-19 03:25:24 -06:00
|
|
|
path = self.session_path
|
2012-12-04 17:20:17 -06:00
|
|
|
session_url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
|
|
|
|
|
|
|
|
return session_url
|
|
|
|
|
2014-09-23 05:10:56 -05:00
|
|
|
def create_connection(self, ccache=None, verbose=0, fallback=True,
|
2014-06-12 06:39:39 -05:00
|
|
|
delegate=False, nss_dir=None):
|
2012-06-06 21:54:16 -05:00
|
|
|
try:
|
2012-12-19 03:25:24 -06:00
|
|
|
rpc_uri = self.env[self.env_rpc_uri_key]
|
2012-07-16 09:40:12 -05:00
|
|
|
principal = get_current_principal()
|
|
|
|
setattr(context, 'principal', principal)
|
2012-06-06 21:54:16 -05:00
|
|
|
# We have a session cookie, try using the session URI to see if it
|
|
|
|
# is still valid
|
|
|
|
if not delegate:
|
2012-12-19 03:25:24 -06:00
|
|
|
rpc_uri = self.apply_session_cookie(rpc_uri)
|
2012-06-06 21:54:16 -05:00
|
|
|
except ValueError:
|
|
|
|
# No session key, do full Kerberos auth
|
|
|
|
pass
|
2014-06-12 06:39:39 -05:00
|
|
|
# This might be dangerous. Use at your own risk!
|
|
|
|
if nss_dir:
|
|
|
|
context.nss_dir = nss_dir
|
2012-12-19 03:25:24 -06:00
|
|
|
urls = self.get_url_list(rpc_uri)
|
2010-07-26 16:54:38 -05:00
|
|
|
serverproxy = None
|
2012-12-04 17:20:17 -06:00
|
|
|
for url in urls:
|
2010-07-26 16:54:38 -05:00
|
|
|
kw = dict(allow_none=True, encoding='UTF-8')
|
|
|
|
kw['verbose'] = verbose
|
2012-12-04 17:20:17 -06:00
|
|
|
if url.startswith('https://'):
|
2012-02-15 10:06:54 -06:00
|
|
|
if delegate:
|
2012-12-19 03:25:24 -06:00
|
|
|
transport_class = DelegatedKerbTransport
|
2012-02-15 10:06:54 -06:00
|
|
|
else:
|
2012-12-19 03:25:24 -06:00
|
|
|
transport_class = KerbTransport
|
2011-02-15 13:10:38 -06:00
|
|
|
else:
|
2012-12-19 03:25:24 -06:00
|
|
|
transport_class = LanguageAwareTransport
|
|
|
|
kw['transport'] = transport_class(protocol=self.protocol)
|
2014-02-05 02:24:22 -06:00
|
|
|
self.log.info('trying %s' % url)
|
2012-12-04 17:20:17 -06:00
|
|
|
setattr(context, 'request_url', url)
|
2012-12-19 03:25:24 -06:00
|
|
|
serverproxy = self.server_proxy_class(url, **kw)
|
2012-12-04 17:20:17 -06:00
|
|
|
if len(urls) == 1:
|
2012-06-06 21:54:16 -05:00
|
|
|
# if we have only 1 server and then let the
|
|
|
|
# main requester handle any errors. This also means it
|
|
|
|
# must handle a 401 but we save a ping.
|
2010-07-26 16:54:38 -05:00
|
|
|
return serverproxy
|
|
|
|
try:
|
|
|
|
command = getattr(serverproxy, 'ping')
|
2011-05-17 14:09:39 -05:00
|
|
|
try:
|
2012-12-19 03:25:24 -06:00
|
|
|
response = command([], {})
|
2011-05-17 14:09:39 -05:00
|
|
|
except Fault, e:
|
|
|
|
e = decode_fault(e)
|
2012-12-19 03:25:24 -06:00
|
|
|
if e.faultCode in errors_by_code:
|
|
|
|
error = errors_by_code[e.faultCode]
|
2011-05-17 14:09:39 -05:00
|
|
|
raise error(message=e.faultString)
|
|
|
|
else:
|
|
|
|
raise UnknownError(
|
|
|
|
code=e.faultCode,
|
|
|
|
error=e.faultString,
|
2012-12-04 17:20:17 -06:00
|
|
|
server=url,
|
2011-05-17 14:09:39 -05:00
|
|
|
)
|
2010-07-26 16:54:38 -05:00
|
|
|
# We don't care about the response, just that we got one
|
|
|
|
break
|
|
|
|
except KerberosError, krberr:
|
|
|
|
# kerberos error on one server is likely on all
|
|
|
|
raise errors.KerberosError(major=str(krberr), minor='')
|
2012-06-06 21:54:16 -05:00
|
|
|
except ProtocolError, e:
|
2012-12-04 17:20:17 -06:00
|
|
|
if hasattr(context, 'session_cookie') and e.errcode == 401:
|
2012-06-06 21:54:16 -05:00
|
|
|
# Unauthorized. Remove the session and try again.
|
2012-12-04 17:20:17 -06:00
|
|
|
delattr(context, 'session_cookie')
|
2012-06-06 21:54:16 -05:00
|
|
|
try:
|
2012-12-04 17:20:17 -06:00
|
|
|
delete_persistent_client_session_data(principal)
|
|
|
|
except Exception, e:
|
|
|
|
# This shouldn't happen if we have a session but it isn't fatal.
|
2012-06-06 21:54:16 -05:00
|
|
|
pass
|
|
|
|
return self.create_connection(ccache, verbose, fallback, delegate)
|
|
|
|
if not fallback:
|
|
|
|
raise
|
|
|
|
serverproxy = None
|
2010-07-26 16:54:38 -05:00
|
|
|
except Exception, e:
|
|
|
|
if not fallback:
|
2012-06-06 21:54:16 -05:00
|
|
|
raise
|
2012-12-18 16:14:41 -06:00
|
|
|
else:
|
|
|
|
self.log.info('Connection to %s failed with %s', url, e)
|
2010-07-26 16:54:38 -05:00
|
|
|
serverproxy = None
|
|
|
|
|
|
|
|
if serverproxy is None:
|
2012-07-04 07:52:47 -05:00
|
|
|
raise NetworkError(uri=_('any of the configured servers'),
|
2012-12-04 17:20:17 -06:00
|
|
|
error=', '.join(urls))
|
2010-07-26 16:54:38 -05:00
|
|
|
return serverproxy
|
2009-01-22 15:00:37 -06:00
|
|
|
|
2009-01-23 19:02:32 -06:00
|
|
|
def destroy_connection(self):
|
2012-10-17 15:58:54 -05:00
|
|
|
if sys.version_info >= (2, 7):
|
|
|
|
conn = getattr(context, self.id, None)
|
|
|
|
if conn is not None:
|
|
|
|
conn = conn.conn._ServerProxy__transport
|
|
|
|
conn.close()
|
2009-01-22 15:00:37 -06:00
|
|
|
|
2012-12-19 03:25:24 -06:00
|
|
|
def _call_command(self, command, params):
|
|
|
|
"""Call the command with given params"""
|
|
|
|
# For XML, this method will wrap/unwrap binary values
|
|
|
|
# For JSON we do that in the proxy
|
|
|
|
return command(*params)
|
|
|
|
|
2009-01-19 22:10:42 -06:00
|
|
|
def forward(self, name, *args, **kw):
|
|
|
|
"""
|
|
|
|
Forward call to command named ``name`` over XML-RPC.
|
|
|
|
|
|
|
|
This method will encode and forward an XML-RPC request, and will then
|
|
|
|
decode and return the corresponding XML-RPC response.
|
|
|
|
|
|
|
|
:param command: The name of the command being forwarded.
|
|
|
|
:param args: Positional arguments to pass to remote command.
|
|
|
|
:param kw: Keyword arguments to pass to remote command.
|
|
|
|
"""
|
|
|
|
if name not in self.Command:
|
|
|
|
raise ValueError(
|
|
|
|
'%s.forward(): %r not in api.Command' % (self.name, name)
|
|
|
|
)
|
2012-12-04 17:20:17 -06:00
|
|
|
server = getattr(context, 'request_url', None)
|
2014-02-05 02:24:22 -06:00
|
|
|
self.log.info("Forwarding '%s' to %s server '%s'",
|
|
|
|
name, self.protocol, server)
|
2009-01-23 19:02:32 -06:00
|
|
|
command = getattr(self.conn, name)
|
2010-03-26 04:56:53 -05:00
|
|
|
params = [args, kw]
|
2009-01-19 22:10:42 -06:00
|
|
|
try:
|
2012-12-19 03:25:24 -06:00
|
|
|
return self._call_command(command, params)
|
2009-01-19 22:10:42 -06:00
|
|
|
except Fault, e:
|
2009-01-22 16:41:54 -06:00
|
|
|
e = decode_fault(e)
|
2009-01-22 15:00:37 -06:00
|
|
|
self.debug('Caught fault %d from server %s: %s', e.faultCode,
|
2010-07-26 16:54:38 -05:00
|
|
|
server, e.faultString)
|
2012-12-19 03:25:24 -06:00
|
|
|
if e.faultCode in errors_by_code:
|
|
|
|
error = errors_by_code[e.faultCode]
|
2009-01-19 22:10:42 -06:00
|
|
|
raise error(message=e.faultString)
|
|
|
|
raise UnknownError(
|
|
|
|
code=e.faultCode,
|
|
|
|
error=e.faultString,
|
2010-07-26 16:54:38 -05:00
|
|
|
server=server,
|
2009-01-19 22:10:42 -06:00
|
|
|
)
|
2010-05-31 06:40:17 -05:00
|
|
|
except NSPRError, e:
|
2010-07-26 16:54:38 -05:00
|
|
|
raise NetworkError(uri=server, error=str(e))
|
2009-07-10 14:33:07 -05:00
|
|
|
except ProtocolError, e:
|
2012-06-06 21:54:16 -05:00
|
|
|
# By catching a 401 here we can detect the case where we have
|
|
|
|
# a single IPA server and the session is invalid. Otherwise
|
|
|
|
# we always have to do a ping().
|
2012-12-04 17:20:17 -06:00
|
|
|
session_cookie = getattr(context, 'session_cookie', None)
|
|
|
|
if session_cookie and e.errcode == 401:
|
2012-06-06 21:54:16 -05:00
|
|
|
# Unauthorized. Remove the session and try again.
|
2012-12-04 17:20:17 -06:00
|
|
|
delattr(context, 'session_cookie')
|
2012-06-06 21:54:16 -05:00
|
|
|
try:
|
2012-07-16 09:40:12 -05:00
|
|
|
principal = getattr(context, 'principal', None)
|
2012-12-04 17:20:17 -06:00
|
|
|
delete_persistent_client_session_data(principal)
|
|
|
|
except Exception, e:
|
|
|
|
# This shouldn't happen if we have a session but it isn't fatal.
|
2012-06-06 21:54:16 -05:00
|
|
|
pass
|
2012-10-01 12:05:11 -05:00
|
|
|
|
|
|
|
# Create a new serverproxy with the non-session URI. If there
|
|
|
|
# is an existing connection we need to save the NSS dbdir so
|
|
|
|
# we can skip an unnecessary NSS_Initialize() and avoid
|
|
|
|
# NSS_Shutdown issues.
|
2012-06-06 21:54:16 -05:00
|
|
|
serverproxy = self.create_connection(os.environ.get('KRB5CCNAME'), self.env.verbose, self.env.fallback, self.env.delegate)
|
2012-10-01 12:05:11 -05:00
|
|
|
|
|
|
|
dbdir = None
|
|
|
|
current_conn = getattr(context, self.id, None)
|
|
|
|
if current_conn is not None:
|
|
|
|
dbdir = getattr(current_conn.conn._ServerProxy__transport, 'dbdir', None)
|
|
|
|
if dbdir is not None:
|
|
|
|
self.debug('Using dbdir %s' % dbdir)
|
2012-06-06 21:54:16 -05:00
|
|
|
setattr(context, self.id, Connection(serverproxy, self.disconnect))
|
2012-10-01 12:05:11 -05:00
|
|
|
if dbdir is not None:
|
|
|
|
current_conn = getattr(context, self.id, None)
|
|
|
|
current_conn.conn._ServerProxy__transport.dbdir = dbdir
|
2012-06-06 21:54:16 -05:00
|
|
|
return self.forward(name, *args, **kw)
|
2010-07-26 16:54:38 -05:00
|
|
|
raise NetworkError(uri=server, error=e.errmsg)
|
2010-10-14 16:21:11 -05:00
|
|
|
except socket.error, e:
|
|
|
|
raise NetworkError(uri=server, error=str(e))
|
2011-01-17 15:23:53 -06:00
|
|
|
except (OverflowError, TypeError), e:
|
|
|
|
raise XMLRPCMarshallError(error=str(e))
|
2012-12-19 03:25:24 -06:00
|
|
|
|
|
|
|
|
|
|
|
class xmlclient(RPCClient):
|
|
|
|
session_path = '/ipa/session/xml'
|
|
|
|
server_proxy_class = ServerProxy
|
|
|
|
protocol = 'xml'
|
|
|
|
env_rpc_uri_key = 'xmlrpc_uri'
|
|
|
|
|
|
|
|
def _call_command(self, command, params):
|
2014-03-28 03:51:10 -05:00
|
|
|
version = params[1].get('version', VERSION_WITHOUT_CAPABILITIES)
|
|
|
|
params = xml_wrap(params, version)
|
2012-12-19 03:25:24 -06:00
|
|
|
result = command(*params)
|
|
|
|
return xml_unwrap(result)
|
|
|
|
|
|
|
|
|
|
|
|
class JSONServerProxy(object):
|
|
|
|
def __init__(self, uri, transport, encoding, verbose, allow_none):
|
|
|
|
type, uri = urllib.splittype(uri)
|
|
|
|
if type not in ("http", "https"):
|
|
|
|
raise IOError("unsupported XML-RPC protocol")
|
|
|
|
self.__host, self.__handler = urllib.splithost(uri)
|
|
|
|
self.__transport = transport
|
|
|
|
|
|
|
|
assert encoding == 'UTF-8'
|
|
|
|
assert allow_none
|
|
|
|
self.__verbose = verbose
|
|
|
|
|
|
|
|
# FIXME: Some of our code requires ServerProxy internals.
|
|
|
|
# But, xmlrpclib.ServerProxy's _ServerProxy__transport can be accessed
|
|
|
|
# by calling serverproxy('transport')
|
|
|
|
self._ServerProxy__transport = transport
|
|
|
|
|
|
|
|
def __request(self, name, args):
|
|
|
|
payload = {'method': unicode(name), 'params': args, 'id': 0}
|
2014-03-28 03:51:10 -05:00
|
|
|
version = args[1].get('version', VERSION_WITHOUT_CAPABILITIES)
|
2015-01-12 07:18:49 -06:00
|
|
|
payload = json_encode_binary(payload, version)
|
2012-12-19 03:25:24 -06:00
|
|
|
|
2014-09-23 05:10:56 -05:00
|
|
|
if self.__verbose >= 2:
|
|
|
|
root_logger.info('Request: %s',
|
|
|
|
json.dumps(payload, sort_keys=True, indent=4))
|
|
|
|
|
2012-12-19 03:25:24 -06:00
|
|
|
response = self.__transport.request(
|
|
|
|
self.__host,
|
|
|
|
self.__handler,
|
2015-01-12 07:18:49 -06:00
|
|
|
json.dumps(payload),
|
2014-09-23 05:10:56 -05:00
|
|
|
verbose=self.__verbose >= 3,
|
2012-12-19 03:25:24 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
response = json_decode_binary(json.loads(response))
|
|
|
|
except ValueError, e:
|
|
|
|
raise JSONError(str(e))
|
|
|
|
|
2014-09-23 05:10:56 -05:00
|
|
|
if self.__verbose >= 2:
|
2015-01-12 07:18:49 -06:00
|
|
|
root_logger.info(
|
|
|
|
'Response: %s',
|
|
|
|
json.dumps(json_encode_binary(response, version),
|
|
|
|
sort_keys=True, indent=4)
|
|
|
|
)
|
2012-12-19 03:25:24 -06:00
|
|
|
error = response.get('error')
|
|
|
|
if error:
|
|
|
|
try:
|
|
|
|
error_class = errors_by_code[error['code']]
|
|
|
|
except KeyError:
|
|
|
|
raise UnknownError(
|
|
|
|
code=error.get('code'),
|
|
|
|
error=error.get('message'),
|
|
|
|
server=self.__host,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise error_class(message=error['message'])
|
|
|
|
|
|
|
|
return response['result']
|
|
|
|
|
|
|
|
def __getattr__(self, name):
|
|
|
|
def _call(*args):
|
|
|
|
return self.__request(name, args)
|
|
|
|
return _call
|
|
|
|
|
|
|
|
|
|
|
|
class jsonclient(RPCClient):
|
|
|
|
session_path = '/ipa/session/json'
|
|
|
|
server_proxy_class = JSONServerProxy
|
|
|
|
protocol = 'json'
|
|
|
|
env_rpc_uri_key = 'jsonrpc_uri'
|