2010-01-12 09:40:09 -06:00
|
|
|
# Authors:
|
|
|
|
# Pavel Zuna <pzuna@redhat.com>
|
|
|
|
#
|
|
|
|
# Copyright (C) 2009 Red Hat
|
|
|
|
# see file 'COPYING' for use and warranty information
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU General Public License as
|
|
|
|
# published by the Free Software Foundation; version 2 only
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
"""
|
|
|
|
Migration to IPA
|
|
|
|
|
|
|
|
Example: Migrate users and groups from DS to IPA
|
|
|
|
|
|
|
|
ipa migrate-ds ldap://example.com:389
|
|
|
|
"""
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import re
|
2010-10-26 15:10:42 -05:00
|
|
|
import ldap as _ldap
|
2010-01-12 09:40:09 -06:00
|
|
|
|
2010-10-26 09:26:06 -05:00
|
|
|
from ipalib import api, errors, output
|
2010-10-26 15:10:42 -05:00
|
|
|
from ipalib import Command, List, Password, Str, Flag, StrEnum
|
2010-01-12 09:40:09 -06:00
|
|
|
from ipalib.cli import to_cli
|
2010-03-17 09:01:24 -05:00
|
|
|
if api.env.in_server and api.env.context in ['lite', 'server']:
|
2010-06-25 12:37:27 -05:00
|
|
|
try:
|
|
|
|
from ipaserver.plugins.ldap2 import ldap2
|
|
|
|
except StandardError, e:
|
|
|
|
raise e
|
2010-02-19 10:08:16 -06:00
|
|
|
from ipalib import _
|
2010-03-05 15:11:21 -06:00
|
|
|
from ipalib.text import Gettext # FIXME: remove once the other Gettext FIXME is removed
|
2010-01-12 09:40:09 -06:00
|
|
|
|
|
|
|
|
|
|
|
# USER MIGRATION CALLBACKS AND VARS
|
|
|
|
|
2010-03-05 15:11:21 -06:00
|
|
|
_krb_err_msg = _('Kerberos principal %s already exists. Use \'ipa user-mod\' to set it manually.')
|
|
|
|
_grp_err_msg = _('Failed to add user to the default group. Use \'ipa group-add-member\' to add manually.')
|
2010-01-12 09:40:09 -06:00
|
|
|
|
2010-10-26 15:10:42 -05:00
|
|
|
_supported_schemas = (u'RFC2307bis', u'RFC2307')
|
2010-01-12 09:40:09 -06:00
|
|
|
|
2010-10-26 15:10:42 -05:00
|
|
|
|
|
|
|
def _pre_migrate_user(ldap, pkey, dn, entry_attrs, failed, config, ctx, **kwargs):
|
2010-11-26 08:37:12 -06:00
|
|
|
attr_blacklist = ['krbprincipalkey']
|
|
|
|
|
2010-01-12 09:40:09 -06:00
|
|
|
# get default primary group for new users
|
|
|
|
if 'def_group_dn' not in ctx:
|
|
|
|
def_group = config.get('ipadefaultprimarygroup')
|
|
|
|
ctx['def_group_dn'] = api.Object.group.get_dn(def_group)
|
|
|
|
try:
|
|
|
|
(g_dn, g_attrs) = ldap.get_entry(ctx['def_group_dn'], ['gidnumber'])
|
|
|
|
except errors.NotFound:
|
|
|
|
error_msg = 'Default group for new users not found.'
|
|
|
|
raise errors.NotFound(reason=error_msg)
|
|
|
|
ctx['def_group_gid'] = g_attrs['gidnumber'][0]
|
|
|
|
|
|
|
|
# fill in required attributes by IPA
|
2010-10-26 09:26:06 -05:00
|
|
|
entry_attrs['ipauniqueid'] = 'autogenerate'
|
2010-01-12 09:40:09 -06:00
|
|
|
if 'homedirectory' not in entry_attrs:
|
|
|
|
homes_root = config.get('ipahomesrootdir', ('/home', ))[0]
|
|
|
|
home_dir = '%s/%s' % (homes_root, pkey)
|
|
|
|
home_dir = home_dir.replace('//', '/').rstrip('/')
|
|
|
|
entry_attrs['homedirectory'] = home_dir
|
|
|
|
entry_attrs.setdefault('gidnumber', ctx['def_group_gid'])
|
|
|
|
|
2010-11-26 08:37:12 -06:00
|
|
|
# do not migrate attributes autogenerated during migration
|
|
|
|
for attr in entry_attrs.keys():
|
|
|
|
if attr in attr_blacklist:
|
|
|
|
del entry_attrs[attr]
|
|
|
|
|
2010-01-12 09:40:09 -06:00
|
|
|
# generate a principal name and check if it isn't already taken
|
2010-02-17 11:32:58 -06:00
|
|
|
principal = u'%s@%s' % (pkey, api.env.realm)
|
2010-01-12 09:40:09 -06:00
|
|
|
try:
|
|
|
|
ldap.find_entry_by_attr(
|
|
|
|
'krbprincipalname', principal, 'krbprincipalaux', ['']
|
|
|
|
)
|
|
|
|
except errors.NotFound:
|
|
|
|
entry_attrs['krbprincipalname'] = principal
|
|
|
|
else:
|
|
|
|
failed[pkey] = _krb_err_msg % principal
|
2010-02-19 10:08:16 -06:00
|
|
|
|
2010-01-12 09:40:09 -06:00
|
|
|
return dn
|
|
|
|
|
|
|
|
|
|
|
|
def _post_migrate_user(ldap, pkey, dn, entry_attrs, failed, config, ctx):
|
|
|
|
# add user to the default group
|
|
|
|
try:
|
|
|
|
ldap.add_entry_to_group(dn, ctx['def_group_dn'])
|
|
|
|
except errors.ExecutionError, e:
|
|
|
|
failed[pkey] = _grp_err_msg
|
|
|
|
|
|
|
|
|
|
|
|
# GROUP MIGRATION CALLBACKS AND VARS
|
|
|
|
|
2010-10-26 15:10:42 -05:00
|
|
|
def _pre_migrate_group(ldap, pkey, dn, entry_attrs, failed, config, ctx, **kwargs):
|
|
|
|
def convert_members_rfc2307bis(member_attr, search_bases, overwrite=False):
|
2010-01-12 09:40:09 -06:00
|
|
|
"""
|
|
|
|
Convert DNs in member attributes to work in IPA.
|
|
|
|
"""
|
|
|
|
new_members = []
|
|
|
|
entry_attrs.setdefault(member_attr, [])
|
2010-02-19 10:08:16 -06:00
|
|
|
for m in entry_attrs[member_attr]:
|
2010-10-26 15:10:42 -05:00
|
|
|
try:
|
|
|
|
# what str2dn returns looks like [[('cn', 'foo', 4)], [('dc', 'example', 1)], [('dc', 'com', 1)]]
|
|
|
|
rdn = _ldap.dn.str2dn(m ,flags=_ldap.DN_FORMAT_LDAPV3)[0]
|
|
|
|
rdnval = rdn[0][1]
|
|
|
|
except IndexError:
|
|
|
|
api.log.error('Malformed DN %s has no RDN?' % m)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if m.lower().endswith(search_bases['user']):
|
|
|
|
api.log.info('migrating user %s' % m)
|
|
|
|
m = '%s=%s,%s' % (api.Object.user.primary_key.name,
|
|
|
|
rdnval,
|
|
|
|
api.env.container_user)
|
|
|
|
elif m.lower().endswith(search_bases['group']):
|
|
|
|
api.log.info('migrating group %s' % m)
|
|
|
|
m = '%s=%s,%s' % (api.Object.group.primary_key.name,
|
|
|
|
rdnval,
|
|
|
|
api.env.container_group)
|
|
|
|
else:
|
|
|
|
api.log.error('entry %s does not belong into any known container' % m)
|
2010-01-12 09:40:09 -06:00
|
|
|
continue
|
2010-10-26 15:10:42 -05:00
|
|
|
|
2010-01-12 09:40:09 -06:00
|
|
|
m = ldap.normalize_dn(m)
|
|
|
|
new_members.append(m)
|
2010-10-26 15:10:42 -05:00
|
|
|
|
2010-01-12 09:40:09 -06:00
|
|
|
del entry_attrs[member_attr]
|
|
|
|
if overwrite:
|
|
|
|
entry_attrs['member'] = []
|
|
|
|
entry_attrs['member'] += new_members
|
|
|
|
|
2010-10-26 15:10:42 -05:00
|
|
|
def convert_members_rfc2307(member_attr):
|
|
|
|
"""
|
|
|
|
Convert usernames in member attributes to work in IPA.
|
|
|
|
"""
|
|
|
|
new_members = []
|
|
|
|
entry_attrs.setdefault(member_attr, [])
|
|
|
|
for m in entry_attrs[member_attr]:
|
|
|
|
memberdn = '%s=%s,%s' % (api.Object.user.primary_key.name,
|
|
|
|
m,
|
|
|
|
api.env.container_user)
|
|
|
|
new_members.append(ldap.normalize_dn(memberdn))
|
|
|
|
entry_attrs['member'] = new_members
|
|
|
|
|
|
|
|
schema = kwargs.get('schema', None)
|
2010-10-26 09:26:06 -05:00
|
|
|
entry_attrs['ipauniqueid'] = 'autogenerate'
|
2010-10-26 15:10:42 -05:00
|
|
|
if schema == 'RFC2307bis':
|
|
|
|
search_bases = kwargs.get('search_bases', None)
|
|
|
|
if not search_bases:
|
|
|
|
raise ValueError('Search bases not specified')
|
|
|
|
|
|
|
|
convert_members_rfc2307bis('member', search_bases, overwrite=True)
|
|
|
|
convert_members_rfc2307bis('uniquemember', search_bases)
|
|
|
|
elif schema == 'RFC2307':
|
|
|
|
convert_members_rfc2307('memberuid')
|
|
|
|
else:
|
|
|
|
raise ValueError('Schema %s not supported' % schema)
|
2010-01-12 09:40:09 -06:00
|
|
|
|
|
|
|
return dn
|
|
|
|
|
|
|
|
|
|
|
|
# DS MIGRATION PLUGIN
|
|
|
|
|
2010-10-26 15:10:42 -05:00
|
|
|
def construct_filter(template, oc_list):
|
|
|
|
oc_subfilter = ''.join([ '(objectclass=%s)' % oc for oc in oc_list])
|
|
|
|
return template % oc_subfilter
|
|
|
|
|
2010-01-12 09:40:09 -06:00
|
|
|
def validate_ldapuri(ugettext, ldapuri):
|
|
|
|
m = re.match('^ldaps?://[-\w\.]+(:\d+)?$', ldapuri)
|
|
|
|
if not m:
|
2010-12-02 15:31:42 -06:00
|
|
|
err_msg = _('Invalid LDAP URI.')
|
2010-01-12 09:40:09 -06:00
|
|
|
raise errors.ValidationError(name='ldap_uri', error=err_msg)
|
|
|
|
|
|
|
|
|
|
|
|
class migrate_ds(Command):
|
|
|
|
"""
|
|
|
|
Migrate users and groups from DS to IPA.
|
|
|
|
"""
|
|
|
|
migrate_objects = {
|
|
|
|
# OBJECT_NAME: (search_filter, pre_callback, post_callback)
|
|
|
|
#
|
|
|
|
# OBJECT_NAME - is the name of an LDAPObject subclass
|
|
|
|
# search_filter - is the filter to retrieve objects from DS
|
|
|
|
# pre_callback - is called for each object just after it was
|
|
|
|
# retrieved from DS and before being added to IPA
|
|
|
|
# post_callback - is called for each object after it was added to IPA
|
|
|
|
#
|
|
|
|
# {pre, post}_callback parameters:
|
|
|
|
# ldap - ldap2 instance connected to IPA
|
|
|
|
# pkey - primary key value of the object (uid for users, etc.)
|
|
|
|
# dn - dn of the object as it (will be/is) stored in IPA
|
|
|
|
# entry_attrs - attributes of the object
|
|
|
|
# failed - a list of so-far failed objects
|
|
|
|
# config - IPA config entry attributes
|
|
|
|
# ctx - object context, used to pass data between callbacks
|
|
|
|
#
|
|
|
|
# If pre_callback return value evaluates to False, migration
|
|
|
|
# of the current object is aborted.
|
2010-10-26 15:10:42 -05:00
|
|
|
'user': {
|
|
|
|
'filter_template' : '(&(|%s)(uid=*))',
|
|
|
|
'oc_option' : 'userobjectclass',
|
|
|
|
'pre_callback' : _pre_migrate_user,
|
|
|
|
'post_callback' : _post_migrate_user
|
|
|
|
},
|
|
|
|
'group': {
|
|
|
|
'filter_template' : '(&(|%s)(cn=*))',
|
|
|
|
'oc_option' : 'groupobjectclass',
|
|
|
|
'pre_callback' : _pre_migrate_group,
|
|
|
|
'post_callback' : None
|
|
|
|
},
|
2010-01-12 09:40:09 -06:00
|
|
|
}
|
|
|
|
migrate_order = ('user', 'group')
|
|
|
|
|
|
|
|
takes_args = (
|
|
|
|
Str('ldapuri', validate_ldapuri,
|
|
|
|
cli_name='ldap_uri',
|
2010-02-19 10:08:16 -06:00
|
|
|
label=_('LDAP URI'),
|
|
|
|
doc=_('LDAP URI of DS server to migrate from'),
|
2010-01-12 09:40:09 -06:00
|
|
|
),
|
|
|
|
Password('bindpw',
|
|
|
|
cli_name='password',
|
2010-12-02 15:31:42 -06:00
|
|
|
label=_('Password'),
|
2010-03-05 15:11:21 -06:00
|
|
|
doc=_('bind password'),
|
2010-01-12 09:40:09 -06:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
takes_options = (
|
|
|
|
Str('binddn?',
|
|
|
|
cli_name='bind_dn',
|
2010-02-19 10:08:16 -06:00
|
|
|
label=_('Bind DN'),
|
2010-01-12 09:40:09 -06:00
|
|
|
default=u'cn=directory manager',
|
|
|
|
autofill=True,
|
|
|
|
),
|
|
|
|
Str('usercontainer?',
|
|
|
|
cli_name='user_container',
|
2010-02-19 10:08:16 -06:00
|
|
|
label=_('User container'),
|
|
|
|
doc=_('RDN of container for users in DS'),
|
2010-01-12 09:40:09 -06:00
|
|
|
default=u'ou=people',
|
|
|
|
autofill=True,
|
|
|
|
),
|
|
|
|
Str('groupcontainer?',
|
|
|
|
cli_name='group_container',
|
2010-02-19 10:08:16 -06:00
|
|
|
label=_('Group container'),
|
|
|
|
doc=_('RDN of container for groups in DS'),
|
2010-01-12 09:40:09 -06:00
|
|
|
default=u'ou=groups',
|
|
|
|
autofill=True,
|
|
|
|
),
|
2010-10-26 15:10:42 -05:00
|
|
|
List('userobjectclass?',
|
|
|
|
cli_name='user_objectclass',
|
|
|
|
label=_('User object class'),
|
|
|
|
doc=_('Comma-separated list of objectclasses used to search for user entries in DS'),
|
|
|
|
default=(u'person',),
|
|
|
|
autofill=True,
|
|
|
|
),
|
|
|
|
List('groupobjectclass?',
|
|
|
|
cli_name='group_objectclass',
|
|
|
|
label=_('Group object class'),
|
|
|
|
doc=_('Comma-separated list of objectclasses used to search for group entries in DS'),
|
|
|
|
default=(u'groupOfUniqueNames', u'groupOfNames'),
|
|
|
|
autofill=True,
|
|
|
|
),
|
|
|
|
StrEnum('schema?',
|
|
|
|
cli_name='schema',
|
|
|
|
label=_('LDAP schema'),
|
|
|
|
doc=_('The schema used on the LDAP server. Supported values are RFC2307 and RFC2307bis. The default is RFC2307bis'),
|
|
|
|
values=_supported_schemas,
|
|
|
|
default=_supported_schemas[0],
|
|
|
|
autofill=True,
|
|
|
|
),
|
2010-09-27 12:50:54 -05:00
|
|
|
Flag('continue?',
|
|
|
|
doc=_('Continous operation mode. Errors are reported but the process continues'),
|
|
|
|
default=False,
|
|
|
|
),
|
2010-01-12 09:40:09 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
has_output = (
|
|
|
|
output.Output('result',
|
|
|
|
type=dict,
|
2010-03-05 15:11:21 -06:00
|
|
|
doc=_('Lists of objects migrated; categorized by type.'),
|
2010-01-12 09:40:09 -06:00
|
|
|
),
|
|
|
|
output.Output('failed',
|
|
|
|
type=dict,
|
2010-03-05 15:11:21 -06:00
|
|
|
doc=_('Lists of objects that could not be migrated; categorized by type.'),
|
2010-01-12 09:40:09 -06:00
|
|
|
),
|
|
|
|
output.Output('enabled',
|
|
|
|
type=bool,
|
2010-03-05 15:11:21 -06:00
|
|
|
doc=_('False if migration mode was disabled.'),
|
2010-01-12 09:40:09 -06:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2010-03-05 15:11:21 -06:00
|
|
|
exclude_doc = _('comma-separated list of %s to exclude from migration')
|
|
|
|
|
|
|
|
truncated_err_msg = _('''\
|
|
|
|
search results for objects to be migrated
|
|
|
|
have been truncated by the server;
|
|
|
|
migration process might be uncomplete\n''')
|
|
|
|
|
|
|
|
migration_disabled_msg = _('''\
|
|
|
|
Migration mode is disabled. Use \'ipa config-mod\' to enable it.''')
|
|
|
|
|
|
|
|
pwd_migration_msg = _('''\
|
|
|
|
Passwords have been migrated in pre-hashed format.
|
|
|
|
IPA is unable to generate Kerberos keys unless provided
|
|
|
|
with clear text passwords. All migrated users need to
|
|
|
|
login at https://your.domain/ipa/migration/ before they
|
|
|
|
can use their Kerberos accounts.''')
|
2010-01-12 09:40:09 -06:00
|
|
|
|
|
|
|
def get_options(self):
|
|
|
|
"""
|
|
|
|
Call get_options of the baseclass and add "exclude" options
|
|
|
|
for each type of object being migrated.
|
|
|
|
"""
|
|
|
|
for option in super(migrate_ds, self).get_options():
|
|
|
|
yield option
|
|
|
|
for ldap_obj_name in self.migrate_objects:
|
|
|
|
ldap_obj = self.api.Object[ldap_obj_name]
|
|
|
|
name = 'exclude_%ss' % to_cli(ldap_obj_name)
|
2010-03-05 15:11:21 -06:00
|
|
|
# FIXME: can't substitute strings static Gettext instance
|
|
|
|
doc = Gettext(self.exclude_doc % ldap_obj.object_name_plural)
|
2010-01-12 09:40:09 -06:00
|
|
|
yield List(
|
|
|
|
'%s?' % name, cli_name=name, doc=doc, default=tuple(),
|
|
|
|
autofill=True
|
|
|
|
)
|
|
|
|
|
|
|
|
def normalize_options(self, options):
|
|
|
|
"""
|
|
|
|
Convert all "exclude" option values to lower-case.
|
|
|
|
|
|
|
|
Also, empty List parameters are converted to None, but the migration
|
|
|
|
plugin doesn't like that - convert back to empty lists.
|
|
|
|
"""
|
|
|
|
for p in self.params():
|
|
|
|
if isinstance(p, List):
|
|
|
|
if options[p.name]:
|
|
|
|
options[p.name] = tuple(
|
|
|
|
v.lower() for v in options[p.name]
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
options[p.name] = tuple()
|
|
|
|
|
2010-10-26 15:10:42 -05:00
|
|
|
def _get_search_bases(self, options, ds_base_dn, migrate_order):
|
|
|
|
search_bases = dict()
|
|
|
|
for ldap_obj_name in migrate_order:
|
|
|
|
search_bases[ldap_obj_name] = '%s,%s' % (
|
|
|
|
options['%scontainer' % to_cli(ldap_obj_name)], ds_base_dn
|
|
|
|
)
|
|
|
|
return search_bases
|
|
|
|
|
2010-01-12 09:40:09 -06:00
|
|
|
def migrate(self, ldap, config, ds_ldap, ds_base_dn, options):
|
|
|
|
"""
|
|
|
|
Migrate objects from DS to LDAP.
|
|
|
|
"""
|
|
|
|
migrated = {} # {'OBJ': ['PKEY1', 'PKEY2', ...], ...}
|
|
|
|
failed = {} # {'OBJ': {'PKEY1': 'Failed 'cos blabla', ...}, ...}
|
2010-10-26 15:10:42 -05:00
|
|
|
search_bases = self._get_search_bases(options, ds_base_dn, self.migrate_order)
|
2010-01-12 09:40:09 -06:00
|
|
|
for ldap_obj_name in self.migrate_order:
|
|
|
|
ldap_obj = self.api.Object[ldap_obj_name]
|
|
|
|
|
2010-10-26 15:10:42 -05:00
|
|
|
search_filter = construct_filter(self.migrate_objects[ldap_obj_name]['filter_template'],
|
|
|
|
options[to_cli(self.migrate_objects[ldap_obj_name]['oc_option'])])
|
2010-01-12 09:40:09 -06:00
|
|
|
exclude = options['exclude_%ss' % to_cli(ldap_obj_name)]
|
|
|
|
context = {}
|
|
|
|
|
|
|
|
migrated[ldap_obj_name] = []
|
|
|
|
failed[ldap_obj_name] = {}
|
|
|
|
|
|
|
|
# FIXME: with limits set, we get a strange 'Success' exception
|
2010-09-27 12:50:54 -05:00
|
|
|
try:
|
|
|
|
(entries, truncated) = ds_ldap.find_entries(
|
2010-10-26 15:10:42 -05:00
|
|
|
search_filter, ['*'], search_bases[ldap_obj_name], ds_ldap.SCOPE_ONELEVEL#,
|
2010-09-27 12:50:54 -05:00
|
|
|
#time_limit=0, size_limit=0
|
|
|
|
)
|
|
|
|
except errors.NotFound:
|
|
|
|
if not options.get('continue',False):
|
|
|
|
raise errors.NotFound(reason=_('Container for %(container)s not found' % {'container':ldap_obj_name}))
|
|
|
|
else:
|
|
|
|
truncated = False
|
|
|
|
entries = []
|
2010-01-12 09:40:09 -06:00
|
|
|
if truncated:
|
|
|
|
self.log.error(
|
|
|
|
'%s: %s' % (
|
|
|
|
ldap_obj.object_name_plural, self.truncated_err_msg
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
for (dn, entry_attrs) in entries:
|
|
|
|
pkey = entry_attrs[ldap_obj.primary_key.name][0].lower()
|
|
|
|
if pkey in exclude:
|
|
|
|
continue
|
|
|
|
|
|
|
|
dn = ldap_obj.get_dn(pkey)
|
|
|
|
entry_attrs['objectclass'] = list(
|
|
|
|
set(
|
|
|
|
config.get(
|
|
|
|
ldap_obj.object_class_config, ldap_obj.object_class
|
|
|
|
) + [o.lower() for o in entry_attrs['objectclass']]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2010-10-26 15:10:42 -05:00
|
|
|
callback = self.migrate_objects[ldap_obj_name]['pre_callback']
|
2010-01-12 09:40:09 -06:00
|
|
|
if callable(callback):
|
|
|
|
dn = callback(
|
|
|
|
ldap, pkey, dn, entry_attrs, failed[ldap_obj_name],
|
2010-10-26 15:10:42 -05:00
|
|
|
config, context, schema = options['schema'],
|
|
|
|
search_bases = search_bases
|
2010-01-12 09:40:09 -06:00
|
|
|
)
|
|
|
|
if not dn:
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
ldap.add_entry(dn, entry_attrs)
|
|
|
|
except errors.ExecutionError, e:
|
2010-02-17 11:32:58 -06:00
|
|
|
failed[ldap_obj_name][pkey] = unicode(e)
|
2010-01-12 09:40:09 -06:00
|
|
|
else:
|
|
|
|
migrated[ldap_obj_name].append(pkey)
|
|
|
|
|
2010-10-26 15:10:42 -05:00
|
|
|
callback = self.migrate_objects[ldap_obj_name]['post_callback']
|
2010-01-12 09:40:09 -06:00
|
|
|
if callable(callback):
|
|
|
|
callback(
|
|
|
|
ldap, pkey, dn, entry_attrs, failed[ldap_obj_name],
|
|
|
|
config, context
|
|
|
|
)
|
|
|
|
|
|
|
|
return (migrated, failed)
|
|
|
|
|
|
|
|
def execute(self, ldapuri, bindpw, **options):
|
|
|
|
ldap = self.api.Backend.ldap2
|
|
|
|
self.normalize_options(options)
|
|
|
|
|
|
|
|
config = ldap.get_ipa_config()[1]
|
|
|
|
|
|
|
|
# check if migration mode is enabled
|
|
|
|
if config.get('ipamigrationenabled', ('FALSE', ))[0] == 'FALSE':
|
|
|
|
return dict(result={}, failed={}, enabled=False)
|
|
|
|
|
|
|
|
# connect to DS
|
|
|
|
ds_ldap = ldap2(shared_instance=False, ldap_uri=ldapuri, base_dn='')
|
|
|
|
ds_ldap.connect(bind_dn=options['binddn'], bind_pw=bindpw)
|
|
|
|
|
|
|
|
# retrieve DS base DN
|
|
|
|
(entries, truncated) = ds_ldap.find_entries(
|
|
|
|
'', ['namingcontexts'], '', ds_ldap.SCOPE_BASE
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
ds_base_dn = entries[0][1]['namingcontexts'][0]
|
|
|
|
except (IndexError, KeyError), e:
|
|
|
|
raise StandardError(str(e))
|
|
|
|
|
|
|
|
# migrate!
|
|
|
|
(migrated, failed) = self.migrate(
|
|
|
|
ldap, config, ds_ldap, ds_base_dn, options
|
|
|
|
)
|
|
|
|
|
|
|
|
return dict(result=migrated, failed=failed, enabled=True)
|
|
|
|
|
|
|
|
def output_for_cli(self, textui, result, ldapuri, bindpw, **options):
|
|
|
|
textui.print_name(self.name)
|
|
|
|
if not result['enabled']:
|
|
|
|
textui.print_plain(self.migration_disabled_msg)
|
|
|
|
return 1
|
|
|
|
textui.print_plain('Migrated:')
|
|
|
|
textui.print_entry1(
|
|
|
|
result['result'], attr_order=self.migrate_order,
|
|
|
|
one_value_per_line=False
|
|
|
|
)
|
|
|
|
for ldap_obj_name in self.migrate_order:
|
|
|
|
textui.print_plain('Failed %s:' % ldap_obj_name)
|
|
|
|
textui.print_entry1(
|
|
|
|
result['failed'][ldap_obj_name], attr_order=self.migrate_order,
|
|
|
|
one_value_per_line=True,
|
|
|
|
)
|
|
|
|
textui.print_plain('-' * len(self.name))
|
2010-09-27 12:50:54 -05:00
|
|
|
textui.print_plain(unicode(self.pwd_migration_msg))
|
2010-01-12 09:40:09 -06:00
|
|
|
|
|
|
|
api.register(migrate_ds)
|