2008-09-15 19:51:01 -05:00
|
|
|
# Authors: Rob Crittenden <rcritten@redhat.com>
|
|
|
|
#
|
|
|
|
# Copyright (C) 2008 Red Hat
|
|
|
|
# see file 'COPYING' for use and warranty information
|
|
|
|
#
|
2010-12-09 06:59:11 -06:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
2008-09-15 19:51:01 -05:00
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2010-12-09 06:59:11 -06:00
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2008-09-15 19:51:01 -05:00
|
|
|
#
|
|
|
|
|
|
|
|
# Documentation can be found at http://freeipa.org/page/LdapUpdate
|
|
|
|
|
|
|
|
# TODO
|
|
|
|
# save undo files?
|
|
|
|
|
|
|
|
UPDATES_DIR="/usr/share/ipa/updates/"
|
|
|
|
|
|
|
|
import sys
|
2009-02-04 09:53:34 -06:00
|
|
|
from ipaserver.install import installutils
|
|
|
|
from ipaserver import ipaldap
|
2009-02-05 14:03:08 -06:00
|
|
|
from ipapython import entity, ipautil
|
2010-10-26 09:26:06 -05:00
|
|
|
from ipalib import util
|
2009-04-23 07:51:59 -05:00
|
|
|
from ipalib import errors
|
2008-09-15 19:51:01 -05:00
|
|
|
import ldap
|
2010-04-16 15:23:45 -05:00
|
|
|
from ldap.dn import escape_dn_chars
|
2008-09-15 19:51:01 -05:00
|
|
|
import logging
|
|
|
|
import krbV
|
|
|
|
import platform
|
|
|
|
import time
|
|
|
|
import random
|
|
|
|
import os
|
2010-05-27 10:58:31 -05:00
|
|
|
import pwd
|
2008-09-15 19:51:01 -05:00
|
|
|
import fnmatch
|
2009-05-13 16:02:09 -05:00
|
|
|
import csv
|
2008-09-15 19:51:01 -05:00
|
|
|
|
|
|
|
class BadSyntax(Exception):
|
|
|
|
def __init__(self, value):
|
|
|
|
self.value = value
|
|
|
|
def __str__(self):
|
|
|
|
return repr(self.value)
|
|
|
|
|
2008-09-19 22:09:09 -05:00
|
|
|
class LDAPUpdate:
|
2010-05-27 10:58:31 -05:00
|
|
|
def __init__(self, dm_password, sub_dict={}, live_run=True,
|
|
|
|
online=True, ldapi=False):
|
2008-09-15 19:51:01 -05:00
|
|
|
"""dm_password = Directory Manager password
|
|
|
|
sub_dict = substitution dictionary
|
|
|
|
live_run = Apply the changes or just test
|
2010-05-27 10:58:31 -05:00
|
|
|
online = do an online LDAP update or use an experimental LDIF updater
|
|
|
|
ldapi = bind using ldapi. This assumes autobind is enabled.
|
2008-09-15 19:51:01 -05:00
|
|
|
"""
|
|
|
|
self.sub_dict = sub_dict
|
|
|
|
self.live_run = live_run
|
|
|
|
self.dm_password = dm_password
|
|
|
|
self.conn = None
|
2008-09-17 22:18:09 -05:00
|
|
|
self.modified = False
|
2010-05-27 10:58:31 -05:00
|
|
|
self.online = online
|
|
|
|
self.ldapi = ldapi
|
|
|
|
|
|
|
|
self.pw_name = pwd.getpwuid(os.geteuid()).pw_name
|
2008-09-15 19:51:01 -05:00
|
|
|
|
|
|
|
krbctx = krbV.default_context()
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
fqdn = installutils.get_fqdn()
|
|
|
|
if fqdn is None:
|
|
|
|
raise RuntimeError("Unable to determine hostname")
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
domain = ipautil.get_domain_name()
|
|
|
|
libarch = self.__identify_arch()
|
2009-02-04 09:53:34 -06:00
|
|
|
suffix = util.realm_to_suffix(krbctx.default_realm)
|
2010-05-27 10:58:31 -05:00
|
|
|
self.realm = krbctx.default_realm
|
2008-09-15 19:51:01 -05:00
|
|
|
|
|
|
|
if not self.sub_dict.get("REALM"):
|
|
|
|
self.sub_dict["REALM"] = krbctx.default_realm
|
|
|
|
if not self.sub_dict.get("FQDN"):
|
|
|
|
self.sub_dict["FQDN"] = fqdn
|
|
|
|
if not self.sub_dict.get("DOMAIN"):
|
|
|
|
self.sub_dict["DOMAIN"] = domain
|
|
|
|
if not self.sub_dict.get("SUFFIX"):
|
|
|
|
self.sub_dict["SUFFIX"] = suffix
|
2010-04-16 15:23:45 -05:00
|
|
|
if not self.sub_dict.get("ESCAPED_SUFFIX"):
|
|
|
|
self.sub_dict["ESCAPED_SUFFIX"] = escape_dn_chars(suffix)
|
2008-09-15 19:51:01 -05:00
|
|
|
if not self.sub_dict.get("LIBARCH"):
|
|
|
|
self.sub_dict["LIBARCH"] = libarch
|
|
|
|
if not self.sub_dict.get("TIME"):
|
|
|
|
self.sub_dict["TIME"] = int(time.time())
|
|
|
|
|
2010-05-27 10:58:31 -05:00
|
|
|
if online:
|
|
|
|
# Try out the password
|
|
|
|
if not self.ldapi:
|
|
|
|
try:
|
|
|
|
conn = ipaldap.IPAdmin(fqdn)
|
|
|
|
conn.do_simple_bind(bindpw=self.dm_password)
|
|
|
|
conn.unbind()
|
|
|
|
except ldap.CONNECT_ERROR:
|
|
|
|
raise RuntimeError("Unable to connect to LDAP server %s" % fqdn)
|
|
|
|
except ldap.SERVER_DOWN:
|
|
|
|
raise RuntimeError("Unable to connect to LDAP server %s" % fqdn)
|
|
|
|
except ldap.INVALID_CREDENTIALS:
|
|
|
|
raise RuntimeError("The password provided is incorrect for LDAP server %s" % fqdn)
|
|
|
|
else:
|
|
|
|
conn = ipaldap.IPAdmin(ldapi=True, realm=self.realm)
|
|
|
|
conn.do_external_bind(self.pw_name)
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Offline updates are not supported.")
|
2008-09-15 19:51:01 -05:00
|
|
|
|
2009-05-13 16:02:09 -05:00
|
|
|
# The following 2 functions were taken from the Python
|
|
|
|
# documentation at http://docs.python.org/library/csv.html
|
|
|
|
def __utf_8_encoder(self, unicode_csv_data):
|
|
|
|
for line in unicode_csv_data:
|
|
|
|
yield line.encode('utf-8')
|
|
|
|
|
|
|
|
def __unicode_csv_reader(self, unicode_csv_data, quote_char="'", dialect=csv.excel, **kwargs):
|
|
|
|
# csv.py doesn't do Unicode; encode temporarily as UTF-8:
|
|
|
|
csv_reader = csv.reader(self.__utf_8_encoder(unicode_csv_data),
|
|
|
|
dialect=dialect, delimiter=',',
|
|
|
|
quotechar=quote_char,
|
|
|
|
skipinitialspace=True,
|
|
|
|
**kwargs)
|
|
|
|
for row in csv_reader:
|
|
|
|
# decode UTF-8 back to Unicode, cell by cell:
|
|
|
|
yield [unicode(cell, 'utf-8') for cell in row]
|
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
def __identify_arch(self):
|
|
|
|
"""On multi-arch systems some libraries may be in /lib64, /usr/lib64,
|
|
|
|
etc. Determine if a suffix is needed based on the current
|
|
|
|
architecture.
|
|
|
|
"""
|
2008-09-18 15:58:10 -05:00
|
|
|
bits = platform.architecture()[0]
|
2008-09-15 19:51:01 -05:00
|
|
|
|
2008-09-18 15:58:10 -05:00
|
|
|
if bits == "64bit":
|
2008-09-15 19:51:01 -05:00
|
|
|
return "64"
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
2009-09-14 15:12:58 -05:00
|
|
|
def _template_str(self, s):
|
2008-09-15 19:51:01 -05:00
|
|
|
try:
|
|
|
|
return ipautil.template_str(s, self.sub_dict)
|
|
|
|
except KeyError, e:
|
|
|
|
raise BadSyntax("Unknown template keyword %s" % e)
|
|
|
|
|
|
|
|
def __parse_values(self, line):
|
|
|
|
"""Parse a comma-separated string into separate values and convert them
|
|
|
|
into a list. This should handle quoted-strings with embedded commas
|
|
|
|
"""
|
2009-05-13 16:02:09 -05:00
|
|
|
if line[0] == "'":
|
|
|
|
quote_char = "'"
|
|
|
|
else:
|
|
|
|
quote_char = '"'
|
|
|
|
reader = self.__unicode_csv_reader([line], quote_char)
|
|
|
|
value = []
|
|
|
|
for row in reader:
|
|
|
|
value = value + row
|
|
|
|
return value
|
2008-09-15 19:51:01 -05:00
|
|
|
|
|
|
|
def read_file(self, filename):
|
|
|
|
if filename == '-':
|
|
|
|
fd = sys.stdin
|
|
|
|
else:
|
|
|
|
fd = open(filename)
|
|
|
|
text = fd.readlines()
|
|
|
|
if fd != sys.stdin: fd.close()
|
|
|
|
return text
|
|
|
|
|
|
|
|
def __entry_to_entity(self, ent):
|
|
|
|
"""Tne Entry class is a bare LDAP entry. The Entity class has a lot more
|
|
|
|
helper functions that we need, so convert to dict and then to Entity.
|
|
|
|
"""
|
|
|
|
entry = dict(ent.data)
|
|
|
|
entry['dn'] = ent.dn
|
|
|
|
for key,value in entry.iteritems():
|
|
|
|
if isinstance(value,list) or isinstance(value,tuple):
|
|
|
|
if len(value) == 0:
|
|
|
|
entry[key] = ''
|
|
|
|
elif len(value) == 1:
|
|
|
|
entry[key] = value[0]
|
|
|
|
return entity.Entity(entry)
|
|
|
|
|
|
|
|
def __combine_updates(self, dn_list, all_updates, update):
|
|
|
|
"""Combine a new update with the list of total updates
|
|
|
|
|
|
|
|
Updates are stored in 2 lists:
|
|
|
|
dn_list: contains a unique list of DNs in the updates
|
|
|
|
all_updates: the actual updates that need to be applied
|
|
|
|
|
|
|
|
We want to apply the updates from the shortest to the longest
|
|
|
|
path so if new child and parent entries are in different updates
|
|
|
|
we can be sure the parent gets written first. This also lets
|
|
|
|
us apply any schema first since it is in the very short cn=schema.
|
|
|
|
"""
|
|
|
|
dn = update.get('dn')
|
|
|
|
dns = ldap.explode_dn(dn.lower())
|
|
|
|
l = len(dns)
|
|
|
|
if dn_list.get(l):
|
|
|
|
if dn not in dn_list[l]:
|
|
|
|
dn_list[l].append(dn)
|
|
|
|
else:
|
|
|
|
dn_list[l] = [dn]
|
|
|
|
if not all_updates.get(dn):
|
|
|
|
all_updates[dn] = update
|
|
|
|
return all_updates
|
|
|
|
|
|
|
|
e = all_updates[dn]
|
|
|
|
e['updates'] = e['updates'] + update['updates']
|
|
|
|
|
|
|
|
all_updates[dn] = e
|
|
|
|
|
|
|
|
return all_updates
|
|
|
|
|
|
|
|
def parse_update_file(self, data, all_updates, dn_list):
|
|
|
|
"""Parse the update file into a dictonary of lists and apply the update
|
|
|
|
for each DN in the file."""
|
2011-02-11 12:29:55 -06:00
|
|
|
valid_keywords = ["default", "add", "remove", "only", "deleteentry", "replace"]
|
2008-09-15 19:51:01 -05:00
|
|
|
update = {}
|
|
|
|
d = ""
|
|
|
|
index = ""
|
|
|
|
dn = None
|
|
|
|
lcount = 0
|
|
|
|
for line in data:
|
|
|
|
# Strip out \n and extra white space
|
|
|
|
lcount = lcount + 1
|
|
|
|
|
|
|
|
# skip comments and empty lines
|
|
|
|
line = line.rstrip()
|
|
|
|
if line.startswith('#') or line == '': continue
|
|
|
|
|
|
|
|
if line.lower().startswith('dn:'):
|
|
|
|
if dn is not None:
|
|
|
|
all_updates = self.__combine_updates(dn_list, all_updates, update)
|
|
|
|
|
|
|
|
update = {}
|
|
|
|
dn = line[3:].strip()
|
2009-09-14 15:12:58 -05:00
|
|
|
update['dn'] = self._template_str(dn)
|
2008-09-15 19:51:01 -05:00
|
|
|
else:
|
|
|
|
if dn is None:
|
|
|
|
raise BadSyntax, "dn is not defined in the update"
|
|
|
|
|
2009-09-14 15:12:58 -05:00
|
|
|
line = self._template_str(line)
|
2008-09-15 19:51:01 -05:00
|
|
|
if line.startswith(' '):
|
|
|
|
v = d[len(d) - 1]
|
2009-03-23 14:14:31 -05:00
|
|
|
v = v + line[1:]
|
2008-09-15 19:51:01 -05:00
|
|
|
d[len(d) - 1] = v
|
|
|
|
update[index] = d
|
|
|
|
continue
|
|
|
|
line = line.strip()
|
|
|
|
values = line.split(':', 2)
|
|
|
|
if len(values) != 3:
|
|
|
|
raise BadSyntax, "Bad formatting on line %d: %s" % (lcount,line)
|
|
|
|
|
|
|
|
index = values[0].strip().lower()
|
|
|
|
|
|
|
|
if index not in valid_keywords:
|
|
|
|
raise BadSyntax, "Unknown keyword %s" % index
|
|
|
|
|
|
|
|
attr = values[1].strip()
|
|
|
|
value = values[2].strip()
|
|
|
|
|
|
|
|
new_value = ""
|
|
|
|
if index == "default":
|
|
|
|
new_value = attr + ":" + value
|
|
|
|
else:
|
|
|
|
new_value = index + ":" + attr + ":" + value
|
|
|
|
index = "updates"
|
|
|
|
|
|
|
|
d = update.get(index, [])
|
|
|
|
|
|
|
|
d.append(new_value)
|
|
|
|
|
|
|
|
update[index] = d
|
|
|
|
|
|
|
|
if dn is not None:
|
|
|
|
all_updates = self.__combine_updates(dn_list, all_updates, update)
|
|
|
|
|
|
|
|
return (all_updates, dn_list)
|
|
|
|
|
|
|
|
def create_index_task(self, attribute):
|
|
|
|
"""Create a task to update an index for an attribute"""
|
2009-10-12 12:54:08 -05:00
|
|
|
|
|
|
|
# Sleep a bit to ensure previous operations are complete
|
|
|
|
time.sleep(5)
|
2008-09-15 19:51:01 -05:00
|
|
|
|
|
|
|
r = random.SystemRandom()
|
|
|
|
|
|
|
|
# Refresh the time to make uniqueness more probable. Add on some
|
|
|
|
# randomness for good measure.
|
|
|
|
self.sub_dict['TIME'] = int(time.time()) + r.randint(0,10000)
|
|
|
|
|
2009-09-14 15:12:58 -05:00
|
|
|
cn = self._template_str("indextask_$TIME")
|
2008-09-15 19:51:01 -05:00
|
|
|
dn = "cn=%s, cn=index, cn=tasks, cn=config" % cn
|
|
|
|
|
|
|
|
e = ipaldap.Entry(dn)
|
|
|
|
|
|
|
|
e.setValues('objectClass', ['top', 'extensibleObject'])
|
|
|
|
e.setValue('cn', cn)
|
|
|
|
e.setValue('nsInstance', 'userRoot')
|
|
|
|
e.setValues('nsIndexAttribute', attribute)
|
|
|
|
|
|
|
|
logging.info("Creating task to index attribute: %s", attribute)
|
|
|
|
logging.debug("Task id: %s", dn)
|
|
|
|
|
|
|
|
if self.live_run:
|
|
|
|
self.conn.addEntry(e.dn, e.toTupleList())
|
|
|
|
|
|
|
|
return dn
|
|
|
|
|
|
|
|
def monitor_index_task(self, dn):
|
|
|
|
"""Give a task DN monitor it and wait until it has completed (or failed)
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not self.live_run:
|
|
|
|
# If not doing this live there is nothing to monitor
|
|
|
|
return
|
|
|
|
|
|
|
|
# Pause for a moment to give the task time to be created
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
attrlist = ['nstaskstatus', 'nstaskexitcode']
|
|
|
|
entry = None
|
|
|
|
|
2008-09-18 15:58:10 -05:00
|
|
|
while True:
|
2008-09-15 19:51:01 -05:00
|
|
|
try:
|
|
|
|
entry = self.conn.getEntry(dn, ldap.SCOPE_BASE, "(objectclass=*)", attrlist)
|
2009-04-23 07:51:59 -05:00
|
|
|
except errors.NotFound, e:
|
2008-09-15 19:51:01 -05:00
|
|
|
logging.error("Task not found: %s", dn)
|
|
|
|
return
|
2009-04-23 07:51:59 -05:00
|
|
|
except errors.DatabaseError, e:
|
2009-02-10 15:04:28 -06:00
|
|
|
logging.error("Task lookup failure %s", e)
|
2008-09-15 19:51:01 -05:00
|
|
|
return
|
|
|
|
|
|
|
|
status = entry.getValue('nstaskstatus')
|
|
|
|
if status is None:
|
|
|
|
# task doesn't have a status yet
|
|
|
|
time.sleep(1)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if status.lower().find("finished") > -1:
|
|
|
|
logging.info("Indexing finished")
|
2008-09-18 15:58:10 -05:00
|
|
|
break
|
2008-09-15 19:51:01 -05:00
|
|
|
|
|
|
|
logging.debug("Indexing in progress")
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
def __create_default_entry(self, dn, default):
|
|
|
|
"""Create the default entry from the values provided.
|
|
|
|
|
|
|
|
The return type is entity.Entity
|
|
|
|
"""
|
|
|
|
entry = ipaldap.Entry(dn)
|
|
|
|
|
|
|
|
if not default:
|
|
|
|
# This means that the entire entry needs to be created with add
|
|
|
|
return self.__entry_to_entity(entry)
|
|
|
|
|
|
|
|
for line in default:
|
|
|
|
# We already do syntax-parsing so this is safe
|
|
|
|
(k, v) = line.split(':',1)
|
|
|
|
e = entry.getValues(k)
|
|
|
|
if e:
|
|
|
|
# multi-valued attribute
|
|
|
|
e = list(e)
|
|
|
|
e.append(v)
|
|
|
|
else:
|
|
|
|
e = v
|
|
|
|
entry.setValues(k, e)
|
|
|
|
|
|
|
|
return self.__entry_to_entity(entry)
|
|
|
|
|
|
|
|
def __get_entry(self, dn):
|
|
|
|
"""Retrieve an object from LDAP.
|
|
|
|
|
|
|
|
The return type is ipaldap.Entry
|
|
|
|
"""
|
|
|
|
searchfilter="objectclass=*"
|
|
|
|
sattrs = ["*"]
|
|
|
|
scope = ldap.SCOPE_BASE
|
|
|
|
|
|
|
|
return self.conn.getList(dn, scope, searchfilter, sattrs)
|
|
|
|
|
|
|
|
def __apply_updates(self, updates, entry):
|
|
|
|
"""updates is a list of changes to apply
|
|
|
|
entry is the thing to apply them to
|
|
|
|
|
2009-09-14 15:12:58 -05:00
|
|
|
Returns the modified entry
|
2008-09-15 19:51:01 -05:00
|
|
|
"""
|
|
|
|
if not updates:
|
|
|
|
return entry
|
|
|
|
|
|
|
|
only = {}
|
|
|
|
for u in updates:
|
|
|
|
# We already do syntax-parsing so this is safe
|
|
|
|
(utype, k, values) = u.split(':',2)
|
|
|
|
|
|
|
|
values = self.__parse_values(values)
|
|
|
|
|
|
|
|
e = entry.getValues(k)
|
|
|
|
if not isinstance(e, list):
|
|
|
|
if e is None:
|
|
|
|
e = []
|
|
|
|
else:
|
|
|
|
e = [e]
|
|
|
|
|
|
|
|
for v in values:
|
|
|
|
if utype == 'remove':
|
|
|
|
logging.debug("remove: '%s' from %s, current value %s", v, k, e)
|
|
|
|
try:
|
|
|
|
e.remove(v)
|
|
|
|
except ValueError:
|
|
|
|
logging.warn("remove: '%s' not in %s", v, k)
|
|
|
|
pass
|
|
|
|
entry.setValues(k, e)
|
|
|
|
logging.debug('remove: updated value %s', e)
|
|
|
|
elif utype == 'add':
|
|
|
|
logging.debug("add: '%s' to %s, current value %s", v, k, e)
|
|
|
|
# Remove it, ignoring errors so we can blindly add it later
|
|
|
|
try:
|
|
|
|
e.remove(v)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
e.append(v)
|
|
|
|
logging.debug('add: updated value %s', e)
|
|
|
|
entry.setValues(k, e)
|
|
|
|
elif utype == 'only':
|
|
|
|
logging.debug("only: set %s to '%s', current value %s", k, v, e)
|
|
|
|
if only.get(k):
|
|
|
|
e.append(v)
|
|
|
|
else:
|
2008-10-15 14:13:27 -05:00
|
|
|
e = [v]
|
2008-09-15 19:51:01 -05:00
|
|
|
only[k] = True
|
|
|
|
entry.setValues(k, e)
|
|
|
|
logging.debug('only: updated value %s', e)
|
2009-09-14 15:12:58 -05:00
|
|
|
elif utype == 'deleteentry':
|
|
|
|
# skip this update type, it occurs in __delete_entries()
|
|
|
|
return None
|
2011-02-11 12:29:55 -06:00
|
|
|
elif utype == 'replace':
|
|
|
|
# v has the format "old: new"
|
|
|
|
try:
|
|
|
|
(old, new) = v.split(':', 1)
|
|
|
|
except ValueError:
|
|
|
|
raise BadSyntax, "bad syntax in replace, needs to be in the format old: new in %s" % new_entry.dn
|
|
|
|
try:
|
|
|
|
e.remove(old)
|
|
|
|
e.append(new)
|
|
|
|
logging.debug('replace: updated value %s', e)
|
|
|
|
entry.setValues(k, e)
|
|
|
|
except ValueError:
|
|
|
|
logging.debug('replace: %s not found, skipping', old)
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
self.print_entity(entry)
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
return entry
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
def print_entity(self, e, message=None):
|
|
|
|
"""The entity object currently lacks a str() method"""
|
|
|
|
logging.debug("---------------------------------------------")
|
|
|
|
if message:
|
|
|
|
logging.debug("%s", message)
|
|
|
|
logging.debug("dn: " + e.dn)
|
|
|
|
attr = e.attrList()
|
|
|
|
for a in attr:
|
|
|
|
value = e.getValues(a)
|
|
|
|
if isinstance(value,str):
|
|
|
|
logging.debug(a + ": " + value)
|
|
|
|
else:
|
|
|
|
logging.debug(a + ": ")
|
|
|
|
for l in value:
|
|
|
|
logging.debug("\t" + l)
|
2009-09-14 15:12:58 -05:00
|
|
|
|
2008-09-17 22:18:09 -05:00
|
|
|
def is_schema_updated(self, s):
|
|
|
|
"""Compare the schema in 's' with the current schema in the DS to
|
|
|
|
see if anything has changed. This should account for syntax
|
|
|
|
differences (like added parens that make no difference but are
|
|
|
|
detected as a change by generateModList()).
|
|
|
|
|
|
|
|
This doesn't handle re-ordering of attributes. They are still
|
|
|
|
detected as changes, so foo $ bar != bar $ foo.
|
|
|
|
|
|
|
|
return True if the schema has changed
|
|
|
|
return False if it has not
|
|
|
|
"""
|
|
|
|
s = ldap.schema.SubSchema(s)
|
|
|
|
s = s.ldap_entry()
|
|
|
|
|
|
|
|
# Get a fresh copy and convert into a SubSchema
|
|
|
|
n = self.__get_entry("cn=schema")[0]
|
|
|
|
n = dict(n.data)
|
|
|
|
n = ldap.schema.SubSchema(n)
|
|
|
|
n = n.ldap_entry()
|
|
|
|
|
|
|
|
if s == n:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
def __update_record(self, update):
|
|
|
|
found = False
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
new_entry = self.__create_default_entry(update.get('dn'),
|
|
|
|
update.get('default'))
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
try:
|
|
|
|
e = self.__get_entry(new_entry.dn)
|
|
|
|
if len(e) > 1:
|
|
|
|
# we should only ever get back one entry
|
|
|
|
raise BadSyntax, "More than 1 entry returned on a dn search!? %s" % new_entry.dn
|
|
|
|
entry = self.__entry_to_entity(e[0])
|
|
|
|
found = True
|
|
|
|
logging.info("Updating existing entry: %s", entry.dn)
|
2009-04-23 07:51:59 -05:00
|
|
|
except errors.NotFound:
|
2008-09-15 19:51:01 -05:00
|
|
|
# Doesn't exist, start with the default entry
|
|
|
|
entry = new_entry
|
|
|
|
logging.info("New entry: %s", entry.dn)
|
2009-04-23 07:51:59 -05:00
|
|
|
except errors.DatabaseError:
|
2008-09-15 19:51:01 -05:00
|
|
|
# Doesn't exist, start with the default entry
|
|
|
|
entry = new_entry
|
|
|
|
logging.info("New entry, using default value: %s", entry.dn)
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
self.print_entity(entry)
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
# Bring this entry up to date
|
|
|
|
entry = self.__apply_updates(update.get('updates'), entry)
|
2009-09-14 15:12:58 -05:00
|
|
|
if entry is None:
|
|
|
|
# It might be None if it is just deleting an entry
|
|
|
|
return
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
self.print_entity(entry, "Final value")
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
if not found:
|
|
|
|
# New entries get their orig_data set to the entry itself. We want to
|
|
|
|
# empty that so that everything appears new when generating the
|
|
|
|
# modlist
|
|
|
|
# entry.orig_data = {}
|
|
|
|
try:
|
|
|
|
if self.live_run:
|
|
|
|
self.conn.addEntry(entry.dn, entry.toTupleList())
|
2009-09-14 15:12:58 -05:00
|
|
|
self.modified = True
|
2008-09-15 19:51:01 -05:00
|
|
|
except Exception, e:
|
2009-02-10 15:04:28 -06:00
|
|
|
logging.error("Add failure %s", e)
|
2008-09-15 19:51:01 -05:00
|
|
|
else:
|
|
|
|
# Update LDAP
|
|
|
|
try:
|
2008-09-17 22:18:09 -05:00
|
|
|
updated = False
|
|
|
|
changes = self.conn.generateModList(entry.origDataDict(), entry.toDict())
|
|
|
|
if (entry.dn == "cn=schema"):
|
|
|
|
updated = self.is_schema_updated(entry.toDict())
|
|
|
|
else:
|
2009-03-23 14:14:31 -05:00
|
|
|
if len(changes) >= 1:
|
2008-09-17 22:18:09 -05:00
|
|
|
updated = True
|
|
|
|
logging.debug("%s" % changes)
|
2009-03-23 14:14:31 -05:00
|
|
|
logging.debug("Live %d, updated %d" % (self.live_run, updated))
|
2008-09-17 22:18:09 -05:00
|
|
|
if self.live_run and updated:
|
2008-09-15 19:51:01 -05:00
|
|
|
self.conn.updateEntry(entry.dn, entry.origDataDict(), entry.toDict())
|
|
|
|
logging.info("Done")
|
2009-04-23 07:51:59 -05:00
|
|
|
except errors.EmptyModlist:
|
2008-09-15 19:51:01 -05:00
|
|
|
logging.info("Entry already up-to-date")
|
2008-09-17 22:18:09 -05:00
|
|
|
updated = False
|
2009-04-23 07:51:59 -05:00
|
|
|
except errors.DatabaseError, e:
|
2009-02-10 15:04:28 -06:00
|
|
|
logging.error("Update failed: %s", e)
|
2008-09-17 22:18:09 -05:00
|
|
|
updated = False
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
if ("cn=index" in entry.dn and
|
|
|
|
"cn=userRoot" in entry.dn):
|
|
|
|
taskid = self.create_index_task(entry.cn)
|
|
|
|
self.monitor_index_task(taskid)
|
2008-09-17 22:18:09 -05:00
|
|
|
|
|
|
|
if updated:
|
|
|
|
self.modified = True
|
2008-09-15 19:51:01 -05:00
|
|
|
return
|
|
|
|
|
2009-09-14 15:12:58 -05:00
|
|
|
def __delete_record(self, updates):
|
|
|
|
"""
|
|
|
|
Run through all the updates again looking for any that should be
|
|
|
|
deleted.
|
|
|
|
|
|
|
|
This must use a reversed list so that the longest entries are
|
|
|
|
considered first so we don't end up trying to delete a parent
|
|
|
|
and child in the wrong order.
|
|
|
|
"""
|
|
|
|
dn = updates['dn']
|
2009-10-02 08:23:40 -05:00
|
|
|
updates = updates.get('updates', [])
|
2009-09-14 15:12:58 -05:00
|
|
|
for u in updates:
|
|
|
|
# We already do syntax-parsing so this is safe
|
|
|
|
(utype, k, values) = u.split(':',2)
|
|
|
|
|
|
|
|
if utype == 'deleteentry':
|
|
|
|
try:
|
|
|
|
if self.live_run:
|
|
|
|
self.conn.deleteEntry(dn)
|
|
|
|
self.modified = True
|
|
|
|
except errors.NotFound, e:
|
2009-12-18 00:57:39 -06:00
|
|
|
logging.info("Deleting non-existent entry %s", e)
|
2009-09-14 15:12:58 -05:00
|
|
|
self.modified = True
|
|
|
|
except errors.DatabaseError, e:
|
|
|
|
logging.error("Delete failed: %s", e)
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
def get_all_files(self, root, recursive=False):
|
|
|
|
"""Get all update files"""
|
|
|
|
f = []
|
|
|
|
for path, subdirs, files in os.walk(root):
|
|
|
|
for name in files:
|
|
|
|
if fnmatch.fnmatch(name, "*.update"):
|
|
|
|
f.append(os.path.join(path, name))
|
|
|
|
if not recursive:
|
|
|
|
break
|
2009-03-23 14:14:31 -05:00
|
|
|
f.sort()
|
2008-09-15 19:51:01 -05:00
|
|
|
return f
|
|
|
|
|
|
|
|
def update(self, files):
|
|
|
|
"""Execute the update. files is a list of the update files to use.
|
2008-09-17 22:18:09 -05:00
|
|
|
|
|
|
|
returns True if anything was changed, otherwise False
|
2008-09-15 19:51:01 -05:00
|
|
|
"""
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
try:
|
2010-05-27 10:58:31 -05:00
|
|
|
if self.online:
|
|
|
|
if self.ldapi:
|
|
|
|
self.conn = ipaldap.IPAdmin(ldapi=True, realm=self.realm)
|
|
|
|
self.conn.do_external_bind(self.pw_name)
|
|
|
|
else:
|
|
|
|
self.conn = ipaldap.IPAdmin(self.sub_dict['FQDN'])
|
|
|
|
self.conn.do_simple_bind(bindpw=self.dm_password)
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Offline updates are not supported.")
|
2008-09-15 19:51:01 -05:00
|
|
|
all_updates = {}
|
|
|
|
dn_list = {}
|
|
|
|
for f in files:
|
|
|
|
try:
|
|
|
|
logging.info("Parsing file %s" % f)
|
|
|
|
data = self.read_file(f)
|
|
|
|
except Exception, e:
|
|
|
|
print e
|
|
|
|
sys.exit(1)
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-15 19:51:01 -05:00
|
|
|
(all_updates, dn_list) = self.parse_update_file(data, all_updates, dn_list)
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2009-09-14 15:12:58 -05:00
|
|
|
# For adds and updates we want to apply updates from shortest
|
|
|
|
# to greatest length of the DN. For deletes we want the reverse.
|
2008-09-15 19:51:01 -05:00
|
|
|
sortedkeys = dn_list.keys()
|
|
|
|
sortedkeys.sort()
|
|
|
|
for k in sortedkeys:
|
|
|
|
for dn in dn_list[k]:
|
|
|
|
self.__update_record(all_updates[dn])
|
2009-09-14 15:12:58 -05:00
|
|
|
|
|
|
|
sortedkeys.reverse()
|
|
|
|
for k in sortedkeys:
|
|
|
|
for dn in dn_list[k]:
|
|
|
|
self.__delete_record(all_updates[dn])
|
2008-09-15 19:51:01 -05:00
|
|
|
finally:
|
|
|
|
if self.conn: self.conn.unbind()
|
2009-02-04 09:53:34 -06:00
|
|
|
|
2008-09-17 22:18:09 -05:00
|
|
|
return self.modified
|