Index: modules/univention/s4connector/__init__.py
===================================================================
--- modules/univention/s4connector/__init__.py (Revision 49853)
+++ modules/univention/s4connector/__init__.py (Arbeitskopie)
@@ -44,7 +44,6 @@
from signal import *
term_signal_caught = False
-from univention.s4connector.s4cache import S4Cache
import sqlite3 as lite
univention.admin.modules.update()
@@ -314,7 +313,7 @@
return self.config.has_key(section) and self.config[section].has_key(option)
class attribute:
- def __init__ ( self, ucs_attribute='', ldap_attribute='', con_attribute='', con_other_attribute='', required=0, single_value=False, compare_function=None, mapping=(), reverse_attribute_check=False, sync_mode='sync' ):
+ def __init__ ( self, ucs_attribute='', ldap_attribute='', con_attribute='', con_other_attribute='', required=0, compare_function=None, mapping=(), reverse_attribute_check=False, sync_mode='sync' ):
self.ucs_attribute=ucs_attribute
self.ldap_attribute=ldap_attribute
self.con_attribute=con_attribute
@@ -331,7 +330,6 @@
# Seee https://forge.univention.org/bugzilla/show_bug.cgi?id=25823
self.reverse_attribute_check=reverse_attribute_check
self.sync_mode = sync_mode
- self.single_value=single_value
class property:
def __init__( self, ucs_default_dn='', con_default_dn='', ucs_module='', ucs_module_others=[], sync_mode='', scope='', con_search_filter='', ignore_filter=None, match_filter=None, ignore_subtree=[],
@@ -408,9 +406,6 @@
configdbfile='/etc/univention/%s/s4internal.sqlite' % self.CONFIGBASENAME
self.config = configdb(configdbfile)
- s4cachedbfile='/etc/univention/%s/s4cache.sqlite' % self.CONFIGBASENAME
- self.s4cache = S4Cache(s4cachedbfile)
-
configfile='/etc/univention/%s/s4internal.cfg' % self.CONFIGBASENAME
if os.path.exists(configfile):
ud.debug(ud.LDAP, ud.PROCESS, "Converting %s into a sqlite database" % configfile)
@@ -429,7 +424,7 @@
self.open_ucs()
- for section in ['DN Mapping UCS','DN Mapping CON','UCS rejected', 'UCS deleted']:
+ for section in ['DN Mapping UCS','DN Mapping CON','UCS rejected']:
if not self.config.has_section(section):
self.config.add_section(section)
@@ -706,11 +701,6 @@
if key:
break
- entryUUID = new.get('entryUUID')[0]
- if entryUUID:
- if self.was_entryUUID_deleted(entryUUID):
- ud.debug(ud.LDAP, ud.PROCESS, "__sync_file_from_ucs: Object with entryUUID %s was already deleted. Don't recreate." % entryUUID)
- return True
#ud.debug(ud.LDAP, ud.INFO, "__sync_file_from_ucs: old: %s" % old)
#ud.debug(ud.LDAP, ud.INFO, "__sync_file_from_ucs: new: %s" % new)
if old and new:
@@ -774,8 +764,8 @@
if not self._ignore_object(key,object) or ignore_subtree_match:
ud.debug(ud.LDAP, ud.INFO, "__sync_file_from_ucs: finished mapping")
try:
- if ((old_dn and not self.sync_from_ucs(key, object, premapped_ucs_dn, unicode(old_dn,'utf8'), old, new))
- or (not old_dn and not self.sync_from_ucs(key, object, premapped_ucs_dn, old_dn, old, new))):
+ if ((old_dn and not self.sync_from_ucs(key, object, premapped_ucs_dn, unicode(old_dn,'utf8'), old))
+ or (not old_dn and not self.sync_from_ucs(key, object, premapped_ucs_dn, old_dn, old))):
self._save_rejected_ucs(filename, dn)
return False
else:
@@ -921,6 +911,30 @@
# dummy
pass
+ def _generate_dn_list_from(self, files):
+ '''
+ Save all filenames in a dictonary with dn as key
+ If more than one pickle file was created for one DN we could skip the first one
+ '''
+ if len(files) > 200:
+ # Show an info if it takes some time
+ ud.debug(ud.LDAP, ud.PROCESS, 'Scan all changes from UCS ...')
+ self.dn_list = {}
+ for listener_file in files:
+ filename = os.path.join(self.listener_dir, listener_file)
+ if not filename == "%s/tmp" % self.baseConfig['%s/s4/listener/dir' % self.CONFIGBASENAME]:
+ if not filename in self.rejected_files:
+ try:
+ f=file(filename,'r')
+ except IOError: # file not found so there's nothing to sync
+ continue
+
+ dn,new,old,old_dn=cPickle.load(f)
+ if not self.dn_list.get(dn):
+ self.dn_list[dn]=[filename]
+ else:
+ self.dn_list[dn].append(filename)
+
def poll_ucs(self):
'''
poll changes from UCS: iterates over files exported by directory-listener module
@@ -945,6 +959,9 @@
# the change list is too long and it took too much time
files = files[:MAX_SYNC_IN_ONE_INTERVAL]
+ # Create a dictonary with all DNs
+ self._generate_dn_list_from(files)
+
# We may dropped the parent object, so don't show the traceback in any case
traceback_level = ud.WARN
@@ -956,28 +973,43 @@
try:
f=file(filename,'r')
except IOError: # file not found so there's nothing to sync
+ if self.dn_list.get(dn):
+ self.dn_list[dn].remove(filename)
continue
dn,new,old,old_dn=cPickle.load(f)
- for i in [0, 1]: # do it twice if the LDAP connection was closed
+ if len(self.dn_list.get(dn, [])) < 2 or not old or not new:
+ # If the list contains more then one file, the DN will be synced later
+ # But if the object was added or remoed, the synchonization is required
+ for i in [0, 1]: # do it twice if the LDAP connection was closed
+ try:
+ sync_successfull = self.__sync_file_from_ucs(filename, traceback_level=traceback_level)
+ except (ldap.SERVER_DOWN, SystemExit):
+ # once again, ldap idletimeout ...
+ if i == 0:
+ self.open_ucs()
+ continue
+ raise
+ except:
+ self._save_rejected_ucs(filename, dn)
+ # We may dropped the parent object, so don't show this warning
+ self._debug_traceback(traceback_level, "sync failed, saved as rejected \n\t%s" % filename)
+ if sync_successfull:
+ os.remove(os.path.join(self.listener_dir,listener_file))
+ change_counter += 1
+ break
+ else:
+ os.remove(os.path.join(filename))
+ traceback_level = ud.INFO
try:
- sync_successfull = self.__sync_file_from_ucs(filename, traceback_level=traceback_level)
- except (ldap.SERVER_DOWN, SystemExit):
- # once again, ldap idletimeout ...
- if i == 0:
- self.open_ucs()
- continue
- raise
+ ud.debug(ud.LDAP, ud.PROCESS, 'Drop %s. The DN %s will synced later' % (filename, dn))
except:
- self._save_rejected_ucs(filename, dn)
- # We may dropped the parent object, so don't show this warning
- self._debug_traceback(traceback_level, "sync failed, saved as rejected \n\t%s" % filename)
- if sync_successfull:
- os.remove(os.path.join(self.listener_dir,listener_file))
- change_counter += 1
- break
+ ud.debug(ud.LDAP, ud.PROCESS, 'Drop %s. The object will synced later' % (filename))
+ if self.dn_list.get(dn):
+ self.dn_list[dn].remove(filename)
+
done_counter += 1
print "%s"%done_counter,
sys.stdout.flush()
@@ -1002,7 +1034,6 @@
_d=ud.function('ldap.__set_value')
if not modtype == 'add':
ucs_object.open()
- ud.debug(ud.LDAP, ud.INFO, '__set_values: object: %s' % object)
def set_values(attributes):
if object['attributes'].has_key(attributes.ldap_attribute):
ucs_key = attributes.ucs_attribute
@@ -1058,10 +1089,7 @@
else:
equal = compare[0] == compare[1]
if not equal:
- if isinstance(value, list):
- ucs_object[ucs_key] = list(set(value))
- else:
- ucs_object[ucs_key] = value
+ ucs_object[ucs_key] = value
ud.debug(ud.LDAP, ud.INFO,
"set key in ucs-object: %s" % ucs_key)
else:
@@ -1096,18 +1124,11 @@
else:
ud.debug(ud.LDAP, ud.WARN, '__set_values: The attributes for %s have not been removed as it represents a mandatory attribute' % ucs_key)
+
for attr_key in self.property[property_type].attributes.keys():
if self.property[property_type].attributes[attr_key].sync_mode in ['read', 'sync']:
+ set_values(self.property[property_type].attributes[attr_key])
- con_attribute = self.property[property_type].attributes[attr_key].con_attribute
- con_other_attribute = self.property[property_type].attributes[attr_key].con_other_attribute
-
- if not object.get('changed_attributes') or con_attribute in object.get('changed_attributes') or (con_other_attribute and con_other_attribute in object.get('changed_attributes')):
- ud.debug(ud.LDAP, ud.INFO, '__set_values: Set: %s' % con_attribute)
- set_values(self.property[property_type].attributes[attr_key])
- else:
- ud.debug(ud.LDAP, ud.INFO, '__set_values: Skip: %s' % con_attribute)
-
# post-values
if not self.property[property_type].post_attributes:
return
@@ -1118,21 +1139,13 @@
set_values(self.property[property_type].post_attributes[attr_key].mapping[1](self, property_type, object))
else:
if self.property[property_type].post_attributes[attr_key].sync_mode in ['read', 'sync']:
-
- con_attribute = self.property[property_type].post_attributes[attr_key].con_attribute
- con_other_attribute = self.property[property_type].post_attributes[attr_key].con_other_attribute
-
- if not object.get('changed_attributes') or con_attribute in object.get('changed_attributes') or (con_other_attribute and con_other_attribute in object.get('changed_attributes')):
- ud.debug(ud.LDAP, ud.INFO, '__set_values: Set: %s' % con_attribute)
- if self.property[property_type].post_attributes[attr_key].reverse_attribute_check:
- if object['attributes'].get(self.property[property_type].post_attributes[attr_key].ldap_attribute):
- set_values(self.property[property_type].post_attributes[attr_key])
- else:
- ucs_object[self.property[property_type].post_attributes[attr_key].ucs_attribute] = ''
+ if self.property[property_type].post_attributes[attr_key].reverse_attribute_check:
+ if object['attributes'].get(self.property[property_type].post_attributes[attr_key].ldap_attribute):
+ set_values(self.property[property_type].post_attributes[attr_key])
else:
- set_values(self.property[property_type].post_attributes[attr_key])
+ ucs_object[self.property[property_type].post_attributes[attr_key].ucs_attribute] = ''
else:
- ud.debug(ud.LDAP, ud.INFO, '__set_values: Skip: %s' % con_attribute)
+ set_values(self.property[property_type].post_attributes[attr_key])
def __modify_custom_attributes(self, property_type, object, ucs_object, module, position, modtype = "modify"):
if object.has_key('custom_attributes'):
@@ -1207,33 +1220,6 @@
ucs_object.move(object['dn'])
return True
- def _get_entryUUID(self, dn):
- try:
- result = self.search_ucs(base=dn, scope='base', attr=['entryUUID'], unique=True)
- if result:
- return result[0][1].get('entryUUID')[0]
- else:
- return None
- except univention.admin.uexceptions.noObject:
- return None
-
- def update_deleted_cache_after_removal_in_ucs(self, entryUUID, objectGUID):
- if not entryUUID:
- return
- # use a dummy value
- if not objectGUID:
- objectGUID='objectGUID'
- ud.debug(ud.LDAP, ud.INFO, "update_deleted_cache_after_removal_in_ucs: Save entryUUID %s as deleted to UCS deleted cache. ObjectGUUID: %s" % (entryUUID, objectGUID))
- self._set_config_option('UCS deleted', entryUUID, base64.encodestring(objectGUID))
-
- def was_entryUUID_deleted(self, entryUUID):
- objectGUID = self.config.get('UCS deleted', entryUUID)
- if objectGUID:
- return True
- else:
- return False
-
-
def delete_in_ucs(self, property_type, object, module, position):
_d=ud.function('ldap.delete_in_ucs')
@@ -1241,9 +1227,6 @@
ud.debug(ud.LDAP, ud.PROCESS, "Delete of %s was disabled in mapping" % object['dn'])
return True
- objectGUID = object['attributes'].get('objectGUID')[0]
- entryUUID = self._get_entryUUID(object['dn'])
-
module = self.modules[property_type]
ucs_object = univention.admin.objects.get(module, None, self.lo, dn=object['dn'], position='')
@@ -1250,7 +1233,6 @@
try:
ucs_object.open()
ucs_object.remove()
- self. update_deleted_cache_after_removal_in_ucs(entryUUID, objectGUID)
return True
except Exception, e:
ud.debug(ud.LDAP, ud.INFO,"delete object exception: %s"%e)
@@ -1285,7 +1267,7 @@
else:
raise
- def sync_to_ucs(self, property_type, object, premapped_s4_dn, original_object):
+ def sync_to_ucs(self, property_type, object, premapped_s4_dn):
_d=ud.function('ldap.sync_to_ucs')
# this function gets an object from the s4 class, which should be converted into a ucs modul
@@ -1329,25 +1311,6 @@
pass
try:
- guid = original_object.get('attributes').get('objectGUID')[0]
-
- object['changed_attributes'] = []
- if object['modtype'] == 'modify' and original_object:
- old_s4_object = self.s4cache.get_entry(guid)
- ud.debug(ud.LDAP, ud.INFO, "sync_to_ucs: old_s4_object: %s" % old_s4_object)
- ud.debug(ud.LDAP, ud.INFO, "sync_to_ucs: new_s4_object: %s" % original_object['attributes'])
- if old_s4_object:
- for attr in original_object['attributes']:
- if old_s4_object.get(attr) != original_object['attributes'].get(attr):
- object['changed_attributes'].append(attr)
- for attr in old_s4_object:
- if old_s4_object.get(attr) != original_object['attributes'].get(attr):
- if not attr in object['changed_attributes']:
- object['changed_attributes'].append(attr)
- else:
- object['changed_attributes'] = original_object['attributes'].keys()
- ud.debug(ud.LDAP, ud.INFO, "The following attributes have been changed: %s" % object['changed_attributes'])
-
result = False
if hasattr(self.property[property_type],"ucs_sync_function"):
result = self.property[property_type].ucs_sync_function(self, property_type, object)
@@ -1355,7 +1318,6 @@
if object['modtype'] == 'add':
result = self.add_in_ucs(property_type, object, module, position)
self._check_dn_mapping(object['dn'], premapped_s4_dn)
- self.s4cache.add_entry(guid, original_object.get('attributes'))
if object['modtype'] == 'delete':
if not old_object:
ud.debug(ud.LDAP, ud.WARN,
@@ -1364,17 +1326,14 @@
else:
result = self.delete_in_ucs(property_type, object, module, position)
self._remove_dn_mapping(object['dn'], premapped_s4_dn)
- self.s4cache.remove_entry(guid)
if object['modtype'] == 'move':
result = self.move_in_ucs(property_type, object, module, position)
self._remove_dn_mapping(object['olddn'], '') # we don't know the old s4-dn here anymore, will be checked by remove_dn_mapping
self._check_dn_mapping(object['dn'], premapped_s4_dn)
- # Check S4cache
if object['modtype'] == 'modify':
result = self.modify_in_ucs(property_type, object, module, position)
self._check_dn_mapping(object['dn'], premapped_s4_dn)
- self.s4cache.add_entry(guid, original_object.get('attributes'))
if not result:
ud.debug(ud.LDAP, ud.WARN,
@@ -1413,7 +1372,7 @@
self._debug_traceback(ud.ERROR, "Unknown Exception during sync_to_ucs")
return False
- def sync_from_ucs(self, property_type, object, pre_mapped_ucs_dn, old_dn=None, old_ucs_object = None, new_ucs_object = None):
+ def sync_from_ucs(self, property_type, object, old_dn=None):
# dummy
return False
Index: modules/univention/s4connector/s4/__init__.py
===================================================================
--- modules/univention/s4connector/s4/__init__.py (Revision 49853)
+++ modules/univention/s4connector/s4/__init__.py (Arbeitskopie)
@@ -2040,7 +2040,7 @@
mapped_object = self._object_mapping(property_key,object)
try:
if not self._ignore_object(property_key,mapped_object) and not self._ignore_object(property_key,object):
- sync_successfull = self.sync_to_ucs(property_key, mapped_object, premapped_s4_dn, object)
+ sync_successfull = self.sync_to_ucs(property_key, mapped_object, premapped_s4_dn)
else:
sync_successfull = True
except (ldap.SERVER_DOWN, SystemExit):
@@ -2134,7 +2134,7 @@
try:
mapped_object = self._object_mapping(property_key,object)
if not self._ignore_object(property_key,mapped_object):
- sync_successfull = self.sync_to_ucs(property_key, mapped_object, object['dn'], object)
+ sync_successfull = self.sync_to_ucs(property_key, mapped_object, object['dn'])
else:
sync_successfull = True
except (ldap.SERVER_DOWN, SystemExit):
@@ -2204,11 +2204,8 @@
sys.stdout.flush()
return change_count
- def __has_attribute_value_changed(self, attribute, old_ucs_object, new_ucs_object):
- return not old_ucs_object.get(attribute) == new_ucs_object.get(attribute)
-
- def sync_from_ucs(self, property_type, object, pre_mapped_ucs_dn, old_dn=None, old_ucs_object = None, new_ucs_object = None):
+ def sync_from_ucs(self, property_type, object, pre_mapped_ucs_dn, old_dn=None, old_ucs_object = None):
_d=ud.function('ldap.__sync_from_ucs')
# Diese Methode erhaelt von der UCS Klasse ein Objekt,
# welches hier bearbeitet wird und in das S4 geschrieben wird.
@@ -2273,9 +2270,6 @@
s4_object=self.get_object(object['dn'])
- #
- # ADD
- #
if (object['modtype'] == 'add' and not s4_object) or (object['modtype'] == 'modify' and not s4_object):
ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: add object: %s"%object['dn'])
@@ -2355,123 +2349,85 @@
f(self, property_type, object)
ud.debug(ud.LDAP, ud.INFO, "Call post_con_modify_functions: %s (done)" % f)
- #
- # MODIFY
- #
elif (object['modtype'] == 'modify' and s4_object) or (object['modtype'] == 'add' and s4_object):
ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: modify object: %s"%object['dn'])
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: old_object: %s" % old_ucs_object)
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: new_object: %s" % new_ucs_object)
- object['old_ucs_object'] = old_ucs_object
- object['new_ucs_object'] = new_ucs_object
- attribute_list = set(old_ucs_object.keys()).union(set(new_ucs_object.keys()))
if hasattr(self.property[property_type],"con_sync_function"):
self.property[property_type].con_sync_function(self, property_type, object)
else:
- # Iterate over attributes and post_attributes
- for attribute_type_name, attribute_type in [('attributes', self.property[property_type].attributes),('post_attributes', self.property[property_type].post_attributes)]:
- if hasattr(self.property[property_type], attribute_type_name) and attribute_type != None:
- for attr in attribute_list:
- value = new_ucs_object.get(attr)
- if not self.__has_attribute_value_changed(attr, old_ucs_object, new_ucs_object):
- continue
-
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: The following attribute has been changed: %s" % attr)
-
- for attribute in attribute_type.keys():
- if attribute_type[attribute].ldap_attribute == attr:
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: Found a corresponding mapping defintion: %s" % attribute)
-
- s4_attribute = attribute_type[attribute].con_attribute
- s4_other_attribute = attribute_type[attribute].con_other_attribute
-
- if not attribute_type[attribute].sync_mode in ['write', 'sync']:
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: %s is in not in wroite or sync mode. Skipping" % attribute)
+ attr_list = []
+ if hasattr(self.property[property_type], 'attributes') and self.property[property_type].attributes != None:
+ for attr,value in object['attributes'].items():
+ attr_list.append(attr)
+ for attribute in self.property[property_type].attributes.keys():
+ if self.property[property_type].attributes[attribute].con_attribute == attr or self.property[property_type].attributes[attribute].con_other_attribute == attr:
+ if not s4_object.has_key(attr):
+ if value:
+ modlist.append((ldap.MOD_ADD, attr, value))
+ elif self.property[property_type].attributes[attribute].compare_function:
+ if not self.property[property_type].attributes[attribute].compare_function(value,s4_object[attr]):
+ modlist.append((ldap.MOD_REPLACE, attr, value))
+ elif not univention.s4connector.compare_lowercase(value,s4_object[attr]): # FIXME: use defined compare-function from mapping.py
+ modlist.append((ldap.MOD_REPLACE, attr, value))
+ if hasattr(self.property[property_type], 'post_attributes') and self.property[property_type].post_attributes != None:
+ for attr,value in object['attributes'].items():
+ attr_list.append(attr)
+ for attribute in self.property[property_type].post_attributes.keys():
+ if self.property[property_type].post_attributes[attribute].con_attribute == attr or self.property[property_type].post_attributes[attribute].con_other_attribute == attr:
+ if self.property[property_type].post_attributes[attribute].reverse_attribute_check:
+ if not object['attributes'].get(self.property[property_type].post_attributes[attribute].ldap_attribute):
continue
+ if not s4_object.has_key(attr):
+ if value:
+ modlist.append((ldap.MOD_ADD, attr, value))
+ elif self.property[property_type].post_attributes[attribute].compare_function:
+ if not self.property[property_type].post_attributes[attribute].compare_function(value,s4_object[attr]):
+ modlist.append((ldap.MOD_REPLACE, attr, value))
+ elif not univention.s4connector.compare_lowercase(value,s4_object[attr]): # FIXME: use defined compare-function from mapping.py
+ modlist.append((ldap.MOD_REPLACE, attr, value))
- modify = False
+ attrs_in_current_ucs_object = object['attributes'].keys()
+ attrs_which_should_be_mapped = []
+ attrs_to_remove_from_s4_object = []
- # Get the UCS attributes
- old_values = set(old_ucs_object.get(attr, []))
- new_values = set(new_ucs_object.get(attr, []))
+ if hasattr(self.property[property_type], 'attributes') and self.property[property_type].attributes != None:
+ for ac in self.property[property_type].attributes.keys():
+ if self.property[property_type].attributes[ac].sync_mode in ['write', 'sync']:
+ if not self.property[property_type].attributes[ac].con_attribute in attrs_which_should_be_mapped:
+ attrs_which_should_be_mapped.append(self.property[property_type].attributes[ac].con_attribute)
+ if self.property[property_type].attributes[ac].con_other_attribute:
+ if not self.property[property_type].attributes[ac].con_other_attribute in attrs_which_should_be_mapped:
+ attrs_which_should_be_mapped.append(self.property[property_type].attributes[ac].con_other_attribute)
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: old_values: %s" % old_values)
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: new_values: %s" % new_values)
+ if hasattr(self.property[property_type], 'post_attributes') and self.property[property_type].post_attributes != None:
+ for ac in self.property[property_type].post_attributes.keys():
+ if self.property[property_type].post_attributes[ac].sync_mode in ['write', 'sync']:
+ if not self.property[property_type].post_attributes[ac].con_attribute in attrs_which_should_be_mapped:
+ if self.property[property_type].post_attributes[ac].reverse_attribute_check:
+ if object['attributes'].get(self.property[property_type].post_attributes[ac].ldap_attribute):
+ attrs_which_should_be_mapped.append(self.property[property_type].post_attributes[ac].con_attribute)
+ elif s4_object.get(self.property[property_type].post_attributes[ac].con_attribute):
+ modlist.append((ldap.MOD_DELETE, self.property[property_type].post_attributes[ac].con_attribute, None))
+ else:
+ attrs_which_should_be_mapped.append(self.property[property_type].post_attributes[ac].con_attribute)
+ if self.property[property_type].post_attributes[ac].con_other_attribute:
+ if not self.property[property_type].post_attributes[ac].con_other_attribute in attrs_which_should_be_mapped:
+ attrs_which_should_be_mapped.append(self.property[property_type].post_attributes[ac].con_other_attribute)
- if attribute_type[attribute].compare_function:
- if not attribute_type[attribute].compare_function(list(old_values), list(new_values)):
- modify = True
- elif not univention.s4connector.compare_lowercase(list(old_values), list(new_values)): # FIXME: use defined compare-function from mapping.py
- modify=True
+ modlist_empty_attrs = []
+ for expected_attribute in attrs_which_should_be_mapped:
+ if not object['attributes'].has_key(expected_attribute):
+ attrs_to_remove_from_s4_object.append(expected_attribute)
- if not modify:
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: no modification necessary for %s" % attribute)
+ if modlist:
+ for modified_attrs in modlist:
+ if modified_attrs[1] in attrs_to_remove_from_s4_object and len(modified_attrs[2]) > 0:
+ attrs_to_remove_from_s4_object.remove(modified_attrs[1])
- if modify:
- # So, at this point we have the old and the new UCS object.
- # Thus we can create the diff, but we have to check the current S4 object
+ for yank_empty_attr in attrs_to_remove_from_s4_object:
+ if s4_object.has_key(yank_empty_attr):
+ if value != None:
+ modlist.append((ldap.MOD_DELETE, yank_empty_attr, None))
- if not old_values:
- to_add = new_values
- to_remove = set([])
- elif not new_values:
- to_remove = old_values
- to_add = set([])
- else:
- to_add = new_values - old_values
- to_remove = old_values - new_values
-
- if s4_other_attribute:
- # in this case we need lists because sets are unorded and the order is important
- current_s4_values = set(s4_object.get(s4_attribute, []))
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: The current S4 values: %s" % current_s4_values)
-
- current_s4_other_values = set(s4_object.get(attribute_type[attribute].con_other_attribute, []))
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: The current S4 other values: %s" % current_s4_other_values)
-
- new_s4_values = current_s4_values - to_remove
- if not new_s4_values and to_add:
- for n_value in new_ucs_object.get(attr, []):
- if n_value in to_add:
- to_add = to_add - set([n_value])
- new_s4_values = [n_value]
- break
-
- new_s4_other_values = (current_s4_other_values | to_add) - to_remove
- if current_s4_values != new_s4_values:
- if new_s4_values:
- modlist.append((ldap.MOD_REPLACE, s4_attribute, new_s4_values))
- else:
- modlist.append((ldap.MOD_REPLACE, s4_attribute, []))
-
- if current_s4_other_values != new_s4_other_values:
- modlist.append((ldap.MOD_REPLACE, s4_other_attribute, new_s4_other_values))
- else:
- current_s4_values = set(s4_object.get(s4_attribute, []))
-
- ud.debug(ud.LDAP, ud.INFO, "sync_from_ucs: The current S4 values: %s" % current_s4_values)
-
- if (to_add or to_remove) and attribute_type[attribute].single_value:
- modify=False
- if not current_s4_values or not value:
- modify=True
- elif attribute_type[attribute].compare_function:
- if not attribute_type[attribute].compare_function(list(current_s4_values), list(value)):
- modify=True
- elif not univention.s4connector.compare_lowercase(list(current_s4_values), list(value)):
- modify=True
- if modify:
- modlist.append((ldap.MOD_REPLACE, s4_attribute, value))
- else:
- if to_remove:
- r = current_s4_values & to_remove
- if r:
- modlist.append((ldap.MOD_DELETE, s4_attribute, r))
- if to_add:
- a = to_add - current_s4_values
- if a:
- modlist.append((ldap.MOD_ADD, s4_attribute, a))
-
ud.debug(ud.LDAP, ud.INFO, "to modify: %s" % object['dn'])
if modlist:
ud.debug(ud.LDAP, ud.ALL, "sync_from_ucs: modlist: %s" % modlist)
@@ -2488,9 +2444,6 @@
ud.debug(ud.LDAP, ud.INFO, "Call post_con_modify_functions: %s" % f)
f(self, property_type, object)
ud.debug(ud.LDAP, ud.INFO, "Call post_con_modify_functions: %s (done)" % f)
- #
- # DELETE
- #
elif object['modtype'] == 'delete':
if hasattr(self.property[property_type],"con_sync_function"):
self.property[property_type].con_sync_function(self, property_type, object)
@@ -2513,7 +2466,6 @@
def delete_in_s4(self, object, property_type ):
_d=ud.function('ldap.delete_in_s4')
ud.debug(ud.LDAP, ud.ALL,"delete: %s" % object['dn'])
- ud.debug(ud.LDAP, ud.ALL,"delete_in_s4: %s" % object)
try:
self.lo_s4.lo.delete_s(compatible_modstring(object['dn']))
except ldap.NO_SUCH_OBJECT:
Index: modules/univention/s4connector/s4/password.py
===================================================================
--- modules/univention/s4connector/s4/password.py (Revision 49853)
+++ modules/univention/s4connector/s4/password.py (Arbeitskopie)
@@ -457,24 +457,6 @@
_d=ud.function('ldap.s4.password_sync_ucs_to_s4')
ud.debug(ud.LDAP, ud.INFO, "password_sync_ucs_to_s4 called")
- modify=False
- old_ucs_object = object.get('old_ucs_object', {})
- new_ucs_object = object.get('new_ucs_object', {})
- if old_ucs_object or new_ucs_object:
- for attr in ['sambaLMPassword', 'sambaNTPassword','sambaPwdLastSet','sambaPwdMustChange', 'krb5PrincipalName', 'krb5Key', 'shadowLastChange', 'shadowMax', 'krb5PasswordEnd', 'univentionService']:
- old_values = set(old_ucs_object.get(attr, []))
- new_values = set(new_ucs_object.get(attr, []))
- if old_values != new_values:
- modify=True
- break
- else:
- # add mode
- modify=True
-
- if not modify:
- ud.debug(ud.LDAP, ud.INFO, 'password_sync_ucs_to_s4: the password for %s has not been changed. Skipping password sync.' % (object['dn']))
- return
-
compatible_modstring = univention.s4connector.s4.compatible_modstring
try:
ud.debug(ud.LDAP, ud.INFO, "Object DN=%s" % object['dn'])
@@ -645,11 +627,6 @@
_d=ud.function('ldap.s4.password_sync_s4_to_ucs')
ud.debug(ud.LDAP, ud.INFO, "password_sync_s4_to_ucs called")
- if ucs_object['modtype'] == 'modify':
- if not 'pwdLastSet' in ucs_object.get('changed_attributes', []):
- ud.debug(ud.LDAP, ud.INFO, 'password_sync_s4_to_ucs: the password for %s has not been changed. Skipping password sync.' % (ucs_object['dn']))
- return
-
object=s4connector._object_mapping(key, ucs_object, 'ucs')
res=s4connector.lo_s4.lo.search_s(univention.s4connector.s4.compatible_modstring(object['dn']), ldap.SCOPE_BASE, '(objectClass=*)',['objectSid','pwdLastSet'])
Index: modules/univention/s4connector/s4cache.py
===================================================================
--- modules/univention/s4connector/s4cache.py (Revision 49853)
+++ modules/univention/s4connector/s4cache.py (Arbeitskopie)
@@ -1,405 +0,0 @@
-#!/usr/bin/python2.6
-# -*- coding: utf-8 -*-
-#
-# Univention S4 Connector
-# s4 cache
-#
-# Copyright 2014 Univention GmbH
-#
-# http://www.univention.de/
-#
-# All rights reserved.
-#
-# The source code of this program is made available
-# under the terms of the GNU Affero General Public License version 3
-# (GNU AGPL V3) as published by the Free Software Foundation.
-#
-# Binary versions of this program provided by Univention to you as
-# well as other copyrighted, protected or trademarked materials like
-# Logos, graphics, fonts, specific documentations and configurations,
-# cryptographic keys etc. are subject to a license agreement between
-# you and Univention and not subject to the GNU AGPL V3.
-#
-# In the case you use this program under the terms of the GNU AGPL V3,
-# the program is provided in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public
-# License with the Debian GNU/Linux or Univention distribution in file
-# /usr/share/common-licenses/AGPL-3; if not, see
-# .
-
-import univention.debug2 as ud
-import sqlite3
-import inspect
-import base64
-import binascii
-
-
-def func_name():
- return inspect.currentframe().f_back.f_code.co_name
-
-def _is_base64(val):
- try:
- # It is not sufficient to run base64.decodestring to detect a base64 string.
- # When the ascii decode is not possible, it is not a base4 string.
- val.decode('ascii')
- except UnicodeDecodeError:
- return False
- try:
- # The string must be casted as str otherwise we saw something like this:
- # 11.02.2014 03:53:44,141 LDAP (INFO): _is_base64 returns True for: Í8^Ml%'A²ôâ/! ^RÃ
- # 11.02.2014 03:53:44,142 LDAP (WARNING): S4Cache: sqlite: near "A²ôâ": syntax error. SQL command was: [u"SELECT id FROM GUIDS WHERE guid='\xcd8\rl%'\x97A\xb2\xf4\xe2/! \x12\xc3';"
- base64.decodestring(str(val))
- return True
- except binascii.Error:
- return False
-
-def _decode_base64(val):
- return base64.decodestring(val)
-
-def _encode_base64(val):
- return base64.encodestring(val)
-
-def _encode_guid(guid):
- # guid may be unicode
-
- if _is_base64(guid):
- return guid
-
- if type(guid) == type(u''):
- return guid.encode('ISO-8859-1').encode('base64')
- else:
- return unicode(guid,'latin').encode('ISO-8859-1').encode('base64')
-
-def _decode_guid(guid):
- try:
- return base64.decodestring(guid)
- except binascii.Error:
- return guid
-
-
-class EntryDiff(object):
- def __init__(self, old, new):
- self.old = old
- self.new = new
- if not old:
- old = {}
- if not new:
- new = {}
- self.set_old = set(old.keys())
- self.set_new = set(new.keys())
- self.intersect = self.set_new.intersection(self.set_old)
-
- def added(self):
- return self.set_new - self.intersect
-
- def removed(self):
- return self.set_old - self.intersect
-
- def changed(self):
- return set(o for o in self.intersect if self.old[o] != self.new[o])
-
-
-class S4Cache:
- """
- Local cache for the current Samba 4 state of the s4connector.
- With this cache the connector has the possibility to create
- a diff between the new Samba 4 object and the old one from
- cache.
- """
- def __init__ (self, filename):
- _d = ud.function('S4Cache.%s' % func_name())
- self.filename = filename
- self._dbcon = sqlite3.connect(self.filename)
- self.s4cache = {}
-
- self.__create_tables()
-
- def add_entry(self, guid, entry):
- _d = ud.function('S4Cache.%s' % func_name())
-
- guid = _encode_guid(guid).strip()
-
- if not self._guid_exists(guid):
- self._add_entry(guid, entry)
- else:
- self._update_entry(guid, entry)
- self.s4cache[guid] = entry
-
-
- def diff_entry(self, old_entry, new_entry):
- _d = ud.function('S4Cache.%s' % func_name())
-
- result = {'added': None, 'removed': None, 'changed': None}
-
- diff = EntryDiff(old_entry, new_entry)
-
- result['added'] = diff.added()
- result['removed'] = diff.removed()
- result['changed'] = diff.changed()
-
- return result
-
- def get_entry(self, guid):
- _d = ud.function('S4Cache.%s' % func_name())
-
- entry = {}
-
- guid = _encode_guid(guid)
-
- guid_id = self._get_guid_id(guid)
-
- if not guid_id:
- return None
-
- sql_commands = [
- "SELECT ATTRIBUTES.attribute,data.value from data \
- inner join ATTRIBUTES ON data.attribute_id=attributes.id where guid_id = %s;" % (guid_id)
- ]
-
- rows = self.__execute_sql_commands(sql_commands, fetch_result=True)
-
- if not rows:
- return None
-
- for line in rows:
- if not entry.get(line[0]):
- entry[str(line[0])] = []
- entry[line[0]].append(_decode_base64(line[1]))
-
- return entry
-
- def remove_entry(self, guid):
- _d = ud.function('S4Cache.%s' % func_name())
-
- guid = _encode_guid(guid)
-
- guid_id = self._get_guid_id(guid)
-
- if not guid_id:
- return None
-
- sql_commands = [
- "DELETE FROM data WHERE guid_id = '%(guid_id)s';" % ({'guid_id': guid_id}),
- "DELETE FROM guids WHERE id = '%(guid_id)s';" % ({'guid_id': guid_id})
- ]
-
- self.__execute_sql_commands(sql_commands, fetch_result=False)
-
- def __execute_sql_commands(self, sql_commands, fetch_result=False):
- for i in [1, 2]:
- try:
- cur = self._dbcon.cursor()
- for sql_command in sql_commands:
- if isinstance(sql_command, tuple):
- ud.debug(ud.LDAP, ud.INFO, "S4Cache: Execute SQL command: '%s', '%s'" % (sql_command[0], sql_command[1]))
- cur.execute(sql_command[0], sql_command[1])
- else:
- ud.debug(ud.LDAP, ud.INFO, "S4Cache: Execute SQL command: '%s'" % sql_command)
- cur.execute(sql_command)
- self._dbcon.commit()
- if fetch_result:
- rows = cur.fetchall()
- cur.close()
- if fetch_result:
- ud.debug(ud.LDAP, ud.INFO, "S4Cache: Return SQL result: '%s'" % rows)
- return rows
- return None
- except sqlite3.Error, exp:
- ud.debug(ud.LDAP, ud.WARN, "S4Cache: sqlite: %s. SQL command was: %s" % (exp, sql_commands))
- if self._dbcon:
- self._dbcon.close()
- self._dbcon = sqlite3.connect(self.filename)
-
-
- def __create_tables(self):
- _d = ud.function('S4Cache.%s' % func_name())
-
- sql_commands = [
- "CREATE TABLE IF NOT EXISTS GUIDS (id INTEGER PRIMARY KEY, guid TEXT);",
- "CREATE TABLE IF NOT EXISTS ATTRIBUTES (id INTEGER PRIMARY KEY, attribute TEXT);",
- "CREATE TABLE IF NOT EXISTS DATA (id INTEGER PRIMARY KEY, guid_id INTEGER, attribute_id INTEGER, value TEXT);"
- ]
-
- self.__execute_sql_commands(sql_commands, fetch_result=False)
-
-
- def _guid_exists(self, guid):
- _d = ud.function('S4Cache.%s' % func_name())
-
- return self._get_guid_id(guid.strip()) != None
-
-
- def _get_guid_id(self, guid):
- _d = ud.function('S4Cache.%s' % func_name())
-
- sql_commands = [
- "SELECT id FROM GUIDS WHERE guid='%s';" % (_encode_guid(guid).strip())
- ]
-
- rows = self.__execute_sql_commands(sql_commands, fetch_result=True)
-
- if rows:
- return rows[0][0]
-
- return None
-
-
- def _append_guid(self, guid):
- _d = ud.function('S4Cache.%s' % func_name())
-
- sql_commands = [
- "INSERT INTO GUIDS(guid) VALUES('%s');" % (_encode_guid(guid).strip())
- ]
-
- rows = self.__execute_sql_commands(sql_commands, fetch_result=False)
-
-
- def _get_attr_id(self, attr):
- _d = ud.function('S4Cache.%s' % func_name())
-
- sql_commands = [
- "SELECT id FROM ATTRIBUTES WHERE attribute='%s';" % (attr)
- ]
-
- rows = self.__execute_sql_commands(sql_commands, fetch_result=True)
-
- if rows:
- return rows[0][0]
-
- return None
-
-
- def _attr_exists(self, guid):
- _d = ud.function('S4Cache.%s' % func_name())
-
- return self._get_attr_id(guid) != None
-
- def _create_attr(self, attr):
- _d = ud.function('S4Cache.%s' % func_name())
-
- sql_commands = [
- "INSERT INTO ATTRIBUTES(attribute) VALUES('%s');" % (attr)
- ]
-
- self.__execute_sql_commands(sql_commands, fetch_result=False)
-
-
- def _get_attr_id_and_create_if_not_exists(self, attr):
- _d = ud.function('S4Cache.%s' % func_name())
- if not self._get_attr_id(attr):
- self._create_attr(attr)
-
- return self._get_attr_id(attr)
-
- def _add_entry(self, guid, entry):
- _d = ud.function('S4Cache.%s' % func_name())
-
- guid = guid.strip()
-
- self._append_guid(guid)
- guid_id = self._get_guid_id(guid)
-
- sql_commands = []
- for attr in entry.keys():
- attr_id = self._get_attr_id_and_create_if_not_exists(attr)
- for value in entry[attr]:
- sql_commands.append(
- (
- "INSERT INTO DATA(guid_id,attribute_id,value) VALUES(%s,%s,?);" % (guid_id, attr_id),
- [_encode_base64(value)]
- )
- )
-
- if sql_commands:
- self.__execute_sql_commands(sql_commands, fetch_result=False)
-
- def _update_entry(self, guid, entry):
- _d = ud.function('S4Cache.%s' % func_name())
-
- guid = guid.strip()
- guid_id = self._get_guid_id(guid)
- old_entry = self.get_entry(guid)
- diff = self.diff_entry(old_entry, entry)
-
- sql_commands = []
- for attribute in diff['removed']:
- sql_commands.append(
- "DELETE FROM data WHERE data.id IN (\
- SELECT data.id FROM DATA INNER JOIN ATTRIBUTES ON data.attribute_id=attributes.id \
- where attributes.attribute='%(attribute)s' and guid_id = '%(guid_id)s' \
- );" % ({'guid_id': guid_id, 'attribute': attribute})
- )
- for attribute in diff['added']:
- attr_id = self._get_attr_id_and_create_if_not_exists(attribute)
- for value in entry[attribute]:
- sql_commands.append(
- (
- "INSERT INTO DATA(guid_id,attribute_id,value) VALUES(%s,%s,?);" % (guid_id, attr_id),
- [_encode_base64(value)]
- )
- )
- for attribute in diff['changed']:
- attr_id = self._get_attr_id_and_create_if_not_exists(attribute)
- for value in set(old_entry.get(attribute)) - set(entry.get(attribute)):
- sql_commands.append(
- (
- "DELETE FROM data WHERE data.id IN (\
- SELECT data.id FROM DATA INNER JOIN ATTRIBUTES ON data.attribute_id=attributes.id \
- where attributes.id='%(attr_id)s' and guid_id = '%(guid_id)s' and value = ? \
- );" % ({'guid_id': guid_id, 'attr_id': attr_id}),
- [_encode_base64(value)]
- )
- )
- for value in set(entry.get(attribute)) - set(old_entry.get(attribute)):
- sql_commands.append(
- (
- "INSERT INTO DATA(guid_id,attribute_id,value) VALUES(%s,%s,?);" % (guid_id, attr_id),
- [_encode_base64(value)]
- )
- )
-
- if sql_commands:
- self.__execute_sql_commands(sql_commands, fetch_result=False)
-
-
-
-if __name__ == '__main__':
-
- print 'Starting S4cache test example ',
-
- s4cache = S4Cache('cache.sqlite')
-
- guid = '1234'
-
- entry = {
- 'attr1': ['foobar'],
- 'attr2': [ 'val1', 'val2', 'val3']
- }
-
- s4cache.add_entry(guid, entry)
- entry_old = s4cache.get_entry(guid)
- diff_entry = s4cache.diff_entry(entry_old, entry)
- if diff_entry.get('changed') or diff_entry.get('removed') or diff_entry.get('added'):
- raise Exception('Test 1 failed: %s' % diff_entry)
- print '.',
-
- entry['attr3'] = ['val2']
- entry['attr2'] = ['val1', 'val3']
-
- diff_entry = s4cache.diff_entry(entry_old, entry)
- if diff_entry.get('changed') != set(['attr2']) or diff_entry.get('removed') or diff_entry.get('added') != set(['attr3']):
- raise Exception('Test 2 failed: %s' % diff_entry)
- print '.',
-
- s4cache.add_entry(guid, entry)
- entry_old = s4cache.get_entry(guid)
- diff_entry = s4cache.diff_entry(entry_old, entry)
- if diff_entry.get('changed') or diff_entry.get('removed') or diff_entry.get('added'):
- raise Exception('Test 3 failed: %s' % diff_entry)
- print '.',
-
- print ' done'