source: dassldapsync/dassldapsync.py

Last change on this file was 1272, checked in by joergs, on Apr 21, 2023 at 2:35:27 PM

bugfix

  • Property svn:executable set to *
File size: 23.9 KB
Line 
1#!/usr/bin/python
2# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
3
4from __future__ import print_function
5
6# core modules
7import argparse
8try:
9 from configparser import ConfigParser
10except ImportError:
11 from ConfigParser import ConfigParser
12import logging
13from pprint import pprint
14import signal
15import subprocess
16import sys
17import time
18
19# external modules
20import datetime
21import dateutil.parser
22import dateutil.tz
23import ldap
24from ldap.ldapobject import ReconnectLDAPObject
25import ldap.modlist
26from ldap.syncrepl import SyncreplConsumer
27from ldapurl import LDAPUrl
28import ldif
29
30
31
32def getArguments():
33 configfile = '/etc/dassldapsync.conf'
34 parser = argparse.ArgumentParser(description='Synchronize the content of two LDAP servers.')
35 parser.add_argument('-d', '--debug', action='store_true', help="enable debug output")
36 parser.add_argument('-n', '--dry-run', action='store_true', dest='dryrun', help="dry run")
37 parser.add_argument('configfile', default=configfile,
38 help="Configuration file [default: {}]".format(configfile))
39 return parser.parse_args()
40
41
42class Options(object):
43 def __init__(self):
44 self.create = False
45 self.delete = False
46 self.starttls = False
47 self.updateonly = False
48 self.filter = None
49 self.attrlist = None
50 self.exclude = None
51 self.renameattr = None
52 self.renamecommand = None
53 self.pwd_max_days = 0
54
55def readLDIFSource(path):
56 logger = logging.getLogger()
57 logger.info("reading LDAP objects from file {}".format(path))
58 with open(path, 'r') as f:
59 parser = ldif.LDIFRecordList(f)
60 parser.parse()
61 result = parser.all_records
62 return result
63
64def readLdapSource(server, binddn, bindpw, basedn, filterstr, attrlist=None, starttls=False):
65 logger = logging.getLogger()
66 logger.info("reading LDAP objects from server {}".format(server))
67 ldapurl = LDAPUrl(hostport="{}:389".format(server))
68 con = ldap.initialize(ldapurl. initializeUrl())
69 if starttls:
70 con.start_tls_s()
71 con.simple_bind_s(binddn, bindpw)
72 results = con.search_s(basedn, ldap.SCOPE_SUBTREE, filterstr, attrlist)
73 return results
74
75class LdapSync(object):
76 def __init__(self, destserver,
77 destbinddn, destbindpw,
78 srcbasedn, destbasedn, options=Options()):
79 self.logger = logging.getLogger()
80
81 self.destserver = destserver
82 self.destbasedn = destbasedn
83 self.destbinddn = destbinddn
84 self.destbindpw = destbindpw
85 self.options = options
86
87 self.srcbasedn = srcbasedn
88
89 self.con = None
90
91 self.attrmap = ldap.cidict.cidict({})
92 self.classmap = {}
93
94 #self.junk_objectclasses = [ b"sambaidmapentry" ]
95 #"sambasid",
96 self.junk_objectclasses = []
97 self.junk_attrs = ["authzto",
98 "creatorsname", "createtimestamp", "contextcsn",
99 "entrycsn", "entryuuid",
100 "memberof", "modifiersname", "modifytimestamp",
101 "pwdaccountlockedtime", "pwdchangedtime", "pwdfailuretime",
102 "structuralobjectclass"]
103
104 self.reset_result()
105
106
107 def reset_result(self):
108 self.result = {
109 'excluded': {'ok': [], 'failed': []},
110 'unmodified': {'ok': [], 'failed': []},
111 'add': {'ok': [], 'failed': []},
112 'update': {'ok': [], 'failed': []},
113 'delete': {'ok': [], 'failed': []},
114 }
115
116
117 def _dest_ldap_connect(self):
118 if self.con is None:
119 self.logger.info("connect to destination LDAP server {}".format(self.destserver))
120 ldapurl = LDAPUrl(hostport="{}:389".format(self.destserver))
121 self.con = ldap.initialize(ldapurl. initializeUrl())
122 if self.options.starttls:
123 self.con.start_tls_s()
124 self.con.simple_bind_s(self.destbinddn, self.destbindpw)
125
126 def __adapt_dn(self, dn):
127 # move LDAP object to dest base
128 if self.srcbasedn != self.destbasedn:
129 dn_old = dn
130 rpath = dn[:-len(self.srcbasedn)]
131 dn = rpath+self.destbasedn
132 self.logger.debug("moved {} to {}".format(dn_old, dn))
133 # print "dn:",dn,"src:",srcbasedn,"rpath:",rpath,"dest:",destbasedn
134 return dn
135
136 def __is_dn_included(self, dn):
137 if self.options.exclude is None:
138 return True
139 if dn.lower().endswith(self.options.exclude):
140 return False
141 return True
142
143 def __adapt_source_ldap_objects(self, searchresult):
144 """
145 Do configured modification to the source LDAP objects.
146 """
147 self.logger.debug("modifying LDAP objects retrieved from source LDAP")
148
149 update_objects = []
150
151 for r in searchresult:
152 dn = self.__adapt_dn(r[0])
153 d = ldap.cidict.cidict(r[1])
154
155 if not self.__is_dn_included(dn):
156 self.notify_excluded(dn)
157 else:
158 objectclasses = d["objectclass"]
159
160 newObjectclasses = []
161 for o in objectclasses:
162 if o.lower() in self.classmap:
163 new_oc = self.classmap[o.lower()]
164 if new_oc not in newObjectclasses:
165 newObjectclasses.append(new_oc)
166 else:
167 if o not in newObjectclasses:
168 newObjectclasses.append(o)
169
170 d["objectclass"] = newObjectclasses
171
172 for a in d.keys():
173 attr = a
174 if self.attrmap.has_key(a.lower()):
175 attr = self.attrmap[attr].lower()
176 if attr.lower() != a.lower():
177 values = d[a]
178 del d[a]
179 d[attr] = values
180
181 update_objects.append((dn, d))
182 return update_objects
183
184
185 def _get_dest_entry(self, dn, entry):
186 """
187 In the destination LDAP, the objects should be named
188 according to options.renameattr.
189 """
190 attrlist = self.options.attrlist
191
192 existingDestDn = None
193 existingDestEntry = None
194 if self.options.renameattr and entry.has_key(self.options.renameattr):
195 searchresult = self.con.search_s(
196 self.destbasedn,
197 ldap.SCOPE_SUBTREE,
198 '%s=%s' % (self.options.renameattr, entry[self.options.renameattr][0]), attrlist)
199 if searchresult is not None and len(searchresult) > 0:
200 existingDestDn, existingDestEntry = searchresult[0]
201 if existingDestDn.lower() != dn.lower():
202 if not self.options.dryrun:
203 self.con.modrdn_s(existingDestDn, dn)
204 self.notify_renamed(existingDestDn, dn,
205 existingDestEntry[self.options.renameattr][0],
206 entry[self.options.renameattr][0],
207 options)
208 if existingDestDn is None:
209 searchresult = self.con.search_s(dn, ldap.SCOPE_BASE, 'objectclass=*', attrlist)
210 existingDestDn, existingDestEntry = searchresult[0]
211 return (existingDestDn, existingDestEntry)
212
213
214 def __handle_pwdAccountLockedTime(self, dn, entry, now, max_age):
215 # hack for syncing accounts locked by password policy
216 do_unlock = False
217 if self.options.pwd_max_days > 0 and entry.has_key('pwdChangedTime'):
218 # print "pwdChangedTime set for",dn
219 pwdChange = entry['pwdChangedTime'][0]
220 d = dateutil.parser.parse(pwdChange)
221 if (now-d) > max_age:
222 entry['pwdAccountLockedTime'] = ['000001010000Z']
223 self.logger.info("locking {} {}".format(dn, pwdChange))
224 else:
225 # pwdAccountLockedTime is a operational attribute,
226 # and therefore not part of entry.
227 # Do extra search to retrieve attribute.
228 searchresult = self.con.search_s(
229 dn, ldap.SCOPE_BASE,
230 "objectclass=*", attrlist=['pwdAccountLockedTime'])
231 tmp_dn, tmp_entry = searchresult[0]
232 if tmp_entry.has_key('pwdAccountLockedTime'):
233 do_unlock = True
234 return do_unlock
235
236
237 def _syncLdapObject(self, srcDn, srcAttributes):
238 tzutc = dateutil.tz.gettz('UTC')
239 now = datetime.datetime.now(tzutc)
240 max_age = datetime.timedelta(days=self.options.pwd_max_days)
241
242 objectClasses = srcAttributes['objectClass']
243 srcAttributes['objectClass'] = [oc for oc in objectClasses if oc.lower() not in self.junk_objectclasses]
244
245 try:
246 destDn, destAttributes = self._get_dest_entry(srcDn, srcAttributes)
247
248 # hack for syncing accounts locked by password policy
249 do_unlock = self.__handle_pwdAccountLockedTime(srcDn, srcAttributes, now, max_age)
250
251 mod_attrs = ldap.modlist.modifyModlist(destAttributes, srcAttributes)
252
253 # hack for unlocking, see above
254 if do_unlock:
255 self.logger.info("unlocking {} {}".format(destDn, 'pwdAccountLockedTime'))
256 mod_attrs.append((ldap.MOD_DELETE, 'pwdAccountLockedTime', None))
257
258 if self.options.attrlist is not None:
259 mod_attrs = [a for a in mod_attrs if a[1].lower() in self.options.attrlist]
260
261 if self.junk_attrs is not None:
262 mod_attrs = [a for a in mod_attrs if a[1].lower() not in self.junk_attrs]
263
264 if mod_attrs:
265 try:
266 self.logger.debug('mod_attrs: ' + str(mod_attrs))
267 if not self.options.dryrun:
268 self.con.modify_s(srcDn, mod_attrs)
269 self.notify_modified(srcDn)
270 except:
271 self.logger.exception('modify failed')
272 self.notify_modified(srcDn, False)
273 else:
274 self.notify_unchanged(srcDn)
275
276 except ldap.NO_SUCH_OBJECT:
277 if self.options.create:
278 try:
279 entry = ldap.modlist.addModlist(srcAttributes, self.junk_attrs)
280 if not self.options.dryrun:
281 self.con.add_s(srcDn, entry)
282 self.notify_created(srcDn)
283 except (ldap.OBJECT_CLASS_VIOLATION,
284 ldap.NO_SUCH_OBJECT,
285 ldap.CONSTRAINT_VIOLATION) as e:
286 #print(e)
287 self.notify_created(srcDn, False)
288
289
290 def __syncLdapDestination(self, update_objects):
291 logger.debug("writing data to destination LDAP")
292 for obj in update_objects:
293 dn, entry = obj
294 self._syncLdapObject(dn, entry)
295
296
297 def __deleteDestLdapObjects(self, update_objects):
298 """
299 Remove all LDAP objects in destination LDAP server
300 that did not come from the source LDAP objects
301 and are not excluded.
302 """
303
304 searchresult = self.con.search_s(self.destbasedn, ldap.SCOPE_SUBTREE, self.options.filter)
305 existing = [x[0].lower() for x in searchresult]
306
307 morituri = existing
308
309 if self.destbasedn.lower() in existing:
310 morituri.remove(self.destbasedn.lower())
311
312 for obj in update_objects:
313 dn, entry = obj
314 if dn.lower() in existing:
315 morituri.remove(dn.lower())
316 for dn in morituri:
317 if self.__is_dn_included(dn):
318 try:
319 if not self.options.dryrun:
320 self.con.delete_s(dn)
321 self.notify_deleted(dn)
322 except:
323 self.notify_deleted(dn, False)
324
325
326 def sync(self, searchresult):
327 """
328 Synchronize entries from searchresult to destination LDAP server.
329 """
330 if len(searchresult) == 0:
331 self.logger.error("empty source, aborting")
332 return
333
334 self._dest_ldap_connect()
335
336 update_objects = self.__adapt_source_ldap_objects(searchresult)
337 self.__syncLdapDestination(update_objects)
338 if self.options.delete:
339 self.__deleteDestLdapObjects(update_objects)
340 self.con.unbind()
341
342 self.__log_summary(True)
343
344
345 def __log_summary(self, show_failed=True, show_ok=False):
346 result = self.result
347 for action in result.keys():
348 ok = len(result[action]['ok'])
349 failed = len(result[action]['failed'])
350 print("{} (ok: {}, failed: {})".format(action, ok, failed))
351
352 if ok > 0 and (show_ok or ok <= 3):
353 print("succeeded:")
354 print("\n".join(result[action]['ok']))
355
356 if failed > 0 and (show_failed or failed <= 3):
357 print("failed:")
358 print("\n".join(result[action]['failed']))
359 print()
360
361 def get_short_dn(self, dn):
362 return dn.lower().replace(',' + self.srcbasedn.lower(), '')
363
364 def notify_unchanged(self, dn):
365 #logger.debug(u'{} unchanged'.format(self.get_short_dn(dn)))
366 self.result['unmodified']['ok'].append(dn)
367
368 def notify_excluded(self, dn):
369 #logger.debug(u'{} unchanged'.format(self.get_short_dn(dn)))
370 self.result['excluded']['ok'].append(dn)
371
372 def notify_created(self, dn, ok=True):
373 if ok:
374 logger.debug(u'{} created'.format(self.get_short_dn(dn)))
375 self.result['add']['ok'].append(dn)
376 else:
377 self.logger.warning(u"failed to add {}".format(dn))
378 self.result['add']['failed'].append(dn)
379
380 def notify_modified(self, dn, ok=True):
381 if ok:
382 logger.debug(u'{} modified'.format(self.get_short_dn(dn)))
383 self.result['update']['ok'].append(dn)
384 else:
385 self.logger.error(u"failed to modify {}".format(dn))
386 self.result['update']['failed'].append(dn)
387
388 def notify_deleted(self, dn, ok=True):
389 if ok:
390 logger.debug(u'{} deleted'.format(self.get_short_dn(dn)))
391 self.result['delete']['ok'].append(dn)
392 else:
393 self.logger.error(u"failed to delete {}".format(dn))
394 self.result['delete']['failed'].append(dn)
395
396 def notify_renamed(self, dn, newdn, uid, newuid, options):
397 print(u"renamed {} -> {}".format(dn, newdn))
398 subprocess.check_call(
399 "%s %s %s %s %s" % (options.renamecommand, dn, newdn, uid, newuid),
400 shell=True)
401
402
403
404class SyncReplConsumer(ReconnectLDAPObject, SyncreplConsumer):
405 """
406 Syncrepl Consumer interface
407 """
408
409 def __init__(self, dest, syncrepl_entry_callback, *args, **kwargs):
410 self.logger = logging.getLogger()
411 # Initialise the LDAP Connection first
412 ldap.ldapobject.ReconnectLDAPObject.__init__(self, *args, **kwargs)
413 # We need this for later internal use
414 self.__presentUUIDs = dict()
415 self.cookie = None
416 self.dest_ldap = dest
417 self.syncrepl_entry_callback = syncrepl_entry_callback
418
419 def syncrepl_get_cookie(self):
420 return self.cookie
421
422 def syncrepl_set_cookie(self, cookie):
423 self.cookie = cookie
424
425 def syncrepl_entry(self, dn, attributes, uuid):
426 # First we determine the type of change we have here
427 # (and store away the previous data for later if needed)
428 if uuid in self.__presentUUIDs:
429 change_type = 'modify'
430 else:
431 change_type = 'add'
432 # Now we store our knowledge of the existence of this entry
433 self.__presentUUIDs[uuid] = dn
434 # Debugging
435 logger.debug('{}: {} ({})'.format(dn, change_type, ",".join(attributes.keys())))
436 # If we have a cookie then this is not our first time being run,
437 # so it must be a change
438 if self.cookie is not None:
439 self.syncrepl_entry_callback(dn, attributes)
440
441
442 def syncrepl_delete(self, uuids):
443 """ syncrepl_delete """
444 # Make sure we know about the UUID being deleted, just in case...
445 uuids = [uuid for uuid in uuids if uuid in self.__presentUUIDs]
446 # Delete all the UUID values we know of
447 for uuid in uuids:
448 logger.debug('detected deletion of entry {} ({})', uuid, self.__presentUUIDs[uuid])
449 del self.__presentUUIDs[uuid]
450
451 def syncrepl_present(self, uuids, refreshDeletes=False):
452 """ called on initial sync """
453 if uuids is not None:
454 self.logger.debug('uuids: {}'.format(','.join(uuids)))
455 # If we have not been given any UUID values,
456 # then we have recieved all the present controls...
457 if uuids is None:
458 # We only do things if refreshDeletes is false as the syncrepl
459 # extension will call syncrepl_delete instead when it detects a
460 # delete notice
461 if not refreshDeletes:
462 deletedEntries = [
463 uuid for uuid in self.__presentUUIDs
464 ]
465 self.syncrepl_delete(deletedEntries)
466 # Phase is now completed, reset the list
467 self.__presentUUIDs = {}
468 else:
469 # Note down all the UUIDs we have been sent
470 for uuid in uuids:
471 self.__presentUUIDs[uuid] = True
472
473
474 def syncrepl_refreshdone(self):
475 self.logger.info('Initial synchronization is now done, persist phase begins')
476 #self.logger.debug('UUIDs:\n' + '\n'.join(self.__presentUUIDs))
477
478
479
480class LdapSyncRepl(LdapSync):
481 def __init__(self, destsrv,
482 destadmindn, destadminpw,
483 basedn, destbasedn,
484 options=Options(), source_ldap_url_obj=None):
485 # Install our signal handlers
486 signal.signal(signal.SIGTERM, self.shutdown)
487 self.watcher_running = False
488 self.source_ldap_url_obj = source_ldap_url_obj
489 self.ldap_credentials = False
490 self.source_ldap_connection = None
491 super(LdapSyncRepl, self).__init__(destsrv,
492 destadmindn, destadminpw,
493 basedn, destbasedn, options)
494
495
496 def sync(self):
497 self._dest_ldap_connect()
498 self.watcher_running = True
499 while self.watcher_running:
500 self.logger.info('Connecting to source LDAP server')
501 # Prepare the LDAP server connection (triggers the connection as well)
502 self.source_ldap_connection = SyncReplConsumer(self.con,
503 self.perform_application_sync_callback,
504 self.source_ldap_url_obj.initializeUrl())
505
506 if self.source_ldap_url_obj.who and self.source_ldap_url_obj.cred:
507 self.ldap_credentials = True
508 # Now we login to the LDAP server
509 try:
510 self.source_ldap_connection.simple_bind_s(
511 self.source_ldap_url_obj.who, self.source_ldap_url_obj.cred)
512 except ldap.INVALID_CREDENTIALS as e:
513 print('Login to LDAP server failed: ', str(e))
514 sys.exit(1)
515 except ldap.SERVER_DOWN:
516 print('LDAP server is down, going to retry.')
517 time.sleep(5)
518 continue
519
520 # Commence the syncing
521 self.logger.info('Staring sync process')
522 ldap_search = self.source_ldap_connection.syncrepl_search(
523 self.source_ldap_url_obj.dn or '',
524 self.source_ldap_url_obj.scope or ldap.SCOPE_SUBTREE,
525 mode='refreshAndPersist',
526 attrlist=self.source_ldap_url_obj.attrs,
527 filterstr=self.source_ldap_url_obj.filterstr or '(objectClass=*)'
528 )
529
530 try:
531 while self.source_ldap_connection.syncrepl_poll(all=1, msgid=ldap_search):
532 print(".", end="")
533 except KeyboardInterrupt:
534 # User asked to exit
535 print("aborted\n")
536 self.shutdown(None, None)
537 except Exception as e:
538 # Handle any exception
539 if self.watcher_running:
540 self.logger.exception('Encountered a problem, going to retry.')
541 time.sleep(5)
542
543 def perform_application_sync_callback(self, dn, attributes):
544 logger.debug('{}: src: {}'.format(dn, str(attributes)))
545 try:
546 self._syncLdapObject(dn, attributes)
547 except ldap.NO_SUCH_OBJECT:
548 self.logger.info("SKIPPED: {} object does not exist on target".format(dn))
549 return False
550 return True
551
552 def shutdown(self, signum, stack):
553 # Declare the needed global variables
554 self.logger.info('Shutting down!')
555
556 # We are no longer running
557 self.watcher_running = False
558
559def get_ldap_url_obj(self, configsection):
560 baseurl = 'ldap://{server}:389/{basedn}'.format(server=configsection.get('server'), basedn=configsection.get('basedn'))
561 attrs = None
562 if configsection.get('attributes') is not None:
563 attrs = configsection.get('attributes').split(',')
564 return LDAPUrl(
565 baseurl,
566 dn=configsection.get('baseDn'),
567 who=configsection.get('bindDn'),
568 cred=configsection.get('basePassword'),
569 filterstr=configsection.get('filter'),
570 attrs=attrs
571 )
572
573
574if __name__ == "__main__":
575 logging.basicConfig(format='%(levelname)s %(module)s.%(funcName)s: %(message)s', level=logging.INFO)
576 logger = logging.getLogger()
577
578 args = getArguments()
579 if args.debug:
580 logger.setLevel(logging.DEBUG)
581 conffile = args.configfile
582
583 config = ConfigParser()
584 config.read(conffile)
585
586 srcfile = None
587 try:
588 srcfile = config.get("source", "file")
589 except:
590 pass
591
592 basedn = config.get("source", "baseDn")
593 filterstr = config.get("source", "filter", fallback=None)
594
595 if srcfile is None:
596 srv = config.get("source", "server")
597 admindn = config.get("source", "bindDn")
598 adminpw = config.get("source", "bindPassword")
599 starttls = config.getboolean("source", "starttls")
600
601 destsrv = config.get("destination", "server")
602 destadmindn = config.get("destination", "bindDn")
603 destadminpw = config.get("destination", "bindPassword")
604 destbasedn = config.get("destination", "baseDn")
605 try:
606 rdn = config.get("destination", "rdn")
607 logger.warning("setting rdn is currently ignored")
608 except:
609 pass
610
611 options = Options()
612 try:
613 options.exclude = config.get("destination", "excludesubtree").lower()
614 except:
615 pass
616
617 options.dryrun = args.dryrun
618 options.create = config.getboolean("destination", "create", fallback=False)
619 options.delete = config.getboolean("destination", "delete", fallback=False)
620 options.starttls = config.getboolean("destination", "starttls", fallback=False)
621 options.renameattr = config.get("destination", "detectRename", fallback=None)
622 options.renamecommand = config.get("destination", "detectRename", fallback=None)
623 options.pwd_max_days = int(config.get("source", "pwd_max_days", fallback=0))
624 options.filter = filterstr
625
626 # Set source.attrlist as global option.
627 # If source would use less attributes than dest,
628 # all attributes not retrieved from source would be deleted from dest
629 try:
630 options.attrlist = config.get("source", "attributes").split(",")
631 except:
632 options.attrlist = None
633
634 if config.get('source', 'mode', fallback=None) == 'syncrepl':
635 ldapsync = LdapSyncRepl(
636 destsrv, destadmindn, destadminpw, basedn, destbasedn,
637 options,
638 source_ldap_url_obj=get_ldap_url_obj(config['source']))
639 ldapsync.sync()
640 else:
641 if srcfile:
642 objects = readLDIFSource(srcfile)
643 else:
644 objects = readLdapSource(srv, admindn, adminpw,
645 basedn, filterstr, options.attrlist, starttls)
646
647 ldapsync = LdapSync(destsrv, destadmindn, destadminpw, basedn, destbasedn, options)
648 ldapsync.sync(objects)
Note: See TracBrowser for help on using the repository browser.