def init_person_entitlements(self): """Populate dicts with a person's entitlement information.""" timer = make_timer(self.logger, 'Processing person entitlements...') self.person2entitlements = pickle.load( file( os.path.join(ldapconf(None, 'dump_dir'), ldapconf('PERSON', 'entitlements_pickle_file')))) timer("...person entitlements done.")
def init_person_entitlements(self): """Populate dicts with a person's entitlement information.""" timer = make_timer(self.logger, 'Processing person entitlements...') self.person2entitlements = pickle.load(file( os.path.join( ldapconf(None, 'dump_dir'), ldapconf('PERSON', 'entitlements_pickle_file')))) timer("...person entitlements done.")
def generate_automount(f): db = Factory.get('Database')() co = Factory.get('Constants')(db) d = Factory.get('Disk')(db) h = Factory.get('Host')(db) hosts = [] disks = d.list(filter_expired=True) for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue if disk['host_id'] not in hosts: hosts.append(disk['host_id']) h_id2name = {} # TBD: any point in filtering? does it just consume more resources than # listing all hosts? for host in h.search(host_id=hosts): h_id2name[host['host_id']] = host['name'] paths = {} for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue path = disk['path'].split('/') if not ((path[1], path[2])) in paths.keys(): paths[(path[1], path[2])] = disk['host_id'] f.write(container_entry_string('AUTOMOUNT_MASTER')) for path in paths: entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn=%s,%s" % ("/%s/%s" % (path[0], path[1]), ldapconf('AUTOMOUNT_MASTER', 'dn', None)) entry['automountInformation'] = "ldap:ou=auto.%s-%s,%s" % ( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automountMap'] dn = "ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn=/,ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) dns = 'uio.no' if path[0] == 'ifi': dns = 'ifi.uio.no' entry[ 'automountInformation'] = "-fstype=nfs,tcp,vers=3,rw,intr,hard,nodev,nosuid,noacl %s.%s:/%s/%s/&" % ( h_id2name[paths[path]], dns, path[0], path[1]) f.write(entry_string(dn, entry))
def generate_automount(f): db = Factory.get('Database')() d = Factory.get('Disk')(db) h = Factory.get('Host')(db) hosts = [] disks = d.list(filter_expired=True) for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue if disk['host_id'] not in hosts: hosts.append(disk['host_id']) h_id2name = {} for host in h.search(host_id=hosts): h_id2name[host['host_id']] = host['name'] paths = {} for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue path = disk['path'].split('/') if not ((path[1], path[2])) in paths.keys(): paths[(path[1], path[2])] = disk['host_id'] f.write(container_entry_string('AUTOMOUNT_MASTER')) for path in paths: entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn={},{}".format( "/{}/{}".format(path[0], path[1]), ldapconf('AUTOMOUNT_MASTER', 'dn', None)) entry['automountInformation'] = "ldap:ou=auto.{}-{},{}".format( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automountMap'] dn = "ou=auto.{}-{},{}".format( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn=/,ou=auto.{}-{},{}".format( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) dns = 'uio.no' if path[0] == 'ifi': dns = 'ifi.uio.no' automount_opts = ("-fstype=nfs,tcp,vers=3,rw,intr,hard,nodev," "nosuid,noacl {}.{}:/{}/{}/&") entry['automountInformation'] = automount_opts.format( h_id2name[paths[path]], dns, path[0], path[1]) f.write(entry_string(dn, entry))
def generate_automount(f): db = Factory.get("Database")() co = Factory.get("Constants")(db) d = Factory.get("Disk")(db) h = Factory.get("Host")(db) hosts = [] disks = d.list(filter_expired=True) for disk in disks: if disk["count"] <= 0: # Skip disks with no users continue if disk["host_id"] not in hosts: hosts.append(disk["host_id"]) h_id2name = {} # TBD: any point in filtering? does it just consume more resources than # listing all hosts? for host in h.search(host_id=hosts): h_id2name[host["host_id"]] = host["name"] paths = {} for disk in disks: if disk["count"] <= 0: # Skip disks with no users continue path = disk["path"].split("/") if not ((path[1], path[2])) in paths.keys(): paths[(path[1], path[2])] = disk["host_id"] f.write(container_entry_string("AUTOMOUNT_MASTER")) for path in paths: entry = {} entry["objectClass"] = ["top", "automount"] dn = "cn=%s,%s" % ("/%s/%s" % (path[0], path[1]), ldapconf("AUTOMOUNT_MASTER", "dn", None)) entry["automountInformation"] = "ldap:ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf("AUTOMOUNT", "dn", None)) f.write(entry_string(dn, entry)) entry = {} entry["objectClass"] = ["top", "automountMap"] dn = "ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf("AUTOMOUNT", "dn", None)) f.write(entry_string(dn, entry)) entry = {} entry["objectClass"] = ["top", "automount"] dn = "cn=/,ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf("AUTOMOUNT", "dn", None)) dns = "uio.no" if path[0] == "ifi": dns = "ifi.uio.no" entry["automountInformation"] = "-fstype=nfs,tcp,vers=3,rw,intr,hard,nodev,nosuid,noacl %s.%s:/%s/%s/&" % ( h_id2name[paths[path]], dns, path[0], path[1], ) f.write(entry_string(dn, entry))
def init_person_entitlements(self): """Populate dicts with a person's entitlement information.""" timer = make_timer(self.logger, 'Processing person entitlements...') path = os.path.join(ldapconf(None, 'dump_dir'), ldapconf('PERSON', 'entitlements_file')) with io.open(path, encoding='utf-8') as stream: data = json.loads(stream.read()) # convert string keys to int self.person2entitlements = {int(k): v for k, v in data.items()} timer("...person entitlements done.")
def yield_users(self): """ Yield all users qualified for export to LDAP. """ def _mangle(attrs): if not isinstance(attrs, (list, set, tuple)): return (attrs, ) return attrs for user_id in self.users: attrs = self.users[user_id] entry = { "dn": (self._uname2dn(attrs["uname"]), ), "uid": (attrs["uname"], ), "eduPersonPrincipalName": (attrs["uname"], ), "mail": (attrs["mail"], ), "objectClass": ldapconf("USER", "objectClass"), } for key in ( "cn", "sn", "givenName", "userPassword", "uioMemberOf", ): if key in attrs: entry[key] = _mangle(attrs[key]) yield entry
def generate_voip_clients(sink, addr_id2dn, *args): vc = VoipClient(db) const = Factory.get("Constants")() sink.write(container_entry_string('VOIP_CLIENT')) for entry in vc.list_voip_attributes(*args): voip_address_id = entry.pop("voip_address_id") if voip_address_id not in addr_id2dn: logger.debug("voip client %s refers to voip_address %s, but the " "latter is not in the cache. Has %s been recently " "created?", repr(entry), voip_address_id, voip_address_id) continue entry['objectClass'] = ['top', 'sipClient'] entry['sipVoipAddressDN'] = addr_id2dn[voip_address_id] if entry["sipClientType"] == text_type(const.voip_client_type_softphone): attr = "uid" assert attr in entry elif entry["sipClientType"] == text_type(const.voip_client_type_hardphone): attr = "sipMacAddress" assert "uid" not in entry else: logger.warn("Aiee! Unknown voip_client type: %s (entry: %s)", entry["sipClientType"], repr(entry)) continue dn = "{}={},{}".format(attr, entry[attr], ldapconf('VOIP_CLIENT', 'dn', None)) sink.write(entry_string(dn, entry))
def main(): global verbose, f, db, co, ldap, auth, start parser = argparse.ArgumentParser() parser.add_argument('-v', "--verbose", action="count", default=0) parser.add_argument('-m', "--mail-file") parser.add_argument('-s', "--spread", default=ldapconf('MAIL', 'spread', None)) parser.add_argument('-i', "--ignore-size", dest="max_change", action="store_const", const=100) parser.add_argument('-a', "--no-auth-data", dest="auth", action="store_false", default=True) args = parser.parse_args() verbose = args.verbose auth = args.auth db = Factory.get('Database')() co = Factory.get('Constants')(db) start = now() curr = now() if verbose: logger.debug("Loading the EmailLDAP module...") ldap = Factory.get('EmailLDAP')(db) if verbose: logger.debug(" done in %d sec." % (now() - curr)) spread = args.spread if spread is not None: spread = map_spreads(spread, int) f = ldif_outfile('MAIL', args.mail_file, max_change=args.max_change) get_data(spread) end_ldif_outfile('MAIL', f)
def person_authn_selection(self): u""" Returns norEduPersonAuthnMethod_selector with constants. Returns the LDAP_PERSON[norEduPersonAuthnMethod_selector] setting with all strings replaced with their corresponding constant. """ if not hasattr(self, '_person_authn_selection'): self._person_authn_selection = dict() def get_const(name, cls): constant = self.const.human2constant(name, cls) if not constant: self.logger.warn( "LDAP_PERSON[norEduPersonAuthnMethod_selector]: " "Unknown %s %r", cls.__name__, name) return constant for aff, selections in ldapconf('PERSON', 'norEduPersonAuthnMethod_selector', {}).iteritems(): aff = get_const(aff, self.const.PersonAffiliation) if not aff: continue for system, c_type in selections: system = get_const(system, self.const.AuthoritativeSystem) c_type = get_const(c_type, self.const.ContactInfo) if (not system) or (not c_type): continue self._person_authn_selection.setdefault(aff, []).append( (system, c_type)) return self._person_authn_selection
def generate_voip_clients(sink, addr_id2dn, encoding, *args): db = Factory.get("Database")() vc = VoipClient(db) const = Factory.get("Constants")() sink.write(container_entry_string("VOIP_CLIENT")) for entry in vc.list_voip_attributes(*args): voip_address_id = entry.pop("voip_address_id") if voip_address_id not in addr_id2dn: logger.debug( "voip client %s refers to voip_address %s, but the " "latter is not in the cache. Has %s been recently " "created?", repr(entry), voip_address_id, voip_address_id, ) continue entry["objectClass"] = ["top", "sipClient"] entry["sipVoipAddressDN"] = addr_id2dn[voip_address_id] if entry["sipClientType"] == str(const.voip_client_type_softphone): attr = "uid" assert attr in entry elif entry["sipClientType"] == str(const.voip_client_type_hardphone): attr = "sipMacAddress" assert "uid" not in entry else: logger.warn("Aiee! Unknown voip_client type: %s (entry: %s)", entry["sipClientType"], repr(entry)) continue dn = "%s=%s,%s" % (attr, entry[attr], ldapconf("VOIP_CLIENT", "dn", None)) sink.write(entry_string(object2encoding(dn, encoding), object2encoding(entry, encoding)))
def person_authn_selection(self): u""" Returns norEduPersonAuthnMethod_selector with constants. Returns the LDAP_PERSON[norEduPersonAuthnMethod_selector] setting with all strings replaced with their corresponding constant. """ if not hasattr(self, '_person_authn_selection'): self._person_authn_selection = dict() def get_const(name, cls): constant = self.const.human2constant(name, cls) if not constant: self.logger.warn( "LDAP_PERSON[norEduPersonAuthnMethod_selector]: " "Unknown %s %r", cls.__name__, name) return constant for aff, selections in ldapconf( 'PERSON', 'norEduPersonAuthnMethod_selector', {}).iteritems(): aff = get_const(aff, self.const.PersonAffiliation) if not aff: continue for system, c_type in selections: system = get_const(system, self.const.AuthoritativeSystem) c_type = get_const(c_type, self.const.ContactInfo) if (not system) or (not c_type): continue self._person_authn_selection.setdefault(aff, []).append( (system, c_type)) return self._person_authn_selection
def generate_voip_clients(sink, addr_id2dn, *args): vc = VoipClient(db) const = Factory.get("Constants")() sink.write(container_entry_string('VOIP_CLIENT')) for entry in vc.list_voip_attributes(*args): voip_address_id = entry.pop("voip_address_id") if voip_address_id not in addr_id2dn: logger.debug( "voip client %s refers to voip_address %s, but the " "latter is not in the cache. Has %s been recently " "created?", repr(entry), voip_address_id, voip_address_id) continue entry['objectClass'] = ['top', 'sipClient'] entry['sipVoipAddressDN'] = addr_id2dn[voip_address_id] if entry["sipClientType"] == text_type( const.voip_client_type_softphone): attr = "uid" assert attr in entry elif entry["sipClientType"] == text_type( const.voip_client_type_hardphone): attr = "sipMacAddress" assert "uid" not in entry else: logger.warn("Aiee! Unknown voip_client type: %s (entry: %s)", entry["sipClientType"], repr(entry)) continue dn = "{}={},{}".format(attr, entry[attr], ldapconf('VOIP_CLIENT', 'dn', None)) sink.write(entry_string(dn, entry))
def init_person_groups(self): """Populate dicts with a person's group information.""" timer = make_timer(self.logger, 'Processing person groups...') self.person2group = pickle.load( file( join_paths(ldapconf(None, 'dump_dir'), "personid2group.pickle"))) timer("...person groups done.")
def init_person_course(self): """Populate dicts with a person's course information.""" timer = make_timer(self.logger, 'Processing person courses...') self.ownerid2urnlist = pickle.load( file( join_paths(ldapconf(None, 'dump_dir'), "ownerid2urnlist.pickle"))) timer("...person courses done.")
def gen_undervisningsaktivitet(cgi, sip, out): timer = make_timer(logger, 'Starting gen_undervisningsaktivitet') # uioEduSection - Undervisningsaktivitet (instansiering av gruppe, # kollokvia, lab, skrivekurs, forelesning) # access_FS.py:Undervisning.list_aktiviteter # # uioEduCourseCode - FS.emne.emnekode # uioEduCourseAdministrator - (FS.emne.*_reglement (6 siffer)). # uioEduCourseLevel - (FS.emne.studienivakode) # uioEduCourseName - (FS.emne.emnenavn_bokmal) # uioEduCourseSectionName - (FS.undaktivitet.aktivitetsnavn) # uioEduCourseOffering - urn:mace:uit.no:section:<noe> n = 0 ret = {} top_dn = ldapconf('KURS', 'dn') for entry in sip.undervisningsaktiviteter: try: emne = sip.emnekode2info[entry['emnekode']] except KeyError: logger.warn( "Undervisningsaktivitet %s er ikke knyttet til gyldig emne", entry['emnekode']) continue if 'emnenavn_bokmal' not in emne: logger.warn("Undervisningsaktivitet %s uten enhet?" % repr(entry)) continue aktivitet_id = {} for persontype, role in interesting_fs_roles: args = [entry[x] for x in CerebrumGroupInfo.id_key_seq] args.extend((entry['aktivitetkode'], persontype)) args = [x.lower() for x in args] entity_id = cgi.find_group_by_undervisningsaktivitet(*args) if entity_id is not None: aktivitet_id["%i" % entity_id] = role keys = aktivitet_id.keys() keys.sort() urn = 'urn:mace:uit.no:section:aktivitet-%s' % "_".join(keys) out.write( entry_string( "cn=ua-%i,%s" % (n, top_dn), { 'objectClass': ("top", "uioEduSection"), 'uioEduCourseCode': (entry['emnekode'], ), 'uioEduCourseAdministrator': (emne['sko'], ), 'uioEduCourseLevel': (emne['studienivakode'], ), 'uioEduCourseName': (emne['emnenavn_bokmal'], ), 'uioEduCourseSectionName': (entry['aktivitetsnavn'], ), 'uioEduCourseInstitution': (emne['institusjonsnr'], ), 'uioEduCourseVersion': (emne['versjonskode'], ), 'uioEduCourseSectionCode': (entry['aktivitetkode'], ), 'uioEduOfferingTermCode': (entry['terminkode'], ), 'uioEduOfferingYear': (entry['arstall'], ), 'uioEduOfferingTermNumber': (entry['terminnr'], ), 'uioEduCourseOffering': (urn, ) })) n += 1 ret[urn] = aktivitet_id timer('... done gen_undervisningsaktivitet') return ret
def write_subnet_ldif(): DN = ldapconf('SUBNETS', 'dn') startAttr, endAttr, objectClasses = ldapconf('SUBNETS', 'rangeSchema') objectClasses = ('top', 'ipNetwork') + tuple(objectClasses) db = Factory.get('Database')() f = ldif_outfile('SUBNETS') f.write(container_entry_string('SUBNETS')) for row in Subnet(db).search(): cn = "%s/%s" % (row['subnet_ip'], row['subnet_mask']) desc = row['description'] f.write(entry_string("cn=%s,%s" % (cn, DN), { 'objectClass': objectClasses, 'description': (desc and (iso2utf(desc),) or ()), 'ipNetworkNumber': (row['subnet_ip'],), 'ipNetmaskNumber': (netmask_to_ip(row['subnet_mask']),), startAttr: (str(int(row['ip_min'])),), endAttr: (str(int(row['ip_max'])),)})) end_ldif_outfile('SUBNETS', f)
def setup_ldif(self): DNs = [ldapconf(which, 'dn', default=None, module=posixconf) for which in ('USER', 'FILEGROUP', 'NETGROUP')] self.user_dn, self.fgrp_dn, self.ngrp_dn = DNs self.type2groups = (self.netgroups, self.host_netgroups) if self.opts.user_spread: self.setup_passwd() self.setup_filegroup() self.setup_netgroup()
def gen_undervisningsaktivitet(cgi, sip, out): # uioEduSection - Undervisningsaktivitet (instansiering av gruppe, # kollokvia, lab, skrivekurs, forelesning) # access_FS.py:Undervisning.list_aktiviteter # # uioEduCourseCode - FS.emne.emnekode # uioEduCourseAdministrator - (FS.emne.*_reglement (6 siffer)). # uioEduCourseLevel - (FS.emne.studienivakode) # uioEduCourseName - (FS.emne.emnenavn_bokmal) # uioEduCourseSectionName - (FS.undaktivitet.aktivitetsnavn) # uioEduCourseOffering - urn:mace:uio.no:section:<noe> n = 0 ret = {} top_dn = ldapconf('KURS', 'dn') for entry in sip.undervisningsaktiviteter: try: emne = sip.emnekode2info[entry['emnekode']] except KeyError: logger.warn( "Undervisningsaktivitet %s er ikke knyttet til gyldig emne", entry['emnekode']) continue if 'emnenavn_bokmal' not in emne: logger.warn("Undervisningsaktivitet %s uten enhet?" % repr(entry)) continue aktivitet_id = {} for persontype, role in interesting_fs_roles: args = [entry[x] for x in CerebrumGroupInfo.id_key_seq] args.extend((entry['aktivitetkode'], persontype)) args = [x.lower() for x in args] entity_id = cgi.find_group_by_undervisningsaktivitet(*args) if entity_id is not None: aktivitet_id["%i" % entity_id] = role # if len(aktivitet_id) != 2: # continue keys = aktivitet_id.keys() keys.sort() urn = 'urn:mace:uio.no:section:aktivitet-%s' % "_".join(keys) # urn = 'urn:mace:uio.no:section:aktivitet-%s' % aktivitet_id out.write(entry_string("cn=ua-%i,%s" % (n, top_dn), { 'objectClass': ("top", "uioEduSection"), 'uioEduCourseCode': (iso2utf(entry['emnekode']),), 'uioEduCourseAdministrator': (iso2utf(emne['sko']),), 'uioEduCourseLevel': (iso2utf(emne['studienivakode']),), 'uioEduCourseName': (iso2utf(emne['emnenavn_bokmal']),), 'uioEduCourseSectionName': (iso2utf(entry['aktivitetsnavn']),), 'uioEduCourseInstitution': (iso2utf(emne['institusjonsnr']),), 'uioEduCourseVersion': (iso2utf(emne['versjonskode']),), 'uioEduCourseSectionCode': (iso2utf(entry['aktivitetkode']),), 'uioEduOfferingTermCode': (iso2utf(entry['terminkode']),), 'uioEduOfferingYear': (iso2utf(entry['arstall']),), 'uioEduOfferingTermNumber': (iso2utf(entry['terminnr']),), 'uioEduCourseOffering': (iso2utf(urn),)})) n += 1 ret[urn] = aktivitet_id return ret
def __init__(self): self.user_dn = ldapconf('RADIUS', 'dn', None) self.db = Factory.get('Database')() self.const = Factory.get('Constants')(self.db) self.account = Factory.get('Account')(self.db) self.auth = None self.id2vlan_vpn = {} for spread in reversed(cereconf.LDAP_RADIUS['spreads']): vlan_vpn = (cereconf.LDAP_RADIUS['spread2vlan'][spread], "OU=%s;" % cereconf.LDAP_RADIUS['spread2vpn'][spread]) spread = self.const.Spread(spread) for row in self.account.search(spread=spread): self.id2vlan_vpn[row['account_id']] = vlan_vpn # Configure auth auth_attr = ldapconf('RADIUS', 'auth_attr', None) self.user_password = AuthExporter.make_exporter( self.db, auth_attr['userPassword']) self.nt_password = AuthExporter.make_exporter(self.db, auth_attr['ntPassword'])
def setup_ldif(self): DNs = [ ldapconf(which, 'dn', default=None, module=posixconf) for which in ('USER', 'FILEGROUP', 'NETGROUP') ] self.user_dn, self.fgrp_dn, self.ngrp_dn = DNs self.type2groups = (self.netgroups, self.host_netgroups) if self.opts.user_spread: self.setup_passwd() self.setup_filegroup() self.setup_netgroup()
def write_subnet_ldif(): DN = ldapconf('SUBNETS', 'dn') startAttr, endAttr, objectClasses = ldapconf('SUBNETS', 'rangeSchema') objectClasses = ('top', 'ipNetwork') + tuple(objectClasses) db = Factory.get('Database')() f = ldif_outfile('SUBNETS') f.write(container_entry_string('SUBNETS')) for row in Subnet(db).search(): cn = "%s/%s" % (row['subnet_ip'], row['subnet_mask']) desc = row['description'] f.write( entry_string( "cn=%s,%s" % (cn, DN), { 'objectClass': objectClasses, 'description': (desc and (iso2utf(desc), ) or ()), 'ipNetworkNumber': (row['subnet_ip'], ), 'ipNetmaskNumber': (netmask_to_ip(row['subnet_mask']), ), startAttr: (str(int(row['ip_min'])), ), endAttr: (str(int(row['ip_max'])), ) })) end_ldif_outfile('SUBNETS', f)
def setup(self, spread, zone): self.spread = spread self.zone = zone self.ngrp_dn = ldapconf('NETGROUP', 'dn', default=None, module=posixconf) self._build_entity2name_mapping(self.co.group_namespace) self._build_entity2name_mapping(self.co.dns_owner_namespace) logger.info('Caching groups with spread=%r', self.spread) for row in self.posix_group.search( spread=self.spread, filter_expired=not self.EMULATE_POSIX_LDIF): self.host_netgroups[int(row['group_id'])] = row['name']
def generate_voip_addresses(sink, *args): va = VoipAddress(db) sink.write(container_entry_string('VOIP_ADDRESS')) addr_id2dn = dict() for entry in va.list_voip_attributes(*args): entry['objectClass'] = ['top', 'voipAddress'] dn = "voipOwnerId={},{}".format(entry['voipOwnerId'], ldapconf('VOIP_ADDRESS', 'dn', None)) entity_id = entry.pop("entity_id") addr_id2dn[entity_id] = dn if not entry.get("cn"): entry["cn"] = () sink.write(entry_string(dn, entry)) return addr_id2dn
def __init__(self, logger): """ Fetches all users and groups with the required spreads to qualify for LDAP export. """ self.db = Factory.get("Database")() self.const = Factory.get("Constants")(self.db) self.logger = logger # groups must be populated before users, since the latter relies on the # former due to data precaching. auth_attr = ldapconf('USER', 'auth_attr', {}) self.user_password = AuthExporter.make_exporter( self.db, auth_attr['userPassword']) self.groups = self._load_groups() self.users = self._load_users()
def generate_voip_addresses(sink, encoding, *args): db = Factory.get("Database")() va = VoipAddress(db) sink.write(container_entry_string("VOIP_ADDRESS")) addr_id2dn = dict() for entry in va.list_voip_attributes(*args): entry["objectClass"] = ["top", "voipAddress"] dn = "voipOwnerId=%s,%s" % (entry["voipOwnerId"], ldapconf("VOIP_ADDRESS", "dn", None)) entity_id = entry.pop("entity_id") addr_id2dn[entity_id] = dn entry = object2encoding(entry, encoding) if not entry.get("cn"): entry["cn"] = () sink.write(entry_string(object2encoding(dn, encoding), entry)) return addr_id2dn
def yield_groups(self): """Generate group dicts with all LDAP-relevant information.""" group = Factory.get("Group")(self.db) for group_id in self.groups: gi = self.groups[group_id] group_name = gi["name"] entry = { "dn": (self._gname2dn(group_name), ), "cn": (group_name, ), "objectClass": ldapconf("GROUP", "objectClass"), "description": (gi["description"], ), } entry.update(self._get_member_info(group_id, group)) if not entry.get("member"): continue yield entry
def yield_groups(self): """Generate group dicts with all LDAP-relevant information.""" group = Factory.get("Group")(self.db) for group_id in self.groups: gi = self.groups[group_id] group_name = gi["name"] entry = { "dn": (self._gname2dn(group_name),), "cn": (group_name,), "objectClass": ldapconf("GROUP", "objectClass"), "description": (gi["description"],), } entry.update(self._get_member_info(group_id, group)) if not entry.get("member"): continue yield entry
def ldap_connect(self, serv_l=None): if not serv_l: serv_l = cereconf.LDAP['server'] for server in serv_l: try: serv, user = [str(y) for y in server.split(':')] f_name = cereconf.LDAP[ 'dump_dir'] + '/log/' + serv + '.sync.log' try: passwd = db._read_password(serv, user) except: logger.warn('No valid password-file for %s!' % serv) break if os.path.isfile(f_name): self.s_list[serv] = [ file(f_name, 'a'), ] else: self.s_list[serv] = [ file(f_name, 'w'), ] user = "******".join((user, ldapconf('ORG', 'dn'))) con = ldap.open(serv) con.protocol_version = ldap.VERSION3 try: if cereconf.TLS_CACERT_FILE is not None: con.OPT_X_TLS_CACERTFILE = cereconf.TLS_CACERT_FILE except: pass try: if cereconf.TLS_CACERT_DIR is not None: con.OPT_X_TLS_CACERTDIR = cereconf.TLS_CACERT_DIR except: pass l_bind = None try: con.start_tls_s() l_bind = con.simple_bind(user, passwd) self.s_list[serv].append(con) except: logger.warn("Could not open TLS-connection to %s" % serv) self.s_list[serv][0].close() del self.s_list[serv] if l_bind and con: logger.info("TLS-connection open to %s" % serv) except ldap.LDAPError, e: logger.warn(e)
def generate_voip_addresses(sink, encoding, *args): db = Factory.get("Database")() va = VoipAddress(db) sink.write(container_entry_string('VOIP_ADDRESS')) addr_id2dn = dict() for entry in va.list_voip_attributes(*args): entry['objectClass'] = ['top', 'voipAddress'] dn = "voipOwnerId=%s,%s" % (entry['voipOwnerId'], ldapconf('VOIP_ADDRESS', 'dn', None)) entity_id = entry.pop("entity_id") addr_id2dn[entity_id] = dn entry = object2encoding(entry, encoding) if not entry.get("cn"): entry["cn"] = () sink.write(entry_string(object2encoding(dn, encoding), entry)) return addr_id2dn
def gen_undervisningsenhet(cgi, sip, out): timer = make_timer(logger, 'Starting gen_undervisningsenhet') # uioEduOffering - Undervisningsenhet (instansiering av et emne) # access_FS.py:Undervisning.list_undervisningenheter # # uioEduCourseCode, uioEduCourseAdministrator, uioEduCourseLevel, # uioEduCourseName - som for Undervisningsaktivitet # uioEduCourseOffering - urn:mace:uit.no:offering:<noe> n = 0 ret = {} top_dn = ldapconf('KURS', 'dn') for entry in sip.undervisningsenheter: emne = sip.emnekode2info.get(entry['emnekode']) if not emne: # warned earlier continue aktivitet_id = {} for persontype, role in interesting_fs_roles: args = [entry[x] for x in CerebrumGroupInfo.id_key_seq] args.append(persontype) args = [x.lower() for x in args] entity_id = cgi.find_group_by_undervisningsenhet(*args) if entity_id is not None: aktivitet_id["%i" % entity_id] = role keys = aktivitet_id.keys() keys.sort() urn = 'urn:mace:uit.no:offering:enhet-%s' % "_".join(keys) out.write( entry_string( "cn=ue-%i,%s" % (n, top_dn), { 'objectClass': ("top", "uioEduOffering"), 'uioEduCourseCode': (entry['emnekode'], ), 'uioEduCourseAdministrator': (emne['sko'], ), 'uioEduCourseLevel': (emne['studienivakode'], ), 'uioEduCourseName': (emne['emnenavn_bokmal'], ), 'uioEduCourseInstitution': (emne['institusjonsnr'], ), 'uioEduCourseVersion': (emne['versjonskode'], ), 'uioEduOfferingTermCode': (entry['terminkode'], ), 'uioEduOfferingYear': (entry['arstall'], ), 'uioEduOfferingTermNumber': (entry['terminnr'], ), 'uioEduCourseOffering': (urn, ) })) n += 1 ret[urn] = aktivitet_id timer('... done gen_undervisningsenhet') return ret
def __init__(self): self.user_dn = ldapconf('USER', 'dn', None) self.db = Factory.get('Database')() self.const = Factory.get('Constants')(self.db) self.account = Factory.get('Account')(self.db) self.md4_auth = self.make_auths(self.const.auth_type_md4_nt) self.auth = None for auth_type in (self.const.auth_type_crypt3_des, self.const.auth_type_md5_crypt): self.auth = self.make_auths(auth_type, self.auth) self.load_quaratines() self.id2vlan_vpn = {} for spread in reversed(cereconf.LDAP_USER['spreads']): vlan_vpn = (cereconf.LDAP_USER['spread2vlan'][spread], "OU=%s;" % cereconf.LDAP_USER['spread2vpn'][spread]) spread = self.const.Spread(spread) for row in self.account.search(spread=spread): self.id2vlan_vpn[row['account_id']] = vlan_vpn
def main(inargs=None): parser = argparse.ArgumentParser( description="Generate a group tree for LDAP", ) parser.add_argument( '--ldiffile', help='Write groups to the ldif-file %(metavar)', metavar='file', ) parser.add_argument( '--picklefile', help='Write group memberships to the pickle-file %(metavar)s', metavar='file', ) Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) if not any((args.ldiffile, args.picklefile)): parser.error('Must use --ldiffile or --picklefile') Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start %s', parser.prog) logger.debug('args: %r', args) ldiffile = args.ldiffile picklefile = args.picklefile db = Factory.get('Database')() dn = ldapconf('GROUP', 'dn') logger.info('Generating LDIF...') destfile = ldif_outfile('GROUP', ldiffile) destfile.write(container_entry_string('GROUP')) mbr2grp = dump_ldif(db, dn, destfile) end_ldif_outfile('GROUP', destfile) logger.info('Wrote LDIF to %r', ldiffile) logger.info('Generating pickle dump...') tmpfname = picklefile + '.tmp' pickle.dump(mbr2grp, open(tmpfname, 'wb'), pickle.HIGHEST_PROTOCOL) os.rename(tmpfname, picklefile) logger.info('Wrote pickle file to %r', picklefile) logger.info('Done %s', parser.prog)
def gen_undervisningsenhet(cgi, sip, out): # uioEduOffering - Undervisningsenhet (instansiering av et emne) # access_FS.py:Undervisning.list_undervisningenheter # # uioEduCourseCode, uioEduCourseAdministrator, uioEduCourseLevel, # uioEduCourseName - som for Undervisningsaktivitet # uioEduCourseOffering - urn:mace:uio.no:offering:<noe> n = 0 ret = {} top_dn = ldapconf('KURS', 'dn') for entry in sip.undervisningsenheter: emne = sip.emnekode2info.get(entry['emnekode']) if not emne: # warned erlier continue aktivitet_id = {} for persontype, role in interesting_fs_roles: args = [entry[x] for x in CerebrumGroupInfo.id_key_seq] args.append(persontype) args = [x.lower() for x in args] entity_id = cgi.find_group_by_undervisningsenhet(*args) if entity_id is not None: aktivitet_id["%i" % entity_id] = role # if len(aktivitet_id) != 2: # continue keys = aktivitet_id.keys() keys.sort() urn = 'urn:mace:uio.no:offering:enhet-%s' % "_".join(keys) out.write(entry_string("cn=ue-%i,%s" % (n, top_dn), { 'objectClass': ("top", "uioEduOffering"), 'uioEduCourseCode': (iso2utf(entry['emnekode']),), 'uioEduCourseAdministrator': (iso2utf(emne['sko']),), 'uioEduCourseLevel': (iso2utf(emne['studienivakode']),), 'uioEduCourseName': (iso2utf(emne['emnenavn_bokmal']),), 'uioEduCourseInstitution': (iso2utf(emne['institusjonsnr']),), 'uioEduCourseVersion': (iso2utf(emne['versjonskode']),), 'uioEduOfferingTermCode': (iso2utf(entry['terminkode']),), 'uioEduOfferingYear': (iso2utf(entry['arstall']),), 'uioEduOfferingTermNumber': (iso2utf(entry['terminnr']),), 'uioEduCourseOffering': (iso2utf(urn),)})) n += 1 ret[urn] = aktivitet_id return ret
def update_dummy_ou_entry(self, entry): # Changes from superclass: # If root_ou_id is set is found, add object class norEduOrgUnit and its # attrs cn, norEduOrgUnitUniqueIdentifier, norEduOrgUniqueIdentifier. if self.root_ou_id is None: return self.ou.clear() self.ou.find(self.root_ou_id) ldap_ou_id = self.get_orgUnitUniqueID() entry.update({ 'objectClass': ['top', 'organizationalUnit', 'norEduOrgUnit'], 'cn': (ldapconf('OU', 'dummy_name'),), self.FEIDE_attr_ou_id: (ldap_ou_id,)}) entry.update(self.FEIDE_ou_common_attrs) if self.FEIDE_class_obsolete: entry['objectClass'].append(self.FEIDE_class_obsolete) if self.norEduOrgUniqueID: entry['norEduOrgUniqueNumber'] = self.norEduOrgUniqueID entry['norEduOrgUnitUniqueNumber'] = (ldap_ou_id,)
def make_person_entry(self, row, person_id): """Override to add Feide specific functionality.""" dn, entry, alias_info = self.__super.make_person_entry(row, person_id) if not dn: return dn, entry, alias_info pri_edu_aff, pri_ou, pri_aff = self.make_eduPersonPrimaryAffiliation( person_id) if pri_edu_aff: entry['eduPersonPrimaryAffiliation'] = pri_edu_aff entry['eduPersonPrimaryOrgUnitDN'] = ( self.ou2DN.get(int(pri_ou)) or self.dummy_ou_dn) if (ldapconf('PERSON', 'entitlements_pickle_file') and person_id in self.person2entitlements): entry['eduPersonEntitlement'] = set(self.person2entitlements[person_id]) entry['objectClass'].append('schacContactLocation') entry['schacHomeOrganization'] = self.homeOrg return dn, entry, alias_info
def ldap_connect(self, serv_l=None): if not serv_l: serv_l = cereconf.LDAP['server'] for server in serv_l: try: serv, user = [str(y) for y in server.split(':')] f_name = cereconf.LDAP['dump_dir']+'/log/' + serv+'.sync.log' try: passwd = read_password(user, serv) except: logger.warn('No valid password-file for %r!', serv) break if os.path.isfile(f_name): self.s_list[serv] = [file(f_name, 'a')] else: self.s_list[serv] = [file(f_name, 'w')] user = "******".join((user, ldapconf('ORG', 'dn'))) con = ldap.open(serv) con.protocol_version = ldap.VERSION3 try: if cereconf.TLS_CACERT_FILE is not None: con.OPT_X_TLS_CACERTFILE = cereconf.TLS_CACERT_FILE except: pass try: if cereconf.TLS_CACERT_DIR is not None: con.OPT_X_TLS_CACERTDIR = cereconf.TLS_CACERT_DIR except: pass l_bind = None try: con.start_tls_s() l_bind = con.simple_bind(user, passwd) self.s_list[serv].append(con) except: logger.warn("Could not open TLS-connection to %r", serv) self.s_list[serv][0].close() del self.s_list[serv] if l_bind and con: logger.info("TLS-connection open to %r", serv) except ldap.LDAPError as e: logger.warn(e)
def _load_groups(self): """Cache a dict with group_id -> group_name for all LDAP-exportable groups. See L{_load_users} for a related method. @rtype: dict (int -> dict-like object) @return: A dict mapping group_id to group info for all groups with LDAP-exportable spreads. The latter is controlled by cereconf.LDAP_GROUP. """ group = Factory.get("Group")(self.db) spreads = tuple( self.const.human2constant(x) for x in ldapconf("GROUP", "spreads", ())) self.logger.debug("Collecting groups for LDAP export. " "Spreads: %s", ", ".join(str(x) for x in spreads)) return dict((x["group_id"], x) for x in group.search(spread=spreads))
def _load_groups(self): """Cache a dict with group_id -> group_name for all LDAP-exportable groups. See L{_load_users} for a related method. @rtype: dict (int -> dict-like object) @return: A dict mapping group_id to group info for all groups with LDAP-exportable spreads. The latter is controlled by cereconf.LDAP_GROUP. """ group = Factory.get("Group")(self.db) spreads = tuple(self.const.human2constant(x) for x in ldapconf("GROUP", "spreads", ())) self.logger.debug("Collecting groups for LDAP export. " "Spreads: %s", ", ".join(str(x) for x in spreads)) return dict((x["group_id"], x) for x in group.search(spread=spreads))
def make_person_entry(self, row, person_id): """Override to add Feide specific functionality.""" dn, entry, alias_info = self.__super.make_person_entry(row, person_id) if not dn: return dn, entry, alias_info pri_edu_aff, pri_ou, pri_aff = self.make_eduPersonPrimaryAffiliation( person_id) if pri_edu_aff: entry['eduPersonPrimaryAffiliation'] = pri_edu_aff entry['eduPersonPrimaryOrgUnitDN'] = (self.ou2DN.get(int(pri_ou)) or self.dummy_ou_dn) if (ldapconf('PERSON', 'entitlements_file') and person_id in self.person2entitlements): entry['eduPersonEntitlement'] = set( self.person2entitlements[person_id]) entry['objectClass'].append('schacContactLocation') entry['schacHomeOrganization'] = self.homeOrg return dn, entry, alias_info
def _load_users(self): """Cache enough user information for the export to progress.""" account = Factory.get("Account")(self.db) spreads = tuple(self.const.human2constant(x) for x in ldapconf("USER", "spreads", ())) self.logger.debug("Collecting users for LDAP export. " "Spreads: %s", ", ".join(str(x) for x in spreads)) users = dict() account = Factory.get("Account")(self.db) for spread in spreads: for row in account.search(spread=spread): users[row["account_id"]] = {"uname": row["name"], "np_type": row["np_type"],} users = self._get_contact_info(users) users = self._get_password_info(users) users = self._get_membership_info(users) return users
def update_dummy_ou_entry(self, entry): # Changes from superclass: # If root_ou_id is set is found, add object class norEduOrgUnit and its # attrs cn, norEduOrgUnitUniqueIdentifier, norEduOrgUniqueIdentifier. if self.root_ou_id is None: return self.ou.clear() self.ou.find(self.root_ou_id) ldap_ou_id = self.get_orgUnitUniqueID() entry.update({ 'objectClass': ['top', 'organizationalUnit', 'norEduOrgUnit'], 'cn': (ldapconf('OU', 'dummy_name'), ), 'norEduOrgUnitUniqueIdentifier': (ldap_ou_id, ) }) entry.update(self.FEIDE_ou_common_attrs) if self.FEIDE_class_obsolete: entry['objectClass'].append(self.FEIDE_class_obsolete) if self.norEduOrgUniqueID: entry['norEduOrgUniqueNumber'] = self.norEduOrgUniqueID entry['norEduOrgUnitUniqueNumber'] = (ldap_ou_id, )
def _load_users(self): """Cache enough user information for the export to progress.""" account = Factory.get("Account")(self.db) spreads = tuple( self.const.human2constant(x) for x in ldapconf("USER", "spreads", ())) self.logger.debug("Collecting users for LDAP export. " "Spreads: %s", ", ".join(str(x) for x in spreads)) users = dict() account = Factory.get("Account")(self.db) for spread in spreads: for row in account.search(spread=spread): users[row["account_id"]] = { "uname": row["name"], "np_type": row["np_type"], } users = self._get_contact_info(users) users = self._get_password_info(users) users = self._get_membership_info(users) return users
def main(inargs=None): parser = argparse.ArgumentParser(description="Update LDAP") Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) Cerebrum.logutils.autoconf('cronjob', args) today = datetime.date.today().strftime('%Y%m%d') ldap_server = cereconf.LDAP['server'] user = cereconf.LDAP['user'] password = read_password(user, ldap_server) ldap_dump_dir = ldapconf(None, 'dump_dir') infile = os.path.join(ldap_dump_dir, 'uit_diff_%s' % (today, )) ldap_temp_file = os.path.join(ldap_dump_dir, "temp_uit_ldif") ldap_diff = os.path.join(ldap_dump_dir, "uit_ldif") ret = 0 ret = os.system(' '.join(( '/usr/bin/ldapmodify', '-x', '-H', 'ldaps://%s' % (ldap_server, ), '-D', '"cn=%s,dc=uit,dc=no"' % (user, ), '-w', password, '-f', infile, ))) if ret != 0: logger.error("Unable to update ldap server") raise SystemExit(1) ret = os.system("mv %s %s" % (ldap_temp_file, ldap_diff)) if ret != 0: logger.error("Unable to copy tempfile") raise SystemExit(1)
def yield_users(self): """ Yield all users qualified for export to LDAP. """ def _mangle(attrs): if not isinstance(attrs, (list, set, tuple)): return (attrs,) return attrs for user_id in self.users: attrs = self.users[user_id] tmp = {"dn": (self._uname2dn(attrs["uname"]),), "uid": (attrs["uname"],), "eduPersonPrincipalName": (attrs["uname"],), "mail": (attrs["mail"],), "objectClass": ldapconf("USER", "objectClass"),} for key in ("cn", "sn", "givenName", "userPassword", "uioMemberOf",): if key in attrs: tmp[key] = _mangle(attrs[key]) tmp = object2utf8(tmp) yield tmp
def person_authn_levels(self): """ Returns a authentication level mapping for update_person_authn. Initializes self.person_authn_levels with a dict that maps person entity_id to a set of service authentication levels: person_id: set([ (feide_service_id, authentication_level), ... ]), ... """ if not hasattr(self, '_person_authn_levels'): supported = ldapconf('PERSON', 'norEduPersonAuthnMethod_selector', {}) if not supported: self._person_authn_levels = {} return self._person_authn_levels timer = make_timer(self.logger, 'Fetching authentication levels...') fse = FeideService(self.db) self._person_authn_levels = fse.get_person_to_authn_level_map() timer("...authentication levels done.") return self._person_authn_levels
from collections import defaultdict from Cerebrum.Utils import Factory from Cerebrum.modules.LDIFutils import (ldapconf, entry_string, ldif_outfile, end_ldif_outfile, container_entry_string) logger = Factory.get_logger("cronjob") db = Factory.get('Database')() co = Factory.get('Constants')(db) group = Factory.get('Group')(db) mbr2grp = defaultdict(list) top_dn = ldapconf('GROUP', 'dn') def dump_ldif(file_handle): group2dn = {} for row in group.search(spread=co.spread_ldap_group): dn = (u"cn={},{}".format(row['name'], top_dn)) group2dn[row['group_id']] = dn file_handle.write(entry_string(dn, { 'objectClass': ("top", "uioGroup"), 'description': (row['description'],) })) for mbr in group.search_members(spread=co.spread_ldap_group, member_type=co.entity_person): mbr2grp[int(mbr["member_id"])].append(group2dn[mbr['group_id']])
def write_mail_dns(): f = ldif_outfile('MAIL_DNS') hosts, cnames, lower2host, hosts_only_mx = get_hosts_and_cnames() db = Factory.get('Database')() co = Factory.get('Constants')(db) logger = Factory.get_logger('cronjob') email = Email.EmailDomain(db) email_domain = {} for dom_entry in email.list_email_domains(): email_domain[int(dom_entry['domain_id'])] = dom_entry['domain'] for no_exp_dom in email.list_email_domains_with_category(co.email_domain_category_noexport): del email_domain[int(no_exp_dom['domain_id'])] domains = email_domain.values() domains.sort() domain_dict = {} for domain in domains: domain_dict[domain.lower()] = True # Verify that domains have a MX-record. for arg in cereconf.LDAP_MAIL_DNS['dig_args']: zone = arg[0] if domain.endswith(zone) and not (domain in hosts_only_mx or domain in hosts): logger.error("email domain without MX defined: %s" % domain) # Valid email domains only requires MX if domain in hosts_only_mx: del hosts_only_mx[domain] for host in hosts_only_mx: logger.warn("MX defined but no A/AAAA record or valid email domain: %s" % host) def handle_domain_host(host): f.write("host: %s\n" % lower2host[host]) for cname in hosts[host]: if not domain_dict.has_key(cname): f.write("cn: %s\n" % lower2host[cname]) del cnames[cname] del hosts[host] dn_suffix = ldapconf('MAIL_DNS', 'dn') f.write(container_entry_string('MAIL_DNS')) for domain in domains: f.write("""dn: cn=%s,%s objectClass: uioHost cn: %s """ % (domain, dn_suffix, domain)) domain = domain.lower() if cnames.has_key(domain): f.write("cn: %s\n" % lower2host[cnames[domain]]) handle_domain_host(cnames[domain]) elif hosts.has_key(domain): handle_domain_host(domain) f.write('\n') sorted_hosts = hosts.keys() sorted_hosts.sort() for host in sorted_hosts: f.write("""dn: host=%s,%s objectClass: uioHost host: %s cn: %s """ % (lower2host[host], dn_suffix, lower2host[host], lower2host[host])) for cname in hosts[host]: f.write("cn: %s\n" % lower2host[cname]) f.write('\n') end_ldif_outfile('MAIL_DNS', f)
def init_person_course(self): """Populate dicts with a person's course information.""" timer = make_timer(self.logger, 'Processing person courses...') self.ownerid2urnlist = pickle.load(file( join_paths(ldapconf(None, 'dump_dir'), "ownerid2urnlist.pickle"))) timer("...person courses done.")
def init_person_groups(self): """Populate dicts with a person's group information.""" timer = make_timer(self.logger, 'Processing person groups...') self.person2group = pickle.load(file( join_paths(ldapconf(None, 'dump_dir'), "personid2group.pickle"))) timer("...person groups done.")
from collections import defaultdict from Cerebrum.Utils import Factory from Cerebrum.modules.LDIFutils import (ldapconf, entry_string, ldif_outfile, end_ldif_outfile, container_entry_string) logger = Factory.get_logger("cronjob") db = Factory.get('Database')() ac = Factory.get('Account')(db) co = Factory.get('Constants')(db) group = Factory.get('Group')(db) mbr2grp = defaultdict(set) top_dn = ldapconf('GROUP', 'dn') def dump_ldif(file_handle): group2dn = {} for row in group.search(spread=co.spread_ldap_group): dn = ("cn={},{}".format(row['name'], top_dn)) group2dn[row['group_id']] = dn file_handle.write( entry_string( dn, { 'objectClass': ("top", "hiofGroup"), 'description': (row['description'], ), })) for group_id, group_dn in group2dn.items(): for mbr in group.search_members(group_id=group_id,
def _uname2dn(self, uname): return ",".join(("uid=" + uname, ldapconf("USER", "dn")))
import base64 import argparse from time import time as now import cereconf from Cerebrum.Utils import Factory from Cerebrum.modules import Email from Cerebrum.modules.LDIFutils import ldapconf, map_spreads, ldif_outfile, \ end_ldif_outfile, container_entry_string from Cerebrum import Errors logger = Factory.get_logger("cronjob") default_spam_level = 9999 default_spam_action = 0 mail_dn = ldapconf('MAIL', 'dn') def dict_to_ldif_string(d): """Stringify a dict LDIF-style. FIXME: Should this be moved to LDIFutils.py? Convert a dict with LDIF-attributes to a string that can be written directly to an LDIF file. @type d: dict (basestring to basestring/sequence of basestring) @param d: A dictionary with key,value pairs containing the attributes for some LDAP object. value-part can be either a scalar (a basestring) OR a sequence (list, tuple or set) thereof
def main(inargs=None): parser = argparse.ArgumentParser( description='Generate a guest accounts ldif', ) default_filename = ldapconf('GUESTS', 'file', None, guestconfig) default_spread = ldapconf('GUESTS', 'spread', None, guestconfig) default_base = ldapconf('GUESTS', 'dn', None, guestconfig) parser.add_argument( '-f', '--filename', default=default_filename, required=not default_filename, help='Destination file (default: %(default)s)', metavar='<filename>', ) parser.add_argument( '-s', '--spread', default=default_spread, required=not default_spread, help='Guest spread (default: %(default)s)', metavar='<spread>', ) parser.add_argument( '-b', '--base', default=default_base, required=not default_base, help='DN for guest user objects (default: %(default)s)', metavar='<dn>', ) Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start %s', parser.prog) logger.debug('args: %s', repr(args)) db = Factory.get('Database')() co = Factory.get('Constants')(db) filename = args.filename spread = get_spread(co, args.spread) base = args.base def entry_to_dn(uid): return "uid=%s,%s" % (entry['uid'], base) logger.info("Configuring export") ldif = LDIFWriter('GUESTS', filename, module=guestconfig) try: exporter = GuestLDIF(db, ldif, spread=spread) logger.info("Starting guest account ldap export.") count = 0 for entry in exporter.generate_guests(): ldif.write_entry(entry_to_dn(entry), entry) count += 1 except Exception as e: logger.error("Unable to export: %s", e, exc_info=True) raise finally: ldif.close() logger.info("%d accounts dumped to ldif", count) logger.info('Done %s', parser.prog)
def _gname2dn(self, gname): return ",".join(("cn=" + gname, ldapconf("GROUP", "dn")))