def generate_all(fname): """Generate user + group LDIF to fname. @type fname: str @param fname: The file where the ldif data will be written """ logger.debug("Generating ldif into %s", fname) out = ldif_outfile("ORG", fname) out.write(container_entry_string("ORG")) helper = LDIFHelper(logger) logger.debug("Generating user ldif...") out.write(container_entry_string("USER")) for user in helper.yield_users(): dn = user["dn"][0] del user["dn"] out.write(entry_string(dn, user, False)) end_ldif_outfile("USER", out, out) logger.debug("Generating group ldif...") out.write(container_entry_string("GROUP")) for group in helper.yield_groups(): dn = group["dn"][0] del group["dn"] out.write(entry_string(dn, group, False)) end_ldif_outfile("GROUP", out) logger.debug("Done with group ldif (all done)")
def generate_automount(f): db = Factory.get('Database')() co = Factory.get('Constants')(db) d = Factory.get('Disk')(db) h = Factory.get('Host')(db) hosts = [] disks = d.list(filter_expired=True) for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue if disk['host_id'] not in hosts: hosts.append(disk['host_id']) h_id2name = {} # TBD: any point in filtering? does it just consume more resources than # listing all hosts? for host in h.search(host_id=hosts): h_id2name[host['host_id']] = host['name'] paths = {} for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue path = disk['path'].split('/') if not ((path[1], path[2])) in paths.keys(): paths[(path[1], path[2])] = disk['host_id'] f.write(container_entry_string('AUTOMOUNT_MASTER')) for path in paths: entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn=%s,%s" % ("/%s/%s" % (path[0], path[1]), ldapconf('AUTOMOUNT_MASTER', 'dn', None)) entry['automountInformation'] = "ldap:ou=auto.%s-%s,%s" % ( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automountMap'] dn = "ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn=/,ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) dns = 'uio.no' if path[0] == 'ifi': dns = 'ifi.uio.no' entry[ 'automountInformation'] = "-fstype=nfs,tcp,vers=3,rw,intr,hard,nodev,nosuid,noacl %s.%s:/%s/%s/&" % ( h_id2name[paths[path]], dns, path[0], path[1]) f.write(entry_string(dn, entry))
def generate_automount(f): db = Factory.get('Database')() d = Factory.get('Disk')(db) h = Factory.get('Host')(db) hosts = [] disks = d.list(filter_expired=True) for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue if disk['host_id'] not in hosts: hosts.append(disk['host_id']) h_id2name = {} for host in h.search(host_id=hosts): h_id2name[host['host_id']] = host['name'] paths = {} for disk in disks: if disk['count'] <= 0: # Skip disks with no users continue path = disk['path'].split('/') if not ((path[1], path[2])) in paths.keys(): paths[(path[1], path[2])] = disk['host_id'] f.write(container_entry_string('AUTOMOUNT_MASTER')) for path in paths: entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn={},{}".format( "/{}/{}".format(path[0], path[1]), ldapconf('AUTOMOUNT_MASTER', 'dn', None)) entry['automountInformation'] = "ldap:ou=auto.{}-{},{}".format( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automountMap'] dn = "ou=auto.{}-{},{}".format( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) f.write(entry_string(dn, entry)) entry = {} entry['objectClass'] = ['top', 'automount'] dn = "cn=/,ou=auto.{}-{},{}".format( path[1], path[0], ldapconf('AUTOMOUNT', 'dn', None)) dns = 'uio.no' if path[0] == 'ifi': dns = 'ifi.uio.no' automount_opts = ("-fstype=nfs,tcp,vers=3,rw,intr,hard,nodev," "nosuid,noacl {}.{}:/{}/{}/&") entry['automountInformation'] = automount_opts.format( h_id2name[paths[path]], dns, path[0], path[1]) f.write(entry_string(dn, entry))
def generate_automount(f): db = Factory.get("Database")() co = Factory.get("Constants")(db) d = Factory.get("Disk")(db) h = Factory.get("Host")(db) hosts = [] disks = d.list(filter_expired=True) for disk in disks: if disk["count"] <= 0: # Skip disks with no users continue if disk["host_id"] not in hosts: hosts.append(disk["host_id"]) h_id2name = {} # TBD: any point in filtering? does it just consume more resources than # listing all hosts? for host in h.search(host_id=hosts): h_id2name[host["host_id"]] = host["name"] paths = {} for disk in disks: if disk["count"] <= 0: # Skip disks with no users continue path = disk["path"].split("/") if not ((path[1], path[2])) in paths.keys(): paths[(path[1], path[2])] = disk["host_id"] f.write(container_entry_string("AUTOMOUNT_MASTER")) for path in paths: entry = {} entry["objectClass"] = ["top", "automount"] dn = "cn=%s,%s" % ("/%s/%s" % (path[0], path[1]), ldapconf("AUTOMOUNT_MASTER", "dn", None)) entry["automountInformation"] = "ldap:ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf("AUTOMOUNT", "dn", None)) f.write(entry_string(dn, entry)) entry = {} entry["objectClass"] = ["top", "automountMap"] dn = "ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf("AUTOMOUNT", "dn", None)) f.write(entry_string(dn, entry)) entry = {} entry["objectClass"] = ["top", "automount"] dn = "cn=/,ou=auto.%s-%s,%s" % (path[1], path[0], ldapconf("AUTOMOUNT", "dn", None)) dns = "uio.no" if path[0] == "ifi": dns = "ifi.uio.no" entry["automountInformation"] = "-fstype=nfs,tcp,vers=3,rw,intr,hard,nodev,nosuid,noacl %s.%s:/%s/%s/&" % ( h_id2name[paths[path]], dns, path[0], path[1], ) f.write(entry_string(dn, entry))
def generate_user_netgroup_output(self, f_ldif, f_netgroup): if not self.opts.netgroup_spread: return self.find_groups('netgroup') self.netgroup_names = set(self.netgroups.values()) for g_id in self.netgroups: group_members, user_members = map( sorted, self.expand_netgroup(g_id, self.co.entity_account, self.spreads.user)) user_members = ["(,%s,)" % m for m in user_members] if f_ldif: dn, entry = self.ldif_netgroup(False, g_id, group_members, user_members) f_ldif.write(entry_string(dn, entry, False)) if f_netgroup: f_netgroup.write( self._wrap_line( self.netgroups[g_id], # TODO: Drop the 'or's, whihch are equivalent to orig code? " ".join((group_members or [""]) + (user_members or [""])), ' ', self._make_tmp_netgroup_name, is_ng=True)) self.clear_groups()
def generate_host_netgroup_output(self, f_ldif, f_netgroup): if not self.opts.zone: return self._num_map = {} zone = self.zone.postfix zone_offset = -len(zone or "") self.find_groups('host_netgroup') for g_id in self.host_netgroups: group_members, host_members = map( sorted, self.expand_netgroup(g_id, self.co.entity_dns_owner, None)) members = set("(%s,-,)" % m[:-1] for m in host_members) if zone is not None: members.update("(%s,-,)" % m[:zone_offset] for m in host_members if m.endswith(zone)) if self.opts.ldif: dn, entry = self.ldif_netgroup(True, g_id, group_members, members) f_ldif.write(entry_string(dn, entry, False)) if f_netgroup: f_netgroup.write( self._wrap_line(self.host_netgroups[g_id], " ".join(group_members) + " " + " ".join(members), ' ', self._make_tmp_host_netgroup_name, is_ng=True)) self.clear_groups()
def dump_ldif(db, root_dn, file_handle): co = Factory.get('Constants')(db) group = Factory.get('Group')(db) ac = Factory.get('Account')(db) logger.debug('Processing groups...') group_to_dn = {} for row in group.search(spread=co.spread_ldap_group): dn = "cn={},{}".format(row['name'], root_dn) group_to_dn[row['group_id']] = dn file_handle.write( entry_string( dn, { 'objectClass': ("top", "uioUntypedObject"), 'description': (row['description'], ), })) logger.debug('Caching account ownership...') account_to_owner = {} for row in ac.search(expire_start=None, expire_stop=None): # TODO: Should prpbably filter out accounts without owner_type=person? account_to_owner[row['account_id']] = row['owner_id'] logger.debug('Processing group memberships...') member_to_group = defaultdict(list) for row in group.search_members(spread=co.spread_ldap_group, member_type=co.entity_account): if row['member_id'] not in account_to_owner: continue owner_id = account_to_owner[int(row['member_id'])] member_to_group[owner_id].append(group_to_dn[row['group_id']]) return dict(member_to_group)
def generate_voip_clients(sink, addr_id2dn, *args): vc = VoipClient(db) const = Factory.get("Constants")() sink.write(container_entry_string('VOIP_CLIENT')) for entry in vc.list_voip_attributes(*args): voip_address_id = entry.pop("voip_address_id") if voip_address_id not in addr_id2dn: logger.debug("voip client %s refers to voip_address %s, but the " "latter is not in the cache. Has %s been recently " "created?", repr(entry), voip_address_id, voip_address_id) continue entry['objectClass'] = ['top', 'sipClient'] entry['sipVoipAddressDN'] = addr_id2dn[voip_address_id] if entry["sipClientType"] == text_type(const.voip_client_type_softphone): attr = "uid" assert attr in entry elif entry["sipClientType"] == text_type(const.voip_client_type_hardphone): attr = "sipMacAddress" assert "uid" not in entry else: logger.warn("Aiee! Unknown voip_client type: %s (entry: %s)", entry["sipClientType"], repr(entry)) continue dn = "{}={},{}".format(attr, entry[attr], ldapconf('VOIP_CLIENT', 'dn', None)) sink.write(entry_string(dn, entry))
def generate_voip_clients(sink, addr_id2dn, *args): vc = VoipClient(db) const = Factory.get("Constants")() sink.write(container_entry_string('VOIP_CLIENT')) for entry in vc.list_voip_attributes(*args): voip_address_id = entry.pop("voip_address_id") if voip_address_id not in addr_id2dn: logger.debug( "voip client %s refers to voip_address %s, but the " "latter is not in the cache. Has %s been recently " "created?", repr(entry), voip_address_id, voip_address_id) continue entry['objectClass'] = ['top', 'sipClient'] entry['sipVoipAddressDN'] = addr_id2dn[voip_address_id] if entry["sipClientType"] == text_type( const.voip_client_type_softphone): attr = "uid" assert attr in entry elif entry["sipClientType"] == text_type( const.voip_client_type_hardphone): attr = "sipMacAddress" assert "uid" not in entry else: logger.warn("Aiee! Unknown voip_client type: %s (entry: %s)", entry["sipClientType"], repr(entry)) continue dn = "{}={},{}".format(attr, entry[attr], ldapconf('VOIP_CLIENT', 'dn', None)) sink.write(entry_string(dn, entry))
def dump(self): fd = ldif_outfile('USER') fd.write(container_entry_string('USER')) noAuth = (None, None) for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): info = self.auth[account_id] uname = info[0] auth = info[1] ntAuth = self.md4_auth.get(account_id, noAuth)[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntAuth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['top', 'account', 'hiofRadiusAccount'], 'uid': (uname,), 'radiusTunnelType': ('13',), 'radiusTunnelMediumType': ('6',), 'radiusTunnelPrivateGroupId': (vlan_vpn[0],), 'radiusClass': (vlan_vpn[1],)} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) if ntAuth: entry['ntPassword'] = (ntAuth,) fd.write(entry_string(dn, entry, False)) end_ldif_outfile('USER', fd)
def generate_user_output(self, f_ldif, f_passwd, f_shadow): if not self.opts.user_spread: return if f_ldif: f_ldif.write_container('USER') elif not self.opts.passwd: return for data in ifilter(None, self.posix_users): if f_ldif: dn,entry = self.ldif_user(data) f_ldif.write(entry_string(dn, entry, False)) if f_passwd: # TODO: shadow passwd = data.passwd or '*invalid' try: if self.opts.auth_method == 'NOCRYPT': a = data.account_id m = self.co.auth_type_crypt3_des if passwd == '*invalid' and self.auth_data[a][m]: passwd = 'x' except KeyError: pass f_passwd.write(self.join(( data.uname, passwd, data.uid, data.gid, data.gecos, data.home, data.shell)) + "\n") if f_passwd and self.opts.eof: f_passwd.write('E_O_F\n')
def generate_user_output(self, f_ldif, f_passwd, f_shadow): if not self.opts.user_spread: return if f_ldif: f_ldif.write_container('USER') elif not self.opts.passwd: return for data in ifilter(None, self.posix_users): if f_ldif: dn, entry = self.ldif_user(data) f_ldif.write(entry_string(dn, entry, False)) if f_passwd: # TODO: shadow passwd = data.passwd or '*invalid' try: if self.opts.auth_method == 'NOCRYPT': a = data.account_id m = self.co.auth_type_crypt3_des if passwd == '*invalid' and self.auth_data[a][m]: passwd = 'x' except KeyError: pass f_passwd.write( self.join((data.uname, passwd, data.uid, data.gid, data.gecos, data.home, data.shell)) + "\n") if f_passwd and self.opts.eof: f_passwd.write('E_O_F\n')
def generate_voip_clients(sink, addr_id2dn, encoding, *args): db = Factory.get("Database")() vc = VoipClient(db) const = Factory.get("Constants")() sink.write(container_entry_string("VOIP_CLIENT")) for entry in vc.list_voip_attributes(*args): voip_address_id = entry.pop("voip_address_id") if voip_address_id not in addr_id2dn: logger.debug( "voip client %s refers to voip_address %s, but the " "latter is not in the cache. Has %s been recently " "created?", repr(entry), voip_address_id, voip_address_id, ) continue entry["objectClass"] = ["top", "sipClient"] entry["sipVoipAddressDN"] = addr_id2dn[voip_address_id] if entry["sipClientType"] == str(const.voip_client_type_softphone): attr = "uid" assert attr in entry elif entry["sipClientType"] == str(const.voip_client_type_hardphone): attr = "sipMacAddress" assert "uid" not in entry else: logger.warn("Aiee! Unknown voip_client type: %s (entry: %s)", entry["sipClientType"], repr(entry)) continue dn = "%s=%s,%s" % (attr, entry[attr], ldapconf("VOIP_CLIENT", "dn", None)) sink.write(entry_string(object2encoding(dn, encoding), object2encoding(entry, encoding)))
def gen_undervisningsaktivitet(cgi, sip, out): timer = make_timer(logger, 'Starting gen_undervisningsaktivitet') # uioEduSection - Undervisningsaktivitet (instansiering av gruppe, # kollokvia, lab, skrivekurs, forelesning) # access_FS.py:Undervisning.list_aktiviteter # # uioEduCourseCode - FS.emne.emnekode # uioEduCourseAdministrator - (FS.emne.*_reglement (6 siffer)). # uioEduCourseLevel - (FS.emne.studienivakode) # uioEduCourseName - (FS.emne.emnenavn_bokmal) # uioEduCourseSectionName - (FS.undaktivitet.aktivitetsnavn) # uioEduCourseOffering - urn:mace:uit.no:section:<noe> n = 0 ret = {} top_dn = ldapconf('KURS', 'dn') for entry in sip.undervisningsaktiviteter: try: emne = sip.emnekode2info[entry['emnekode']] except KeyError: logger.warn( "Undervisningsaktivitet %s er ikke knyttet til gyldig emne", entry['emnekode']) continue if 'emnenavn_bokmal' not in emne: logger.warn("Undervisningsaktivitet %s uten enhet?" % repr(entry)) continue aktivitet_id = {} for persontype, role in interesting_fs_roles: args = [entry[x] for x in CerebrumGroupInfo.id_key_seq] args.extend((entry['aktivitetkode'], persontype)) args = [x.lower() for x in args] entity_id = cgi.find_group_by_undervisningsaktivitet(*args) if entity_id is not None: aktivitet_id["%i" % entity_id] = role keys = aktivitet_id.keys() keys.sort() urn = 'urn:mace:uit.no:section:aktivitet-%s' % "_".join(keys) out.write( entry_string( "cn=ua-%i,%s" % (n, top_dn), { 'objectClass': ("top", "uioEduSection"), 'uioEduCourseCode': (entry['emnekode'], ), 'uioEduCourseAdministrator': (emne['sko'], ), 'uioEduCourseLevel': (emne['studienivakode'], ), 'uioEduCourseName': (emne['emnenavn_bokmal'], ), 'uioEduCourseSectionName': (entry['aktivitetsnavn'], ), 'uioEduCourseInstitution': (emne['institusjonsnr'], ), 'uioEduCourseVersion': (emne['versjonskode'], ), 'uioEduCourseSectionCode': (entry['aktivitetkode'], ), 'uioEduOfferingTermCode': (entry['terminkode'], ), 'uioEduOfferingYear': (entry['arstall'], ), 'uioEduOfferingTermNumber': (entry['terminnr'], ), 'uioEduCourseOffering': (urn, ) })) n += 1 ret[urn] = aktivitet_id timer('... done gen_undervisningsaktivitet') return ret
def dump_accounts(file_handle, spread, base): for row in ac.search(spread): ac.clear() ac.find(row['account_id']) dn = "uid=%s,%s" % (row['name'], base) file_handle.write(entry_string(dn, { 'objectClass': ("top", "account"), 'uid': (row['name'],), 'userPassword': (ac.get_account_authentication(co.auth_type_md5_crypt),)}))
def gen_undervisningsaktivitet(cgi, sip, out): # uioEduSection - Undervisningsaktivitet (instansiering av gruppe, # kollokvia, lab, skrivekurs, forelesning) # access_FS.py:Undervisning.list_aktiviteter # # uioEduCourseCode - FS.emne.emnekode # uioEduCourseAdministrator - (FS.emne.*_reglement (6 siffer)). # uioEduCourseLevel - (FS.emne.studienivakode) # uioEduCourseName - (FS.emne.emnenavn_bokmal) # uioEduCourseSectionName - (FS.undaktivitet.aktivitetsnavn) # uioEduCourseOffering - urn:mace:uio.no:section:<noe> n = 0 ret = {} top_dn = ldapconf('KURS', 'dn') for entry in sip.undervisningsaktiviteter: try: emne = sip.emnekode2info[entry['emnekode']] except KeyError: logger.warn( "Undervisningsaktivitet %s er ikke knyttet til gyldig emne", entry['emnekode']) continue if 'emnenavn_bokmal' not in emne: logger.warn("Undervisningsaktivitet %s uten enhet?" % repr(entry)) continue aktivitet_id = {} for persontype, role in interesting_fs_roles: args = [entry[x] for x in CerebrumGroupInfo.id_key_seq] args.extend((entry['aktivitetkode'], persontype)) args = [x.lower() for x in args] entity_id = cgi.find_group_by_undervisningsaktivitet(*args) if entity_id is not None: aktivitet_id["%i" % entity_id] = role # if len(aktivitet_id) != 2: # continue keys = aktivitet_id.keys() keys.sort() urn = 'urn:mace:uio.no:section:aktivitet-%s' % "_".join(keys) # urn = 'urn:mace:uio.no:section:aktivitet-%s' % aktivitet_id out.write(entry_string("cn=ua-%i,%s" % (n, top_dn), { 'objectClass': ("top", "uioEduSection"), 'uioEduCourseCode': (iso2utf(entry['emnekode']),), 'uioEduCourseAdministrator': (iso2utf(emne['sko']),), 'uioEduCourseLevel': (iso2utf(emne['studienivakode']),), 'uioEduCourseName': (iso2utf(emne['emnenavn_bokmal']),), 'uioEduCourseSectionName': (iso2utf(entry['aktivitetsnavn']),), 'uioEduCourseInstitution': (iso2utf(emne['institusjonsnr']),), 'uioEduCourseVersion': (iso2utf(emne['versjonskode']),), 'uioEduCourseSectionCode': (iso2utf(entry['aktivitetkode']),), 'uioEduOfferingTermCode': (iso2utf(entry['terminkode']),), 'uioEduOfferingYear': (iso2utf(entry['arstall']),), 'uioEduOfferingTermNumber': (iso2utf(entry['terminnr']),), 'uioEduCourseOffering': (iso2utf(urn),)})) n += 1 ret[urn] = aktivitet_id return ret
def dump_ldif(file_handle): for row in group.search(spread=co.spread_ldap_group): group.clear() group.find(int(row['group_id'])) dn = "cn=%s,%s" % (row['name'], top_dn) for mbr in group.search_members(group_id=group.entity_id, member_type=co.entity_person): mbr2grp.setdefault(int(mbr["member_id"]), []).append(dn) file_handle.write(entry_string(dn, { 'objectClass': ("top", "uioGroup"), 'description': (iso2utf(row['description']),)}))
def dump_ldif(file_handle): group2dn = {} for row in group.search(spread=co.spread_ldap_group): dn = (u"cn={},{}".format(row['name'], top_dn)) group2dn[row['group_id']] = dn file_handle.write(entry_string(dn, { 'objectClass': ("top", "uioGroup"), 'description': (row['description'],) })) for mbr in group.search_members(spread=co.spread_ldap_group, member_type=co.entity_person): mbr2grp[int(mbr["member_id"])].append(group2dn[mbr['group_id']])
def dump_accounts(file_handle, spread, base): for row in ac.search(spread): ac.clear() ac.find(row['account_id']) dn = "uid=%s,%s" % (row['name'], base) file_handle.write( entry_string( dn, { 'objectClass': ("top", "account"), 'uid': (row['name'], ), 'userPassword': (ac.get_account_authentication(co.auth_type_md5_crypt), ) }))
def generate_voip_addresses(sink, *args): va = VoipAddress(db) sink.write(container_entry_string('VOIP_ADDRESS')) addr_id2dn = dict() for entry in va.list_voip_attributes(*args): entry['objectClass'] = ['top', 'voipAddress'] dn = "voipOwnerId={},{}".format(entry['voipOwnerId'], ldapconf('VOIP_ADDRESS', 'dn', None)) entity_id = entry.pop("entity_id") addr_id2dn[entity_id] = dn if not entry.get("cn"): entry["cn"] = () sink.write(entry_string(dn, entry)) return addr_id2dn
def dump_ldif(file_handle): for row in group.search(spread=co.spread_ldap_group): group.clear() group.find(int(row['group_id'])) dn = "cn=%s,%s" % (row['name'], top_dn) for mbr in group.search_members(group_id=group.entity_id, member_type=co.entity_person): mbr2grp.setdefault(int(mbr["member_id"]), []).append(dn) file_handle.write( entry_string( dn, { 'objectClass': ("top", "uioGroup"), 'description': (iso2utf(row['description']), ) }))
def generate_all(fname): """Write user + group LDIF to fname.""" out = ldif_outfile("ORG", fname) logger.debug('writing to %r', out) out.write(container_entry_string("ORG")) helper = LDIFHelper(logger.getChild('LDIFHelper')) logger.info("Generating user ldif...") out.write(container_entry_string("USER")) for user in helper.yield_users(): dn = user["dn"][0] del user["dn"] out.write(entry_string(dn, user, False)) end_ldif_outfile("USER", out, out) logger.debug("Generating group ldif...") out.write(container_entry_string("GROUP")) for group in helper.yield_groups(): dn = group["dn"][0] del group["dn"] out.write(entry_string(dn, group, False)) end_ldif_outfile("GROUP", out)
def generate_voip_addresses(sink, encoding, *args): db = Factory.get("Database")() va = VoipAddress(db) sink.write(container_entry_string("VOIP_ADDRESS")) addr_id2dn = dict() for entry in va.list_voip_attributes(*args): entry["objectClass"] = ["top", "voipAddress"] dn = "voipOwnerId=%s,%s" % (entry["voipOwnerId"], ldapconf("VOIP_ADDRESS", "dn", None)) entity_id = entry.pop("entity_id") addr_id2dn[entity_id] = dn entry = object2encoding(entry, encoding) if not entry.get("cn"): entry["cn"] = () sink.write(entry_string(object2encoding(dn, encoding), entry)) return addr_id2dn
def generate_netgroup_output(self, f_ldif): f_ldif.write_container('NETGROUP') zone = self.zone.postfix zone_offset = -len(zone or "") self.find_groups() for g_id in self.host_netgroups: group_members, host_members = map(sorted, self.expand_netgroup( g_id, self.co.entity_dns_owner, None)) members = set("(%s,-,)" % m[:-1] for m in host_members) if zone is not None: members.update("(%s,-,)" % m[:zone_offset] for m in host_members if m.endswith(zone)) dn, entry = self.ldif_netgroup(g_id, group_members, members) f_ldif.write(entry_string(dn, entry, False)) self.clear_groups()
def gen_undervisningsenhet(cgi, sip, out): timer = make_timer(logger, 'Starting gen_undervisningsenhet') # uioEduOffering - Undervisningsenhet (instansiering av et emne) # access_FS.py:Undervisning.list_undervisningenheter # # uioEduCourseCode, uioEduCourseAdministrator, uioEduCourseLevel, # uioEduCourseName - som for Undervisningsaktivitet # uioEduCourseOffering - urn:mace:uit.no:offering:<noe> n = 0 ret = {} top_dn = ldapconf('KURS', 'dn') for entry in sip.undervisningsenheter: emne = sip.emnekode2info.get(entry['emnekode']) if not emne: # warned earlier continue aktivitet_id = {} for persontype, role in interesting_fs_roles: args = [entry[x] for x in CerebrumGroupInfo.id_key_seq] args.append(persontype) args = [x.lower() for x in args] entity_id = cgi.find_group_by_undervisningsenhet(*args) if entity_id is not None: aktivitet_id["%i" % entity_id] = role keys = aktivitet_id.keys() keys.sort() urn = 'urn:mace:uit.no:offering:enhet-%s' % "_".join(keys) out.write( entry_string( "cn=ue-%i,%s" % (n, top_dn), { 'objectClass': ("top", "uioEduOffering"), 'uioEduCourseCode': (entry['emnekode'], ), 'uioEduCourseAdministrator': (emne['sko'], ), 'uioEduCourseLevel': (emne['studienivakode'], ), 'uioEduCourseName': (emne['emnenavn_bokmal'], ), 'uioEduCourseInstitution': (emne['institusjonsnr'], ), 'uioEduCourseVersion': (emne['versjonskode'], ), 'uioEduOfferingTermCode': (entry['terminkode'], ), 'uioEduOfferingYear': (entry['arstall'], ), 'uioEduOfferingTermNumber': (entry['terminnr'], ), 'uioEduCourseOffering': (urn, ) })) n += 1 ret[urn] = aktivitet_id timer('... done gen_undervisningsenhet') return ret
def generate_voip_addresses(sink, encoding, *args): db = Factory.get("Database")() va = VoipAddress(db) sink.write(container_entry_string('VOIP_ADDRESS')) addr_id2dn = dict() for entry in va.list_voip_attributes(*args): entry['objectClass'] = ['top', 'voipAddress'] dn = "voipOwnerId=%s,%s" % (entry['voipOwnerId'], ldapconf('VOIP_ADDRESS', 'dn', None)) entity_id = entry.pop("entity_id") addr_id2dn[entity_id] = dn entry = object2encoding(entry, encoding) if not entry.get("cn"): entry["cn"] = () sink.write(entry_string(object2encoding(dn, encoding), entry)) return addr_id2dn
def write_subnet_ldif(): DN = ldapconf('SUBNETS', 'dn') startAttr, endAttr, objectClasses = ldapconf('SUBNETS', 'rangeSchema') objectClasses = ('top', 'ipNetwork') + tuple(objectClasses) db = Factory.get('Database')() f = ldif_outfile('SUBNETS') f.write(container_entry_string('SUBNETS')) for row in Subnet(db).search(): cn = "%s/%s" % (row['subnet_ip'], row['subnet_mask']) desc = row['description'] f.write(entry_string("cn=%s,%s" % (cn, DN), { 'objectClass': objectClasses, 'description': (desc and (iso2utf(desc),) or ()), 'ipNetworkNumber': (row['subnet_ip'],), 'ipNetmaskNumber': (netmask_to_ip(row['subnet_mask']),), startAttr: (str(int(row['ip_min'])),), endAttr: (str(int(row['ip_max'])),)})) end_ldif_outfile('SUBNETS', f)
def gen_undervisningsenhet(cgi, sip, out): # uioEduOffering - Undervisningsenhet (instansiering av et emne) # access_FS.py:Undervisning.list_undervisningenheter # # uioEduCourseCode, uioEduCourseAdministrator, uioEduCourseLevel, # uioEduCourseName - som for Undervisningsaktivitet # uioEduCourseOffering - urn:mace:uio.no:offering:<noe> n = 0 ret = {} top_dn = ldapconf('KURS', 'dn') for entry in sip.undervisningsenheter: emne = sip.emnekode2info.get(entry['emnekode']) if not emne: # warned erlier continue aktivitet_id = {} for persontype, role in interesting_fs_roles: args = [entry[x] for x in CerebrumGroupInfo.id_key_seq] args.append(persontype) args = [x.lower() for x in args] entity_id = cgi.find_group_by_undervisningsenhet(*args) if entity_id is not None: aktivitet_id["%i" % entity_id] = role # if len(aktivitet_id) != 2: # continue keys = aktivitet_id.keys() keys.sort() urn = 'urn:mace:uio.no:offering:enhet-%s' % "_".join(keys) out.write(entry_string("cn=ue-%i,%s" % (n, top_dn), { 'objectClass': ("top", "uioEduOffering"), 'uioEduCourseCode': (iso2utf(entry['emnekode']),), 'uioEduCourseAdministrator': (iso2utf(emne['sko']),), 'uioEduCourseLevel': (iso2utf(emne['studienivakode']),), 'uioEduCourseName': (iso2utf(emne['emnenavn_bokmal']),), 'uioEduCourseInstitution': (iso2utf(emne['institusjonsnr']),), 'uioEduCourseVersion': (iso2utf(emne['versjonskode']),), 'uioEduOfferingTermCode': (iso2utf(entry['terminkode']),), 'uioEduOfferingYear': (iso2utf(entry['arstall']),), 'uioEduOfferingTermNumber': (iso2utf(entry['terminnr']),), 'uioEduCourseOffering': (iso2utf(urn),)})) n += 1 ret[urn] = aktivitet_id return ret
def dump(self): fd = ldif_outfile('RADIUS') logger.debug('writing to %s', repr(fd)) fd.write(container_entry_string('RADIUS')) logger.info('Generating export...') for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): try: uname = self.account_names[account_id] except KeyError: logger.error('No account name for account_id=%r', account_id) continue try: auth = self.user_password.get(account_id) except LookupError: auth = None try: ntauth = self.nt_password.get(account_id) except LookupError: ntauth = None if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntauth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['top', 'account', 'uiaRadiusAccount'], 'uid': (uname, ), 'radiusTunnelType': ('VLAN', ), 'radiusTunnelMediumType': ('IEEE-802', ), 'radiusTunnelPrivateGroupId': (vlan_vpn[0], ), 'radiusClass': (vlan_vpn[1], ), } if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = auth if ntauth: entry['ntPassword'] = (ntauth, ) fd.write(entry_string(dn, entry, False)) end_ldif_outfile('RADIUS', fd)
def generate_user_netgroup_output(self, f_ldif, f_netgroup): if not self.opts.netgroup_spread: return self.find_groups('netgroup') self.netgroup_names = set(self.netgroups.values()) for g_id in self.netgroups: group_members, user_members = map(sorted, self.expand_netgroup( g_id, self.co.entity_account, self.spreads.user)) user_members = ["(,%s,)" % m for m in user_members] if f_ldif: dn, entry = self.ldif_netgroup(False, g_id, group_members, user_members) f_ldif.write(entry_string(dn, entry, False)) if f_netgroup: f_netgroup.write(self._wrap_line( self.netgroups[g_id], # TODO: Drop the 'or's, whihch are equivalent to orig code? " ".join((group_members or [""]) + (user_members or [""])), ' ', self._make_tmp_netgroup_name, is_ng=True)) self.clear_groups()
def write_subnet_ldif(): DN = ldapconf('SUBNETS', 'dn') startAttr, endAttr, objectClasses = ldapconf('SUBNETS', 'rangeSchema') objectClasses = ('top', 'ipNetwork') + tuple(objectClasses) db = Factory.get('Database')() f = ldif_outfile('SUBNETS') f.write(container_entry_string('SUBNETS')) for row in Subnet(db).search(): cn = "%s/%s" % (row['subnet_ip'], row['subnet_mask']) desc = row['description'] f.write( entry_string( "cn=%s,%s" % (cn, DN), { 'objectClass': objectClasses, 'description': (desc and (iso2utf(desc), ) or ()), 'ipNetworkNumber': (row['subnet_ip'], ), 'ipNetmaskNumber': (netmask_to_ip(row['subnet_mask']), ), startAttr: (str(int(row['ip_min'])), ), endAttr: (str(int(row['ip_max'])), ) })) end_ldif_outfile('SUBNETS', f)
def generate_filegroup_output(self, f_ldif, f_filegroup): if not self.opts.filegroup_spread: return if f_ldif: f_ldif.write_container('FILEGROUP') elif not self.opts.filegroup: return self.find_groups('filegroup') # Loop over gids to sort properly for gid in self.filegroups: for g_id, gid in sorted(self.g_id2gid.iteritems(), key=itemgetter(1)): if g_id not in self.filegroups: continue users = sorted(self.expand_filegroup(g_id)) if f_ldif: dn, entry = self.ldif_filegroup(g_id, gid, users) f_ldif.write(entry_string(dn, entry, False)) if f_filegroup: f_filegroup.write(self._wrap_line( self.filegroups[g_id], ",".join(users), ':*:%i:' % gid, self._make_tmp_filegroup_name)) self.clear_groups()
def dump_ldif(file_handle): group2dn = {} for row in group.search(spread=co.spread_ldap_group): dn = ("cn={},{}".format(row['name'], top_dn)) group2dn[row['group_id']] = dn file_handle.write(entry_string(dn, { 'objectClass': ("top", "hiofGroup"), 'description': (row['description'],), })) for group_id, group_dn in group2dn.items(): for mbr in group.search_members(group_id=group_id, indirect_members=True, member_type=(co.entity_person, co.entity_account)): if mbr['member_type'] == co.entity_person: mbr2grp[int(mbr["member_id"])].add(group_dn) elif mbr['member_type'] == co.entity_account: ac.clear() ac.find(mbr['member_id']) if ac.owner_type != co.entity_person: continue mbr2grp[int(ac.owner_id)].add(group_dn)
def generate_filegroup_output(self, f_ldif, f_filegroup): if not self.opts.filegroup_spread: return if f_ldif: f_ldif.write_container('FILEGROUP') elif not self.opts.filegroup: return self.find_groups('filegroup') # Loop over gids to sort properly for gid in self.filegroups: for g_id, gid in sorted(self.g_id2gid.iteritems(), key=itemgetter(1)): if g_id not in self.filegroups: continue users = sorted(self.expand_filegroup(g_id)) if f_ldif: dn, entry = self.ldif_filegroup(g_id, gid, users) f_ldif.write(entry_string(dn, entry, False)) if f_filegroup: f_filegroup.write( self._wrap_line(self.filegroups[g_id], ",".join(users), ':*:%i:' % gid, self._make_tmp_filegroup_name)) self.clear_groups()
def dump_ldif(file_handle): group2dn = {} for row in group.search(spread=co.spread_ldap_group): dn = ("cn={},{}".format(row['name'], top_dn)) group2dn[row['group_id']] = dn file_handle.write( entry_string( dn, { 'objectClass': ("top", "hiofGroup"), 'description': (row['description'], ), })) for group_id, group_dn in group2dn.items(): for mbr in group.search_members(group_id=group_id, indirect_members=True, member_type=(co.entity_person, co.entity_account)): if mbr['member_type'] == co.entity_person: mbr2grp[int(mbr["member_id"])].add(group_dn) elif mbr['member_type'] == co.entity_account: ac.clear() ac.find(mbr['member_id']) if ac.owner_type != co.entity_person: continue mbr2grp[int(ac.owner_id)].add(group_dn)
def dump_ldif(db, root_dn, file_handle): co = Factory.get('Constants')(db) group = Factory.get('Group')(db) logger.debug('Processing groups...') group_to_dn = {} for row in group.search(spread=co.spread_ldap_group): dn = "cn={},{}".format(row['name'], root_dn) group_to_dn[row['group_id']] = dn file_handle.write( entry_string( dn, { 'objectClass': ("top", "uioGroup"), 'description': (row['description'], ) })) logger.debug('Processing group memberships...') member_to_group = defaultdict(list) for row in group.search_members(spread=co.spread_ldap_group, member_type=co.entity_person): person_id = int(row['member_id']) member_to_group[person_id].append(group_to_dn[row['group_id']]) return dict(member_to_group)
def generate_host_netgroup_output(self, f_ldif, f_netgroup): if not self.opts.zone: return self._num_map = {} zone = self.zone.postfix zone_offset = -len(zone or "") self.find_groups('host_netgroup') for g_id in self.host_netgroups: group_members, host_members = map(sorted, self.expand_netgroup( g_id, self.co.entity_dns_owner, None)) members = set("(%s,-,)" % m[:-1] for m in host_members) if zone is not None: members.update("(%s,-,)" % m[:zone_offset] for m in host_members if m.endswith(zone)) if self.opts.ldif: dn, entry = self.ldif_netgroup(True, g_id, group_members, members) f_ldif.write(entry_string(dn, entry, False)) if f_netgroup: f_netgroup.write(self._wrap_line( self.host_netgroups[g_id], " ".join(group_members) + " " + " ".join(members), ' ', self._make_tmp_host_netgroup_name, is_ng=True)) self.clear_groups()