def dump(self): # This function uses LDIFWriter to properly format an LDIF file. fd = LDIFutils.LDIFWriter("SAMSON3", cereconf.LDAP_SAMSON3["file"]) fd.write_container() for e in self.entries: fd.write(LDIFutils.entry_string(e["dn"], e["entry"], False)) fd.close()
def user_ldif(self, filename=None): """Generate posix-user.""" self.init_user() f = LDIFutils.ldif_outfile('USER', filename, self.fd) self.write_user_objects_head(f) # Write the USER container object f.write(LDIFutils.container_entry_string('USER')) def generate_users(): for row in self.posuser.list_posix_users( spread=self.spread_d['user'], filter_expired=True): account_id = row['account_id'] dn, entry = self.user_object(row) if not dn: logger.debug('no dn for account_id=%r', account_id) continue yield dn, entry for dn, entry in sorted(generate_users(), key=operator.itemgetter(0)): try: f.write(LDIFutils.entry_string(dn, entry, False)) except Exception: logger.error('Got error on dn=%r', dn) raise LDIFutils.end_ldif_outfile('USER', f, self.fd)
def write_user_objects_head(self, f): # UiT: Add a system object entry = {'objectClass': ['top', 'uioUntypedObject']} ou_dn = "cn=system,dc=uit,dc=no" f.write(LDIFutils.entry_string(ou_dn, entry)) super(PosixLDIF_UiTMixin, self).write_user_objects_head(f)
def dump(self): # This function uses LDIFWriter to properly format an LDIF file. fd = LDIFutils.LDIFWriter('SAMSON3', cereconf.LDAP_SAMSON3['file']) fd.write_container() for e in self.entries: fd.write(LDIFutils.entry_string(e['dn'], e['entry'], False)) fd.close()
def dump(self): fd = LDIFutils.ldif_outfile('RADIUS') fd.write(LDIFutils.container_entry_string('RADIUS')) noAuth = (None, None) for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): info = self.auth[account_id] uname = LDIFutils.iso2utf(str(info[0])) auth = info[1] ntAuth = self.md4_auth.get(account_id, noAuth)[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntAuth = None dn = ','.join(('uid=' + uname, self.radius_dn)) entry = { # Ikke endelig innhold 'objectClass': ['top', 'account', 'uiaRadiusAccount'], 'uid': (uname,), 'radiusTunnelType': ('VLAN',), 'radiusTunnelMediumType': ('IEEE-802',), 'radiusTunnelPrivateGroupId': (vlan_vpn[0],), 'radiusClass': (vlan_vpn[1],)} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) if ntAuth: entry['ntPassword'] = (ntAuth,) fd.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('RADIUS', fd)
def main(args=None): args = make_parser().parse_args(args) logger.info("Start {0}".format(__file__)) LDIFutils.needs_base64 = args.needs_base64 xml_parser = system2parser('system_sap')(args.input_file, logger) show_ou = OUSelector('ORG_OU', cereconf.OU_USAGE_SPREAD) get_ous = OrgTree(xml_parser.iter_ou(), show_ou) use_lang = LanguageSelector(cereconf.LDAP['pref_languages']) aff_selector = AffSelector( cereconf.LDAP_PERSON['affiliation_selector']) stats = { 'seen': 0, 'excluded': 0, 'included': 0 } with atomic_or_stdout(args.output_file) as output: for person in xml_parser.iter_person(): stats['seen'] += 1 partial_affs = set() for emp in iterate_employments(person, aff_selector): aff = format_scoped_aff(emp, get_ous) titles = [format_title(t) for t in iterate_employment_titles(emp) if use_lang(t.language)] partial_affs.add('{0}#{1}'.format(aff, ';'.join(titles))) if len(partial_affs) < 2: # We want at least two unique employments to output person stats['excluded'] += 1 continue try: identifier = get_identifier(person) except ValueError: logger.warn("Missing NIN: {0}".format(str(person))) stats['excluded'] += 1 continue stats['included'] += 1 output.write( LDIFutils.entry_string( identifier, {'uioPersonPartialEmployment': set(partial_affs)}, add_rdn=False)) logger.info("persons" " considered: {0[seen]:d}," " included: {0[included]:d}," " excluded: {0[excluded]:d}".format(stats)) logger.info("Done {0}".format(__file__))
def user_ldif(self, filename=None, auth_meth=None): """Generate posix-user.""" timer = make_timer(self.logger, 'Starting user_ldif...') self.init_user(auth_meth) f = LDIFutils.ldif_outfile('USER', filename, self.fd) f.write(LDIFutils.container_entry_string('USER')) for row in self.posuser.list_extended_posix_users( self.user_auth, spread=self.spread_d['user'], include_quarantines=False): dn, entry = self.user_object(row) if dn: f.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', f, self.fd) timer('... done user_ldif')
def netgroup_ldif(self, filename=None): """Generate netgroup with only users.""" timer = make_timer(self.logger, 'Starting netgroup_ldif...') if 'netgroup' not in self.spread_d: self.logger.warn("No valid netgroup-spread in cereconf or arg!") return self.init_netgroup() timer2 = make_timer(self.logger, 'Caching netgroups...') for row in self.grp.search(spread=self.spread_d['netgroup'], filter_expired=False): group_id = row['group_id'] self.create_group_object(group_id, row['name'], row['description']) self.create_netgroup_object(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.netgroupcache.iteritems(): users, groups = self.get_users_and_groups(group_id, set(), set(), add_persons=True) unames = self.userid2unames(users, group_id) triple = [] for uname in unames: if '_' in uname: continue triple.append('(,%s,)' % uname) netgroup = [] for g in groups: netgroup.append(self.netgroupcache[g]['cn']) entry['nisNetgroupTriple'] = triple entry['memberNisNetgroup'] = netgroup timer2('... done adding users and groups') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('NETGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('NETGROUP')) for group_id, entry in self.netgroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.ngrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('NETGROUP', f, self.fd) timer2('... done writing group objects') self.netgroupcache = None timer('... done netgroup_ldif')
def filegroup_ldif(self, filename=None): """ Generate filegroup. Groups without group and expanded members from both external and internal groups. """ timer = make_timer(self.logger, 'Starting filegroup_ldif...') if 'filegroup' not in self.spread_d: self.logger.warn("No spread is given for filegroup!") return self.init_filegroup() timer2 = make_timer(self.logger, 'Caching filegroups...') for row in self.grp.search(spread=self.spread_d['filegroup'], filter_expired=False): group_id = row['group_id'] if group_id not in self.group2gid: self.logger.warn( "Group id:{} has one of {} but no GID, skipping".format( group_id, getattr(cereconf, 'LDAP_FILEGROUP').get('spread'), [])) continue self.create_group_object(group_id, row['name'], row['description']) self.create_filegroup_object(group_id) self.update_filegroup_entry(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.filegroupcache.iteritems(): users = self.get_users(group_id, set()) unames = self.userid2unames(users, group_id) entry['memberUid'] = unames timer2('... done adding users') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('FILEGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('FILEGROUP')) for group_id, entry in self.filegroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.fgrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) timer2('... done writing group objects') self.filegroupcache = None LDIFutils.end_ldif_outfile('FILEGROUP', f, self.fd) timer('... done filegroup_ldif')
def dump(self): fd = LDIFutils.ldif_outfile('USER') fd.write(LDIFutils.container_entry_string('USER')) for row in self.account.search(): account_id = row['account_id'] info = self.auth[account_id] uname = LDIFutils.iso2utf(str(info[0])) auth = info[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['account'], 'uid': (uname,),} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) fd.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', fd)
def dump(self): fd = LDIFutils.ldif_outfile('USER') fd.write(LDIFutils.container_entry_string('USER')) for row in self.account.search(): account_id = row['account_id'] info = self.auth[account_id] uname = LDIFutils.iso2utf(str(info[0])) auth = info[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['account'], 'uid': (uname, ), } if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth, ) fd.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', fd)
def main(inargs=None): parser = argparse.ArgumentParser() parser.add_argument( '-i', '--input-file', default=DEFAULT_INPUT_FILE, help="sap2bas XML input file (default: %(default)s)", metavar='FILE', ) parser.add_argument( '-o', '--output-file', default=DEFAULT_OUTPUT_FILE, help="LDIF output file, or '-' for stdout (default: %(default)s)", metavar='FILE', ) parser.add_argument( '-u', '--utf8-data', dest='needs_base64', action='store_const', const=LDIFutils.needs_base64_readable, default=LDIFutils.needs_base64_safe, help="Allow utf-8 values in ldif", ) Cerebrum.logutils.options.install_subparser(parser) parser.set_defaults(**{ Cerebrum.logutils.options.OPTION_LOGGER_LEVEL: 'INFO', }) args = parser.parse_args(inargs) Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start %s', parser.prog) logger.debug('args: %s', repr(args)) LDIFutils.needs_base64 = args.needs_base64 xml_parser = system2parser('system_sap')(args.input_file, logger) show_ou = OUSelector('ORG_OU', cereconf.OU_USAGE_SPREAD) get_ous = OrgTree(xml_parser.iter_ou(), show_ou) use_lang = LanguageSelector(cereconf.LDAP['pref_languages']) aff_selector = AffSelector(cereconf.LDAP_PERSON['affiliation_selector']) stats = { 'seen': 0, 'excluded': 0, 'included': 0, } with atomic_or_stdout(args.output_file) as output: for person in xml_parser.iter_person(): stats['seen'] += 1 partial_affs = set() for emp in iterate_employments(person, aff_selector): try: aff = format_scoped_aff(emp, get_ous) except Exception as e: logger.warning('Ignoring employment person=%r emp=%r: %s', person, emp, e) continue titles = [ format_title(t) for t in iterate_employment_titles(emp) if use_lang(t.language) ] partial_affs.add('{0}#{1}'.format(aff, ';'.join(titles))) if len(partial_affs) < 2: # We want at least two unique employments to output person stats['excluded'] += 1 continue try: identifier = get_identifier(person) except ValueError: logger.warn("Missing NIN: {0}".format(str(person))) stats['excluded'] += 1 continue stats['included'] += 1 output.write( LDIFutils.entry_string( identifier, {'uioPersonPartialEmployment': list(sorted(partial_affs))}, add_rdn=False)) logger.info("persons" " considered: {0[seen]:d}," " included: {0[included]:d}," " excluded: {0[excluded]:d}".format(stats)) logger.info("Done %s", parser.prog)