def user_ldif(self, filename=None): """Generate posix-user.""" self.init_user() f = LDIFutils.ldif_outfile('USER', filename, self.fd) self.write_user_objects_head(f) # Write the USER container object f.write(LDIFutils.container_entry_string('USER')) def generate_users(): for row in self.posuser.list_posix_users( spread=self.spread_d['user'], filter_expired=True): account_id = row['account_id'] dn, entry = self.user_object(row) if not dn: logger.debug('no dn for account_id=%r', account_id) continue yield dn, entry for dn, entry in sorted(generate_users(), key=operator.itemgetter(0)): try: f.write(LDIFutils.entry_string(dn, entry, False)) except Exception: logger.error('Got error on dn=%r', dn) raise LDIFutils.end_ldif_outfile('USER', f, self.fd)
def dump(self): # This function uses LDIFWriter to properly format an LDIF file. fd = LDIFutils.LDIFWriter('SAMSON3', cereconf.LDAP_SAMSON3['file']) fd.write_container() for e in self.entries: fd.write(LDIFutils.entry_string(e['dn'], e['entry'], False)) fd.close()
def create_filegroup_object(self, group_id): assert group_id not in self.filegroupcache cache = self.groupcache[group_id] entry = {'objectClass': ('top', 'posixGroup'), 'cn': LDIFutils.iso2utf(cache['name']), 'gidNumber': self.group2gid[group_id], } if 'description' in cache: entry['description'] = (LDIFutils.iso2utf(cache['description']),) self.filegroupcache[group_id] = entry
def __init__(self, db, logger, u_sprd=None, g_sprd=None, n_sprd=None, fd=None): """ Initiate database and import modules. Spreads are given in initiation and general constants which is used in more than one method. """ timer = make_timer(logger, 'Initing PosixLDIF...') from Cerebrum.modules import PosixGroup self.db = db self.logger = logger self.const = Factory.get('Constants')(self.db) self.grp = Factory.get('Group')(self.db) self.posuser = Factory.get('PosixUser')(self.db) self.posgrp = PosixGroup.PosixGroup(self.db) self.user_dn = LDIFutils.ldapconf('USER', 'dn', None) # This is an odd one -- if set to False, then id2uname should be # populated with users exported in the users export -- which makes the # group exports filter group members by *actually* exported users... self.get_name = True self.fd = fd self.spread_d = {} # Validate spread from arg or from cereconf for x, y in zip(['USER', 'FILEGROUP', 'NETGROUP'], [u_sprd, g_sprd, n_sprd]): spread = LDIFutils.map_spreads( y or getattr(cereconf, 'LDAP_' + x).get('spread'), list) if spread: self.spread_d[x.lower()] = spread if 'user' not in self.spread_d: raise Errors.ProgrammingError( "Must specify spread-value as 'arg' or in cereconf") self.account2name = dict() self.group2gid = dict() self.groupcache = defaultdict(dict) self.group2groups = defaultdict(set) self.group2users = defaultdict(set) self.group2persons = defaultdict(list) self.shell_tab = dict() self.quarantines = dict() self.user_exporter = UserExporter(self.db) if len(self.spread_d['user']) > 1: logger.warning('Exporting users with multiple spreads, ' 'ignoring homedirs from %r', self.spread_d['user'][1:]) self.homedirs = HomedirResolver(db, self.spread_d['user'][0]) self.owners = OwnerResolver(db) auth_attr = LDIFutils.ldapconf('USER', 'auth_attr', None) self.user_password = AuthExporter.make_exporter( db, auth_attr['userPassword']) timer('... done initing PosixLDIF.')
def create_filegroup_object(self, group_id): assert group_id not in self.filegroupcache cache = self.groupcache[group_id] entry = { 'objectClass': ('top', 'posixGroup'), 'cn': LDIFutils.iso2utf(cache['name']), 'gidNumber': self.group2gid[group_id], } if 'description' in cache: entry['description'] = (LDIFutils.iso2utf(cache['description']), ) self.filegroupcache[group_id] = entry
def user_ldif(self, filename=None, auth_meth=None): """Generate posix-user.""" timer = make_timer(self.logger, 'Starting user_ldif...') self.init_user(auth_meth) f = LDIFutils.ldif_outfile('USER', filename, self.fd) f.write(LDIFutils.container_entry_string('USER')) for row in self.posuser.list_extended_posix_users( self.user_auth, spread=self.spread_d['user'], include_quarantines=False): dn, entry = self.user_object(row) if dn: f.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', f, self.fd) timer('... done user_ldif')
def __init__(self, *args, **kwargs): super(PosixLDIFRadius, self).__init__(*args, **kwargs) auth_attr = LDIFutils.ldapconf('USER', 'auth_attr', None) self.samba_nt_password = AuthExporter.make_exporter( self.db, auth_attr['sambaNTPassword'])
def init_netgroup(self): """Initiate modules, constants and cache""" self.ngrp_dn = LDIFutils.ldapconf('NETGROUP', 'dn') self.cache_account2name() self.cache_groups_and_users() self.cache_group2persons() self.netgroupcache = defaultdict(dict)
def dump(self): # This function uses LDIFWriter to properly format an LDIF file. fd = LDIFutils.LDIFWriter("SAMSON3", cereconf.LDAP_SAMSON3["file"]) fd.write_container() for e in self.entries: fd.write(LDIFutils.entry_string(e["dn"], e["entry"], False)) fd.close()
def write_user_objects_head(self, f): # UiT: Add a system object entry = {'objectClass': ['top', 'uioUntypedObject']} ou_dn = "cn=system,dc=uit,dc=no" f.write(LDIFutils.entry_string(ou_dn, entry)) super(PosixLDIF_UiTMixin, self).write_user_objects_head(f)
def netgroup_ldif(self, filename=None): """Generate netgroup with only users.""" timer = make_timer(self.logger, 'Starting netgroup_ldif...') if 'netgroup' not in self.spread_d: self.logger.warn("No valid netgroup-spread in cereconf or arg!") return self.init_netgroup() timer2 = make_timer(self.logger, 'Caching netgroups...') for row in self.grp.search(spread=self.spread_d['netgroup'], filter_expired=False): group_id = row['group_id'] self.create_group_object(group_id, row['name'], row['description']) self.create_netgroup_object(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.netgroupcache.iteritems(): users, groups = self.get_users_and_groups(group_id, set(), set(), add_persons=True) unames = self.userid2unames(users, group_id) triple = [] for uname in unames: if '_' in uname: continue triple.append('(,%s,)' % uname) netgroup = [] for g in groups: netgroup.append(self.netgroupcache[g]['cn']) entry['nisNetgroupTriple'] = triple entry['memberNisNetgroup'] = netgroup timer2('... done adding users and groups') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('NETGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('NETGROUP')) for group_id, entry in self.netgroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.ngrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('NETGROUP', f, self.fd) timer2('... done writing group objects') self.netgroupcache = None timer('... done netgroup_ldif')
def filegroup_ldif(self, filename=None): """ Generate filegroup. Groups without group and expanded members from both external and internal groups. """ timer = make_timer(self.logger, 'Starting filegroup_ldif...') if 'filegroup' not in self.spread_d: self.logger.warn("No spread is given for filegroup!") return self.init_filegroup() timer2 = make_timer(self.logger, 'Caching filegroups...') for row in self.grp.search(spread=self.spread_d['filegroup'], filter_expired=False): group_id = row['group_id'] if group_id not in self.group2gid: self.logger.warn( "Group id:{} has one of {} but no GID, skipping".format( group_id, getattr(cereconf, 'LDAP_FILEGROUP').get('spread'), [])) continue self.create_group_object(group_id, row['name'], row['description']) self.create_filegroup_object(group_id) self.update_filegroup_entry(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.filegroupcache.iteritems(): users = self.get_users(group_id, set()) unames = self.userid2unames(users, group_id) entry['memberUid'] = unames timer2('... done adding users') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('FILEGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('FILEGROUP')) for group_id, entry in self.filegroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.fgrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) timer2('... done writing group objects') self.filegroupcache = None LDIFutils.end_ldif_outfile('FILEGROUP', f, self.fd) timer('... done filegroup_ldif')
def auth_methods(self, auth_meth=None): """Which authentication methods to fetch. Mixin-support. If all only one entry, it will prefect any in auth_table. If None, it will use default API authentication (md5_crypt). """ self.auth_format = {} auth_meth_l = [] self.user_auth = None code = '_AuthenticationCode' # Priority is arg, else cereconf default value # auth_meth_l is a list sent to load_auth_tab and contains # all methods minus primary which is called by auth = auth_meth or cereconf.LDAP['auth_attr'] if isinstance(auth, dict): if 'userPassword' not in auth: self.logger.warn("Only support 'userPassword'-attribute") return None default_auth = auth['userPassword'][:1][0] self.user_auth = LDIFutils.map_constants(code, default_auth[0]) if len(default_auth) == 2: format = default_auth[1] else: format = None self.auth_format[int(self.user_auth)] = { 'attr': 'userPassword', 'format': format } for entry in auth['userPassword'][1:]: auth_t = LDIFutils.map_constants(code, entry[0]) if len(entry) == 2: format = entry[1] else: format = None auth_meth_l.append(auth_t) self.auth_format[int(auth_t)] = { 'attr': 'userPassword', 'format': format } if isinstance(auth, (list, tuple)): self.user_auth = int(getattr(self.const, auth[:1][0])) for entry in auth[1:]: auth_meth_l.append(int(getattr(self.const, entry))) elif isinstance(auth, str): self.user_auth = int(getattr(self.const, auth)) return auth_meth_l
def init_filegroup(self): """Initiate modules and constants for posixgroup""" from Cerebrum.modules import PosixGroup self.posgrp = PosixGroup.PosixGroup(self.db) self.fgrp_dn = LDIFutils.ldapconf('FILEGROUP', 'dn') self.filegroupcache = defaultdict(dict) self.cache_account2name() self.cache_group2gid() self.cache_groups_and_users()
def main(args=None): args = make_parser().parse_args(args) logger.info("Start {0}".format(__file__)) LDIFutils.needs_base64 = args.needs_base64 xml_parser = system2parser('system_sap')(args.input_file, logger) show_ou = OUSelector('ORG_OU', cereconf.OU_USAGE_SPREAD) get_ous = OrgTree(xml_parser.iter_ou(), show_ou) use_lang = LanguageSelector(cereconf.LDAP['pref_languages']) aff_selector = AffSelector( cereconf.LDAP_PERSON['affiliation_selector']) stats = { 'seen': 0, 'excluded': 0, 'included': 0 } with atomic_or_stdout(args.output_file) as output: for person in xml_parser.iter_person(): stats['seen'] += 1 partial_affs = set() for emp in iterate_employments(person, aff_selector): aff = format_scoped_aff(emp, get_ous) titles = [format_title(t) for t in iterate_employment_titles(emp) if use_lang(t.language)] partial_affs.add('{0}#{1}'.format(aff, ';'.join(titles))) if len(partial_affs) < 2: # We want at least two unique employments to output person stats['excluded'] += 1 continue try: identifier = get_identifier(person) except ValueError: logger.warn("Missing NIN: {0}".format(str(person))) stats['excluded'] += 1 continue stats['included'] += 1 output.write( LDIFutils.entry_string( identifier, {'uioPersonPartialEmployment': set(partial_affs)}, add_rdn=False)) logger.info("persons" " considered: {0[seen]:d}," " included: {0[included]:d}," " excluded: {0[excluded]:d}".format(stats)) logger.info("Done {0}".format(__file__))
def __init__(self): self.user_dn = LDIFutils.ldapconf('USER', 'dn', None) self.db = Factory.get('Database')() self.const = Factory.get('Constants')(self.db) self.account = Factory.get('Account')(self.db) self.auth = None for auth_type in (self.const.auth_type_crypt3_des, self.const.auth_type_md5_crypt): self.auth = self.make_auths(auth_type, self.auth) self.load_quaratines()
def create_netgroup_object(self, group_id): assert group_id not in self.netgroupcache cache = self.groupcache[group_id] entry = {'objectClass': ('top', 'nisNetGroup'), 'cn': LDIFutils.iso2utf(cache['name'],) } if 'description' in cache: entry['description'] = \ latin1_to_iso646_60(cache['description']).rstrip(), self.netgroupcache[group_id] = entry
def __init__(self, db, logger, u_sprd=None, g_sprd=None, n_sprd=None, fd=None): """ Initiate database and import modules. Spreads are given in initiation and general constants which is used in more than one method. """ timer = make_timer(logger, 'Initing PosixLDIF...') from Cerebrum.modules import PosixGroup self.db = db self.logger = logger self.const = Factory.get('Constants')(self.db) self.grp = Factory.get('Group')(self.db) self.posuser = Factory.get('PosixUser')(self.db) self.posgrp = PosixGroup.PosixGroup(self.db) self.user_dn = LDIFutils.ldapconf('USER', 'dn', None) self.get_name = True self.fd = fd self.spread_d = {} # Validate spread from arg or from cereconf for x, y in zip(['USER', 'FILEGROUP', 'NETGROUP'], [u_sprd, g_sprd, n_sprd]): spread = LDIFutils.map_spreads( y or getattr(cereconf, 'LDAP_' + x).get('spread'), list) if spread: self.spread_d[x.lower()] = spread if 'user' not in self.spread_d: raise Errors.ProgrammingError( "Must specify spread-value as 'arg' or in cereconf") self.account2name = dict() self.groupcache = defaultdict(dict) self.group2groups = defaultdict(set) self.group2users = defaultdict(set) self.group2persons = defaultdict(list) timer('... done initing PosixLDIF.')
def create_netgroup_object(self, group_id): assert group_id not in self.netgroupcache cache = self.groupcache[group_id] entry = { 'objectClass': ('top', 'nisNetGroup'), 'cn': LDIFutils.iso2utf(cache['name'], ) } if 'description' in cache: entry['description'] = \ latin1_to_iso646_60(cache['description']).rstrip(), self.netgroupcache[group_id] = entry
def auth_methods(self, auth_meth=None): """Which authentication methods to fetch. Mixin-support. If all only one entry, it will prefect any in auth_table. If None, it will use default API authentication (md5_crypt). """ self.auth_format = {} auth_meth_l = [] self.user_auth = None code = '_AuthenticationCode' # Priority is arg, else cereconf default value # auth_meth_l is a list sent to load_auth_tab and contains # all methods minus primary which is called by auth = auth_meth or cereconf.LDAP['auth_attr'] if isinstance(auth, dict): if 'userPassword' not in auth: self.logger.warn("Only support 'userPassword'-attribute") return None default_auth = auth['userPassword'][:1][0] self.user_auth = LDIFutils.map_constants(code, default_auth[0]) if len(default_auth) == 2: format = default_auth[1] else: format = None self.auth_format[int(self.user_auth)] = {'attr': 'userPassword', 'format': format} for entry in auth['userPassword'][1:]: auth_t = LDIFutils.map_constants(code, entry[0]) if len(entry) == 2: format = entry[1] else: format = None auth_meth_l.append(auth_t) self.auth_format[int(auth_t)] = {'attr': 'userPassword', 'format': format} if isinstance(auth, (list, tuple)): self.user_auth = int(getattr(self.const, auth[:1][0])) for entry in auth[1:]: auth_meth_l.append(int(getattr(self.const, entry))) elif isinstance(auth, str): self.user_auth = int(getattr(self.const, auth)) return auth_meth_l
def dump(self): fd = LDIFutils.ldif_outfile('RADIUS') fd.write(LDIFutils.container_entry_string('RADIUS')) noAuth = (None, None) for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): info = self.auth[account_id] uname = LDIFutils.iso2utf(str(info[0])) auth = info[1] ntAuth = self.md4_auth.get(account_id, noAuth)[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntAuth = None dn = ','.join(('uid=' + uname, self.radius_dn)) entry = { # Ikke endelig innhold 'objectClass': ['top', 'account', 'uiaRadiusAccount'], 'uid': (uname,), 'radiusTunnelType': ('VLAN',), 'radiusTunnelMediumType': ('IEEE-802',), 'radiusTunnelPrivateGroupId': (vlan_vpn[0],), 'radiusClass': (vlan_vpn[1],)} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) if ntAuth: entry['ntPassword'] = (ntAuth,) fd.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('RADIUS', fd)
def __init__(self): # Init a lot of the things we need self.samson3_dn = LDIFutils.ldapconf('SAMSON3', 'dn', None) self.db = Factory.get('Database')() self.const = Factory.get('Constants')(self.db) self.account = Factory.get('Account')(self.db) self.person = Factory.get('Person')(self.db) # Collect password hashes.. self.auth = self.make_auths(self.const.auth_type_md5_crypt) # ..and quarantines.. self.load_quaratines() # ..and define a place to store the catalogobjects before writing LDIF. self.entries = []
def __init__(self): # Init a lot of the things we need self.samson3_dn = LDIFutils.ldapconf("SAMSON3", "dn", None) self.db = Factory.get("Database")() self.const = Factory.get("Constants")(self.db) self.account = Factory.get("Account")(self.db) self.person = Factory.get("Person")(self.db) # Collect password hashes.. self.auth = self.make_auths(self.const.auth_type_md5_crypt) # ..and quarantines.. self.load_quaratines() # ..and define a place to store the catalogobjects before writing LDIF. self.entries = []
def __init__(self): self.radius_dn = LDIFutils.ldapconf('RADIUS', 'dn', None) self.db = Factory.get('Database')() self.const = Factory.get('Constants')(self.db) self.account = Factory.get('Account')(self.db) self.md4_auth = self.make_auths(self.const.auth_type_md4_nt) self.auth = None for auth_type in (self.const.auth_type_crypt3_des, self.const.auth_type_md5_crypt): self.auth = self.make_auths(auth_type, self.auth) self.load_quaratines() self.id2vlan_vpn = {} for spread in reversed(cereconf.LDAP_RADIUS['spreads']): vlan_vpn = (cereconf.LDAP_RADIUS['spread2vlan'][spread], "OU=%s;" % cereconf.LDAP_RADIUS['spread2vpn'][spread]) spread = self.const.Spread(spread) for row in self.account.search(spread=spread): self.id2vlan_vpn[row['account_id']] = vlan_vpn
def init_user(self, *args, **kwargs): self.__super.init_user(*args, **kwargs) timer = make_timer(self.logger, 'Starting PosixLDIFMail.init_user...') self.mail_attrs = cereconf.LDAP_USER.get('mail_attrs', ['mail']) from string import Template self.mail_template = Template(cereconf.LDAP_USER.get( 'mail_default_template')) mail_spreads = LDIFutils.map_spreads( cereconf.LDAP_USER.get('mail_spreads'), list) self.accounts_with_email = set() for account_id, spread in self.posuser.list_entity_spreads( entity_types=self.const.entity_account): if spread in mail_spreads: self.accounts_with_email.add(account_id) timer('...done PosixLDIFMail.init_user')
def get_ldif_info(db, stream): co = Factory.get('Constants')(db) fax, fax_num = co.contact_fax, 'facsimiletelephonenumber' phone, ph_num = co.contact_phone, 'internationalisdnnumber' mobile, mob_num = int(co.contact_mobile_phone), 'mobile' con_info = {} lt = LDIFutils.ldif_parser(stream) r = re.compile("^(\w+)@[uh]ia\.no(\/(\w+))?") for val in lt.parse().values(): if 'uid' not in val: continue # check for syntax in 'uid' m = r.match(val['uid'][0]) if not m: continue # Iff '/x' the 'x' has to be a digit if m.group(2) and not m.group(3).isdigit(): continue uname = m.group(1) try: acc = get_account(db, uname) if not acc.owner_type == int(co.entity_person): logger.debug("Owner (%d) for '%s' is not a person" % (acc.owner_id, uname)) continue pers_id = acc.owner_id except Errors.NotFoundError: logger.debug("Could not find account: %s" % uname) continue if pers_id not in con_info: con_info[pers_id] = {} if ph_num not in val: con_info[pers_id].setdefault(phone, []).append(val[ph_num][0]) if fax_num in val: con_info[pers_id][fax] = val[fax_num] con_info[pers_id][fax].sort() if mob_num in val: con_info[pers_id][mobile] = val[mob_num] con_info[pers_id][mobile].sort() return con_info
def write_mail_domains(): """ Gather data and dump to ldif. """ logger = Factory.get_logger("cronjob") logger.debug("Reading domains...") domains = sorted(get_email_domains()) lw = LDIFutils.LDIFWriter("MAIL_DOMAINS") dn_suffix = lw.getconf("dn") lw.write_container() logger.debug("Writing domains...") for domain in domains: dn = "cn=%s,%s" % (domain, dn_suffix) entry = { "cn": domain, "host": domain, "objectClass": ("uioHost", ), } lw.write_entry(dn, entry) logger.debug("Done.") lw.close()
def dump(self): fd = LDIFutils.ldif_outfile('USER') fd.write(LDIFutils.container_entry_string('USER')) for row in self.account.search(): account_id = row['account_id'] info = self.auth[account_id] uname = LDIFutils.iso2utf(str(info[0])) auth = info[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['account'], 'uid': (uname,),} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) fd.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', fd)
def dump(self): fd = LDIFutils.ldif_outfile('USER') fd.write(LDIFutils.container_entry_string('USER')) for row in self.account.search(): account_id = row['account_id'] info = self.auth[account_id] uname = LDIFutils.iso2utf(str(info[0])) auth = info[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['account'], 'uid': (uname, ), } if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth, ) fd.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', fd)
def write_mail_dns(): """ Gather data and dump to ldif. """ logger = Factory.get_logger('cronjob') hosts, cnames, lower2host, hosts_only_mx = get_hosts_and_cnames() # email domains (lowercase -> domain), in alphabetical order domains = OrderedDict((d.lower(), d) for d in sorted(get_email_domains())) domain_wo_mx = set() for domain in domains: # Verify that domains have an MX-record. for arg in cereconf.LDAP_MAIL_DNS['dig_args']: zone = arg[0] if domain.endswith(zone) and not (domain in hosts_only_mx or domain in hosts): logger.error("email domain without MX defined: %s" % domain) domain_wo_mx.add(domain.lower()) # Valid email domains only requires MX if domain in hosts_only_mx: hosts_only_mx.remove(domain) for host in hosts_only_mx: logger.warn( "MX defined but no A/AAAA record or valid email domain: %s" % host) if domain_wo_mx: cause = "{0:d} email domains without mx".format(len(domain_wo_mx)) logger.error("{0}, this must be rectified manually!".format(cause)) raise CerebrumError(cause) def handle_domain_host(entry, host): entry["host"] = (lower2host[host], ) for cname in hosts[host]: if cname not in domains: entry["cn"].add(lower2host[cname]) del cnames[cname] del hosts[host] lw = LDIFutils.LDIFWriter('MAIL_DNS', filename=None) dn_suffix = lw.getconf('dn') lw.write_container() for domain, output in domains.items(): dn = "cn=%s,%s" % (output, dn_suffix) entry = {"cn": set((output, )), "objectClass": ("uioHost", )} try: if domain in cnames: # This fails `if domain not in hosts` entry["cn"].add(lower2host[cnames[domain]]) handle_domain_host(entry, cnames[domain]) elif domain in hosts: handle_domain_host(entry, domain) except Exception: logger.error( "domain=%r, cnames[domain]=%r, " "in hosts=%r, in cnames=%r", domain, cnames.get(domain), domain in hosts, domain in cnames) raise lw.write_entry(dn, entry) for host in sorted(hosts.keys()): l2h = lower2host[host] names = set(lower2host[cname] for cname in hosts[host]) names.add(l2h) lw.write_entry("host=%s,%s" % (l2h, dn_suffix), { "host": (l2h, ), "cn": names, "objectClass": ("uioHost", ) }) lw.close()
def prep(self): # This function collects the information that should be exported. # It also enforces quarantines. aff_to_select = [self.const.PersonAffiliation(x) for x in \ cereconf.LDAP_SAMSON3['affiliation']] # We select all persons with a specific affiliation: for ac in self.account.list_accounts_by_type( affiliation=aff_to_select): # We get the account name and password hash self.account.clear() self.account.find(ac['account_id']) auth = self.auth[self.account.entity_id] auth = '{crypt}' + auth[1] # Set the hash to None if the account is quarantined. if ac['account_id'] in self.quarantines: # FIXME: jsama, 2012-03-21: # Commenting out these lines is a quick fix. Spreads might not # be totally sane as of this writing, so if anyone has a # quarantine, don't export the password hash. We don't care # about them rules defined in cereconf. #qh = QuarantineHandler(self.db, self.quarantines[ac['account_id']], # spreads=[self.const.spread_ldap_account]) #if qh.should_skip(): # continue #if qh.is_locked(): # auth = None auth = None # Get the persons names self.person.clear() self.person.find(self.account.owner_id) surname = self.person.get_name(self.const.system_cached, self.const.name_last) given_name = self.person.get_name(self.const.system_cached, self.const.name_first) common_name = self.person.get_name(self.const.system_cached, self.const.name_full) # We convert to utf surname = LDIFutils.iso2utf(surname) given_name = LDIFutils.iso2utf(given_name) common_name = LDIFutils.iso2utf(common_name) username = LDIFutils.iso2utf(self.account.account_name) # Get the email address email = self.account.get_primary_mailaddress() # Construct the distinguished name dn = ','.join(('uid=' + username, self.samson3_dn)) # Stuff all data in a dict entry = { 'uid': username, 'mail': email, 'cn': common_name, 'sn': surname, 'givenName': given_name, 'objectClass': 'inetOrgPerson', } if auth: # Export password attribute unless quarantine entry['userPassword'] = auth # Put the DN and dict in a list, to be written to file later. self.entries.append({'dn': dn, 'entry': entry})
def user_object(self, row): account_id = int(row['account_id']) uname = row['entity_name'] passwd = '{crypt}*Invalid' if row['auth_data']: if self.auth_format[self.user_auth]['format']: passwd = self.auth_format[self.user_auth]['format'] % \ row['auth_data'] else: passwd = row['auth_data'] else: for uauth in [x for x in self.a_meth if x in self.auth_format]: try: if self.auth_format[uauth]['format']: passwd = self.auth_format[uauth]['format'] % \ self.auth_data[account_id][uauth] else: passwd = self.auth_data[account_id][uauth] except KeyError: pass if not row['shell']: self.logger.warn("User %s have no posix-shell!" % uname) return None, None else: shell = self.shell_tab[int(row['shell'])] if account_id in self.quarantines: self.qh.quarantines = self.quarantines[account_id] if self.qh.should_skip(): return None, None if self.qh.is_locked(): passwd = '{crypt}' + '*Locked' qshell = self.qh.get_shell() if qshell is not None: shell = qshell try: if row['disk_id']: disk_path = self.disk_tab[int(row['disk_id'])] else: disk_path = None home = self.posuser.resolve_homedir(account_name=uname, home=row['home'], disk_path=disk_path) # 22.07.2013: Jira, CRB-98 # Quick fix, treat empty "home" as an error, to make # generate_posix_ldif complete if not home: # This event should be treated the same way as a disk_id # NotFoundError -- it means that a PosixUser has no home # directory set. raise Exception() except (Errors.NotFoundError, Exception): self.logger.warn("User %s has no home-directory!" % uname) return None, None cn = row['name'] or row['gecos'] or uname gecos = latin1_to_iso646_60(row['gecos'] or cn) entry = {'objectClass': ['top', 'account', 'posixAccount'], 'cn': (LDIFutils.iso2utf(cn),), 'uid': (uname,), 'uidNumber': (str(int(row['posix_uid'])),), 'gidNumber': (str(int(row['posix_gid'])),), 'homeDirectory': (home,), 'userPassword': (passwd,), 'loginShell': (shell,), 'gecos': (gecos,)} self.update_user_entry(account_id, entry, row) if not account_id in self.id2uname: self.id2uname[account_id] = uname else: self.logger.warn('Duplicate user-entry: (%s,%s)!', account_id, uname) return None, None dn = ','.join((('uid=' + uname), self.user_dn)) return dn, entry
def main(inargs=None): parser = argparse.ArgumentParser() parser.add_argument( '-i', '--input-file', default=DEFAULT_INPUT_FILE, help="sap2bas XML input file (default: %(default)s)", metavar='FILE', ) parser.add_argument( '-o', '--output-file', default=DEFAULT_OUTPUT_FILE, help="LDIF output file, or '-' for stdout (default: %(default)s)", metavar='FILE', ) parser.add_argument( '-u', '--utf8-data', dest='needs_base64', action='store_const', const=LDIFutils.needs_base64_readable, default=LDIFutils.needs_base64_safe, help="Allow utf-8 values in ldif", ) Cerebrum.logutils.options.install_subparser(parser) parser.set_defaults(**{ Cerebrum.logutils.options.OPTION_LOGGER_LEVEL: 'INFO', }) args = parser.parse_args(inargs) Cerebrum.logutils.autoconf('cronjob', args) logger.info('Start %s', parser.prog) logger.debug('args: %s', repr(args)) LDIFutils.needs_base64 = args.needs_base64 xml_parser = system2parser('system_sap')(args.input_file, logger) show_ou = OUSelector('ORG_OU', cereconf.OU_USAGE_SPREAD) get_ous = OrgTree(xml_parser.iter_ou(), show_ou) use_lang = LanguageSelector(cereconf.LDAP['pref_languages']) aff_selector = AffSelector(cereconf.LDAP_PERSON['affiliation_selector']) stats = { 'seen': 0, 'excluded': 0, 'included': 0, } with atomic_or_stdout(args.output_file) as output: for person in xml_parser.iter_person(): stats['seen'] += 1 partial_affs = set() for emp in iterate_employments(person, aff_selector): try: aff = format_scoped_aff(emp, get_ous) except Exception as e: logger.warning('Ignoring employment person=%r emp=%r: %s', person, emp, e) continue titles = [ format_title(t) for t in iterate_employment_titles(emp) if use_lang(t.language) ] partial_affs.add('{0}#{1}'.format(aff, ';'.join(titles))) if len(partial_affs) < 2: # We want at least two unique employments to output person stats['excluded'] += 1 continue try: identifier = get_identifier(person) except ValueError: logger.warn("Missing NIN: {0}".format(str(person))) stats['excluded'] += 1 continue stats['included'] += 1 output.write( LDIFutils.entry_string( identifier, {'uioPersonPartialEmployment': list(sorted(partial_affs))}, add_rdn=False)) logger.info("persons" " considered: {0[seen]:d}," " included: {0[included]:d}," " excluded: {0[excluded]:d}".format(stats)) logger.info("Done %s", parser.prog)
def prep(self): # This function collects the information that should be exported. # It also enforces quarantines. aff_to_select = [self.const.PersonAffiliation(x) for x in cereconf.LDAP_SAMSON3["affiliation"]] # We select all persons with a specific affiliation: for ac in self.account.list_accounts_by_type(affiliation=aff_to_select): # We get the account name and password hash self.account.clear() self.account.find(ac["account_id"]) auth = self.auth[self.account.entity_id] auth = "{crypt}" + auth[1] # Set the hash to None if the account is quarantined. if ac["account_id"] in self.quarantines: # FIXME: jsama, 2012-03-21: # Commenting out these lines is a quick fix. Spreads might not # be totally sane as of this writing, so if anyone has a # quarantine, don't export the password hash. We don't care # about them rules defined in cereconf. # qh = QuarantineHandler(self.db, self.quarantines[ac['account_id']], # spreads=[self.const.spread_ldap_account]) # if qh.should_skip(): # continue # if qh.is_locked(): # auth = None auth = None # Get the persons names self.person.clear() self.person.find(self.account.owner_id) surname = self.person.get_name(self.const.system_cached, self.const.name_last) given_name = self.person.get_name(self.const.system_cached, self.const.name_first) common_name = self.person.get_name(self.const.system_cached, self.const.name_full) # We convert to utf surname = LDIFutils.iso2utf(surname) given_name = LDIFutils.iso2utf(given_name) common_name = LDIFutils.iso2utf(common_name) username = LDIFutils.iso2utf(self.account.account_name) # Get the email address email = self.account.get_primary_mailaddress() # Construct the distinguished name dn = ",".join(("uid=" + username, self.samson3_dn)) # Stuff all data in a dict entry = { "uid": username, "mail": email, "cn": common_name, "sn": surname, "givenName": given_name, "objectClass": "inetOrgPerson", } if auth: # Export password attribute unless quarantine entry["userPassword"] = auth # Put the DN and dict in a list, to be written to file later. self.entries.append({"dn": dn, "entry": entry})
def user_object(self, row): account_id = int(row['account_id']) uname = row['entity_name'] passwd = '{crypt}*Invalid' if row['auth_data']: if self.auth_format[self.user_auth]['format']: passwd = self.auth_format[self.user_auth]['format'] % \ row['auth_data'] else: passwd = row['auth_data'] else: for uauth in [x for x in self.a_meth if x in self.auth_format]: try: if self.auth_format[uauth]['format']: passwd = self.auth_format[uauth]['format'] % \ self.auth_data[account_id][uauth] else: passwd = self.auth_data[account_id][uauth] except KeyError: pass if not row['shell']: self.logger.warn("User %s have no posix-shell!" % uname) return None, None else: shell = self.shell_tab[int(row['shell'])] if account_id in self.quarantines: self.qh.quarantines = self.quarantines[account_id] if self.qh.should_skip(): return None, None if self.qh.is_locked(): passwd = '{crypt}' + '*Locked' qshell = self.qh.get_shell() if qshell is not None: shell = qshell try: if row['disk_id']: disk_path = self.disk_tab[int(row['disk_id'])] else: disk_path = None home = self.posuser.resolve_homedir(account_name=uname, home=row['home'], disk_path=disk_path) # 22.07.2013: Jira, CRB-98 # Quick fix, treat empty "home" as an error, to make # generate_posix_ldif complete if not home: # This event should be treated the same way as a disk_id # NotFoundError -- it means that a PosixUser has no home # directory set. raise Exception() except (Errors.NotFoundError, Exception): self.logger.warn("User %s has no home-directory!" % uname) return None, None cn = row['name'] or row['gecos'] or uname gecos = latin1_to_iso646_60(row['gecos'] or cn) entry = { 'objectClass': ['top', 'account', 'posixAccount'], 'cn': (LDIFutils.iso2utf(cn), ), 'uid': (uname, ), 'uidNumber': (str(int(row['posix_uid'])), ), 'gidNumber': (str(int(row['posix_gid'])), ), 'homeDirectory': (home, ), 'userPassword': (passwd, ), 'loginShell': (shell, ), 'gecos': (gecos, ) } self.update_user_entry(account_id, entry, row) if not account_id in self.id2uname: self.id2uname[account_id] = uname else: self.logger.warn('Duplicate user-entry: (%s,%s)!', account_id, uname) return None, None dn = ','.join((('uid=' + uname), self.user_dn)) return dn, entry