def dump(self): fd = LDIFutils.ldif_outfile('RADIUS') fd.write(LDIFutils.container_entry_string('RADIUS')) noAuth = (None, None) for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): info = self.auth[account_id] uname = LDIFutils.iso2utf(str(info[0])) auth = info[1] ntAuth = self.md4_auth.get(account_id, noAuth)[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntAuth = None dn = ','.join(('uid=' + uname, self.radius_dn)) entry = { # Ikke endelig innhold 'objectClass': ['top', 'account', 'uiaRadiusAccount'], 'uid': (uname,), 'radiusTunnelType': ('VLAN',), 'radiusTunnelMediumType': ('IEEE-802',), 'radiusTunnelPrivateGroupId': (vlan_vpn[0],), 'radiusClass': (vlan_vpn[1],)} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) if ntAuth: entry['ntPassword'] = (ntAuth,) fd.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('RADIUS', fd)
def dump(self): fd = ldif_outfile('USER') fd.write(container_entry_string('USER')) noAuth = (None, None) for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): info = self.auth[account_id] uname = info[0] auth = info[1] ntAuth = self.md4_auth.get(account_id, noAuth)[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntAuth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['top', 'account', 'hiofRadiusAccount'], 'uid': (uname,), 'radiusTunnelType': ('13',), 'radiusTunnelMediumType': ('6',), 'radiusTunnelPrivateGroupId': (vlan_vpn[0],), 'radiusClass': (vlan_vpn[1],)} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) if ntAuth: entry['ntPassword'] = (ntAuth,) fd.write(entry_string(dn, entry, False)) end_ldif_outfile('USER', fd)
def generate_guests(self): """ Guest account generator. Yields accounts with the configured spread. """ for row in self.ac.search(spread=self.spread): # NOTE: Will not consider expired accounts self.ac.clear() self.ac.find(row["account_id"]) qh = QuarantineHandler( self.db, map(lambda x: int(x["quarantine_type"]), self.ac.get_entity_quarantine(only_active=True)) ) # No need for quarantined guest accounts in ldap # NOTE: We might want to export accounts that is_locked(), but # without passwords. if qh.should_skip() or qh.is_locked(): logger.debug( "Skipping %s, quarantined: %r", self.ac.account_name, [str(self.co.Quarantine(q)) for q in qh.quarantines], ) continue entry = self.ac2entry(self.ac) yield entry
def generate_guests(self): """ Guest account generator. Yields accounts with the configured spread. """ ac = Factory.get('Account')(self.db) co = Factory.get('Constants')(self.db) for row in ac.search(spread=self.spread): # NOTE: Will not consider expired accounts ac.clear() ac.find(row['account_id']) qh = QuarantineHandler(self.db, [ int(row['quarantine_type']) for row in ac.get_entity_quarantine(only_active=True) ]) # No need for quarantined guest accounts in ldap # NOTE: We might want to export accounts that is_locked(), but # without passwords. if qh.should_skip() or qh.is_locked(): logger.debug("Skipping %s, quarantined: %r", ac.account_name, [str(co.Quarantine(q)) for q in qh.quarantines]) continue entry = self.ac2entry(ac) yield entry
def init_user(self, auth_meth=None): timer = make_timer(self.logger, 'Starting init_user...') self.get_name = False self.qh = QuarantineHandler(self.db, None) self.posuser = Factory.get('PosixUser')(self.db) self.load_disk_tab() self.load_shell_tab() self.load_quaratines() self.load_auth_tab(auth_meth) self.cache_account2name() self.id2uname = {} timer('... init_user done.')
def active_in_system(self, id_type, entity_id, system): """Check if a user is represented and active in a system. :type id_type: basestring :param id_type: The id-type to look-up by. :type entity_id: basestring :param entity_id: The entitys id. :type system: basestring :param system: The system to check.""" # Check for existing quarantines on the entity that are locking the # entity out (if the quarantine is active), and also check if the # entity is in the system. # TODO: Should this evaluate the shell set by quarantine rules? Some # quarantines does not result in a locked-status, but has # nologin-shells associated with them.. import mx from Cerebrum.QuarantineHandler import QuarantineHandler co = Factory.get('Constants')(self.db) # q[i] = {quarantine_type: int, creator_id: int, description: string, # create_date: DateTime, start_date: DateTime, # disable_until: DateTime, DateTime: end_date} e = Utils.get(self.db, 'entity', id_type, entity_id) # Fetch quarantines if applicable try: quars = e.get_entity_quarantine() except AttributeError: quars = [] now = mx.DateTime.now() locked = False for q in quars: # Use code string for quarantine type qt = q['quarantine_type'] qtype = co.map_const(qt) qhandler = QuarantineHandler(self.db, [qtype]) if (qhandler.is_locked() and (q['start_date'] <= now and (q['end_date'] is None or q['end_date'] > now) and (q['disable_until'] is None or q['disable_until'] <= now))): locked = True if (locked or not self.in_system(id_type, entity_id, system)): return False else: return True
def main(): logger = Factory.get_logger("cronjob") db = Factory.get('Database')() const = Factory.get("Constants")(db) account = Factory.get('Account')(db) auth_prefix, auth_method = "{crypt}", int(const.auth_type_md5_crypt) ldif = LDIFWriter('SERVICES', None) dn = ldif.getconf('dn') ldif.write_container() for username in ldif.getconf('users'): account.clear() try: account.find_by_name(username) except Errors.NotFoundError: logger.error("User '%s' not found" % username) sys.exit(1) passwd = None qh = QuarantineHandler.check_entity_quarantines(db, account.entity_id) if not (qh.should_skip() or qh.is_locked()): try: passwd = account.get_account_authentication(auth_method) except Errors.NotFoundError: logger.warn("Password not found for user %s", username) ldif.write_entry( "cn=%s,%s" % (username, dn), { 'description': "Note: The password is maintained in Cerebrum.", 'objectClass': ('applicationProcess', 'simpleSecurityObject'), 'userPassword': auth_prefix + (passwd or "*locked") }) ldif.close()
def get(self, name): """Get account quarantines.""" args = self.account_quarantines_filter.parse_args() spreads = None if args.context: try: spreads = [int(db.const.Spread(args.context))] except Errors.NotFoundError: abort(404, message='Unknown context {!r}'.format( args.context)) ac = find_account(name) qh = QuarantineHandler.check_entity_quarantines( db=db.connection, entity_id=ac.entity_id, spreads=spreads) locked = qh.is_locked() # TODO: Replace with list of hrefs to quarantines resource? quarantines = [] for q in ac.get_entity_quarantine(only_active=True): quarantines.append(_format_quarantine(q)) return { 'locked': locked, 'quarantines': quarantines }
def main(): logger = Factory.get_logger("cronjob") db = Factory.get('Database')() const = Factory.get("Constants")(db) account = Factory.get('Account')(db) auth_prefix, auth_method = "{crypt}", int(const.auth_type_md5_crypt) ldif = LDIFWriter('SERVICES', None) dn = ldif.getconf('dn') ldif.write_container() for username in ldif.getconf('users'): account.clear() try: account.find_by_name(username) except Errors.NotFoundError: logger.error("User '%s' not found" % username) sys.exit(1) passwd = None qh = QuarantineHandler.check_entity_quarantines(db, account.entity_id) if not (qh.should_skip() or qh.is_locked()): try: passwd = account.get_account_authentication(auth_method) except Errors.NotFoundError: logger.warn("Password not found for user %s", username) ldif.write_entry("cn=%s,%s" % (username, dn), { 'description': "Note: The password is maintained in Cerebrum.", 'objectClass': ('applicationProcess', 'simpleSecurityObject'), 'userPassword': auth_prefix + (passwd or "*locked")}) ldif.close()
def init_user(self): self.get_name = False self.qh = QuarantineHandler(self.db, None) self.posuser = Factory.get('PosixUser')(self.db) self.shell_tab = self.user_exporter.shell_codes() self.quarantines = self.user_exporter.make_quarantine_cache( self.spread_d['user'] ) self.owners.make_owner_cache() self.owners.make_name_cache() self.homedirs.make_home_cache() self.group2gid = self.user_exporter.make_posix_gid_cache() self.load_auth_tab() self.cache_account2name() self.id2uname = {}
def find_candidates(exclude_aff=[], grace=0, quarantine=None): """Find persons who should be quarantined and dequarantined. :param list exclude_aff: A list of affiliations/statuses that should be ignored when finding the candidates. Persons with only affiliations from this list will be considered as not affiliated. The list contains tuples, either with affiliation or affiliation- and status-codes. :param int grace: Defines a grace period for when affiliations are still considered active, after their end period. :param None/QuarantineCode/sequence(QuarantineCode) quarantine: If not None, will filter the `quarantined` return value only to have these quarantines. :rtype: dict :return: Three elements are included in the dict: - `affiliated`: A set with person-IDs for those considered affiliatied. - `not_affiliated`: A set with person-IDs for those *not* affiliatied. - `quarantined`: A set with account-IDs for all quarantined accounts. """ datelimit = DateTime.now() - int(grace) logger.debug2("Including affiliations deleted after: %s", datelimit) def is_aff_considered(row): """Check for if an affiliation should be considered or not.""" # Exclude affiliations deleted before the datelimit: if row['deleted_date'] and row['deleted_date'] < datelimit: return False if (row['affiliation'], row['status']) in exclude_aff: return False if (row['affiliation'],) in exclude_aff: return False return True affs = filter(is_aff_considered, pe.list_affiliations(include_deleted=True)) affed = set(x['person_id'] for x in affs) logger.debug('Found %d persons with affiliations', len(affed)) naffed = set(x['person_id'] for x in pe.list_persons()) - affed logger.debug('Found %d persons without affiliations', len(naffed)) if quarantine is None: quarantined = QuarantineHandler.get_locked_entities( db, entity_types=co.entity_account) else: quarantined = set(x['entity_id'] for x in ac.list_entity_quarantines( entity_types=co.entity_account, only_active=True, quarantine_types=quarantine)) logger.debug('Found %d quarantined accounts', len(quarantined)) return {'affiliated': affed, 'not_affiliated': naffed, 'quarantined': quarantined}
def get(self, id): """Get account quarantines.""" args = self.account_quarantines_filter.parse_args() spreads = None if args.context: try: spreads = [int(db.const.Spread(args.context))] except Errors.NotFoundError: abort(404, message=u'Unknown context {!r}'.format( args.context)) ac = find_account(id) qh = QuarantineHandler.check_entity_quarantines( db=db.connection, entity_id=ac.entity_id, spreads=spreads) locked = qh.is_locked() quarantines = [] for q in ac.get_entity_quarantine(only_active=True): quarantines.append({ 'type': q['quarantine_type'], # 'description': q['description'], 'end': q['end_date'], 'start': q['start_date'], # 'disable_until': q['disable_until'], }) return { 'locked': locked, 'quarantines': quarantines }
def read_target_auth_data(self): # For the time being, remove passwords for all quarantined # accounts, regardless of quarantine type. quarantines = dict([(x, "*locked") for x in QuarantineHandler.get_locked_entities( self._db, entity_types=self.const.entity_account)]) for row in self.acc.list_account_authentication(): a_id = int(row['account_id']) self.e_id2passwd[a_id] = quarantines.get(a_id) or row['auth_data']
def read_target_auth_data(self): # For the time being, remove passwords for all quarantined # accounts, regardless of quarantine type. quarantines = dict([ (x, "*locked") for x in QuarantineHandler.get_locked_entities( self._db, entity_types=self.const.entity_account) ]) for row in self.acc.list_account_authentication(): a_id = int(row['account_id']) self.e_id2passwd[a_id] = quarantines.get(a_id) or row['auth_data']
def dump(self): fd = ldif_outfile('RADIUS') logger.debug('writing to %s', repr(fd)) fd.write(container_entry_string('RADIUS')) logger.info('Generating export...') for account_id, vlan_vpn in self.id2vlan_vpn.iteritems(): try: uname = self.account_names[account_id] except KeyError: logger.error('No account name for account_id=%r', account_id) continue try: auth = self.user_password.get(account_id) except LookupError: auth = None try: ntauth = self.nt_password.get(account_id) except LookupError: ntauth = None if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = ntauth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['top', 'account', 'uiaRadiusAccount'], 'uid': (uname, ), 'radiusTunnelType': ('VLAN', ), 'radiusTunnelMediumType': ('IEEE-802', ), 'radiusTunnelPrivateGroupId': (vlan_vpn[0], ), 'radiusClass': (vlan_vpn[1], ), } if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = auth if ntauth: entry['ntPassword'] = (ntauth, ) fd.write(entry_string(dn, entry, False)) end_ldif_outfile('RADIUS', fd)
def read_target_auth_data(self): a = Factory.get('Account')(self._db) # Same as default, but omit co.quarantine_auto_emailonly quarantines = dict([ (x, "*locked") for x in QuarantineHandler.get_locked_entities( self._db, entity_types=self.const.entity_account, ignore_quarantine_types=self.const.quarantine_auto_emailonly) ]) for row in a.list_account_authentication(): a_id = int(row['account_id']) self.e_id2passwd[a_id] = quarantines.get(a_id) or row['auth_data']
def read_target_auth_data(self): a = Factory.get('Account')(self._db) # Same as default, but omit co.quarantine_auto_emailonly quarantines = dict( [(x, "*locked") for x in QuarantineHandler.get_locked_entities( self._db, entity_types=self.const.entity_account, ignore_quarantine_types=self.const.quarantine_auto_emailonly)]) for row in a.list_account_authentication(): a_id = int(row['account_id']) self.e_id2passwd[a_id] = quarantines.get(a_id) or row['auth_data']
def filter_quarantines(self): """ Mark quarantined accounts for disabling/deletion. """ quarantined_accounts = QuarantineHandler.get_locked_entities( self.db, entity_types=self.co.entity_account) # Set quarantine flag for a_id in set(self.id2uname) & set(quarantined_accounts): self.logger.debug("Quarantine flag is set for %s", self.accounts[self.id2uname[a_id]]) self.accounts[self.id2uname[a_id]].quarantined = True
def gather_user_data(self, row): data = PosixData() data.account_id = int(row['account_id']) data.uname = self.e_id2name[data.account_id] data.uid = str(row['posix_uid']) data.gid = str(self.g_id2gid[row['gid']]) if not row['shell']: self.logger.warn("User %s has no posix-shell!" % data.uname) return None data.shell = self.shell_tab[int(row['shell'])] data.quarantined, data.passwd = False, None if data.account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[data.account_id]) if qh.should_skip(): return None if qh.is_locked(): data.quarantined, data.passwd = True, '*Locked' qshell = qh.get_shell() if qshell is not None: data.shell = qshell try: home = self.a_id2home[data.account_id] data.home = self.posix_user.resolve_homedir( account_name=data.uname, home=home[3], disk_path=self.disk_tab[home[1]]) except: self.logger.warn("User %s has no home-directory!" % data.uname) return None cn = gecos = row['gecos'] if data.account_id in self.a_id2owner: cn = self.p_id2name.get(self.a_id2owner[data.account_id], gecos) data.cn = cn or data.uname data.gecos = latin1_to_iso646_60(gecos or data.cn) return data
def dump(self): fd = LDIFutils.ldif_outfile('USER') fd.write(LDIFutils.container_entry_string('USER')) for row in self.account.search(): account_id = row['account_id'] info = self.auth[account_id] uname = LDIFutils.iso2utf(str(info[0])) auth = info[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['account'], 'uid': (uname,),} if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth,) fd.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', fd)
def dump(self): fd = LDIFutils.ldif_outfile('USER') fd.write(LDIFutils.container_entry_string('USER')) for row in self.account.search(): account_id = row['account_id'] info = self.auth[account_id] uname = LDIFutils.iso2utf(str(info[0])) auth = info[1] if account_id in self.quarantines: qh = QuarantineHandler(self.db, self.quarantines[account_id]) if qh.should_skip(): continue if qh.is_locked(): auth = None dn = ','.join(('uid=' + uname, self.user_dn)) entry = { 'objectClass': ['account'], 'uid': (uname, ), } if auth: entry['objectClass'].append('simpleSecurityObject') entry['userPassword'] = ('{crypt}' + auth, ) fd.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', fd)
def set_password(self, uname, new_password, token, browser_token): if not self.check_token(uname, token, browser_token): return False account = self.get_account(uname) try: check_password(new_password, account) except PasswordNotGoodEnough as e: m = text_type(e) raise Errors.CerebrumRPCException('password_invalid', m) # All data is good. Set password account.set_password(new_password) try: account.write_db() account._db.commit() logger.info("Password for %r altered", uname) except self.db.DatabaseError as m: logger.error("Error when setting password for %r: %s", uname, m) raise Errors.CerebrumRPCException('error_unknown') # Remove "weak password" quarantine for r in account.get_entity_quarantine(): for qua in (self.co.quarantine_autopassord, self.co.quarantine_svakt_passord): if int(r['quarantine_type']) == qua: account.delete_entity_quarantine(qua) account.write_db() account._db.commit() # TODO: move these checks up and raise exceptions? Wouldn't happen, # since generate_token() checks this already, but might get other # authentication methods later. if account.is_deleted(): logger.warning("user %r is deleted", uname) elif account.is_expired(): logger.warning("user %r is expired", uname) elif QuarantineHandler.check_entity_quarantines( self.db, account.entity_id).is_locked(): logger.info("user %r has an active quarantine", uname) return True
def list_voip_attributes(self, voippersons, primary2pid, sysadm_aid): """Fast version of search() + get_voip_attributes(). Simply put, with tens of thousands of objects, find() + get_voip_attributes() is unfeasible. This method has similar semantics, to the combination above, except it returns a generated yielding successful dicts, once for each voipClient. Each dict is similar to the one returned by get_voip_attributes. """ # So, a few things we need to cache const2str = dict() for i in ('Quarantine', 'VoipClientInfoCode', 'VoipClientTypeCode'): for cnst in self.const.fetch_constants(getattr(self.const, i)): assert int(cnst) not in const2str const2str[int(cnst)] = text_type(cnst) # entity_id -> {<auth type>: <auth_data>} client2auth = dict() for row in self.list_auth_data(self.const.voip_auth_sip_secret): client2auth.setdefault(row['entity_id'], {})[row['auth_method']] = row['auth_data'] # person_id -> uname, also cache user ids owner2uname = defaultdict(list) aid2owner = dict() account = Factory.get("Account")(self._db) for r in account.search(owner_type=self.const.entity_person, owner_id=voippersons, exclude_account_id=sysadm_aid): owner2uname[r["owner_id"]].append(r["name"]) aid2owner[r["account_id"]] = r["owner_id"] # Get account identificators that have a quarantine that should result # in the account beeing locked. quarantined_accounts = QuarantineHandler.get_locked_entities( self._db, entity_ids=aid2owner.keys()) # Populate account_id -> quarantine information dictionary aid2quarantine = dict() for row in account.list_entity_quarantines( entity_types=self.const.entity_account, only_active=True, entity_ids=quarantined_accounts): aid2quarantine[row["entity_id"]] = ( "{},{},{}".format(const2str[row['quarantine_type']], # __unicode__() returns ISO 8601 format text_type(row['start_date'])[0:10], row['description'])) # Make a owner2quarantine, to block hardphone is if primary users is blocked owner2quarantine = dict() for aid in aid2quarantine: # Of course some users have missing affiliations, thus no primaryid. # Check if they at least have less than two accounts, then the aid # is the primaryid. if aid in primary2pid or len(owner2uname[aid2owner[aid]]) < 2: owner2quarantine[aid2owner[aid]] = aid2quarantine[aid] # uname -> HA1 hashes, only for softphone for Account users aka persons. uname2ha1 = dict() uname2quarantine = dict() for row in account.list_account_authentication( self.const.auth_type_ha1_md5, account_id=aid2owner.keys()): if row['account_id'] in aid2quarantine: uname2quarantine[row['entity_name']] = aid2quarantine.get( row["account_id"]) uname2ha1[row['entity_name']] = row['auth_data'] for row in self.search(): entry = { "sipClientType": const2str[row["client_type"]], "sipClientInfo": const2str[row["client_info"]], "voip_address_id": row["voip_address_id"], } owner_id = row["owner_entity_id"] client_type = row["client_type"] if bool(row["sip_enabled"] == 'T'): entry["sipEnabled"] = "TRUE" else: entry["sipEnabled"] = "FALSE" # Create an extra softphone entry for each account if (client_type == self.const.voip_client_type_softphone and row["owner_entity_type"] == self.const.entity_person): for uid in owner2uname[owner_id]: e = entry.copy() e["uid"] = text_type(uid) if uid in uname2quarantine: e["sipQuarantine"] = uname2quarantine[uid] e["sipEnabled"] = "quarantined" e["ha1MD5password"] = uname2ha1.get(uid) or "missing" # XXX: will be altered in next revision when voip_softphone/softphone # becomes voip_hardhone/softphone. e["sipClientInfo"] = "sbc2phone" yield e entry["sipSecret"] = client2auth.get(row["entity_id"], {}).get( self.const.voip_auth_sip_secret) if row["owner_entity_type"] == self.const.entity_person: # Block if primary user is quarantined if owner_id in owner2quarantine: entry["sipEnabled"] = "quarantined" entry["sipQuarantine"] = owner2quarantine[owner_id] # Block if the person has no valid account elif not owner2uname[owner_id]: entry["sipEnabled"] = "noaccount" if client_type == self.const.voip_client_type_softphone: entry["uid"] = text_type(owner_id) elif client_type == self.const.voip_client_type_hardphone: mac = row["mac_address"] mac = mac.replace(":", "") entry["sipMacAddress"] = mac yield entry
def make_person_entry(self, row, person_id): # Return (dn, person entry, alias_info) for a person to output, # or (None, anything, anything) if the person should not be output. # bool(alias_info) == False means no alias will be output. # Receives a row from list_persons() as a parameter. # The row must have key 'account_id', # and if person_dn_primaryOU() is not overridden: 'ou_id'. account_id = int(row['account_id']) p_affiliations = self.affiliations.get(person_id) if not p_affiliations: self.logger.debug3("Omitting person id=%d, no affiliations", person_id) return None, None, None names = self.person_names.get(person_id) if not names: self.logger.warn("Person %s got no names. Skipping.", person_id) return None, None, None name = iso2utf(names.get(int(self.const.name_full), '').strip()) givenname = iso2utf(names.get(int(self.const.name_first), '').strip()) lastname = iso2utf(names.get(int(self.const.name_last), '').strip()) if not (lastname and givenname): givenname, lastname = self.split_name(name, givenname, lastname) if not lastname: self.logger.warn("Person %s got no lastname. Skipping.", person_id) return None, None, None if not name: name = " ".join(filter(None, (givenname, lastname))) entry = { 'objectClass': [ 'top', 'person', 'organizationalPerson', 'inetOrgPerson', 'eduPerson' ], 'cn': (name, ), 'sn': (lastname, ) } if givenname: entry['givenName'] = (givenname, ) try: entry['uid'] = (self.acc_name[account_id], ) except KeyError: pass passwd = self.acc_passwd.get(account_id) qt = self.acc_quarantines.get(account_id) if qt: qh = QuarantineHandler(self.db, qt) if qh.should_skip(): self.logger.debug3("Omitting person id=%d, quarantined", person_id) return None, None, None if self.acc_locked_quarantines is not self.acc_quarantines: qt = self.acc_locked_quarantines.get(account_id) if qt: qh = QuarantineHandler(self.db, qt) if qt and qh.is_locked(): passwd = 0 if passwd: entry['userPassword'] = ("{crypt}" + passwd, ) elif passwd != 0 and entry.get('uid'): self.logger.debug("User %s got no password-hash.", entry['uid'][0]) dn, primary_ou_dn = self.person_dn_primaryOU(entry, row, person_id) if not dn: self.logger.debug3("Omitting person id=%d, no DN", person_id) return None, None, None if self.org_dn: entry['eduPersonOrgDN'] = (self.org_dn, ) if primary_ou_dn: entry['eduPersonPrimaryOrgUnitDN'] = (primary_ou_dn, ) # edu_OUs = [primary_ou_dn] + [self.ou2DN.get(aff[2]) # for aff in p_affiliations] edu_OUs = self._calculate_edu_OUs( primary_ou_dn, [self.ou2DN.get(aff[2]) for aff in p_affiliations]) entry['eduPersonOrgUnitDN'] = self.attr_unique(filter(None, edu_OUs)) entry['eduPersonAffiliation'] = self.attr_unique( self.select_list(self.eduPersonAff_selector, person_id, p_affiliations)) if self.select_bool(self.contact_selector, person_id, p_affiliations): # title: titles = self.person_titles.get(person_id) self.add_lang_names(entry, 'title', titles) # phone & fax: for attr, contact in self.attr2id2contacts: contact = contact.get(person_id) if contact: entry[attr] = contact # addresses: addrs = self.addr_info.get(person_id) post = addrs and addrs.get(int(self.const.address_post)) if post: a_txt, p_o_box, p_num, city, country = post post = self.make_address("$", p_o_box, a_txt, p_num, city, country) if post: entry['postalAddress'] = (post, ) street = addrs and addrs.get(int(self.const.address_street)) if street: a_txt, p_o_box, p_num, city, country = street street = self.make_address(", ", None, a_txt, p_num, city, country) if street: entry['street'] = (street, ) else: URIs = self.id2labeledURI.get(person_id) if URIs: entry['labeledURI'] = self.attr_unique( map(iso2utf, URIs), normalize_caseExactString) if self.account_mail: mail = self.account_mail(account_id) if mail: entry['mail'] = (mail, ) else: if self.person_contact_mail: mail_source_id = person_id else: mail_source_id = account_id mail = self.get_contacts(entity_id=mail_source_id, contact_type=self.const.contact_email, verify=verify_IA5String, normalize=normalize_IA5String) if mail: entry['mail'] = mail if self.is_person_visible(person_id): attrs, alias_info = self.visible_person_attrs, (primary_ou_dn, ) else: attrs, alias_info = self.invisible_person_attrs, () for key, values in attrs.items(): if key in entry: entry[key].extend(values) else: entry[key] = list(values) self.update_person_entry(entry, row, person_id) return dn, entry, alias_info
def get_old_account_ids(self): """ Returns the ID of candidate accounts with old affiliations. :return set: A set with Account entity_ids. """ def _with_aff(affiliation=None, max_age=None): old = set() person = Utils.Factory.get("Person")(self.db) aff_or_status = self.constants.human2constant(affiliation) if not aff_or_status: self.logger.error('Unknown affiliation "%s"', affiliation) return old lookup = { 'status' if '/' in affiliation else 'affiliation': aff_or_status } for row in person.list_affiliations(**lookup): person_id = row['person_id'] # if person_id in old_ids: # continue person.clear() person.find(person_id) # account_id = person.get_primary_account() for account_row in person.get_accounts(): # consider all accounts belonging to this person account_id = account_row['account_id'] if account_id: history = [ x['set_at'] for x in ph.get_history(account_id) ] if history and (self.today - max(history) > max_age): old.add(account_id) else: # The account does not have an expired password # according to the special rules. # Remove it from old_ids if it was put there # by the default rules. try: old_ids.remove(account_id) except KeyError: pass self.logger.info( 'Accounts with affiliation %s with old password: %s', str(affiliation), len(old)) return old ph = PasswordHistory(self.db) self.logger.info('Fetching accounts with password older than %d days', self.config.max_password_age) old_ids = set([ int(x['account_id']) for x in ph.find_old_password_accounts(( self.today - dt.DateTimeDelta(self.config.max_password_age) ).strftime(DATE_FORMAT)) ]) self.logger.info('Fetching accounts with no password history') old_ids.update( set([int(x['account_id']) for x in ph.find_no_history_accounts()])) # Do we have special rules for certain person affiliations? # We want to end with the smallest 'max_password_age' aff_mappings = sorted(self.config.affiliation_mappings, key=lambda k: k['max_password_age'], reverse=True) for aff_mapping in aff_mappings: self.logger.info( 'Fetching accounts with affiliation %s ' 'with password older than %d days', str(aff_mapping['affiliation']), aff_mapping['max_password_age']) old_ids.update( _with_aff(affiliation=aff_mapping['affiliation'], max_age=dt.DateTimeDelta( aff_mapping['max_password_age']))) self.logger.info('Fetching quarantines') # TODO: Select only autopassword quarantines? quarantined_ids = QuarantineHandler.get_locked_entities( self.db, entity_types=self.constants.entity_account, entity_ids=old_ids) old_ids = old_ids - quarantined_ids return old_ids
class PosixLDIF(object): """ Generates posix-user, -filegroups and -netgroups. Does not support hosts in netgroups. """ __metaclass__ = auto_super def __init__(self, db, logger, u_sprd=None, g_sprd=None, n_sprd=None, fd=None): """ Initiate database and import modules. Spreads are given in initiation and general constants which is used in more than one method. """ timer = make_timer(logger, 'Initing PosixLDIF...') from Cerebrum.modules import PosixGroup self.db = db self.logger = logger self.const = Factory.get('Constants')(self.db) self.grp = Factory.get('Group')(self.db) self.posuser = Factory.get('PosixUser')(self.db) self.posgrp = PosixGroup.PosixGroup(self.db) self.user_dn = LDIFutils.ldapconf('USER', 'dn', None) self.get_name = True self.fd = fd self.spread_d = {} # Validate spread from arg or from cereconf for x, y in zip(['USER', 'FILEGROUP', 'NETGROUP'], [u_sprd, g_sprd, n_sprd]): spread = LDIFutils.map_spreads( y or getattr(cereconf, 'LDAP_' + x).get('spread'), list) if spread: self.spread_d[x.lower()] = spread if 'user' not in self.spread_d: raise Errors.ProgrammingError( "Must specify spread-value as 'arg' or in cereconf") self.account2name = dict() self.groupcache = defaultdict(dict) self.group2groups = defaultdict(set) self.group2users = defaultdict(set) self.group2persons = defaultdict(list) timer('... done initing PosixLDIF.') def user_ldif(self, filename=None, auth_meth=None): """Generate posix-user.""" timer = make_timer(self.logger, 'Starting user_ldif...') self.init_user(auth_meth) f = LDIFutils.ldif_outfile('USER', filename, self.fd) f.write(LDIFutils.container_entry_string('USER')) for row in self.posuser.list_extended_posix_users( self.user_auth, spread=self.spread_d['user'], include_quarantines=False): dn, entry = self.user_object(row) if dn: f.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', f, self.fd) timer('... done user_ldif') def init_user(self, auth_meth=None): timer = make_timer(self.logger, 'Starting init_user...') self.get_name = False self.qh = QuarantineHandler(self.db, None) self.posuser = Factory.get('PosixUser')(self.db) self.load_disk_tab() self.load_shell_tab() self.load_quaratines() self.load_auth_tab(auth_meth) self.cache_account2name() self.id2uname = {} timer('... init_user done.') def cache_account2name(self): """Cache account_id to username. This one is a bit more lenient that what the self.id2uname dictionary contains, as it blindly adds users with correct spread.""" if not self.get_name: return if len(self.account2name) > 0: return timer = make_timer(self.logger, 'Starting cache_account2name...') self.account2name = dict([ (x['entity_id'], x['entity_name']) for x in self.posuser.list_names(self.const.account_namespace, spreads=self.spread_d['user']) ]) timer('... done cache_account2name') def cache_group2gid(self): timer = make_timer(self.logger, 'Starting cache_group2gid...') self.group2gid = dict() for row in self.posgrp.list_posix_groups(): self.group2gid[row['group_id']] = str(row['posix_gid']) timer('... done cache_group2gid') def cache_groups_and_users(self): if len(self.group2groups) or len(self.group2users): return def get_children_not_in_group2groups(): children = set() map(children.update, self.group2groups.itervalues()) return children.difference(self.group2groups.keys()) timer = make_timer(self.logger, 'Starting cache_groups_and_users...') spread = [] for s in ('filegroup', 'netgroup'): if s in self.spread_d: spread += self.spread_d[s] assert spread for row in self.grp.search_members(member_type=self.const.entity_group, spread=spread): self.group2groups[row['group_id']].add(row['member_id']) for row in self.grp.search_members( member_type=self.const.entity_account, member_spread=self.spread_d['user'][0], spread=spread): self.group2users[row['group_id']].add(row['member_id']) children_groups = get_children_not_in_group2groups() extra_groups = children_groups.copy() while children_groups: for group_id in children_groups: self.group2groups[group_id] = set() for row in self.grp.search_members( member_type=self.const.entity_group, group_id=children_groups): member_id = row['member_id'] self.group2groups[row['group_id']].add(member_id) extra_groups.add(member_id) children_groups = get_children_not_in_group2groups() if extra_groups: for row in self.grp.search_members( member_type=self.const.entity_account, member_spread=self.spread_d['user'][0], group_id=extra_groups): self.group2users[row['group_id']].add(row['member_id']) timer('... done cache_groups_and_users') def cache_group2persons(self): """Cache person members in groups. Not used in main module.""" pass def auth_methods(self, auth_meth=None): """Which authentication methods to fetch. Mixin-support. If all only one entry, it will prefect any in auth_table. If None, it will use default API authentication (md5_crypt). """ self.auth_format = {} auth_meth_l = [] self.user_auth = None code = '_AuthenticationCode' # Priority is arg, else cereconf default value # auth_meth_l is a list sent to load_auth_tab and contains # all methods minus primary which is called by auth = auth_meth or cereconf.LDAP['auth_attr'] if isinstance(auth, dict): if 'userPassword' not in auth: self.logger.warn("Only support 'userPassword'-attribute") return None default_auth = auth['userPassword'][:1][0] self.user_auth = LDIFutils.map_constants(code, default_auth[0]) if len(default_auth) == 2: format = default_auth[1] else: format = None self.auth_format[int(self.user_auth)] = { 'attr': 'userPassword', 'format': format } for entry in auth['userPassword'][1:]: auth_t = LDIFutils.map_constants(code, entry[0]) if len(entry) == 2: format = entry[1] else: format = None auth_meth_l.append(auth_t) self.auth_format[int(auth_t)] = { 'attr': 'userPassword', 'format': format } if isinstance(auth, (list, tuple)): self.user_auth = int(getattr(self.const, auth[:1][0])) for entry in auth[1:]: auth_meth_l.append(int(getattr(self.const, entry))) elif isinstance(auth, str): self.user_auth = int(getattr(self.const, auth)) return auth_meth_l def load_auth_tab(self, auth_meth=None): timer = make_timer(self.logger, 'Starting load_auth_tab...') self.a_meth = self.auth_methods(auth_meth) if not self.a_meth: timer('... done load_auth_tab') return self.auth_data = defaultdict(dict) for x in self.posuser.list_account_authentication( auth_type=self.a_meth, spread=self.spread_d['user']): if not x['account_id'] or not x['method']: continue acc_id, meth = int(x['account_id']), int(x['method']) self.auth_data[acc_id][meth] = x['auth_data'] timer('... done load_auth_tab') def load_disk_tab(self): timer = make_timer(self.logger, 'Starting load_disk_tab...') self.disk = Factory.get('Disk')(self.db) self.disk_tab = {} for hd in self.disk.list(): self.disk_tab[int(hd['disk_id'])] = hd['path'] timer('... done load_disk_tab') def load_shell_tab(self): timer = make_timer(self.logger, 'Starting load_shell_tab...') self.shell_tab = {} for sh in self.posuser.list_shells(): self.shell_tab[int(sh['code'])] = sh['shell'] timer('... done load_shell_tab') def load_quaratines(self): timer = make_timer(self.logger, 'Starting load_quaratines...') self.quarantines = defaultdict(list) for row in self.posuser.list_entity_quarantines( entity_types=self.const.entity_account, only_active=True, spreads=self.spread_d['user']): self.quarantines[int(row['entity_id'])].append( int(row['quarantine_type'])) timer('... done load_quaratines') def user_object(self, row): account_id = int(row['account_id']) uname = row['entity_name'] passwd = '{crypt}*Invalid' if row['auth_data']: if self.auth_format[self.user_auth]['format']: passwd = self.auth_format[self.user_auth]['format'] % \ row['auth_data'] else: passwd = row['auth_data'] else: for uauth in [x for x in self.a_meth if x in self.auth_format]: try: if self.auth_format[uauth]['format']: passwd = self.auth_format[uauth]['format'] % \ self.auth_data[account_id][uauth] else: passwd = self.auth_data[account_id][uauth] except KeyError: pass if not row['shell']: self.logger.warn("User %s have no posix-shell!" % uname) return None, None else: shell = self.shell_tab[int(row['shell'])] if account_id in self.quarantines: self.qh.quarantines = self.quarantines[account_id] if self.qh.should_skip(): return None, None if self.qh.is_locked(): passwd = '{crypt}' + '*Locked' qshell = self.qh.get_shell() if qshell is not None: shell = qshell try: if row['disk_id']: disk_path = self.disk_tab[int(row['disk_id'])] else: disk_path = None home = self.posuser.resolve_homedir(account_name=uname, home=row['home'], disk_path=disk_path) # 22.07.2013: Jira, CRB-98 # Quick fix, treat empty "home" as an error, to make # generate_posix_ldif complete if not home: # This event should be treated the same way as a disk_id # NotFoundError -- it means that a PosixUser has no home # directory set. raise Exception() except (Errors.NotFoundError, Exception): self.logger.warn("User %s has no home-directory!" % uname) return None, None cn = row['name'] or row['gecos'] or uname gecos = latin1_to_iso646_60(row['gecos'] or cn) entry = { 'objectClass': ['top', 'account', 'posixAccount'], 'cn': (LDIFutils.iso2utf(cn), ), 'uid': (uname, ), 'uidNumber': (str(int(row['posix_uid'])), ), 'gidNumber': (str(int(row['posix_gid'])), ), 'homeDirectory': (home, ), 'userPassword': (passwd, ), 'loginShell': (shell, ), 'gecos': (gecos, ) } self.update_user_entry(account_id, entry, row) if not account_id in self.id2uname: self.id2uname[account_id] = uname else: self.logger.warn('Duplicate user-entry: (%s,%s)!', account_id, uname) return None, None dn = ','.join((('uid=' + uname), self.user_dn)) return dn, entry def update_user_entry(self, account_id, entry, row): """To call Mixin-class. (Should consider support for multiple mixin.) """ # FIXME: useless documentation string pass def filegroup_ldif(self, filename=None): """ Generate filegroup. Groups without group and expanded members from both external and internal groups. """ timer = make_timer(self.logger, 'Starting filegroup_ldif...') if 'filegroup' not in self.spread_d: self.logger.warn("No spread is given for filegroup!") return self.init_filegroup() timer2 = make_timer(self.logger, 'Caching filegroups...') for row in self.grp.search(spread=self.spread_d['filegroup'], filter_expired=False): group_id = row['group_id'] if group_id not in self.group2gid: self.logger.warn( "Group id:{} has one of {} but no GID, skipping".format( group_id, getattr(cereconf, 'LDAP_FILEGROUP').get('spread'), [])) continue self.create_group_object(group_id, row['name'], row['description']) self.create_filegroup_object(group_id) self.update_filegroup_entry(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.filegroupcache.iteritems(): users = self.get_users(group_id, set()) unames = self.userid2unames(users, group_id) entry['memberUid'] = unames timer2('... done adding users') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('FILEGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('FILEGROUP')) for group_id, entry in self.filegroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.fgrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) timer2('... done writing group objects') self.filegroupcache = None LDIFutils.end_ldif_outfile('FILEGROUP', f, self.fd) timer('... done filegroup_ldif') def init_filegroup(self): """Initiate modules and constants for posixgroup""" from Cerebrum.modules import PosixGroup self.posgrp = PosixGroup.PosixGroup(self.db) self.fgrp_dn = LDIFutils.ldapconf('FILEGROUP', 'dn') self.filegroupcache = defaultdict(dict) self.cache_account2name() self.cache_group2gid() self.cache_groups_and_users() def create_filegroup_object(self, group_id): assert group_id not in self.filegroupcache cache = self.groupcache[group_id] entry = { 'objectClass': ('top', 'posixGroup'), 'cn': LDIFutils.iso2utf(cache['name']), 'gidNumber': self.group2gid[group_id], } if 'description' in cache: entry['description'] = (LDIFutils.iso2utf(cache['description']), ) self.filegroupcache[group_id] = entry def update_filegroup_entry(self, group_id): """Future use of mixin-classes""" pass def netgroup_ldif(self, filename=None): """Generate netgroup with only users.""" timer = make_timer(self.logger, 'Starting netgroup_ldif...') if 'netgroup' not in self.spread_d: self.logger.warn("No valid netgroup-spread in cereconf or arg!") return self.init_netgroup() timer2 = make_timer(self.logger, 'Caching netgroups...') for row in self.grp.search(spread=self.spread_d['netgroup'], filter_expired=False): group_id = row['group_id'] self.create_group_object(group_id, row['name'], row['description']) self.create_netgroup_object(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.netgroupcache.iteritems(): users, groups = self.get_users_and_groups(group_id, set(), set(), add_persons=True) unames = self.userid2unames(users, group_id) triple = [] for uname in unames: if '_' in uname: continue triple.append('(,%s,)' % uname) netgroup = [] for g in groups: netgroup.append(self.netgroupcache[g]['cn']) entry['nisNetgroupTriple'] = triple entry['memberNisNetgroup'] = netgroup timer2('... done adding users and groups') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('NETGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('NETGROUP')) for group_id, entry in self.netgroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.ngrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('NETGROUP', f, self.fd) timer2('... done writing group objects') self.netgroupcache = None timer('... done netgroup_ldif') def cache_uncached_children(self): timer = make_timer(self.logger, 'Starting cache_uncached_children...') children = set() map(children.update, self.group2groups.itervalues()) extra = children.difference(self.groupcache.keys()) if extra: for row in self.grp.search(group_id=extra): self.create_group_object(row['group_id'], row['name'], row['description']) timer('... done cache_uncached_children') def get_users_and_groups(self, group_id, users, groups, add_persons=False): """Recursive method to get members and groups in a group.""" users.update(self.group2users[group_id]) if add_persons: if group_id in self.group2persons: users.update(self.group2persons[group_id]) for g_id in self.group2groups[group_id]: assert g_id in self.groupcache, "g_id %s in group_id %s missing" % \ (g_id, group_id) if g_id in self.netgroupcache: groups.add(g_id) else: users, groups = self.get_users_and_groups( g_id, users, groups, add_persons) return users, groups def get_users(self, group_id, users, add_persons=False): """Recursive method to get members from a group.""" users.update(self.group2users[group_id]) if add_persons: if group_id in self.group2persons: users.update(self.group2persons[group_id]) for g_id in self.group2groups[group_id]: assert g_id in self.groupcache, "g_id %s in group_id %s missing" % \ (g_id, group_id) users = self.get_users(g_id, users) return users def create_netgroup_object(self, group_id): assert group_id not in self.netgroupcache cache = self.groupcache[group_id] entry = { 'objectClass': ('top', 'nisNetGroup'), 'cn': LDIFutils.iso2utf(cache['name'], ) } if 'description' in cache: entry['description'] = \ latin1_to_iso646_60(cache['description']).rstrip(), self.netgroupcache[group_id] = entry def init_netgroup(self): """Initiate modules, constants and cache""" self.ngrp_dn = LDIFutils.ldapconf('NETGROUP', 'dn') self.cache_account2name() self.cache_groups_and_users() self.cache_group2persons() self.netgroupcache = defaultdict(dict) def create_group_object(self, group_id, name, description): if group_id in self.groupcache: return self.groupcache[group_id] = {'name': name} if description: self.groupcache[group_id]['description'] = description def userid2unames(self, users, group_id): unames = [] for user_id in users: if self.get_name: try: uname = self.account2name[user_id] except: self.logger.info( "account2name user id=%s in " "group id=%s not found", user_id, group_id) continue else: try: uname = self.id2uname[user_id] except: self.logger.info( "Cache enabled but user id=%s in " "group id=%s not found", user_id, group_id) continue unames.append(uname) return unames
def make_person_entry(self, row, person_id): # Return (dn, person entry, alias_info) for a person to output, # or (None, anything, anything) if the person should not be output. # bool(alias_info) == False means no alias will be output. # Receives a row from list_persons() as a parameter. # The row must have key 'account_id', # and if person_dn_primaryOU() is not overridden: 'ou_id'. account_id = int(row['account_id']) p_affiliations = self.affiliations.get(person_id) if not p_affiliations: self.logger.debug3("Omitting person id=%d, no affiliations", person_id) return None, None, None names = self.person_names.get(person_id) if not names: self.logger.warn("Person %s got no names. Skipping.", person_id) return None, None, None name = iso2utf(names.get(int(self.const.name_full), '').strip()) givenname = iso2utf(names.get(int(self.const.name_first), '').strip()) lastname = iso2utf(names.get(int(self.const.name_last), '').strip()) if not (lastname and givenname): givenname, lastname = self.split_name(name, givenname, lastname) if not lastname: self.logger.warn("Person %s got no lastname. Skipping.", person_id) return None, None, None if not name: name = " ".join(filter(None, (givenname, lastname))) entry = { 'objectClass': ['top', 'person', 'organizationalPerson', 'inetOrgPerson', 'eduPerson'], 'cn': (name,), 'sn': (lastname,)} if givenname: entry['givenName'] = (givenname,) try: entry['uid'] = (self.acc_name[account_id],) except KeyError: pass passwd = self.acc_passwd.get(account_id) qt = self.acc_quarantines.get(account_id) if qt: qh = QuarantineHandler(self.db, qt) if qh.should_skip(): self.logger.debug3("Omitting person id=%d, quarantined", person_id) return None, None, None if self.acc_locked_quarantines is not self.acc_quarantines: qt = self.acc_locked_quarantines.get(account_id) if qt: qh = QuarantineHandler(self.db, qt) if qt and qh.is_locked(): passwd = 0 if passwd: entry['userPassword'] = ("{crypt}" + passwd,) elif passwd != 0 and entry.get('uid'): self.logger.debug("User %s got no password-hash.", entry['uid'][0]) dn, primary_ou_dn = self.person_dn_primaryOU(entry, row, person_id) if not dn: self.logger.debug3("Omitting person id=%d, no DN", person_id) return None, None, None if self.org_dn: entry['eduPersonOrgDN'] = (self.org_dn,) if primary_ou_dn: entry['eduPersonPrimaryOrgUnitDN'] = (primary_ou_dn,) # edu_OUs = [primary_ou_dn] + [self.ou2DN.get(aff[2]) # for aff in p_affiliations] edu_OUs = self._calculate_edu_OUs( primary_ou_dn, [self.ou2DN.get(aff[2]) for aff in p_affiliations]) entry['eduPersonOrgUnitDN'] = self.attr_unique(filter(None, edu_OUs)) entry['eduPersonAffiliation'] = self.attr_unique(self.select_list( self.eduPersonAff_selector, person_id, p_affiliations)) if self.select_bool(self.contact_selector, person_id, p_affiliations): # title: titles = self.person_titles.get(person_id) self.add_lang_names(entry, 'title', titles) # phone & fax: for attr, contact in self.attr2id2contacts: contact = contact.get(person_id) if contact: entry[attr] = contact # addresses: addrs = self.addr_info.get(person_id) post = addrs and addrs.get(int(self.const.address_post)) if post: a_txt, p_o_box, p_num, city, country = post post = self.make_address( "$", p_o_box, a_txt, p_num, city, country) if post: entry['postalAddress'] = (post,) street = addrs and addrs.get(int(self.const.address_street)) if street: a_txt, p_o_box, p_num, city, country = street street = self.make_address( ", ", None, a_txt, p_num, city, country) if street: entry['street'] = (street,) else: URIs = self.id2labeledURI.get(person_id) if URIs: entry['labeledURI'] = self.attr_unique( map(iso2utf, URIs), normalize_caseExactString) if self.account_mail: mail = self.account_mail(account_id) if mail: entry['mail'] = (mail,) else: if self.person_contact_mail: mail_source_id = person_id else: mail_source_id = account_id mail = self.get_contacts( entity_id=mail_source_id, contact_type=self.const.contact_email, verify=verify_IA5String, normalize=normalize_IA5String) if mail: entry['mail'] = mail if self.is_person_visible(person_id): attrs, alias_info = self.visible_person_attrs, (primary_ou_dn,) else: attrs, alias_info = self.invisible_person_attrs, () for key, values in attrs.items(): if key in entry: entry[key].extend(values) else: entry[key] = list(values) self.update_person_entry(entry, row, person_id) return dn, entry, alias_info
# Remove "weak password" quarantine for r in account.get_entity_quarantine(): for qua in (self.co.quarantine_autopassord, self.co.quarantine_svakt_passord): if int(r['quarantine_type']) == qua: account.delete_entity_quarantine(qua) account.write_db() account._db.commit() # TODO: move these checks up and raise exceptions? Wouldn't happen, # since generate_token() checks this already, but might get other # authentication methods later. if account.is_deleted(): log.warning("user %s is deleted" % uname) elif account.is_expired(): log.warning("user %s is expired" % uname) elif QuarantineHandler.check_entity_quarantines( self.db, account.entity_id).is_locked(): log.info("user %s has an active quarantine" % uname) return True def get_person(self, id_type, ext_id): person = Factory.get('Person')(self.db) person.clear() if not hasattr(self.co, id_type): log.error("Wrong id_type: '%s'" % id_type) raise Errors.CerebrumRPCException('person_notfound') try: person.find_by_external_id(getattr(self.co, id_type), ext_id) return person except Errors.NotFoundError: log.debug("Couldn't find person with %s='%s'" % (id_type, ext_id))
def get_old_account_ids(self): """ Returns the ID of candidate accounts with old affiliations. :return set: A set with Account entity_ids. """ def _with_aff(affiliation=None, max_age=None): old = set() person = Utils.Factory.get("Person")(self.db) aff_or_status = self.constants.human2constant(affiliation) if not aff_or_status: self.logger.error('Unknown affiliation "%s"', affiliation) return old lookup = {'status' if '/' in affiliation else 'affiliation': aff_or_status} for row in person.list_affiliations(**lookup): person_id = row['person_id'] # if person_id in old_ids: # continue person.clear() person.find(person_id) # account_id = person.get_primary_account() for account_row in person.get_accounts(): # consider all accounts belonging to this person account_id = account_row['account_id'] if account_id: history = [x['set_at'] for x in ph.get_history( account_id)] if history and (self.today - max(history) > max_age): old.add(account_id) else: # The account does not have an expired password # according to the special rules. # Remove it from old_ids if it was put there # by the default rules. try: old_ids.remove(account_id) except KeyError: pass self.logger.info( 'Accounts with affiliation %s with old password: %s', str(affiliation), len(old)) return old ph = PasswordHistory(self.db) self.logger.info('Fetching accounts with password older than %d days', self.config.max_password_age) old_ids = set( [int(x['account_id']) for x in ph.find_old_password_accounts(( self.today - dt.DateTimeDelta( self.config.max_password_age)).strftime(DATE_FORMAT))]) self.logger.info('Fetching accounts with no password history') old_ids.update( set([int(x['account_id']) for x in ph.find_no_history_accounts()])) # Do we have special rules for certain person affiliations? # We want to end with the smallest 'max_password_age' aff_mappings = sorted(self.config.affiliation_mappings, key=lambda k: k['max_password_age'], reverse=True) for aff_mapping in aff_mappings: self.logger.info( 'Fetching accounts with affiliation %s ' 'with password older than %d days', str(aff_mapping['affiliation']), aff_mapping['max_password_age']) old_ids.update(_with_aff( affiliation=aff_mapping['affiliation'], max_age=dt.DateTimeDelta(aff_mapping['max_password_age']))) self.logger.info('Fetching quarantines') # TODO: Select only autopassword quarantines? quarantined_ids = QuarantineHandler.get_locked_entities( self.db, entity_types=self.constants.entity_account, entity_ids=old_ids) old_ids = old_ids - quarantined_ids return old_ids
class PosixLDIF(object): """ Generates posix-user, -filegroups and -netgroups. Does not support hosts in netgroups. """ __metaclass__ = auto_super def __init__(self, db, logger, u_sprd=None, g_sprd=None, n_sprd=None, fd=None): """ Initiate database and import modules. Spreads are given in initiation and general constants which is used in more than one method. """ timer = make_timer(logger, 'Initing PosixLDIF...') from Cerebrum.modules import PosixGroup self.db = db self.logger = logger self.const = Factory.get('Constants')(self.db) self.grp = Factory.get('Group')(self.db) self.posuser = Factory.get('PosixUser')(self.db) self.posgrp = PosixGroup.PosixGroup(self.db) self.user_dn = LDIFutils.ldapconf('USER', 'dn', None) self.get_name = True self.fd = fd self.spread_d = {} # Validate spread from arg or from cereconf for x, y in zip(['USER', 'FILEGROUP', 'NETGROUP'], [u_sprd, g_sprd, n_sprd]): spread = LDIFutils.map_spreads( y or getattr(cereconf, 'LDAP_' + x).get('spread'), list) if spread: self.spread_d[x.lower()] = spread if 'user' not in self.spread_d: raise Errors.ProgrammingError( "Must specify spread-value as 'arg' or in cereconf") self.account2name = dict() self.groupcache = defaultdict(dict) self.group2groups = defaultdict(set) self.group2users = defaultdict(set) self.group2persons = defaultdict(list) timer('... done initing PosixLDIF.') def user_ldif(self, filename=None, auth_meth=None): """Generate posix-user.""" timer = make_timer(self.logger, 'Starting user_ldif...') self.init_user(auth_meth) f = LDIFutils.ldif_outfile('USER', filename, self.fd) f.write(LDIFutils.container_entry_string('USER')) for row in self.posuser.list_extended_posix_users( self.user_auth, spread=self.spread_d['user'], include_quarantines=False): dn, entry = self.user_object(row) if dn: f.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('USER', f, self.fd) timer('... done user_ldif') def init_user(self, auth_meth=None): timer = make_timer(self.logger, 'Starting init_user...') self.get_name = False self.qh = QuarantineHandler(self.db, None) self.posuser = Factory.get('PosixUser')(self.db) self.load_disk_tab() self.load_shell_tab() self.load_quaratines() self.load_auth_tab(auth_meth) self.cache_account2name() self.id2uname = {} timer('... init_user done.') def cache_account2name(self): """Cache account_id to username. This one is a bit more lenient that what the self.id2uname dictionary contains, as it blindly adds users with correct spread.""" if not self.get_name: return if len(self.account2name) > 0: return timer = make_timer(self.logger, 'Starting cache_account2name...') self.account2name = dict( (r['account_id'], r['name']) for r in self.posuser.search(spread=self.spread_d['user'], expire_start=None, expire_stop=None)) timer('... done cache_account2name') def cache_group2gid(self): timer = make_timer(self.logger, 'Starting cache_group2gid...') self.group2gid = dict() for row in self.posgrp.list_posix_groups(): self.group2gid[row['group_id']] = text_type(row['posix_gid']) timer('... done cache_group2gid') def cache_groups_and_users(self): if len(self.group2groups) or len(self.group2users): return def get_children_not_in_group2groups(): children = set() map(children.update, self.group2groups.itervalues()) return children.difference(self.group2groups.keys()) timer = make_timer(self.logger, 'Starting cache_groups_and_users...') spread = [] for s in ('filegroup', 'netgroup'): if s in self.spread_d: spread += self.spread_d[s] assert spread for row in self.grp.search_members( member_type=self.const.entity_group, spread=spread): self.group2groups[row['group_id']].add(row['member_id']) for row in self.grp.search_members( member_type=self.const.entity_account, member_spread=self.spread_d['user'][0], spread=spread): self.group2users[row['group_id']].add(row['member_id']) children_groups = get_children_not_in_group2groups() extra_groups = children_groups.copy() while children_groups: for group_id in children_groups: self.group2groups[group_id] = set() for row in self.grp.search_members( member_type=self.const.entity_group, group_id=children_groups): member_id = row['member_id'] self.group2groups[row['group_id']].add(member_id) extra_groups.add(member_id) children_groups = get_children_not_in_group2groups() if extra_groups: for row in self.grp.search_members( member_type=self.const.entity_account, member_spread=self.spread_d['user'][0], group_id=extra_groups): self.group2users[row['group_id']].add(row['member_id']) timer('... done cache_groups_and_users') def cache_group2persons(self): """Cache person members in groups. Not used in main module.""" pass def auth_methods(self, auth_meth=None): """Which authentication methods to fetch. Mixin-support. If all only one entry, it will prefect any in auth_table. If None, it will use default API authentication (md5_crypt). """ self.auth_format = {} auth_meth_l = [] self.user_auth = None code = '_AuthenticationCode' # Priority is arg, else cereconf default value # auth_meth_l is a list sent to load_auth_tab and contains # all methods minus primary which is called by auth = auth_meth or cereconf.LDAP['auth_attr'] if isinstance(auth, dict): if 'userPassword' not in auth: self.logger.warn("Only support 'userPassword'-attribute") return None default_auth = auth['userPassword'][:1][0] self.user_auth = LDIFutils.map_constants(code, default_auth[0]) if len(default_auth) == 2: format = default_auth[1] else: format = None self.auth_format[int(self.user_auth)] = {'attr': 'userPassword', 'format': format} for entry in auth['userPassword'][1:]: auth_t = LDIFutils.map_constants(code, entry[0]) if len(entry) == 2: format = entry[1] else: format = None auth_meth_l.append(auth_t) self.auth_format[int(auth_t)] = {'attr': 'userPassword', 'format': format} if isinstance(auth, (list, tuple)): self.user_auth = int(getattr(self.const, auth[:1][0])) for entry in auth[1:]: auth_meth_l.append(int(getattr(self.const, entry))) elif isinstance(auth, str): self.user_auth = int(getattr(self.const, auth)) return auth_meth_l def load_auth_tab(self, auth_meth=None): timer = make_timer(self.logger, 'Starting load_auth_tab...') self.a_meth = self.auth_methods(auth_meth) if not self.a_meth: timer('... done load_auth_tab') return self.auth_data = defaultdict(dict) for x in self.posuser.list_account_authentication(auth_type=self.a_meth, spread=self.spread_d['user']): if not x['account_id'] or not x['method']: continue acc_id, meth = int(x['account_id']), int(x['method']) self.auth_data[acc_id][meth] = x['auth_data'] timer('... done load_auth_tab') def load_disk_tab(self): timer = make_timer(self.logger, 'Starting load_disk_tab...') self.disk = Factory.get('Disk')(self.db) self.disk_tab = {} for hd in self.disk.list(): self.disk_tab[int(hd['disk_id'])] = hd['path'] timer('... done load_disk_tab') def load_shell_tab(self): timer = make_timer(self.logger, 'Starting load_shell_tab...') self.shell_tab = {} for sh in self.posuser.list_shells(): self.shell_tab[int(sh['code'])] = sh['shell'] timer('... done load_shell_tab') def load_quaratines(self): timer = make_timer(self.logger, 'Starting load_quaratines...') self.quarantines = defaultdict(list) for row in self.posuser.list_entity_quarantines( entity_types=self.const.entity_account, only_active=True, spreads=self.spread_d['user']): self.quarantines[int(row['entity_id'])].append( int(row['quarantine_type'])) timer('... done load_quaratines') def user_object(self, row): account_id = int(row['account_id']) uname = row['entity_name'] passwd = '{crypt}*Invalid' if row['auth_data']: if self.auth_format[self.user_auth]['format']: passwd = self.auth_format[self.user_auth]['format'] % \ row['auth_data'] else: passwd = row['auth_data'] else: for uauth in [x for x in self.a_meth if x in self.auth_format]: try: if self.auth_format[uauth]['format']: passwd = self.auth_format[uauth]['format'] % \ self.auth_data[account_id][uauth] else: passwd = self.auth_data[account_id][uauth] except KeyError: pass if not row['shell']: self.logger.warn("User %s has no POSIX shell", uname) return None, None else: shell = self.shell_tab[int(row['shell'])] if account_id in self.quarantines: self.qh.quarantines = self.quarantines[account_id] if self.qh.should_skip(): return None, None if self.qh.is_locked(): passwd = '{crypt}' + '*Locked' qshell = self.qh.get_shell() if qshell is not None: shell = qshell if row['disk_id']: disk_path = self.disk_tab[int(row['disk_id'])] else: disk_path = None home = self.posuser.resolve_homedir(account_name=uname, home=row['home'], disk_path=disk_path) if not home: self.logger.warn("User %s has no home directory", uname) return None, None cn = row['name'] or row['gecos'] or uname gecos = transliterate.to_iso646_60(row['gecos'] or cn) entry = { 'objectClass': ['top', 'account', 'posixAccount'], 'cn': (cn,), 'uid': (uname,), 'uidNumber': (text_type(row['posix_uid']),), 'gidNumber': (text_type(row['posix_gid']),), 'homeDirectory': (home,), 'userPassword': (passwd,), 'loginShell': (shell,), 'gecos': (gecos,) } self.update_user_entry(account_id, entry, row) if account_id not in self.id2uname: self.id2uname[account_id] = uname else: self.logger.warn('Duplicate user entry: (%s, %s)', account_id, uname) return None, None dn = ','.join((('uid=' + uname), self.user_dn)) return dn, entry def update_user_entry(self, account_id, entry, row): """ Called by user_object(). Inject additional data here. """ pass def filegroup_ldif(self, filename=None): """ Generate filegroup. Groups without group and expanded members from both external and internal groups. """ timer = make_timer(self.logger, 'Starting filegroup_ldif...') if 'filegroup' not in self.spread_d: self.logger.warn("No spread is given for filegroup!") return self.init_filegroup() timer2 = make_timer(self.logger, 'Caching filegroups...') for row in self.grp.search(spread=self.spread_d['filegroup'], filter_expired=False): group_id = row['group_id'] if group_id not in self.group2gid: self.logger.warn( "Group id:{} has one of {} but no GID, skipping".format( group_id, getattr(cereconf, 'LDAP_FILEGROUP').get('spread'), [])) continue self.create_group_object(group_id, row['name'], row['description']) self.create_filegroup_object(group_id) self.update_filegroup_entry(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.filegroupcache.iteritems(): users = self.get_users(group_id, set()) unames = self.userid2unames(users, group_id) entry['memberUid'] = unames timer2('... done adding users') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('FILEGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('FILEGROUP')) for group_id, entry in self.filegroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.fgrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) timer2('... done writing group objects') self.filegroupcache = None LDIFutils.end_ldif_outfile('FILEGROUP', f, self.fd) timer('... done filegroup_ldif') def init_filegroup(self): """Initiate modules and constants for posixgroup""" from Cerebrum.modules import PosixGroup self.posgrp = PosixGroup.PosixGroup(self.db) self.fgrp_dn = LDIFutils.ldapconf('FILEGROUP', 'dn') self.filegroupcache = defaultdict(dict) self.cache_account2name() self.cache_group2gid() self.cache_groups_and_users() def create_filegroup_object(self, group_id): assert group_id not in self.filegroupcache cache = self.groupcache[group_id] entry = { 'objectClass': ('top', 'posixGroup'), 'cn': cache['name'], 'gidNumber': self.group2gid[group_id], } if 'description' in cache: entry['description'] = (cache['description'],) self.filegroupcache[group_id] = entry def update_filegroup_entry(self, group_id): """Future use of mixin-classes""" pass def netgroup_ldif(self, filename=None): """Generate netgroup with only users.""" timer = make_timer(self.logger, 'Starting netgroup_ldif...') if 'netgroup' not in self.spread_d: self.logger.warn("No valid netgroup-spread in cereconf or arg!") return self.init_netgroup() timer2 = make_timer(self.logger, 'Caching netgroups...') for row in self.grp.search(spread=self.spread_d['netgroup'], filter_expired=False): group_id = row['group_id'] self.create_group_object(group_id, row['name'], row['description']) self.create_netgroup_object(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.netgroupcache.iteritems(): users, groups = self.get_users_and_groups(group_id, set(), set(), add_persons=True) unames = self.userid2unames(users, group_id) triple = [] for uname in unames: if '_' in uname: continue triple.append('(,%s,)' % uname) netgroup = [] for g in groups: netgroup.append(self.netgroupcache[g]['cn']) entry['nisNetgroupTriple'] = triple entry['memberNisNetgroup'] = netgroup timer2('... done adding users and groups') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('NETGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('NETGROUP')) for group_id, entry in self.netgroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.ngrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('NETGROUP', f, self.fd) timer2('... done writing group objects') self.netgroupcache = None timer('... done netgroup_ldif') def cache_uncached_children(self): timer = make_timer(self.logger, 'Starting cache_uncached_children...') children = set() map(children.update, self.group2groups.itervalues()) extra = children.difference(self.groupcache.keys()) if extra: for row in self.grp.search(group_id=extra): self.create_group_object(row['group_id'], row['name'], row['description']) timer('... done cache_uncached_children') def get_users_and_groups(self, group_id, users, groups, add_persons=False): """Recursive method to get members and groups in a group.""" users.update(self.group2users[group_id]) if add_persons: if group_id in self.group2persons: users.update(self.group2persons[group_id]) for g_id in self.group2groups[group_id]: assert g_id in self.groupcache, "g_id %s in group_id %s missing" % \ (g_id, group_id) if g_id in self.netgroupcache: groups.add(g_id) else: users, groups = self.get_users_and_groups(g_id, users, groups, add_persons) return users, groups def get_users(self, group_id, users, add_persons=False): """Recursive method to get members from a group.""" users.update(self.group2users[group_id]) if add_persons: if group_id in self.group2persons: users.update(self.group2persons[group_id]) for g_id in self.group2groups[group_id]: assert g_id in self.groupcache, "g_id %s in group_id %s missing" % \ (g_id, group_id) users = self.get_users(g_id, users) return users def create_netgroup_object(self, group_id): assert group_id not in self.netgroupcache cache = self.groupcache[group_id] entry = { 'objectClass': ('top', 'nisNetGroup'), 'cn': cache['name'], } if 'description' in cache: entry['description'] = \ transliterate.to_iso646_60(cache['description']).rstrip(), self.netgroupcache[group_id] = entry def init_netgroup(self): """Initiate modules, constants and cache""" self.ngrp_dn = LDIFutils.ldapconf('NETGROUP', 'dn') self.cache_account2name() self.cache_groups_and_users() self.cache_group2persons() self.netgroupcache = defaultdict(dict) def create_group_object(self, group_id, name, description): if group_id in self.groupcache: return self.groupcache[group_id] = {'name': name} if description: self.groupcache[group_id]['description'] = description def userid2unames(self, users, group_id): unames = [] for user_id in users: if self.get_name: try: uname = self.account2name[user_id] except Exception: self.logger.info("account2name user id=%s in " "group id=%s not found", user_id, group_id) continue else: try: uname = self.id2uname[user_id] except Exception: self.logger.info("Cache enabled but user id=%s in " "group id=%s not found", user_id, group_id) continue unames.append(uname) return unames
def generate_people_info(exported_orgs): exported_employee_id = [] employee_data = {} all_employee_ids = fetch_employee_data() quarantined_accounts = QuarantineHandler.get_locked_entities( db, entity_types=const.entity_account, entity_ids=[x['person_id'] for x in all_employee_ids]) for p in all_employee_ids: if not p['person_id'] in exported_employee_id: exported_employee_id.append(p['person_id']) else: continue person.clear() person.find(p['person_id']) ou.clear() ou.find(p['ou_id']) use_home_oun_id = '%02d%02d%02d' % (ou.fakultet, ou.institutt, ou.avdeling) if not ou.entity_id in exported_orgs: logger.warn( "Person %s connected to non-exported org. unit %s, skipping", person.entity_id, use_home_oun_id) # if a person is connected to a non-exported org unit do not export continue #use_home_oun_id = '83' primary_account_id = person.get_primary_account() if not primary_account_id: continue account.clear() try: account.find(primary_account_id) except Errors.NotFoundError: logger.warn("Skipping %s, no valid account found", p['person_id']) continue no_sap_nr = person.get_external_id( source_system=const.system_sap, id_type=const.externalid_sap_ansattnr)[0]['external_id'] try: email_address = account.get_primary_mailaddress() except Errors.NotFoundError: logger.info("No primary e-mail address found for %s, sending ''", account.account_name) email_address = '' quarantined = 0 if primary_account_id in quarantined_accounts else 1 person_name_full = person.get_name(const.system_cached, const.name_full) phones = person.get_contact_info(source=const.system_sap, type=const.contact_phone) if not phones: use_t1 = '' else: use_t1 = phones[0]['contact_value'] fax = person.get_contact_info(source=const.system_sap, type=const.contact_fax) if not fax: use_t2 = '' else: use_t2 = fax[0]['contact_value'] employee_data[p['person_id']] = { 'use_uid': no_sap_nr, 'use_home_oun_id': use_home_oun_id, 'use_supervisor_uid': '', 'use_name': account.account_name, 'use_domain': '', 'use_full_name': person_name_full, 'use_email_address': email_address, 'use_language_code': 'NO', 'use_approval_limit': '', 'use_approve_own': '', 'use_send_email': '1', 'use_move_to_substitute': '', 'use_substitute_uid': '', 'use_substitute_start_date': '', 'use_substitute_end_date': '', 'use_client_type': '2', 'use_inherit_delivery_address': '1', 'use_delivery_add_id': '', # setting this value to none allows inheritance of delivery address # in stead of # use_home_oun_id, 'use_change_delivery_addr': '1', 'use_edit_delivery_addr': '1', 'use_inherit_invoicing_address': '1', 'use_invoicing_add_id': '', 'use_change_invoicing_addr': '0', 'use_edit_invoicing_addr': '', 'use_inherit_cost_center': '0', 'use_cce_id': '', 'use_change_cost_center': '1', 'use_ugr_id': '', 'use_enabled': quarantined, 'use_superadmin': '', 'use_personnel_number': '', 'use_view_abstract_suplier': '0', 'use_plan_approval_limit': '', 'use_t1': use_t1, 'use_t2': use_t2, 'uro_user_uid': no_sap_nr, 'uro_id': 'DUMMY1', 'uro_oun_id': use_home_oun_id, 'uro_is_self': '' } logger.debug("Fetched all relevant employee data.") return employee_data
def generate_people_info(db, exported_orgs): """ Generate user data. :param exported_orgs: A list or tuple with ou_id of OUs to export (from get_exported_orgs()) :return generator: A generator that yields user dicts """ co = Factory.get('Constants')(db) context = ContextPool(db) logger.debug('fetching account quarantines ...') quarantined_accounts = QuarantineHandler.get_locked_entities( db, entity_types=co.entity_account) logger.debug('... got %d quarantines', len(quarantined_accounts)) def get_primary_contact(person, contact_type): for row in person.get_contact_info(source=co.system_sap, type=contact_type): return row['contact_value'] return '' def get_primary_email(account): try: return account.get_primary_mailaddress() except Errors.NotFoundError: return '' logger.debug('fetching employee data...') for person_id, ou_id in iter_employees(db): with context.ou.find(ou_id) as ou: use_home_oun_id = six.text_type(ou) if ou.entity_id not in exported_orgs: logger.warn("Skipping %s, connected to non-exported OU %s", person_id, use_home_oun_id) # if a person is connected to a non-exported org unit, # do not export continue with context.person.find(person_id) as pe: primary_account_id = pe.get_primary_account() if not primary_account_id: logger.info("Skipping %s, no primary account found", person_id) continue with context.account.find(primary_account_id) as account: account_name = account.account_name email_address = get_primary_email(account) if not email_address: logger.info("No primary e-mail address found for %s", account_name) no_sap_nr = pe.get_external_id( source_system=co.system_sap, id_type=co.externalid_sap_ansattnr)[0]['external_id'] enabled = int(primary_account_id not in quarantined_accounts) person_name_full = pe.get_name(co.system_cached, co.name_full) contact_phone = get_primary_contact(pe, co.contact_phone) contact_fax = get_primary_contact(pe, co.contact_fax) yield { 'uro_id': 'DUMMY1', 'uro_oun_id': use_home_oun_id, 'uro_user_uid': no_sap_nr, 'use_change_cost_center': '1', 'use_change_delivery_addr': '1', 'use_change_invoicing_addr': '0', 'use_client_type': '2', 'use_edit_delivery_addr': '1', 'use_email_address': email_address, 'use_enabled': six.text_type(enabled), 'use_full_name': person_name_full, 'use_home_oun_id': use_home_oun_id, 'use_inherit_cost_center': '0', 'use_inherit_delivery_address': '1', 'use_inherit_invoicing_address': '1', 'use_language_code': 'NO', 'use_name': account_name, 'use_send_email': '1', 'use_t1': contact_phone, 'use_t2': contact_fax, 'use_uid': no_sap_nr, 'use_view_abstract_suplier': '0', } logger.debug("done fetching employee data")
def find_candidates(exclude_aff=[], grace=0, quarantine=None): """Find persons who should be quarantined and dequarantined. :param list exclude_aff: A list of affiliations/statuses that should be ignored when finding the candidates. Persons with only affiliations from this list will be considered as not affiliated. The list contains tuples, either with affiliation or affiliation- and status-codes. :param int grace: Defines a grace period for when affiliations are still considered active, after their end period. :param None/QuarantineCode/sequence(QuarantineCode) quarantine: If not None, will filter the `quarantined` return value only to have these quarantines. :rtype: dict :return: Three elements are included in the dict: - `affiliated`: A set with person-IDs for those considered affiliatied. - `not_affiliated`: A set with person-IDs for those *not* affiliatied. - `quarantined`: A set with account-IDs for all quarantined accounts. """ datelimit = DateTime.now() - int(grace) logger.debug2("Including affiliations deleted after: %s", datelimit) def is_aff_considered(row): """Check for if an affiliation should be considered or not.""" # Exclude affiliations deleted before the datelimit: if row['deleted_date'] and row['deleted_date'] < datelimit: return False if (row['affiliation'], row['status']) in exclude_aff: return False if (row['affiliation'], ) in exclude_aff: return False return True affs = filter(is_aff_considered, pe.list_affiliations(include_deleted=True)) affed = set(x['person_id'] for x in affs) logger.debug('Found %d persons with affiliations', len(affed)) naffed = set(x['person_id'] for x in pe.list_persons()) - affed logger.debug('Found %d persons without affiliations', len(naffed)) if quarantine is None: quarantined = QuarantineHandler.get_locked_entities( db, entity_types=co.entity_account) else: quarantined = set( x['entity_id'] for x in ac.list_entity_quarantines(entity_types=co.entity_account, only_active=True, quarantine_types=quarantine)) logger.debug('Found %d quarantined accounts', len(quarantined)) return { 'affiliated': affed, 'not_affiliated': naffed, 'quarantined': quarantined }
def read_quarantines(self): # For the time being, fetch all quarantines, regardless of quarantine # types. self.quarantined.update( QuarantineHandler.get_locked_entities( self._db, entity_types=self.const.entity_account))
class PosixLDIF(object): """ Generates posix-user, -filegroups and -netgroups. Does not support hosts in netgroups. """ __metaclass__ = auto_super def __init__(self, db, logger, u_sprd=None, g_sprd=None, n_sprd=None, fd=None): """ Initiate database and import modules. Spreads are given in initiation and general constants which is used in more than one method. """ timer = make_timer(logger, 'Initing PosixLDIF...') from Cerebrum.modules import PosixGroup self.db = db self.logger = logger self.const = Factory.get('Constants')(self.db) self.grp = Factory.get('Group')(self.db) self.posuser = Factory.get('PosixUser')(self.db) self.posgrp = PosixGroup.PosixGroup(self.db) self.user_dn = LDIFutils.ldapconf('USER', 'dn', None) # This is an odd one -- if set to False, then id2uname should be # populated with users exported in the users export -- which makes the # group exports filter group members by *actually* exported users... self.get_name = True self.fd = fd self.spread_d = {} # Validate spread from arg or from cereconf for x, y in zip(['USER', 'FILEGROUP', 'NETGROUP'], [u_sprd, g_sprd, n_sprd]): spread = LDIFutils.map_spreads( y or getattr(cereconf, 'LDAP_' + x).get('spread'), list) if spread: self.spread_d[x.lower()] = spread if 'user' not in self.spread_d: raise Errors.ProgrammingError( "Must specify spread-value as 'arg' or in cereconf") self.account2name = dict() self.group2gid = dict() self.groupcache = defaultdict(dict) self.group2groups = defaultdict(set) self.group2users = defaultdict(set) self.group2persons = defaultdict(list) self.shell_tab = dict() self.quarantines = dict() self.user_exporter = UserExporter(self.db) if len(self.spread_d['user']) > 1: logger.warning('Exporting users with multiple spreads, ' 'ignoring homedirs from %r', self.spread_d['user'][1:]) self.homedirs = HomedirResolver(db, self.spread_d['user'][0]) self.owners = OwnerResolver(db) auth_attr = LDIFutils.ldapconf('USER', 'auth_attr', None) self.user_password = AuthExporter.make_exporter( db, auth_attr['userPassword']) timer('... done initing PosixLDIF.') def write_user_objects_head(self, f): """ Write additional objects before the USER object. """ pass @clock_time def user_ldif(self, filename=None): """Generate posix-user.""" self.init_user() f = LDIFutils.ldif_outfile('USER', filename, self.fd) self.write_user_objects_head(f) # Write the USER container object f.write(LDIFutils.container_entry_string('USER')) def generate_users(): for row in self.posuser.list_posix_users( spread=self.spread_d['user'], filter_expired=True): account_id = row['account_id'] dn, entry = self.user_object(row) if not dn: logger.debug('no dn for account_id=%r', account_id) continue yield dn, entry for dn, entry in sorted(generate_users(), key=operator.itemgetter(0)): try: f.write(LDIFutils.entry_string(dn, entry, False)) except Exception: logger.error('Got error on dn=%r', dn) raise LDIFutils.end_ldif_outfile('USER', f, self.fd) @clock_time def init_user(self): self.get_name = False self.qh = QuarantineHandler(self.db, None) self.posuser = Factory.get('PosixUser')(self.db) self.shell_tab = self.user_exporter.shell_codes() self.quarantines = self.user_exporter.make_quarantine_cache( self.spread_d['user'] ) self.owners.make_owner_cache() self.owners.make_name_cache() self.homedirs.make_home_cache() self.group2gid = self.user_exporter.make_posix_gid_cache() self.load_auth_tab() self.cache_account2name() self.id2uname = {} @clock_time def cache_account2name(self): """Cache account_id to username. This one is a bit more lenient that what the self.id2uname dictionary contains, as it blindly adds users with correct spread. It should *not* be used for filtering! """ if len(self.account2name) > 0: return # TODO> OMG! For some reason, Account.search() takes a *wildcard* # spread argument for filtering, but does not support filtering by # multiple spread values! if len(self.spread_d['user']) == 1: # Only look up account names with the given spread. spread = self.spread_d['user'][0] else: # We'll have to look up all names, for now. spread = None self.account2name = dict( (r['account_id'], r['name']) for r in self.posuser.search(spread=spread, expire_start=None, expire_stop=None)) @clock_time def cache_groups_and_users(self): if len(self.group2groups) or len(self.group2users): return def get_children_not_in_group2groups(): children = set() map(children.update, self.group2groups.itervalues()) return children.difference(self.group2groups.keys()) spread = [] for s in ('filegroup', 'netgroup'): if s in self.spread_d: spread += self.spread_d[s] assert spread for row in self.grp.search_members( member_type=self.const.entity_group, spread=spread): self.group2groups[row['group_id']].add(row['member_id']) for row in self.grp.search_members( member_type=self.const.entity_account, member_spread=self.spread_d['user'][0], spread=spread): self.group2users[row['group_id']].add(row['member_id']) children_groups = get_children_not_in_group2groups() extra_groups = children_groups.copy() while children_groups: for group_id in children_groups: self.group2groups[group_id] = set() for row in self.grp.search_members( member_type=self.const.entity_group, group_id=children_groups): member_id = row['member_id'] self.group2groups[row['group_id']].add(member_id) extra_groups.add(member_id) children_groups = get_children_not_in_group2groups() if extra_groups: for row in self.grp.search_members( member_type=self.const.entity_account, member_spread=self.spread_d['user'][0], group_id=extra_groups): self.group2users[row['group_id']].add(row['member_id']) def cache_group2persons(self): """Cache person members in groups. Not used in main module.""" pass @clock_time def load_auth_tab(self): self.user_password.cache.update_all() def user_object(self, row): account_id = int(row['account_id']) uname = self.account2name[account_id] try: passwd = self.user_password.get(account_id) except LookupError: passwd = '{crypt}*Invalid' if not row['shell']: self.logger.warn("User %s has no POSIX shell", uname) return None, None else: shell = self.shell_tab[int(row['shell'])] if account_id in self.quarantines: self.qh.quarantines = self.quarantines[account_id] if self.qh.should_skip(): return None, None if self.qh.is_locked(): passwd = '{crypt}*Locked' qshell = self.qh.get_shell() if qshell is not None: shell = qshell home = self.homedirs.get_homedir(row['account_id'], allow_no_disk=True) if not home: self.logger.warn("User %s has no home directory", uname) return None, None owner_id = self.owners.get_owner_id(row['account_id']) fullname = self.owners.get_name(row['account_id']) cn = fullname or row['gecos'] or uname gecos = transliterate.to_iso646_60(row['gecos'] or cn) posix_gid = self.group2gid[row['gid']] entry = { 'objectClass': ['top', 'account', 'posixAccount'], 'cn': (cn,), 'uid': (uname,), 'uidNumber': (text_type(row['posix_uid']),), 'gidNumber': (text_type(posix_gid),), 'homeDirectory': (home,), 'userPassword': (passwd,), 'loginShell': (shell,), 'gecos': (gecos,) } self.update_user_entry(account_id, entry, owner_id) if account_id not in self.id2uname: self.id2uname[account_id] = uname else: self.logger.warn('Duplicate user entry: (%s, %s)', account_id, uname) return None, None dn = ','.join((('uid=' + uname), self.user_dn)) return dn, entry def update_user_entry(self, account_id, entry, owner_id): """ Called by user_object(). Inject additional data here. """ pass @clock_time def filegroup_ldif(self, filename=None): """ Generate filegroup. Groups without group and expanded members from both external and internal groups. """ if 'filegroup' not in self.spread_d: self.logger.warn("No spread is given for filegroup!") return self.init_filegroup() timer2 = make_timer(self.logger, 'Caching filegroups...') for row in self.grp.search(spread=self.spread_d['filegroup'], filter_expired=True): group_id = row['group_id'] if group_id not in self.group2gid: self.logger.warn( "Group id:{} has one of {} but no GID, skipping".format( group_id, getattr(cereconf, 'LDAP_FILEGROUP').get('spread'), [])) continue self.create_group_object(group_id, row['name'], row['description']) self.create_filegroup_object(group_id) self.update_filegroup_entry(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.filegroupcache.iteritems(): users = self.get_users(group_id, set()) unames = self.userid2unames(users, group_id) entry['memberUid'] = unames timer2('... done adding users') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('FILEGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('FILEGROUP')) for group_id, entry in self.filegroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.fgrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) timer2('... done writing group objects') self.filegroupcache = None LDIFutils.end_ldif_outfile('FILEGROUP', f, self.fd) def init_filegroup(self): """Initiate modules and constants for posixgroup""" from Cerebrum.modules import PosixGroup self.posgrp = PosixGroup.PosixGroup(self.db) self.fgrp_dn = LDIFutils.ldapconf('FILEGROUP', 'dn') self.filegroupcache = defaultdict(dict) self.cache_account2name() self.group2gid = self.user_exporter.make_posix_gid_cache() self.cache_groups_and_users() def create_filegroup_object(self, group_id): assert group_id not in self.filegroupcache cache = self.groupcache[group_id] entry = { 'objectClass': ('top', 'posixGroup'), 'cn': cache['name'], 'gidNumber': text_type(self.group2gid[group_id]), } if 'description' in cache: entry['description'] = (cache['description'],) self.filegroupcache[group_id] = entry def update_filegroup_entry(self, group_id): """Future use of mixin-classes""" pass @clock_time def netgroup_ldif(self, filename=None): """Generate netgroup with only users.""" if 'netgroup' not in self.spread_d: self.logger.warn("No valid netgroup-spread in cereconf or arg!") return self.init_netgroup() timer2 = make_timer(self.logger, 'Caching netgroups...') for row in self.grp.search(spread=self.spread_d['netgroup'], filter_expired=True): group_id = row['group_id'] self.create_group_object(group_id, row['name'], row['description']) self.create_netgroup_object(group_id) timer2('... done caching filegroups') self.cache_uncached_children() timer2 = make_timer(self.logger, 'Adding users and groups...') for group_id, entry in self.netgroupcache.iteritems(): users, groups = self.get_users_and_groups(group_id, set(), set(), add_persons=True) unames = self.userid2unames(users, group_id) triple = [] for uname in unames: if '_' in uname: continue triple.append('(,%s,)' % uname) netgroup = [] for g in groups: netgroup.append(self.netgroupcache[g]['cn']) entry['nisNetgroupTriple'] = triple entry['memberNisNetgroup'] = netgroup timer2('... done adding users and groups') timer2 = make_timer(self.logger, 'Writing group objects...') f = LDIFutils.ldif_outfile('NETGROUP', filename, self.fd) f.write(LDIFutils.container_entry_string('NETGROUP')) for group_id, entry in self.netgroupcache.iteritems(): dn = ','.join(('cn=' + entry['cn'], self.ngrp_dn)) f.write(LDIFutils.entry_string(dn, entry, False)) LDIFutils.end_ldif_outfile('NETGROUP', f, self.fd) timer2('... done writing group objects') self.netgroupcache = None @clock_time def cache_uncached_children(self): children = set() map(children.update, self.group2groups.itervalues()) extra = children.difference(self.groupcache.keys()) if extra: for row in self.grp.search(group_id=extra): self.create_group_object(row['group_id'], row['name'], row['description']) def get_users_and_groups(self, group_id, users, groups, add_persons=False): """Recursive method to get members and groups in a group.""" users.update(self.group2users[group_id]) if add_persons: if group_id in self.group2persons: users.update(self.group2persons[group_id]) for g_id in self.group2groups[group_id]: assert g_id in self.groupcache, "g_id %s in group_id %s missing" % \ (g_id, group_id) if g_id in self.netgroupcache: groups.add(g_id) else: users, groups = self.get_users_and_groups(g_id, users, groups, add_persons) return users, groups def get_users(self, group_id, users, add_persons=False): """Recursive method to get members from a group.""" users.update(self.group2users[group_id]) if add_persons: if group_id in self.group2persons: users.update(self.group2persons[group_id]) for g_id in self.group2groups[group_id]: assert g_id in self.groupcache, "g_id %s in group_id %s missing" % \ (g_id, group_id) users = self.get_users(g_id, users) return users def create_netgroup_object(self, group_id): assert group_id not in self.netgroupcache cache = self.groupcache[group_id] entry = { 'objectClass': ('top', 'nisNetGroup'), 'cn': cache['name'], } if 'description' in cache: entry['description'] = \ transliterate.to_iso646_60(cache['description']).rstrip(), self.netgroupcache[group_id] = entry def init_netgroup(self): """Initiate modules, constants and cache""" self.ngrp_dn = LDIFutils.ldapconf('NETGROUP', 'dn') self.cache_account2name() self.cache_groups_and_users() self.cache_group2persons() self.netgroupcache = defaultdict(dict) def create_group_object(self, group_id, name, description): if group_id in self.groupcache: return self.groupcache[group_id] = {'name': name} if description: self.groupcache[group_id]['description'] = description def userid2unames(self, users, group_id): unames = [] for user_id in users: if self.get_name: try: uname = self.account2name[user_id] except Exception: self.logger.info("account2name user id=%s in " "group id=%s not found", user_id, group_id) continue else: try: uname = self.id2uname[user_id] except Exception: self.logger.info("Cache enabled but user id=%s in " "group id=%s not found", user_id, group_id) continue unames.append(uname) return unames
raise Errors.CerebrumRPCException('error_unknown') # Remove "weak password" quarantine for r in account.get_entity_quarantine(): for qua in (self.co.quarantine_autopassord, self.co.quarantine_svakt_passord): if int(r['quarantine_type']) == qua: account.delete_entity_quarantine(qua) account.write_db() account._db.commit() # TODO: move these checks up and raise exceptions? Wouldn't happen, # since generate_token() checks this already, but might get other # authentication methods later. if account.is_deleted(): log.warning("user %s is deleted" % uname) elif account.is_expired(): log.warning("user %s is expired" % uname) elif QuarantineHandler.check_entity_quarantines( self.db, account.entity_id).is_locked(): log.info("user %s has an active quarantine" % uname) return True def get_person(self, id_type, ext_id): person = Factory.get('Person')(self.db) person.clear() if not hasattr(self.co, id_type): log.error("Wrong id_type: '%s'" % id_type) raise Errors.CerebrumRPCException('person_notfound') try: person.find_by_external_id(getattr(self.co, id_type), ext_id) return person except Errors.NotFoundError: log.debug("Couldn't find person with %s='%s'" % (id_type, ext_id))
def generate_people_info(exported_orgs): exported_employee_id = [] employee_data = {} all_employee_ids = fetch_employee_data() quarantined_accounts = QuarantineHandler.get_locked_entities( db, entity_types=const.entity_account, entity_ids=[x['person_id'] for x in all_employee_ids]) for p in all_employee_ids: if not p['person_id'] in exported_employee_id: exported_employee_id.append(p['person_id']) else: continue person.clear() person.find(p['person_id']) ou.clear() ou.find(p['ou_id']) use_home_oun_id = '%02d%02d%02d' % (ou.fakultet, ou.institutt, ou.avdeling) if not ou.entity_id in exported_orgs: logger.warn("Person %s connected to non-exported org. unit %s, skipping", person.entity_id, use_home_oun_id) # if a person is connected to a non-exported org unit do not export continue #use_home_oun_id = '83' primary_account_id = person.get_primary_account() if not primary_account_id: continue account.clear() try: account.find(primary_account_id) except Errors.NotFoundError: logger.warn("Skipping %s, no valid account found", p['person_id']) continue no_sap_nr = person.get_external_id(source_system=const.system_sap, id_type=const.externalid_sap_ansattnr)[0]['external_id'] try: email_address = account.get_primary_mailaddress() except Errors.NotFoundError: logger.info("No primary e-mail address found for %s, sending ''", account.account_name) email_address = '' quarantined = 0 if primary_account_id in quarantined_accounts else 1 person_name_full = person.get_name(const.system_cached, const.name_full) phones = person.get_contact_info(source=const.system_sap, type=const.contact_phone) if not phones: use_t1 = '' else: use_t1 = phones[0]['contact_value'] fax = person.get_contact_info(source=const.system_sap, type=const.contact_fax) if not fax: use_t2 = '' else: use_t2 = fax[0]['contact_value'] employee_data[p['person_id']] = {'use_uid': no_sap_nr, 'use_home_oun_id': use_home_oun_id, 'use_supervisor_uid': '', 'use_name': account.account_name, 'use_domain': '', 'use_full_name': person_name_full, 'use_email_address': email_address, 'use_language_code': 'NO', 'use_approval_limit': '', 'use_approve_own': '', 'use_send_email': '1', 'use_move_to_substitute': '', 'use_substitute_uid': '', 'use_substitute_start_date': '', 'use_substitute_end_date': '', 'use_client_type': '2', 'use_inherit_delivery_address': '1', 'use_delivery_add_id':'', # setting this value to none allows inheritance of delivery address # in stead of # use_home_oun_id, 'use_change_delivery_addr': '1', 'use_edit_delivery_addr': '1', 'use_inherit_invoicing_address': '1', 'use_invoicing_add_id': '', 'use_change_invoicing_addr': '0', 'use_edit_invoicing_addr': '', 'use_inherit_cost_center': '0', 'use_cce_id': '', 'use_change_cost_center': '1', 'use_ugr_id': '', 'use_enabled': quarantined, 'use_superadmin': '', 'use_personnel_number': '', 'use_view_abstract_suplier': '0', 'use_plan_approval_limit': '', 'use_t1': use_t1, 'use_t2': use_t2, 'uro_user_uid': no_sap_nr, 'uro_id': 'DUMMY1', 'uro_oun_id': use_home_oun_id, 'uro_is_self': ''} logger.debug("Fetched all relevant employee data.") return employee_data
def process(check_trait, set_trait, days, phone_types, message, only_aff): logger.info("SMS-reminder started") if commit: logger.info("In commit, will send out SMS") else: logger.info("In dryrun, will not send SMS") limit_date = now() - days logger.debug('Matching only traits newer than: %s', limit_date) ac = Factory.get('Account')(db) pe = Factory.get('Person')(db) target_traits = set(t['entity_id'] for t in ac.list_traits(code=check_trait) if (t['date'] >= limit_date and # Filter out old traits. t['date'] < (now() - 1))) # Filter out traits from # the last 24 hours. logger.debug('Found %d traits of type %s from last %d days to check', len(target_traits), check_trait, days) set_traits = set(t['entity_id'] for t in ac.list_traits(code=set_trait) if t['date'] >= limit_date) logger.debug('Found %d already set traits of type %s from last %d days', len(set_traits), set_trait, days) target_traits.difference_update(set_traits) logger.debug('Then %d traits of type %s remains to be checked', len(target_traits), check_trait) pe_affs = set() if only_aff: for a in only_aff: pe_affs.update(r['person_id'] for r in pe.list_affiliations(affiliation=a)) logger.debug('Found %d person affiliations to filter by', len(pe_affs)) else: logger.debug('No only_aff specified, so no filtering on affiliation') processed = 0 for account_id in target_traits: ac.clear() try: ac.find(account_id) except Errors.NotFoundError: logger.error("Could not find user with entity_id: %s, skipping", account_id) continue if ac.is_expired(): logger.info("Account %s is expired, skipping", ac.account_name) continue if QuarantineHandler.check_entity_quarantines( db, ac.entity_id).is_locked(): logger.info("Account %s is quarantined, skipping", ac.account_name) continue if pe_affs and ac.owner_id not in pe_affs: logger.info('Account %s without given person affiliation, skipping', ac.account_name) continue # Check password changes for the user if have_changed_password(ac): logger.info("Account %s already changed password, skipping", ac.account_name) continue # Everything ready, should send the SMS if send_sms(ac, pe, phone_types, message=message): ac.populate_trait(code=set_trait, date=now()) ac.write_db() if commit: db.commit() else: db.rollback() logger.debug("Trait set for %s", ac.account_name) processed += 1 else: logger.warn('Failed to send SMS to %s', ac.account_name) logger.info("SMS-reminder done, %d accounts processed" % processed)