def remove_plaintext_passwords(self): """Removes plaintext passwords.""" today = datetime.date.today() delta = datetime.timedelta(days=self.password_age) cutoff = today - delta start = time.time() removed = 0 logger.info("Fetching password change log entries...") for e in self.db.get_log_events_date(type=int( self.clconst.account_password), edate=cutoff): if not e['change_params']: # Nothing to remove continue data = json.loads(e['change_params']) if 'password' in data: del data['password'] if self.commit: self.db.update_log_event(e['change_id'], data) logger.info("Removed password for id:{:d}".format( e['subject_entity'])) removed += 1 logger.info("Spent {} seconds".format(int(time.time() - start))) logger.info("Removed {} passwords older than {}".format( removed, cutoff)) maybe_commit(self)
def process_changelog(evt_key, classes): """Process the entries from changelog identifying previous events by evt_key, and using events and callback methods in classes """ evt_id2call_back = {} for c in classes: for t in c.get_triggers(): evt_id2call_back.setdefault(int(getattr(cl_const, t)), []).append(getattr(c, "notify_%s" % t)) ei = CLHandler.CLHandler(Factory.get('Database')()) for evt in ei.get_events(evt_key, evt_id2call_back.keys()): ok = [] for call_back in evt_id2call_back[int(evt.fields.change_type_id)]: if evt['change_params']: params = json.loads(evt['change_params']) else: params = {} logger.debug2("Callback %i -> %s", evt['change_id'], call_back) ok.append(call_back(evt, params)) # Only confirm if all call_backs returned true if not filter(lambda t: not t, ok): ei.confirm_event(evt) ei.commit_confirmations()
def check_phone(self, phone_no, numbers, person, account): """Check if given phone_no belongs to person. The phone number is only searched for in source systems that the person has active affiliations from and contact types as defined in INDIVIDUATION_PHONE_TYPES. Other numbers are ignored. Set delays are also checked, to avoid that changed phone numbers are used for some period. """ is_fresh = self.entity_is_fresh(person, account) for num in numbers: if not self.number_match(stored=num['number'], given=phone_no): continue if is_fresh: # delay is ignored for fresh entities return True delay = self.get_delay(num['system_name'], num['type']) for row in self.db.get_log_events(types=self.clconst.entity_cinfo_add, any_entity=person.entity_id, sdate=delay): data = json.loads(row['change_params']) if num['number'] == data['value']: logger.info('person_id=%r recently changed phoneno', person.entity_id) self.mail_warning( person=person, account=account, reason=("Your phone number has recently been" " changed. Due to security reasons, it" " can not be used by the password service" " for a few days.")) raise Errors.CerebrumRPCException('fresh_phonenumber') return True return False
def process_changelog(evt_key, classes): """Process the entries from changelog identifying previous events by evt_key, and using events and callback methods in classes """ evt_id2call_back = {} for c in classes: for t in c.get_triggers(): evt_id2call_back.setdefault(int(getattr(cl_const, t)), []).append( getattr(c, "notify_%s" % t)) ei = CLHandler.CLHandler(Factory.get('Database')()) for evt in ei.get_events(evt_key, evt_id2call_back.keys()): ok = [] for call_back in evt_id2call_back[int(evt.fields.change_type_id)]: if evt['change_params']: params = json.loads(evt['change_params']) else: params = {} logger.debug2("Callback %i -> %s", evt['change_id'], call_back) ok.append(call_back(evt, params)) # Only confirm if all call_backs returned true if not filter(lambda t: not t, ok): ei.confirm_event(evt) ei.commit_confirmations()
def _get_old_homeMDB(self): """ If account once had homeMDB try to find the old value. If more than one old value, return the most recent. """ # homeMDB values are stored as EntityTraits. After 2009-08-XX # the traits values are stored in ChangeLog when deleted. Try # to fetch that value if it exists. res = {} for row in self._db.get_log_events(subject_entity=self.entity_id, types=(self.clconst.trait_del,)): if row['change_params']: try: params = json.loads(row['change_params']) if params['code'] != self.const.trait_homedb_info: continue val = params.get('strval') if val: # There might be more than one hit. res[row['tstamp']] = val except Exception: continue if res: keys = res.keys() # when sorting tstamps, most recent will be last in the list keys.sort() return res[keys[-1]] return None
def _get_old_homeMDB(self): """ If account once had homeMDB try to find the old value. If more than one old value, return the most recent. """ # homeMDB values are stored as EntityTraits. After 2009-08-XX # the traits values are stored in ChangeLog when deleted. Try # to fetch that value if it exists. res = {} for row in self._db.get_log_events(subject_entity=self.entity_id, types=(self.clconst.trait_del, )): if row['change_params']: try: params = json.loads(row['change_params']) if params['code'] != self.const.trait_homedb_info: continue val = params.get('strval') if val: # There might be more than one hit. res[row['tstamp']] = val except Exception: continue if res: keys = res.keys() # when sorting tstamps, most recent will be last in the list keys.sort() return res[keys[-1]] return None
def check_phone(self, phone_no, numbers, person, account): """Check if given phone_no belongs to person. The phone number is only searched for in source systems that the person has active affiliations from and contact types as defined in INDIVIDUATION_PHONE_TYPES. Other numbers are ignored. Set delays are also checked, to avoid that changed phone numbers are used for some period. """ is_fresh = self.entity_is_fresh(person, account) for num in numbers: if not self.number_match(stored=num['number'], given=phone_no): continue if is_fresh: # delay is ignored for fresh entities return True delay = self.get_delay(num['system_name'], num['type']) for row in self.db.get_log_events( types=self.clconst.entity_cinfo_add, any_entity=person.entity_id, sdate=delay): data = json.loads(row['change_params']) if num['number'] == data['value']: logger.info('person_id=%r recently changed phoneno', person.entity_id) self.mail_warning( person=person, account=account, reason=("Your phone number has recently been" " changed. Due to security reasons, it" " can not be used by the password service" " for a few days.")) raise Errors.CerebrumRPCException('fresh_phonenumber') return True return False
def remove_plaintext_passwords(self): """Removes plaintext passwords.""" today = datetime.date.today() delta = datetime.timedelta(days=self.password_age) cutoff = today - delta start = time.time() removed = 0 logger.info("Fetching password change log entries...") for e in self.db.get_log_events_date( type=int(self.clconst.account_password), edate=cutoff): if not e['change_params']: # Nothing to remove continue data = json.loads(e['change_params']) if 'password' in data: del data['password'] if self.commit: self.db.update_log_event(e['change_id'], data) logger.info("Removed password for id:{:d}".format( e['subject_entity'])) removed += 1 logger.info("Spent {} seconds".format(int(time.time() - start))) logger.info("Removed {} passwords older than {}".format( removed, cutoff)) maybe_commit(self)
def event_info(self, operator, event_id): if not self.ba.is_postmaster(operator.get_entity_id()): raise PermissionDenied('No access to event') try: ev = self.db.get_event(event_id) except ValueError: raise CerebrumError("Error: Event id must be an integer") except Errors.NotFoundError: raise CerebrumError('Error: No such event exists!') # For certain keys, convert constants to human-readable representations change_params = ev['change_params'] if change_params: change_params = json.loads(ev['change_params']) change_params = self._make_constants_human_readable(change_params) change_params = repr(change_params) else: change_params = None ret = { 'event_id': ev['event_id'], 'event_type': six.text_type(self.clconst.map_const(ev['event_type'])), 'target_system': six.text_type(self.const.map_const(ev['target_system'])), 'failed': ev['failed'], 'tstamp': ev['tstamp'], 'taken_time': ev['taken_time'], 'subject_entity': ev['subject_entity'], 'dest_entity': ev['dest_entity'], 'change_params': change_params, } en = Factory.get('Entity')(self.db) # Look up types and names for subject and destination entities for key in ('subject_entity', 'dest_entity'): if ev[key]: try: en.clear() en.find(ev[key]) entity_type = six.text_type( self.const.map_const(en.entity_type)) entity_name = self._get_entity_name( en.entity_id, en.entity_type) ret[key] = '{} {} (id:{:d})'.format( entity_type, entity_name, en.entity_id) except Exception: pass return ret
def event_search(self, operator, target_sys, *args): """Search for events in the database. :param str target_sys: Target system to search :param str args: Pattern(s) to search for. """ if not self.ba.is_postmaster(operator.get_entity_id()): raise PermissionDenied('No access to event') # TODO: Fetch an ACL of which target systems can be searched by this ts = self._validate_target_system(operator, target_sys) params = self._parse_search_params(*args) if not params: raise CerebrumError('Must specify search pattern.') params['target_system'] = ts event_ids = self._search_events(**params) # Fetch information about the event ids, and present it to the user. r = [] for event_id in event_ids: ev = self.db.get_event(event_id['event_id']) try: types = self.db.get_event_target_type(event_id['event_id']) except Errors.NotFoundError: # If we wind up here, both the subject- or destination-entity # has been deleted, or the event does not carry information # about a subject- or destination-entity. types = [] change_params = ev['change_params'] if ev['change_params']: change_params = json.loads(ev['change_params']) change_params = self._make_constants_human_readable( change_params) ret = { 'id': ev['event_id'], 'type': six.text_type(self.clconst.map_const(ev['event_type'])), 'taken': ev['taken_time'], 'failed': ev['failed'], 'params': repr(change_params), 'dest_type': None, 'subject_type': None, } if 'dest_type' in types: ret['dest_type'] = six.text_type( self.const.map_const(types['dest_type'])) if 'subject_type' in types: ret['subject_type'] = six.text_type( self.const.map_const(types['subject_type'])) r.append(ret) return r
def check_changelog_for_quarantine_triggers(logger, sendmail): """ Scans the changelog for changes related to the quarantines defined in cereconf.QUARANTINE_NOTIFY_DATA. This dict also contains which actions should trigger an email notification for specific quarantines, as well as the email sender/recipient and a quarantine-specific CLHandler-key. If sendmail is enabled, events will be confirmed in CLHandler, and emails will be sent instead of outputted to the logger instance. :param logger: Factory-generated logger-instance :param sendmail: Turns on event confirming to CLHandler and email sending :type: bool """ for quarantine in cereconf.QUARANTINE_NOTIFY_DATA: logger.info('Checking changelog for triggers for quarantine %s' % quarantine) quar_data = cereconf.QUARANTINE_NOTIFY_DATA[quarantine] triggers = tuple( getattr(clconst, trigger) for trigger in quar_data['triggers']) for event in cl.get_events(quar_data['cl_key'], triggers): change_params = {} if event['change_params']: change_params = json.loads(event['change_params']) if change_params['q_type'] == int(co.Quarantine(quarantine)): # Generate dicts with relevant info for email quar_info = generate_quarantine_info(quarantine, quar_data) event_info = generate_event_info(event) logger.info('Found trigger for quarantine %s in change_ID %d' % (quarantine, event_info['change_id'])) try: if sendmail: generate_mail_notification(quar_info, event_info) logger.info( 'Email for change-ID: %d generated and sent.' % event_info['change_id']) cl.confirm_event(event) logger.info('change-ID %d confirmed in CLHandler.' % event_info['change_id']) else: logger.debug('Mail output for change-ID %d:' % event_info['change_id']) logger.debug( generate_mail_notification(quar_info, event_info, debug=True)) except Exception, e: logger.exception(e) raise else: # Irrelevant quarantines should simply be confirmed if sendmail: cl.confirm_event(event) if sendmail: cl.commit_confirmations()
def check_changelog_for_quarantine_triggers(logger, sendmail): """ Scans the changelog for changes related to the quarantines defined in cereconf.QUARANTINE_NOTIFY_DATA. This dict also contains which actions should trigger an email notification for specific quarantines, as well as the email sender/recipient and a quarantine-specific CLHandler-key. If sendmail is enabled, events will be confirmed in CLHandler, and emails will be sent instead of outputted to the logger instance. :param logger: Factory-generated logger-instance :param sendmail: Turns on event confirming to CLHandler and email sending :type: bool """ for quarantine in cereconf.QUARANTINE_NOTIFY_DATA: logger.info('Checking changelog for triggers for quarantine %s' % quarantine) quar_data = cereconf.QUARANTINE_NOTIFY_DATA[quarantine] triggers = tuple( getattr(clconst, trigger) for trigger in quar_data['triggers']) for event in cl.get_events(quar_data['cl_key'], triggers): change_params = {} if event['change_params']: change_params = json.loads(event['change_params']) if change_params['q_type'] == int(co.Quarantine(quarantine)): # Generate dicts with relevant info for email quar_info = generate_quarantine_info(quarantine, quar_data) event_info = generate_event_info(event) logger.info('Found trigger for quarantine %s in change_ID %d' % (quarantine, event_info['change_id'])) try: if sendmail: generate_mail_notification(quar_info, event_info) logger.info( 'Email for change-ID: %d generated and sent.' % event_info['change_id']) cl.confirm_event(event) logger.info('change-ID %d confirmed in CLHandler.' % event_info['change_id']) else: logger.debug('Mail output for change-ID %d:' % event_info['change_id']) logger.debug(generate_mail_notification(quar_info, event_info, debug=True)) except Exception, e: logger.exception(e) raise else: # Irrelevant quarantines should simply be confirmed if sendmail: cl.confirm_event(event) if sendmail: cl.commit_confirmations()
def misc_history(self, operator, days): if not self.ba.is_superuser(operator.get_entity_id()): raise PermissionDenied("Currently limited to superusers") types = (self.clconst.account_create, self.clconst.account_password, self.clconst.ou_create, self.clconst.person_create) sdate = mx.DateTime.now() - mx.DateTime.oneDay * int(days) # Collect in a dict to remove duplicates etc. tmp = {} for r in self.db.get_log_events(sdate=sdate, types=types): tmp.setdefault(int(r['subject_entity']), {})[int(r['change_type_id'])] = r ret = [] for entity_id, changes in tmp.items(): if (int(self.clconst.account_password) in changes and int(self.clconst.account_create) not in changes): # TBD: naa er det OK aa vise passordet? del(changes[int(self.clconst.account_password)]) for k, v in changes.items(): change_type = self.clconst.ChangeType(int(k)) params = '' if k == self.const.account_password: if v['change_params']: params = json.loads(v['change_params']) params = params.get('password', '') tmp = { 'tstamp': v['tstamp'], # 'change_type': str(cl), 'change_type': text_type(change_type), 'misc': params, } entity = self._get_entity(ident=int(v['subject_entity'])) if entity.entity_type == int(self.const.entity_person): person = self._get_person("entity_id", entity.entity_id) name = person.get_name(self.const.system_cached, self.const.name_full) tmp['person_id'] = int(person.entity_id) elif entity.entity_type == int(self.const.entity_account): account = self.Account_class(self.db) account.find(entity.entity_id) name = account.account_name tmp['person_id'] = int(account.owner_id) else: self.ou.clear() self.ou.find(entity.entity_id) name = self.ou.get_name_with_language( name_variant=self.const.ou_name, name_language=self.const.language_nb, default="") tmp['name'] = name ret.append(tmp) return ret
def misc_history(self, operator, days): if not self.ba.is_superuser(operator.get_entity_id()): raise PermissionDenied("Currently limited to superusers") types = (self.clconst.account_create, self.clconst.account_password, self.clconst.ou_create, self.clconst.person_create) sdate = mx.DateTime.now() - mx.DateTime.oneDay * int(days) # Collect in a dict to remove duplicates etc. tmp = {} for r in self.db.get_log_events(sdate=sdate, types=types): tmp.setdefault(int(r['subject_entity']), {})[int(r['change_type_id'])] = r ret = [] for entity_id, changes in tmp.items(): if (int(self.clconst.account_password) in changes and int(self.clconst.account_create) not in changes): # TBD: naa er det OK aa vise passordet? del (changes[int(self.clconst.account_password)]) for k, v in changes.items(): change_type = self.clconst.ChangeType(int(k)) params = '' if k == self.const.account_password: if v['change_params']: params = json.loads(v['change_params']) params = params.get('password', '') tmp = { 'tstamp': v['tstamp'], # 'change_type': str(cl), 'change_type': text_type(change_type), 'misc': params, } entity = self._get_entity(ident=int(v['subject_entity'])) if entity.entity_type == int(self.const.entity_person): person = self._get_person("entity_id", entity.entity_id) name = person.get_name(self.const.system_cached, self.const.name_full) tmp['person_id'] = int(person.entity_id) elif entity.entity_type == int(self.const.entity_account): account = self.Account_class(self.db) account.find(entity.entity_id) name = account.account_name tmp['person_id'] = int(account.owner_id) else: self.ou.clear() self.ou.find(entity.entity_id) name = self.ou.get_name_with_language( name_variant=self.const.ou_name, name_language=self.const.language_nb, default="") tmp['name'] = name ret.append(tmp) return ret
def load_params(self, event): """Get the change params of an event. :type event: dbrow :param event: The db row returned by Change- or EventLog. :rtype: dict or None :return: The change params.""" params = event['change_params'] if isinstance(params, basestring): return json.loads(params) return params
def handler(event): args = event['change_params'] if args is not None: args = json.loads(args) logger.debug('Handler(%s) called for %s', tp, args) eid = int(event['subject_entity']) datum = data[tp].get(eid) try: if datum is None: datum = filler(eid, db, co, data) function(event, db, co, datum, args) except NotFoundError: pass
def proc_sympa_create(request): """Execute the request for creating a sympa mailing list. @type request: ?? @param request: An object describing the sympa list creation request. """ try: listname = get_address(request["entity_id"]) except Errors.NotFoundError: logger.info("Sympa list address %s is deleted! No need to create", listname) return True try: state = json.loads(request["state_data"]) except ValueError: state = None # Remove this when there's no chance of pickled data if state is None: try: state = pickle.loads(request["state_data"]) except Exception: pass if state is None: logger.error("Cannot parse request state for sympa list=%s: %s", listname, request["state_data"]) return True try: host = state["runhost"] profile = state["profile"] description = state["description"] admins = state["admins"] admins = ",".join(admins) except KeyError: logger.error("No host/profile/description specified for sympa list %s", listname) return True # 2008-08-01 IVR FIXME: Safe quote everything fished out from state. cmd = [ cereconf.SYMPA_SCRIPT, host, 'newlist', listname, admins, profile, description ] return spawn_and_log_output(cmd) == EXIT_SUCCESS
def typeset_change_log_row(row, database): account = get_account(row["subject_entity"], database) clconst = Factory.get("CLConstants")() entity = (account is not None and "account: %s/id=%s" % (account.account_name, account.entity_id) or "id=%s" % row["subject_entity"]) if row.get("confirmation_key") is not None: magic = "with request key %s " % str(row["confirmation_key"]) else: magic = "" return "Event %s %sregistered @ %s for %s%s" % ( str(clconst.ChangeType( row["change_type_id"])), magic, row["tstamp"].strftime("%F %R"), entity, row["change_params"] is not None and ", params=%s." % repr(json.loads(row["change_params"])) or ".")
def proc_sympa_create(request): """Execute the request for creating a sympa mailing list. :type request: db_row :param request: A dict-like object describing the sympa list creation request. """ try: listname = get_address(request["entity_id"]) except Errors.NotFoundError: logger.info("Sympa list address id:%s is deleted! No need to create", request["entity_id"]) return True try: state = json.loads(request["state_data"]) except ValueError: state = None # Remove this when there's no chance of pickled data if state is None: try: state = pickle.loads(request["state_data"]) except Exception: pass if state is None: logger.error("Cannot parse request state for sympa list=%s: %s", listname, request["state_data"]) return True try: host = state["runhost"] profile = state["profile"] description = state["description"] admins = state["admins"] admins = ",".join(admins) except KeyError: logger.error("No host/profile/description specified for sympa list %s", listname) return True # 2008-08-01 IVR FIXME: Safe quote everything fished out from state. cmd = [cereconf.SYMPA_SCRIPT, host, 'newlist', listname, admins, profile, description] return Utils.spawn_and_log_output(cmd) == EXIT_SUCCESS
def delete_stale_events(cl_events, db): """Remove all events of type cl_events older than GRACE_PERIOD. cl_events is an iterable listing change_log event types that we want expunged. These events cannot require any state change in Cerebrum (other than their own deletion). It is the caller's responsibility to check that this is so. """ if not isinstance(cl_events, (list, tuple, set)): cl_events = [cl_events, ] clconst = Factory.get("CLConstants")() typeset_request = ", ".join(str(clconst.ChangeType(x)) for x in cl_events) logger.debug("Deleting stale requests: %s", typeset_request) for event in db.get_log_events(types=cl_events): tstamp = event["tstamp"] timeout = cereconf.GRACE_PERIOD try: params = json.loads(event["change_params"]) if params['timeout'] is not None: timeout = DateTimeDelta(params['timeout']) logger.debug('Timeout set to %s for %s', (now() + timeout).strftime('%Y-%m-%d'), event['change_id']) if timeout > cereconf.MAX_INVITE_PERIOD: logger.warning('Too long timeout (%s) for for %s', timeout.strftime('%Y-%m-%d'), event['change_id']) timeout = cereconf.MAX_INVITE_PERIOD except KeyError: pass if now() - tstamp <= timeout: continue logger.debug("Deleting stale event %s (@%s) for entity %s (id=%s)", str(clconst.ChangeType(event["change_type_id"])), event["tstamp"].strftime("%Y-%m-%d"), fetch_name(event["subject_entity"], db), event["subject_entity"]) db.remove_log_event(event["change_id"]) db.commit() logger.debug("Deleted all stale requests: %s", typeset_request)
def process_events(self, start_date=0, end_date=0): """Main 'counting' function. Extracts desired events from database and places entity IDs into 'self._entity_ids', where they later can be counted and extracted for further purposes. We are (at this point) only interested in accounts that have been modified so that their expire_date has been changed to a time in the future, so we override the super-class'es method. @param start_date: Earliest date for events to include @type start_date: mx.DateTime @param end_date: Latest date for events to include @type end_date: mx.DateTime """ event_rows = db.get_log_events_date(sdate=start_date, edate=end_date, type=self._log_event) for row in event_rows: params = {} if row["change_params"]: params = json.loads(row["change_params"]) if "expire_date" in params: if params["expire_date"] is None: # Expire_date unset; include it! self._entity_ids.append(row["subject_entity"]) else: account.clear() account.find(row["subject_entity"]) if (account.expire_date is None or account.expire_date > mx.DateTime.now()): # Account set to expire at some point in the # future or not at all; include it! self._entity_ids.append(row["subject_entity"]) else: # Account (already? still?) expired; no need to include logger.debug("%s: is expired; skipping", account.account_name) continue else: # Event does not modify expire_date; ignore. continue logger.info( "Number of events dealing with valid expire_dates for '%s' is %i", self._description, self._get_total())
def typeset_change_log_row(row, database): account = get_account(row["subject_entity"], database) clconst = Factory.get("CLConstants")() entity = (account is not None and "account: %s/id=%s" % (account.account_name, account.entity_id) or "id=%s" % row["subject_entity"]) if row.get("confirmation_key") is not None: magic = "with request key %s " % str(row["confirmation_key"]) else: magic = "" return "Event %s %sregistered @ %s for %s%s" % ( str(clconst.ChangeType(row["change_type_id"])), magic, row["tstamp"].strftime("%F %R"), entity, row["change_params"] is not None and ", params=%s." % repr(json.loads(row["change_params"])) or ".")
def row_to_record(self, row): """ Build a DbAuditRecord from a change_log db_row. """ def int_or_none(v): return v if v is None else int(v) timestamp = mx_to_datetime(row['tstamp']) change_id = int(row['change_id']) if row['change_by'] is None: change_by = self.initial_account_id else: change_by = int(row['change_by']) change_params = row['change_params'] try: if not change_params: change_params = {} else: change_params = json.loads(change_params) except Exception: logger.warn("unable to deserialize change_params=%r", change_params) raise record_data = self.builder( int(row['subject_entity']), int(row['change_type_id']), int_or_none(row['dest_entity']), change_params, change_by, row['change_program'], ).to_dict() record_data['record_id'] = change_id record_data['timestamp'] = timestamp return record.DbAuditRecord.from_dict(record_data)
def spread_change(self, key, event): """ Spread change """ change_params = json.loads(event['change_params']) if change_params.get('spread', 0) != int(self.datasource.spread): raise UnrelatedEvent pe = Factory.get('Person')(self.db) try: pe.find(event['subject_entity']) except NotFoundError: raise UnrelatedEvent primary = None if self.datasource.is_eligible(pe.entity_id): primary = pe.get_primary_account() # Make sure the current primary account exists if primary: self.update_user(key, event, pe.entity_id) # Delete all other accounts self.delete_users_for_person(key, event, pe.entity_id, except_account_id=primary)
def proc_sympa_remove(request): """Execute the request for removing a sympa mailing list. @type request: ?? @param request: A dict-like object containing all the parameters for sympa list removal. """ try: state = json.loads(request["state_data"]) except ValueError: state = None # Remove this when there's no chance of pickled data if state is None: try: state = pickle.loads(request["state_data"]) except Exception: pass if state is None: logger.error("Cannot parse request state for sympa request %s: %s", request["request_id"], request["state_data"]) return True try: listname = state["listname"] host = state["run_host"] except KeyError: logger.error("No listname/runhost specified for request %s", request["request_id"]) return True cmd = [cereconf.SYMPA_SCRIPT, host, 'rmlist', listname] return spawn_and_log_output(cmd) == EXIT_SUCCESS
def proc_sympa_remove(request): """Execute the request for removing a sympa mailing list. :type request: db_row :param request: A dict-like object containing all the parameters for sympa list removal. """ try: state = json.loads(request["state_data"]) except ValueError: state = None # Remove this when there's no chance of pickled data if state is None: try: state = pickle.loads(request["state_data"]) except Exception: pass if state is None: logger.error("Cannot parse request state for sympa request %s: %s", request["request_id"], request["state_data"]) return True try: listname = state["listname"] host = state["run_host"] except KeyError: logger.error("No listname/runhost specified for request %s", request["request_id"]) return True cmd = [cereconf.SYMPA_SCRIPT, host, 'rmlist', listname] return Utils.spawn_and_log_output(cmd) == EXIT_SUCCESS
def process_log(self): start = time.time() last_seen = {} unknown_type = {} processed = aged = toggled = 0 logger.info("Fetching change log...") # Use a separate cursor for fetching db2 = get_db() for e in db2.get_log_events(): processed += 1 if (processed % 100000) == 0: logger.debug('processed = %s', processed) change_type = int(e['change_type_id']) # Skip unknown change types if change_type not in self.trigger_map: unknown_type.setdefault(change_type, 0) unknown_type[change_type] += 1 continue # Keep all data newer than minimum_age age = start - e['tstamp'].ticks() if age < self.minimum_age: continue # logger.debug('Changelog entry: {!r}'.format( # self.format_for_logging(e))) max_age = self.max_ages.get(change_type, self.default_age) if max_age != self.forever and age > max_age: logger.info("Removed due to age: {!r}".format( self.format_for_logging(e))) aged += 1 if self.commit: self.db.remove_log_event(e['change_id']) # Determine a unique key for this event to check togglability toggler = self.trigger_map[change_type] if toggler is None: continue key = ["{:d}".format(toggler['id'])] for column in toggler['columns']: key.append("{:d}".format(e[column])) if 'change_params' in toggler: if e['change_params']: data = json.loads(e['change_params']) else: data = {} for c in toggler['change_params']: key.append("{}".format(data.get(c))) key = "-".join(key) # Has something been toggled? if key in last_seen: logger.info("Removed toggle {!r}, {!r} toggled by {!r}".format( key, last_seen[key], self.format_for_logging(e))) toggled += 1 if self.commit: self.db.remove_log_event(last_seen[key]) last_seen[key] = int(e['change_id']) if self.commit and (processed % 1000) == 0: self.db.commit() for change_type, num in unknown_type.items(): logger.warn("Unknown change type id:{} ({}) for {} entries".format( change_type, text_type(self.clconst.human2constant(change_type)), num)) maybe_commit(self) logger.info("Entries processed: {}".format(processed)) logger.info("Entries removed due to age: {}".format(aged)) logger.info("Entries removed due to being toggled: {}".format(toggled)) logger.info("Spent {} seconds".format(int(time.time() - start)))
def _unpack(msg): cp = json.loads(msg.get('change_params')) msg['change_params'] = cp return msg
def process_log(self): start = time.time() last_seen = {} unknown_type = {} processed = aged = toggled = 0 logger.info("Fetching change log...") # Use a separate cursor for fetching db2 = get_db() for e in db2.get_log_events(): processed += 1 if (processed % 100000) == 0: logger.debug('processed = %s', processed) change_type = int(e['change_type_id']) # Skip unknown change types if change_type not in self.trigger_map: unknown_type.setdefault(change_type, 0) unknown_type[change_type] += 1 continue # Keep all data newer than minimum_age age = start - e['tstamp'].ticks() if age < self.minimum_age: continue # logger.debug('Changelog entry: {!r}'.format( # self.format_for_logging(e))) max_age = self.max_ages.get(change_type, self.default_age) if max_age != self.forever and age > max_age: logger.info("Removed due to age: {!r}".format( self.format_for_logging(e))) aged += 1 if self.commit: self.db.remove_log_event(e['change_id']) # Determine a unique key for this event to check togglability toggler = self.trigger_map[change_type] if toggler is None: continue key = ["{:d}".format(toggler['id'])] for column in toggler['columns']: key.append("{:d}".format(e[column])) if 'change_params' in toggler: if e['change_params']: data = json.loads(e['change_params']) else: data = {} for c in toggler['change_params']: key.append("{}".format(data.get(c))) key = "-".join(key) # Has something been toggled? if key in last_seen: logger.info("Removed toggle {!r}, {!r} toggled by {!r}".format( key, last_seen[key], self.format_for_logging(e))) toggled += 1 if self.commit: self.db.remove_log_event(last_seen[key]) last_seen[key] = int(e['change_id']) if self.commit and (processed % 1000) == 0: self.db.commit() for change_type, num in unknown_type.items(): logger.warn( "Unknown change type id:{} ({}) for {} entries".format( change_type, text_type(self.clconst.human2constant(change_type)), num)) maybe_commit(self) logger.info("Entries processed: {}".format(processed)) logger.info("Entries removed due to age: {}".format(aged)) logger.info("Entries removed due to being toggled: {}".format(toggled)) logger.info("Spent {} seconds".format(int(time.time() - start)))