def purge_trigger_instances(timestamp=None): if not timestamp: LOG.error('Specify a valid timestamp to purge.') return LOG.info('Purging trigger instances older than timestamp: %s' % timestamp.strftime('%Y-%m-%dT%H:%M:%S.%fZ')) # XXX: Think about paginating this call. filters = {'occurrence_time__lt': isotime.parse(timestamp)} instances = TriggerInstance.query(**filters) LOG.info('#### Total number of trigger instances to delete: %d' % len(instances)) # Purge execution and liveaction models now for instance in instances: _purge_model(instance) # Print stats LOG.info('#### Total trigger instances deleted: %d' % DELETED_COUNT)
def test_migrate_triggers(self): TriggerInstanceDB._meta["allow_inheritance"] = True class TriggerInstanceDB_OldFieldType(TriggerInstanceDB): payload = stormbase.EscapedDictField() trigger_instance_dbs = TriggerInstance.query( __raw__={"payload": { "$not": { "$type": "binData", }, }}) self.assertEqual(len(trigger_instance_dbs), 0) trigger_instance_dbs = TriggerInstance.query(__raw__={ "payload": { "$type": "object", }, }) self.assertEqual(len(trigger_instance_dbs), 0) # 1. Insert data in old format trigger_instance_1_db = TriggerInstanceDB_OldFieldType() trigger_instance_1_db.payload = MOCK_PAYLOAD_1 trigger_instance_1_db.status = TRIGGER_INSTANCE_PROCESSED trigger_instance_1_db.occurrence_time = datetime.datetime.utcnow() trigger_instance_1_db = TriggerInstance.add_or_update( trigger_instance_1_db, publish=False) trigger_instance_2_db = TriggerInstanceDB_OldFieldType() trigger_instance_2_db.payload = MOCK_PAYLOAD_2 trigger_instance_2_db.status = TRIGGER_INSTANCE_PENDING trigger_instance_2_db.occurrence_time = datetime.datetime.utcnow() trigger_instance_2_db = TriggerInstance.add_or_update( trigger_instance_2_db, publish=False) # This object is older than the default threshold so it should not be migrated trigger_instance_3_db = TriggerInstanceDB_OldFieldType() trigger_instance_3_db.payload = MOCK_PAYLOAD_2 trigger_instance_3_db.status = TRIGGER_INSTANCE_PROCESSED trigger_instance_3_db.occurrence_time = datetime.datetime.utcfromtimestamp( 0) trigger_instance_3_db = TriggerInstance.add_or_update( trigger_instance_3_db, publish=False) # Verify data has been inserted in old format trigger_instance_dbs = TriggerInstance.query( __raw__={"payload": { "$not": { "$type": "binData", }, }}) self.assertEqual(len(trigger_instance_dbs), 3) trigger_instance_dbs = TriggerInstance.query(__raw__={ "payload": { "$type": "object", }, }) self.assertEqual(len(trigger_instance_dbs), 3) # Update inserted documents and remove special _cls field added by mongoengine. We need to # do that here due to how mongoengine works with subclasses. TriggerInstance.query(__raw__={ "payload": { "$type": "object", }, }).update(set___cls="TriggerInstanceDB") # 2. Run migration start_dt = datetime.datetime.utcnow().replace( tzinfo=datetime.timezone.utc) - datetime.timedelta(hours=2) end_dt = datetime.datetime.utcnow().replace( tzinfo=datetime.timezone.utc) migration_module.migrate_triggers(start_dt=start_dt, end_dt=end_dt) # 3. Verify data has been migrated - only 1 item should have been migrated since it's in a # completed state trigger_instance_dbs = TriggerInstance.query( __raw__={"payload": { "$not": { "$type": "binData", }, }}) # TODO: Also verify raw as_pymongo() bin field value self.assertEqual(len(trigger_instance_dbs), 2) trigger_instance_dbs = TriggerInstance.query(__raw__={ "payload": { "$type": "object", }, }) self.assertEqual(len(trigger_instance_dbs), 2) trigger_instance_1_db_retrieved = TriggerInstance.get_by_id( trigger_instance_1_db.id) self.assertEqual(trigger_instance_1_db_retrieved.payload, MOCK_PAYLOAD_1) trigger_instance_2_db_retrieved = TriggerInstance.get_by_id( trigger_instance_2_db.id) self.assertEqual(trigger_instance_2_db_retrieved.payload, MOCK_PAYLOAD_2)
def _append_view_properties(self, rule_enforcement_apis): """ Method which appends corresponding execution (if available) and trigger instance object properties. """ trigger_instance_ids = set([]) execution_ids = [] for rule_enforcement_api in rule_enforcement_apis: if rule_enforcement_api.get('trigger_instance_id', None): trigger_instance_ids.add(str(rule_enforcement_api['trigger_instance_id'])) if rule_enforcement_api.get('execution_id', None): execution_ids.append(rule_enforcement_api['execution_id']) # 1. Retrieve corresponding execution objects # NOTE: Executions contain a lot of field and could contain a lot of data so we only # retrieve fields we need only_fields = [ 'id', 'action.ref', 'action.parameters', 'runner.name', 'runner.runner_parameters', 'parameters', 'status' ] execution_dbs = ActionExecution.query(id__in=execution_ids, only_fields=only_fields) execution_dbs_by_id = {} for execution_db in execution_dbs: execution_dbs_by_id[str(execution_db.id)] = execution_db # 2. Retrieve corresponding trigger instance objects trigger_instance_dbs = TriggerInstance.query(id__in=list(trigger_instance_ids)) trigger_instance_dbs_by_id = {} for trigger_instance_db in trigger_instance_dbs: trigger_instance_dbs_by_id[str(trigger_instance_db.id)] = trigger_instance_db # Ammend rule enforcement objects with additional data for rule_enforcement_api in rule_enforcement_apis: rule_enforcement_api['trigger_instance'] = {} rule_enforcement_api['execution'] = {} trigger_instance_id = rule_enforcement_api.get('trigger_instance_id', None) execution_id = rule_enforcement_api.get('execution_id', None) trigger_instance_db = trigger_instance_dbs_by_id.get(trigger_instance_id, None) execution_db = execution_dbs_by_id.get(execution_id, None) if trigger_instance_db: trigger_instance_api = TriggerInstanceAPI.from_model(trigger_instance_db) rule_enforcement_api['trigger_instance'] = trigger_instance_api if execution_db: execution_api = ActionExecutionAPI.from_model(execution_db) rule_enforcement_api['execution'] = execution_api return rule_enforcement_apis