def new_event(self): return message.Event(harmonization=self.harmonization)
def main(): parser = argparse.ArgumentParser( prog=APPNAME, formatter_class=argparse.RawDescriptionHelpFormatter, usage=USAGE, description=DESCRIPTION, epilog=EPILOG, ) parser.add_argument('botid', metavar='botid', nargs='?', default=None, help='botid to inspect dumps of') args = parser.parse_args() if args.botid is None: filenames = glob.glob(os.path.join(DEFAULT_LOGGING_PATH, '*.dump')) if not len(filenames): print(green('Nothing to recover from, no dump files found!')) exit(0) filenames = [(fname, fname[len(DEFAULT_LOGGING_PATH):-5]) for fname in sorted(filenames)] length = max([len(value[1]) for value in filenames]) print( bold("{c:>3}: {s:{l}} {i}".format(c='id', s='name (bot id)', i='content', l=length))) for count, (fname, shortname) in enumerate(filenames): info = dump_info(fname) print("{c:3}: {s:{l}} {i}".format(c=count, s=shortname, i=info, l=length)) botid = input(inverted('Which dump file to process (id or name)? ')) botid = botid.strip() if botid == 'q' or not botid: exit(0) try: fname, botid = filenames[int(botid)] except ValueError: fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump' else: botid = args.botid fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump' if not os.path.isfile(fname): print(bold('Given file does not exist: {}'.format(fname))) exit(1) while True: info = dump_info(fname) print('Processing {}: {}'.format(bold(botid), info)) try: with io.open(fname, 'rt') as handle: content = json.load(handle) meta = load_meta(content) except ValueError: available_opts = [item[0] for item in ACTIONS.values() if item[2]] print( bold('Could not load file:') + '\n{}\nRestricted actions.' ''.format(traceback.format_exc())) else: available_opts = [item[0] for item in ACTIONS.values()] for count, line in enumerate(meta): print('{:3}: {} {}'.format(count, *line)) answer = input(inverted(', '.join(available_opts) + '? ')).split() if not answer: continue if any([answer[0] == char for char in AVAILABLE_IDS]): ids = [int(item) for item in answer[1].split(',')] queue_name = None if answer[0] == 'a': # recover all -> recover all by ids answer[0] = 'r' ids = range(len(meta)) if len(answer) > 1: queue_name = answer[1] if answer[0] == 'q': break elif answer[0] == 'e': # Delete entries for entry in ids: del content[meta[entry][0]] save_file(fname, content) elif answer[0] == 'r': # recover entries for key, entry in [ item for (count, item) in enumerate(content.items()) if count in ids ]: if type(entry['message']) is dict: if '__type' in entry['message']: msg = json.dumps(entry['message']) # backwards compat: dumps had no type info elif '-parser' in entry['bot_id']: msg = message.Report(entry['message']).serialize() else: msg = message.Event(entry['message']).serialize() elif issubclass(type(entry['message']), (six.binary_type, six.text_type)): msg = entry['message'] elif entry['message'] is None: print(bold('No message here, deleting directly.')) del content[key] save_file(fname, content) continue else: print( bold('Unhandable type of message: {!r}' ''.format(type(entry['message'])))) continue print(entry['source_queue']) default = utils.load_configuration(DEFAULTS_CONF_FILE) runtime = utils.load_configuration(RUNTIME_CONF_FILE) params = utils.load_parameters(default, runtime) pipe = pipeline.PipelineFactory.create(params) if queue_name is None: if len(answer) == 2: queue_name = answer[2] else: queue_name = entry['source_queue'] try: pipe.set_queues(queue_name, 'destination') pipe.connect() pipe.send(msg) except exceptions.PipelineError: print( red('Could not reinject into queue {}: {}' ''.format(queue_name, traceback.format_exc()))) else: del content[key] save_file(fname, content) elif answer[0] == 'd': # delete dumpfile os.remove(fname) print('Deleted file {}'.format(fname)) break elif answer[0] == 's': # Show entries by id for count, (key, value) in enumerate(content.items()): if count not in ids: continue print('=' * 100, '\nShowing id {} {}\n'.format(count, key), '-' * 50) if isinstance(value['message'], (six.binary_type, six.text_type)): value['message'] = json.loads(value['message']) if ('raw' in value['message'] and len(value['message']['raw']) > 1000): value['message']['raw'] = value['message'][ 'raw'][:1000] + '...[truncated]' value['traceback'] = value['traceback'].splitlines() pprint.pprint(value)
def new_event(self, *args, **kwargs): return libmessage.Event(*args, harmonization=self.harmonization, **kwargs)
def test_event_init_check_tuple(self): """ Test if initialization method checks fields from tuple. """ event = (('__type', 'Event'), ('source.asn', 'foo')) with self.assertRaises(exceptions.InvalidValue): message.Event(event, harmonization=HARM)
def test_event_init_check(self): """ Test if initialization method checks fields. """ event = {'__type': 'Event', 'source.asn': 'foo'} with self.assertRaises(exceptions.InvalidValue): message.Event(event, harmonization=HARM)
def test_event_from_report(self): report = self.new_report() dict.update(report, FEED_FIELDS) event = message.Event(report, harmonization=HARM) self.assertDictContainsSubset(event, FEED_FIELDS)
def test_invalid_harm_key(self): """ Test if error is raised when using an invalid key. """ with self.assertRaises(exceptions.InvalidKey): message.Event(harmonization={'event': {'foo..bar': {}}}) with self.assertRaises(exceptions.InvalidKey): message.Event(harmonization={'event': {'foo.bar.': {}}})