Exemplo n.º 1
0
	def begin_new_run(self, current_time):
		print "\n" * 10
		subprocess.call('clear')
		msg = "# Running tests at %s  " % (time.strftime("%H:%M:%S", current_time))

		print >> sys.stderr, termstyle.inverted(termstyle.bold(msg))
		print >> sys.stderr, ""
Exemplo n.º 2
0
    def process(self, event):
        if isinstance(event, TestRun):
            print "\n" * 10
            subprocess.call('clear')
            msg = "# Running tests at %s  " % (time.strftime("%H:%M:%S"), )

            print >> sys.stderr, termstyle.inverted(termstyle.bold(msg))
            print >> sys.stderr, ""
Exemplo n.º 3
0
	def process(self, event):
		if isinstance(event, TestRun):
			print "\n" * 10
			subprocess.call('clear')
			msg = "# Running tests at %s  " % (time.strftime("%H:%M:%S"),)
	
			print >> sys.stderr, termstyle.inverted(termstyle.bold(msg))
			print >> sys.stderr, ""
Exemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser(
        prog=APPNAME,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        usage=USAGE,
        description=DESCRIPTION,
        epilog=EPILOG,
    )

    parser.add_argument('botid',
                        metavar='botid',
                        nargs='?',
                        default=None,
                        help='botid to inspect dumps of')
    args = parser.parse_args()
    ctl = intelmqctl.IntelMQContoller()

    if args.botid is None:
        filenames = glob.glob(os.path.join(DEFAULT_LOGGING_PATH, '*.dump'))
        if not len(filenames):
            print(green('Nothing to recover from, no dump files found!'))
            exit(0)
        filenames = [(fname, fname[len(DEFAULT_LOGGING_PATH):-5])
                     for fname in sorted(filenames)]

        length = max([len(value[1]) for value in filenames])
        print(
            bold("{c:>3}: {s:{l}} {i}".format(c='id',
                                              s='name (bot id)',
                                              i='content',
                                              l=length)))
        for count, (fname, shortname) in enumerate(filenames):
            info = dump_info(fname)
            print("{c:3}: {s:{l}} {i}".format(c=count,
                                              s=shortname,
                                              i=info,
                                              l=length))
        try:
            botid = input(
                inverted('Which dump file to process (id or name)?') + ' ')
        except EOFError:
            exit(0)
        else:
            botid = botid.strip()
            if botid == 'q' or not botid:
                exit(0)
        try:
            fname, botid = filenames[int(botid)]
        except ValueError:
            fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'
    else:
        botid = args.botid
        fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'

    if not os.path.isfile(fname):
        print(bold('Given file does not exist: {}'.format(fname)))
        exit(1)

    answer = None
    while True:
        info = dump_info(fname)
        print('Processing {}: {}'.format(bold(botid), info))

        if info.startswith(str(red)):
            available_opts = [item[0] for item in ACTIONS.values() if item[2]]
            print('Restricted actions.')
        else:
            # don't display list after 'show' and 'recover' command
            if not (answer and isinstance(answer, list)
                    and answer[0] in ['s', 'r']):
                with open(fname, 'rt') as handle:
                    content = json.load(handle)
                meta = load_meta(content)
                available_opts = [item[0] for item in ACTIONS.values()]
                for count, line in enumerate(meta):
                    print('{:3}: {} {}'.format(count, *line))

        # Determine bot status
        bot_status = ctl.bot_status(botid)
        if bot_status == 'running':
            print(red('Attention: This bot is currently running!'))
        elif bot_status == 'error':
            print(red('Attention: This bot is not defined!'))

        try:
            answer = input(inverted(', '.join(available_opts) + '?') +
                           ' ').split()
        except EOFError:
            break
        else:
            if not answer:
                continue
        if any([answer[0] == char
                for char in AVAILABLE_IDS]) and len(answer) > 1:
            ids = [int(item) for item in answer[1].split(',')]
        else:
            ids = []
        queue_name = None
        if answer[0] == 'a':
            # recover all -> recover all by ids
            answer[0] = 'r'
            ids = range(len(meta))
            if len(answer) > 1:
                queue_name = answer[1]
        if answer[0] == 'q':
            break
        elif answer[0] == 'e':
            # Delete entries
            for entry in ids:
                del content[meta[entry][0]]
            save_file(fname, content)
        elif answer[0] == 'r':
            if bot_status == 'running':
                # See https://github.com/certtools/intelmq/issues/574
                print(red('Recovery for running bots not possible.'))
                continue
            # recover entries
            default = utils.load_configuration(DEFAULTS_CONF_FILE)
            runtime = utils.load_configuration(RUNTIME_CONF_FILE)
            params = utils.load_parameters(default, runtime)
            pipe = pipeline.PipelineFactory.create(params)
            try:
                for i, (key, entry) in enumerate([
                        item for (count, item) in enumerate(content.items())
                        if count in ids
                ]):
                    if entry['message']:
                        msg = entry['message']
                    else:
                        print('No message here, deleting entry.')
                        del content[key]
                        continue

                    if queue_name is None:
                        if len(answer) == 3:
                            queue_name = answer[2]
                        else:
                            queue_name = entry['source_queue']
                    try:
                        pipe.set_queues(queue_name, 'destination')
                        pipe.connect()
                        pipe.send(msg)
                    except exceptions.PipelineError:
                        print(
                            red('Could not reinject into queue {}: {}'
                                ''.format(queue_name, traceback.format_exc())))
                    else:
                        del content[key]
                        print(green('Recovered dump {}.'.format(i)))
            finally:
                save_file(fname, content)
            if not content:
                os.remove(fname)
                print('Deleted empty file {}'.format(fname))
                break
        elif answer[0] == 'd':
            # delete dumpfile
            os.remove(fname)
            print('Deleted file {}'.format(fname))
            break
        elif answer[0] == 's':
            # Show entries by id
            for count, (key, value) in enumerate(content.items()):
                if count not in ids:
                    continue
                print('=' * 100, '\nShowing id {} {}\n'.format(count, key),
                      '-' * 50)
                if isinstance(value['message'], (bytes, str)):
                    value['message'] = json.loads(value['message'])
                    if ('raw' in value['message']
                            and len(value['message']['raw']) > 1000):
                        value['message']['raw'] = value['message'][
                            'raw'][:1000] + '...[truncated]'
                if type(value['traceback']) is not list:
                    value['traceback'] = value['traceback'].splitlines()
                pprint.pprint(value)
Exemplo n.º 5
0
    def send(self, taxonomy, contact, query, incident_id, requestor=None):
        if not query:
            self.logger.error('No data!')
            return False
        if not requestor:
            requestor = contact

        # PREPARATION
        query = self.shrink_dict(query)
        ids = list(str(row['id']) for row in query)

        if self.subject:
            subject = self.subject
        else:
            subject = ('{tax} incidents in your network: {date}'
                       ''.format(
                           date=datetime.datetime.now().strftime('%Y-%m-%d'),
                           tax=lib.SUBJECT[taxonomy]))
        text = self.get_text(taxonomy)
        csvfile = io.StringIO()
        writer = csv.DictWriter(csvfile,
                                fieldnames=lib.CSV_FIELDS,
                                quoting=csv.QUOTE_MINIMAL,
                                delimiter=str(";"),
                                extrasaction='ignore',
                                lineterminator='\n')
        writer.writeheader()
        query_unicode = query
        writer.writerows(query)
        # note this might contain UTF-8 chars! let's ignore utf-8 errors. sorry.
        attachment_text = csvfile.getvalue()
        attachment_lines = attachment_text.splitlines()

        if self.verbose:
            self.logger.info(text)

        showed_text = '=' * 100 + '''
To: {to}
Subject: {subj}

{text}
    '''.format(to=requestor, subj=subject, text=text)
        showed_text_len = showed_text.count('\n')

        # SHOW DATA
        if self.table_mode:
            if self.quiet:
                height = 80  # assume anything for quiet mode
            else:
                height = lib.getTerminalHeight() - 3 - showed_text_len
            csvfile.seek(0)
            if len(query) > height:
                with tempfile.NamedTemporaryFile(mode='w+') as handle:
                    handle.write(showed_text + '\n')
                    handle.write(
                        tabulate.tabulate(query,
                                          headers='keys',
                                          tablefmt='psql'))
                    handle.seek(0)
                    subprocess.call(['less', handle.name])
            else:
                self.logger.info(showed_text)
                self.logger.info(
                    tabulate.tabulate(query_unicode,
                                      headers='keys',
                                      tablefmt='psql'))
        else:
            if self.quiet:
                height = 80
            else:
                height = lib.getTerminalHeight() - 4
            if 5 + len(query) > height:  # cut query too, 5 is length of text
                self.logger.info('\n'.join(showed_text.splitlines()[:5]))
                self.logger.info('...')
                self.logger.info('\n'.join(attachment_lines[:height - 5]))
                self.logger.info('...')
            elif showed_text_len + len(query) > height > 5 + len(query):
                self.logger.info('\n'.join(
                    showed_text.splitlines()[:height - len(query)]))
                self.logger.info('...')
                self.logger.info(attachment_text)
            else:
                self.logger.info(showed_text)
                self.logger.info(attachment_text)
        self.logger.info('-' * 100)

        # MENU
        if self.batch and requestor:
            answer = 's'
        else:
            answer = 'q'
            if self.batch:
                self.logger.error('You need to set a valid requestor!')
            else:
                answer = input(
                    '{i}{b}[a]{i}utomatic, {b}[n]{i}ext, {i}{b}[s]{i}end, show '
                    '{b}[t]{i}able, change {b}[r]{i}equestor or {b}[q]{i}uit?{r} '
                    ''.format(b=bold, i=myinverted, r=reset)).strip()
        if answer == 'q':
            exit(0)
        elif answer == 'n':
            return False
        elif answer == 'a':
            self.batch = True
        elif answer == 't':
            self.table_mode = bool((self.table_mode + 1) % 2)
            return self.send(taxonomy, contact, query, incident_id, requestor)
        elif answer == 'r':
            answer = input(inverted('New requestor address:') + ' ').strip()
            if len(answer) == 0:
                requestor = contact
            else:
                requestor = answer
            return self.send(taxonomy, contact, query, incident_id, requestor)
        elif answer != 's':
            self.logger.error('Unknow command %r.', answer)
            return self.send(taxonomy, contact, query, incident_id, requestor)

        if text is None:
            self.logger.error('I won\'t send with a missing text!')
            return False

        # INVESTIGATION
        if self.dryrun:
            self.logger.info('Simulate creation of investigation.')
            investigation_id = -1
        else:
            investigation_id = self.rt.create_ticket(
                Queue='Investigations',
                Subject=subject,
                Owner=self.config['rt'].get('investigation_owner',
                                            self.config['rt']['user']),
                Requestor=requestor)

            if investigation_id == -1:
                self.logger.error('Could not create Investigation %r.',
                                  subject)
                return False

            self.logger.info('Created Investigation %d.', investigation_id)
            if not self.rt.edit_link(incident_id, 'HasMember',
                                     investigation_id):
                self.logger.error('Could not link Investigation to Incident.')
                return False

            self.executemany(
                "UPDATE events SET rtir_investigation_id = %s WHERE id = %s",
                [(investigation_id, evid) for evid in ids],
                extend=False)
            self.logger.info('Linked events to investigation.')

        # CORRESPOND
        filename = '%s-%s.csv' % (datetime.datetime.now().strftime('%Y-%m-%d'),
                                  taxonomy)
        if self.zipme or len(query) > self.config['rt']['zip_threshold']:
            attachment = io.BytesIO()
            ziphandle = zipfile.ZipFile(attachment,
                                        mode='w',
                                        compression=zipfile.ZIP_DEFLATED)
            data = csvfile.getvalue()
            ziphandle.writestr('events.csv', data)
            ziphandle.close()
            attachment.seek(0)
            filename += '.zip'
            mimetype = 'application/octet-stream'
        else:
            attachment = csvfile
            attachment.seek(0)
            mimetype = 'text/csv'

        try:
            # TODO: CC
            if self.dryrun:
                self.logger.info('Simulate creation of correspondence.')
            else:
                correspond = self.rt.reply(investigation_id,
                                           text=text,
                                           files=[(filename, attachment,
                                                   mimetype)])
                if not correspond:
                    self.logger.error(
                        'Could not correspond with text and file.')
                    return False
                self.logger.info('Correspondence added to Investigation.')

            self.execute(
                "UPDATE events SET sent_at = LOCALTIMESTAMP WHERE "
                "rtir_investigation_id = %s", (investigation_id, ),
                extend=False)
            self.logger.info('Marked events as sent.')
        except Exception:
            self.con.rollback()
            raise
        else:
            self.con.commit()

            # RESOLVE
            try:
                if not self.dryrun and not self.rt.edit_ticket(
                        investigation_id, Status='resolved'):
                    self.logger.error('Could not close investigation %d.',
                                      investigation_id)
            except IndexError:
                # Bug in RT/python-rt
                pass

        if requestor != contact:
            asns = set(str(row['source.asn']) for row in query)
            answer = input(
                inverted('Save recipient {!r} for ASNs {!s}? [Y/n] '
                         ''.format(requestor, ', '.join(asns)))).strip()
            if answer.strip().lower() in ('', 'y', 'j'):
                self.executemany(lib.QUERY_UPDATE_CONTACT,
                                 [(requestor, asn) for asn in asns],
                                 extend=False)
                self.con.commit()
                if self.cur.rowcount == 0:
                    self.query_insert_contact(asns=asns, contact=requestor)

        return True
Exemplo n.º 6
0
def main():
    parser = argparse.ArgumentParser(
        prog=APPNAME,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        usage=USAGE,
        description=DESCRIPTION,
        epilog=EPILOG,
    )

    parser.add_argument('botid', metavar='botid', nargs='?',
                        default=None, help='botid to inspect dumps of')
    args = parser.parse_args()
    ctl = intelmqctl.IntelMQController()

    if args.botid is None:
        filenames = glob.glob(os.path.join(DEFAULT_LOGGING_PATH, '*.dump'))
        if not len(filenames):
            print(green('Nothing to recover from, no dump files found!'))
            exit(0)
        filenames = [(fname, fname[len(DEFAULT_LOGGING_PATH):-5])
                     for fname in sorted(filenames)]

        length = max([len(value[1]) for value in filenames])
        print(bold("{c:>3}: {s:{l}} {i}".format(c='id', s='name (bot id)',
                                                i='content', l=length)))
        for count, (fname, shortname) in enumerate(filenames):
            info = dump_info(fname)
            print("{c:3}: {s:{l}} {i}".format(c=count, s=shortname, i=info,
                                              l=length))
        try:
            botid = input(inverted('Which dump file to process (id or name)?') +
                          ' ')
        except EOFError:
            exit(0)
        else:
            botid = botid.strip()
            if botid == 'q' or not botid:
                exit(0)
        try:
            fname, botid = filenames[int(botid)]
        except ValueError:
            fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'
    else:
        botid = args.botid
        fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'

    if not os.path.isfile(fname):
        print(bold('Given file does not exist: {}'.format(fname)))
        exit(1)

    answer = None
    while True:
        info = dump_info(fname)
        available_answers = ACTIONS.keys()
        print('Processing {}: {}'.format(bold(botid), info))

        if info.startswith(str(red)):
            available_opts = [item[0] for item in ACTIONS.values() if item[2]]
            available_answers = [k for k, v in ACTIONS.items() if v[2]]
            print('Restricted actions.')
        else:
            # don't display list after 'show' and 'recover' command
            if not (answer and isinstance(answer, list) and answer[0] in ['s', 'r']):
                with open(fname, 'rt') as handle:
                    content = json.load(handle)
                meta = load_meta(content)
                available_opts = [item[0] for item in ACTIONS.values()]
                for count, line in enumerate(meta):
                    print('{:3}: {} {}'.format(count, *line))

        # Determine bot status
        bot_status = ctl.bot_status(botid)
        if bot_status == 'running':
            print(red('Attention: This bot is currently running!'))
        elif bot_status == 'error':
            print(red('Attention: This bot is not defined!'))

        try:
            answer = input(inverted(', '.join(available_opts) + '?') + ' ').split()
        except EOFError:
            break
        else:
            if not answer:
                continue
        if len(answer) == 0 or answer[0] not in available_answers:
            print('Action not allowed.')
            continue
        if any([answer[0] == char for char in AVAILABLE_IDS]) and len(answer) > 1:
            ids = [int(item) for item in answer[1].split(',')]
        else:
            ids = []
        queue_name = None
        if answer[0] == 'a':
            # recover all -> recover all by ids
            answer[0] = 'r'
            ids = range(len(meta))
            if len(answer) > 1:
                queue_name = answer[1]
        if answer[0] == 'q':
            break
        elif answer[0] == 'e':
            # Delete entries
            for entry in ids:
                del content[meta[entry][0]]
            save_file(fname, content)
        elif answer[0] == 'r':
            if bot_status == 'running':
                # See https://github.com/certtools/intelmq/issues/574
                print(red('Recovery for running bots not possible.'))
                continue
            # recover entries
            default = utils.load_configuration(DEFAULTS_CONF_FILE)
            runtime = utils.load_configuration(RUNTIME_CONF_FILE)
            params = utils.load_parameters(default, runtime)
            pipe = pipeline.PipelineFactory.create(params)
            try:
                for i, (key, entry) in enumerate([item for (count, item)
                                                  in enumerate(content.items()) if count in ids]):
                    if entry['message']:
                        msg = entry['message']
                    else:
                        print('No message here, deleting entry.')
                        del content[key]
                        continue

                    if queue_name is None:
                        if len(answer) == 3:
                            queue_name = answer[2]
                        else:
                            queue_name = entry['source_queue']
                    try:
                        pipe.set_queues(queue_name, 'destination')
                        pipe.connect()
                        pipe.send(msg)
                    except exceptions.PipelineError:
                        print(red('Could not reinject into queue {}: {}'
                                  ''.format(queue_name, traceback.format_exc())))
                    else:
                        del content[key]
                        print(green('Recovered dump {}.'.format(i)))
            finally:
                save_file(fname, content)
            if not content:
                os.remove(fname)
                print('Deleted empty file {}'.format(fname))
                break
        elif answer[0] == 'd':
            # delete dumpfile
            os.remove(fname)
            print('Deleted file {}'.format(fname))
            break
        elif answer[0] == 's':
            # Show entries by id
            for count, (key, value) in enumerate(content.items()):
                if count not in ids:
                    continue
                print('=' * 100, '\nShowing id {} {}\n'.format(count, key),
                      '-' * 50)
                if isinstance(value['message'], (bytes, str)):
                    value['message'] = json.loads(value['message'])
                    if ('raw' in value['message'] and
                            len(value['message']['raw']) > 1000):
                        value['message']['raw'] = value['message'][
                            'raw'][:1000] + '...[truncated]'
                if type(value['traceback']) is not list:
                    value['traceback'] = value['traceback'].splitlines()
                pprint.pprint(value)
Exemplo n.º 7
0
def main():
    parser = argparse.ArgumentParser(
        prog=APPNAME,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        usage=USAGE,
        description=DESCRIPTION,
        epilog=EPILOG,
    )

    parser.add_argument('botid',
                        metavar='botid',
                        nargs='?',
                        default=None,
                        help='botid to inspect dumps of')
    args = parser.parse_args()

    # Try to get log_level from defaults_configuration, else use default
    try:
        log_level = utils.load_configuration(
            DEFAULTS_CONF_FILE)['logging_level']
    except Exception:
        log_level = DEFAULT_LOGGING_LEVEL

    try:
        logger = utils.log('intelmqdump', log_level=log_level)
    except (FileNotFoundError, PermissionError) as exc:
        logger = utils.log('intelmqdump', log_level=log_level, log_path=False)
        logger.error('Not logging to file: %s', exc)

    ctl = intelmqctl.IntelMQController()
    readline.parse_and_bind("tab: complete")
    readline.set_completer_delims('')

    pipeline_config = utils.load_configuration(PIPELINE_CONF_FILE)
    pipeline_pipes = {}
    for bot, pipes in pipeline_config.items():
        pipeline_pipes[pipes.get('source-queue', '')] = bot

    if args.botid is None:
        filenames = glob.glob(os.path.join(DEFAULT_LOGGING_PATH, '*.dump'))
        if not len(filenames):
            print(green('Nothing to recover from, no dump files found!'))
            sys.exit(0)
        filenames = [(fname, fname[len(DEFAULT_LOGGING_PATH):-5])
                     for fname in sorted(filenames)]

        length = max([len(value[1]) for value in filenames])
        print(
            bold("{c:>3}: {s:{length}} {i}".format(c='id',
                                                   s='name (bot id)',
                                                   i='content',
                                                   length=length)))
        for count, (fname, shortname) in enumerate(filenames):
            info = dump_info(fname)
            print("{c:3}: {s:{length}} {i}".format(c=count,
                                                   s=shortname,
                                                   i=info,
                                                   length=length))
        try:
            bot_completer = Completer(
                possible_values=[f[1] for f in filenames])
            readline.set_completer(bot_completer.complete)
            botid = input(
                inverted('Which dump file to process (id or name)?') + ' ')
        except EOFError:
            sys.exit(0)
        else:
            botid = botid.strip()
            if botid == 'q' or not botid:
                exit(0)
        try:
            fname, botid = filenames[int(botid)]
        except ValueError:
            fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'
    else:
        botid = args.botid
        fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'

    if not os.path.isfile(fname):
        print(bold('Given file does not exist: {}'.format(fname)))
        exit(1)

    answer = None
    delete_file = False
    while True:
        with open(fname, 'r+') as handle:
            try:
                fcntl.flock(handle, fcntl.LOCK_EX | fcntl.LOCK_NB)
            except BlockingIOError:
                print(red('Dump file is currently locked. Stopping.'))
                break
            info = dump_info(fname, file_descriptor=handle)
            handle.seek(0)
            available_answers = ACTIONS.keys()
            print('Processing {}: {}'.format(bold(botid), info))

            if info.startswith(str(red)):
                available_opts = [
                    item[0] for item in ACTIONS.values() if item[2]
                ]
                available_answers = [k for k, v in ACTIONS.items() if v[2]]
                print('Restricted actions.')
            else:
                # don't display list after 'show', 'recover' & edit commands
                if not (answer and isinstance(answer, list)
                        and answer[0] in ['s', 'r', 'v']):
                    content = json.load(handle)
                    handle.seek(0)
                    content = OrderedDict(
                        sorted(content.items(),
                               key=lambda t: t[0]))  # sort by key here, #1280
                    meta = load_meta(content)

                    available_opts = [item[0] for item in ACTIONS.values()]
                    for count, line in enumerate(meta):
                        print('{:3}: {} {}'.format(count, *line))

            # Determine bot status
            try:
                bot_status = ctl.bot_status(botid)
                if bot_status[1] == 'running':
                    print(
                        red('This bot is currently running, the dump file is now locked and '
                            'the bot can\'t write it.'))
            except KeyError:
                bot_status = 'error'
                print(red('Attention: This bot is not defined!'))
                available_opts = [
                    item[0] for item in ACTIONS.values() if item[2]
                ]
                available_answers = [k for k, v in ACTIONS.items() if v[2]]
                print('Restricted actions.')

            try:
                possible_answers = list(available_answers)
                for id_action in ['r', 'a']:
                    if id_action in possible_answers:
                        possible_answers[possible_answers.index(
                            id_action)] = id_action + ' '
                action_completer = Completer(possible_answers,
                                             queues=pipeline_pipes.keys())
                readline.set_completer(action_completer.complete)
                answer = input(
                    inverted(', '.join(available_opts) + '?') + ' ').split()
            except EOFError:
                break
            else:
                if not answer:
                    continue
            if len(answer) == 0 or answer[0] not in available_answers:
                print('Action not allowed.')
                continue
            if any([answer[0] == char
                    for char in AVAILABLE_IDS]) and len(answer) > 1:
                ids = [int(item) for item in answer[1].split(',')]
            else:
                ids = []
            queue_name = None
            if answer[0] == 'a':
                # recover all -> recover all by ids
                answer[0] = 'r'
                ids = range(len(meta))
                if len(answer) > 1:
                    queue_name = answer[1]
            if answer[0] == 'q':
                break
            elif answer[0] == 'e':
                # Delete entries
                for entry in ids:
                    del content[meta[entry][0]]
                save_file(handle, content)
            elif answer[0] == 'r':
                # recover entries
                default = utils.load_configuration(DEFAULTS_CONF_FILE)
                runtime = utils.load_configuration(RUNTIME_CONF_FILE)
                params = utils.load_parameters(default, runtime)
                pipe = pipeline.PipelineFactory.create(params, logger)
                try:
                    for i, (key, entry) in enumerate([
                            item
                            for (count, item) in enumerate(content.items())
                            if count in ids
                    ]):
                        if entry['message']:
                            msg = copy.copy(
                                entry['message']
                            )  # otherwise the message field gets converted
                            if isinstance(msg, dict):
                                msg = json.dumps(msg)
                        else:
                            print('No message here, deleting entry.')
                            del content[key]
                            continue

                        if queue_name is None:
                            if len(answer) == 3:
                                queue_name = answer[2]
                            else:
                                queue_name = entry['source_queue']
                        if queue_name in pipeline_pipes:
                            if runtime[pipeline_pipes[queue_name]][
                                    'group'] == 'Parser' and json.loads(
                                        msg)['__type'] == 'Event':
                                print(
                                    'Event converted to Report automatically.')
                                msg = message.Report(
                                    message.MessageFactory.unserialize(
                                        msg)).serialize()
                        try:
                            pipe.set_queues(queue_name, 'destination')
                            pipe.connect()
                            pipe.send(msg)
                        except exceptions.PipelineError:
                            print(
                                red('Could not reinject into queue {}: {}'
                                    ''.format(queue_name,
                                              traceback.format_exc())))
                        else:
                            del content[key]
                            print(green('Recovered dump {}.'.format(i)))
                finally:
                    save_file(handle, content)
                if not content:
                    delete_file = True
                    print('Deleting empty file {}'.format(fname))
                    break
            elif answer[0] == 'd':
                # delete dumpfile
                delete_file = True
                print('Deleting empty file {}'.format(fname))
                break
            elif answer[0] == 's':
                # Show entries by id
                for count, (key, orig_value) in enumerate(content.items()):
                    value = copy.copy(
                        orig_value)  # otherwise the raw field gets truncated
                    if count not in ids:
                        continue
                    print('=' * 100, '\nShowing id {} {}\n'.format(count, key),
                          '-' * 50)
                    if value.get('message_type') == 'base64':
                        if len(value['message']) > 1000:
                            value['message'] = value[
                                'message'][:1000] + '...[truncated]'
                    else:
                        if isinstance(value['message'], (bytes, str)):
                            value['message'] = json.loads(value['message'])
                            if ('raw' in value['message']
                                    and len(value['message']['raw']) > 1000):
                                value['message']['raw'] = value['message'][
                                    'raw'][:1000] + '...[truncated]'
                    if type(value['traceback']) is not list:
                        value['traceback'] = value['traceback'].splitlines()
                    pprint.pprint(value)
            elif answer[0] == 'v':
                # edit given id
                if not ids:
                    print(red('Edit mode needs an id'))
                    continue
                for entry in ids:
                    if content[meta[entry][0]].get('message_type') == 'base64':
                        with tempfile.NamedTemporaryFile(
                                mode='w+b', suffix='.txt') as tmphandle:
                            filename = tmphandle.name
                            tmphandle.write(
                                base64.b64decode(
                                    content[meta[entry][0]]['message']))
                            tmphandle.flush()
                            proc = subprocess.call(
                                ['sensible-editor', filename])
                            if proc != 0:
                                print(red('Calling editor failed.'))
                            else:
                                tmphandle.seek(0)
                                new_content = tmphandle.read()
                                try:
                                    new_content = new_content.decode()
                                except UnicodeDecodeError as exc:
                                    print(
                                        red("Could not write the new message because of the following error:"
                                            ))
                                    print(
                                        red(
                                            exceptions.DecodingError(
                                                exception=exc)))
                                else:
                                    del content[meta[entry][0]]['message_type']
                                    content[meta[entry]
                                            [0]]['message'] = new_content
                                    save_file(handle, content)
                    else:
                        with tempfile.NamedTemporaryFile(
                                mode='w+t', suffix='.json') as tmphandle:
                            filename = tmphandle.name
                            utils.write_configuration(
                                configuration_filepath=filename,
                                content=json.loads(
                                    content[meta[entry][0]]['message']),
                                new=True,
                                backup=False)
                            proc = subprocess.call(
                                ['sensible-editor', filename])
                            if proc != 0:
                                print(red('Calling editor failed.'))
                            else:
                                tmphandle.seek(0)
                                content[meta[entry]
                                        [0]]['message'] = tmphandle.read()
                                save_file(handle, content)

    if delete_file:
        os.remove(fname)
Exemplo n.º 8
0
def main():
    parser = argparse.ArgumentParser(
        prog=APPNAME,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        usage=USAGE,
        description=DESCRIPTION,
        epilog=EPILOG,
    )

    parser.add_argument('botid', metavar='botid', nargs='?',
                        default=None, help='botid to inspect dumps of')
    args = parser.parse_args()

    if args.botid is None:
        filenames = glob.glob(os.path.join(DEFAULT_LOGGING_PATH, '*.dump'))
        if not len(filenames):
            print(green('Nothing to recover from, no dump files found!'))
            exit(0)
        filenames = [(fname, fname[len(DEFAULT_LOGGING_PATH):-5])
                     for fname in sorted(filenames)]

        length = max([len(value[1]) for value in filenames])
        print(bold("{c:>3}: {s:{l}} {i}".format(c='id', s='name (bot id)',
                                                i='content', l=length)))
        for count, (fname, shortname) in enumerate(filenames):
            info = dump_info(fname)
            print("{c:3}: {s:{l}} {i}".format(c=count, s=shortname, i=info,
                                              l=length))
        botid = input(inverted('Which dump file to process (id or name)? '))
        botid = botid.strip()
        if botid == 'q' or not botid:
            exit(0)
        try:
            fname, botid = filenames[int(botid)]
        except ValueError:
            fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'
    else:
        botid = args.botid
        fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'

    if not os.path.isfile(fname):
        print(bold('Given file does not exist: {}'.format(fname)))
        exit(1)
    while True:
        info = dump_info(fname)
        print('Processing {}: {}'.format(bold(botid), info))
        try:
            with io.open(fname, 'rt') as handle:
                content = json.load(handle)
            meta = load_meta(content)
        except ValueError:
            available_opts = [item[0] for item in ACTIONS.values() if item[2]]
            print(bold('Could not load file:') + '\n{}\nRestricted actions.'
                  ''.format(traceback.format_exc()))
        else:
            available_opts = [item[0] for item in ACTIONS.values()]
            for count, line in enumerate(meta):
                print('{:3}: {} {}'.format(count, *line))
        answer = input(inverted(', '.join(available_opts) + '? ')).split()
        if not answer:
            continue
        if any([answer[0] == char for char in AVAILABLE_IDS]):
            ids = [int(item) for item in answer[1].split(',')]
        queue_name = None
        if answer[0] == 'a':
            # recover all -> recover all by ids
            answer[0] = 'r'
            ids = range(len(meta))
            if len(answer) > 1:
                queue_name = answer[1]
        if answer[0] == 'q':
            break
        elif answer[0] == 'e':
            # Delete entries
            for entry in ids:
                del content[meta[entry][0]]
            save_file(fname, content)
        elif answer[0] == 'r':
            # recover entries
            for key, entry in [item for (count, item)
                               in enumerate(content.items()) if count in ids]:
                if type(entry['message']) is dict:
                    if '__type' in entry['message']:
                        msg = json.dumps(entry['message'])
                    # backwards compat: dumps had no type info
                    elif '-parser' in entry['bot_id']:
                        msg = message.Report(entry['message']).serialize()
                    else:
                        msg = message.Event(entry['message']).serialize()
                elif issubclass(type(entry['message']), (six.binary_type,
                                                         six.text_type)):
                    msg = entry['message']
                elif entry['message'] is None:
                    print(bold('No message here, deleting directly.'))
                    del content[key]
                    save_file(fname, content)
                    continue
                else:
                    print(bold('Unhandable type of message: {!r}'
                               ''.format(type(entry['message']))))
                    continue
                print(entry['source_queue'])

                default = utils.load_configuration(DEFAULTS_CONF_FILE)
                runtime = utils.load_configuration(RUNTIME_CONF_FILE)
                params = utils.load_parameters(default, runtime)
                pipe = pipeline.PipelineFactory.create(params)
                if queue_name is None:
                    if len(answer) == 2:
                        queue_name = answer[2]
                    else:
                        queue_name = entry['source_queue']
                try:
                    pipe.set_queues(queue_name, 'destination')
                    pipe.connect()
                    pipe.send(msg)
                except exceptions.PipelineError:
                    print(red('Could not reinject into queue {}: {}'
                              ''.format(queue_name, traceback.format_exc())))
                else:
                    del content[key]
                    save_file(fname, content)
        elif answer[0] == 'd':
            # delete dumpfile
            os.remove(fname)
            print('Deleted file {}'.format(fname))
            break
        elif answer[0] == 's':
            # Show entries by id
            for count, (key, value) in enumerate(content.items()):
                if count not in ids:
                    continue
                print('=' * 100, '\nShowing id {} {}\n'.format(count, key),
                      '-' * 50)
                if isinstance(value['message'], (six.binary_type,
                                                 six.text_type)):
                    value['message'] = json.loads(value['message'])
                    if ('raw' in value['message'] and
                            len(value['message']['raw']) > 1000):
                        value['message']['raw'] = value['message'][
                            'raw'][:1000] + '...[truncated]'
                value['traceback'] = value['traceback'].splitlines()
                pprint.pprint(value)
Exemplo n.º 9
0
    def send(self, taxonomy, contact, query, incident_id, requestor=None):
        if not query:
            self.logger.error("No data!")
            return False
        if not requestor:
            requestor = contact

        # PREPARATION
        query = self.shrink_dict(query)
        ids = list(str(row["id"]) for row in query)

        subject = "{tax} in your network: {date}" "".format(
            date=datetime.datetime.now().strftime("%Y-%m-%d"), tax=lib.SUBJECT[taxonomy]
        )
        text = self.get_text(taxonomy)
        if six.PY2:
            csvfile = io.BytesIO()
        else:
            csvfile = io.StringIO()
        if lib.CSV_FIELDS:
            fieldnames = lib.CSV_FIELDS
        else:
            fieldnames = query[0].keys()  # send all
        writer = csv.DictWriter(
            csvfile,
            fieldnames=fieldnames,
            quoting=csv.QUOTE_MINIMAL,
            delimiter=str(";"),
            extrasaction="ignore",
            lineterminator="\n",
        )
        writer.writeheader()
        query_unicode = query
        if six.PY2:
            query = [
                {key: utils.encode(val) if isinstance(val, six.text_type) else val for key, val in row.items()}
                for row in query
            ]
        writer.writerows(query)
        # note this might contain UTF-8 chars! let's ignore utf-8 errors. sorry.
        if six.PY2:
            data = unicode(csvfile.getvalue(), "utf-8")
        else:
            data = csvfile.getvalue()
        attachment_text = data.encode("ascii", "ignore")
        attachment_lines = attachment_text.splitlines()

        if self.verbose:
            self.logger.info(text)

        showed_text = (
            "=" * 100
            + """
To: {to}
Subject: {subj}

{text}
    """.format(
                to=requestor, subj=subject, text=text
            )
        )
        showed_text_len = showed_text.count("\n")

        # SHOW DATA
        if self.table_mode and six.PY2:
            self.logger.error("Sorry, no table mode for ancient python versions!")
            self.table_mode = False
        elif self.table_mode and not six.PY2:
            if self.quiet:
                height = 80  # assume anything for quiet mode
            else:
                height = lib.getTerminalHeight() - 3 - showed_text_len
            csvfile.seek(0)
            if len(query) > height:
                with tempfile.NamedTemporaryFile(mode="w+") as handle:
                    handle.write(showed_text + "\n")
                    handle.write(tabulate.tabulate(query, headers="keys", tablefmt="psql"))
                    handle.seek(0)
                    subprocess.call(["less", handle.name])
            else:
                self.logger.info(showed_text)
                self.logger.info(tabulate.tabulate(query_unicode, headers="keys", tablefmt="psql"))
        else:
            if self.quiet:
                height = 80
            else:
                height = lib.getTerminalHeight() - 4
            if 5 + len(query) > height:  # cut query too, 5 is length of text
                self.logger.info("\n".join(showed_text.splitlines()[:5]))
                self.logger.info("...")
                self.logger.info("\n".join(attachment_lines[: height - 5]))
                self.logger.info("...")
            elif showed_text_len + len(query) > height > 5 + len(query):
                self.logger.info("\n".join(showed_text.splitlines()[: height - len(query)]))
                self.logger.info("...")
                self.logger.info(attachment_text)
            else:
                self.logger.info(showed_text)
                self.logger.info(attachment_text)
        self.logger.info("-" * 100)

        # MENU
        if self.batch and requestor:
            answer = "s"
        else:
            answer = "q"
            if self.batch:
                self.logger.error("You need to set a valid requestor!")
            else:
                answer = input(
                    "{i}{b}[a]{i}utomatic, {b}[n]{i}ext, {i}{b}[s]{i}end, show "
                    "{b}[t]{i}able, change {b}[r]{i}equestor or {b}[q]{i}uit?{r} "
                    "".format(b=bold, i=myinverted, r=reset)
                ).strip()
        if answer == "q":
            exit(0)
        elif answer == "n":
            return False
        elif answer == "a":
            self.batch = True
        elif answer == "t":
            self.table_mode = bool((self.table_mode + 1) % 2)
            return self.send(taxonomy, contact, query, incident_id, requestor)
        elif answer == "r":
            answer = input(inverted("New requestor address:") + " ").strip()
            if len(answer) == 0:
                requestor = contact
            else:
                requestor = answer
            return self.send(taxonomy, contact, query, incident_id, requestor)
        elif answer != "s":
            self.logger.error("Unknow command {!r}.".format(answer))
            return self.send(taxonomy, contact, query, incident_id, requestor)

        if text is None:
            self.logger.error("I won't send with a missing text!")
            return False

        # INVESTIGATION
        if self.dryrun:
            self.logger.info("Simulate creation of investigation.")
            investigation_id = -1
        else:
            investigation_id = self.rt.create_ticket(
                Queue="Investigations", Subject=subject, Owner=self.config["rt"]["user"], Requestor=requestor
            )

            if investigation_id == -1:
                self.logger.error("Could not create Investigation.")
                return False

            self.logger.info("Created Investigation {}.".format(investigation_id))
            if not self.rt.edit_link(incident_id, "HasMember", investigation_id):
                self.logger.error("Could not link Investigation to Incident.")
                return False

            self.executemany(
                "UPDATE events SET rtir_investigation_id = %s WHERE id = %s", [(investigation_id, evid) for evid in ids]
            )
            self.logger.info("Linked events to investigation.")

        # CORRESPOND
        filename = "%s-%s.csv" % (datetime.datetime.now().strftime("%Y-%m-%d"), taxonomy)
        if self.zipme or len(query) > self.config["rt"]["zip_threshold"]:
            attachment = io.BytesIO()
            ziphandle = zipfile.ZipFile(attachment, mode="w", compression=zipfile.ZIP_DEFLATED)
            data = csvfile.getvalue()
            if six.PY2:
                data = unicode(data, "utf-8")
            ziphandle.writestr("events.csv", data.encode("utf-8"))
            ziphandle.close()
            attachment.seek(0)
            filename += ".zip"
            mimetype = "application/octet-stream"
        else:
            attachment = csvfile
            attachment.seek(0)
            mimetype = "text/csv"

        try:
            # TODO: CC
            if self.dryrun:
                self.logger.info("Simulate creation of correspondence.")
            else:
                correspond = self.rt.reply(investigation_id, text=text, files=[(filename, attachment, mimetype)])
                if not correspond:
                    self.logger.error("Could not correspond with text and file.")
                    return False
                self.logger.info("Correspondence added to Investigation.")

            self.execute(
                "UPDATE events SET sent_at = LOCALTIMESTAMP WHERE " "rtir_investigation_id = %s", (investigation_id,)
            )
            self.logger.info("Marked events as sent.")
        except:
            self.con.rollback()
            raise
        else:
            self.con.commit()

            # RESOLVE
            try:
                if not self.dryrun and not self.rt.edit_ticket(investigation_id, Status="resolved"):
                    self.logger.error("Could not close investigation {}.".format(investigation_id))
            except IndexError:
                # Bug in RT/python-rt
                pass

        if requestor != contact:
            asns = set(str(row["source.asn"]) for row in query)
            answer = input(
                inverted("Save recipient {!r} for ASNs {!s}? [Y/n] " "".format(requestor, ", ".join(asns)))
            ).strip()
            if answer.strip().lower() in ("", "y", "j"):
                self.executemany(lib.QUERY_UPDATE_CONTACT, [(requestor, asn) for asn in asns])
                self.con.commit()
                if self.cur.rowcount == 0:
                    self.query_insert_contact(asns=asns, contact=requestor)

        return True
Exemplo n.º 10
0
def main():
    parser = argparse.ArgumentParser(
        prog=APPNAME,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        usage=USAGE,
        description=DESCRIPTION,
        epilog=EPILOG,
    )

    parser.add_argument('botid',
                        metavar='botid',
                        nargs='?',
                        default=None,
                        help='botid to inspect dumps of')
    args = parser.parse_args()

    if args.botid is None:
        filenames = glob.glob(os.path.join(DEFAULT_LOGGING_PATH, '*.dump'))
        if not len(filenames):
            print(green('Nothing to recover from, no dump files found!'))
            exit(0)
        filenames = [(fname, fname[len(DEFAULT_LOGGING_PATH):-5])
                     for fname in sorted(filenames)]

        length = max([len(value[1]) for value in filenames])
        print(
            bold("{c:>3}: {s:{l}} {i}".format(c='id',
                                              s='name (bot id)',
                                              i='content',
                                              l=length)))
        for count, (fname, shortname) in enumerate(filenames):
            info = dump_info(fname)
            print("{c:3}: {s:{l}} {i}".format(c=count,
                                              s=shortname,
                                              i=info,
                                              l=length))
        botid = input(inverted('Which dump file to process (id or name)? '))
        botid = botid.strip()
        if botid == 'q' or not botid:
            exit(0)
        try:
            fname, botid = filenames[int(botid)]
        except ValueError:
            fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'
    else:
        botid = args.botid
        fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'

    if not os.path.isfile(fname):
        print(bold('Given file does not exist: {}'.format(fname)))
        exit(1)
    while True:
        info = dump_info(fname)
        print('Processing {}: {}'.format(bold(botid), info))
        try:
            with io.open(fname, 'rt') as handle:
                content = json.load(handle)
            meta = load_meta(content)
        except ValueError:
            available_opts = [item[0] for item in ACTIONS.values() if item[2]]
            print(
                bold('Could not load file:') + '\n{}\nRestricted actions.'
                ''.format(traceback.format_exc()))
        else:
            available_opts = [item[0] for item in ACTIONS.values()]
            for count, line in enumerate(meta):
                print('{:3}: {} {}'.format(count, *line))
        answer = input(inverted(', '.join(available_opts) + '? ')).split()
        if not answer:
            continue
        if any([answer[0] == char for char in AVAILABLE_IDS]):
            ids = [int(item) for item in answer[1].split(',')]
        queue_name = None
        if answer[0] == 'a':
            # recover all -> recover all by ids
            answer[0] = 'r'
            ids = range(len(meta))
            if len(answer) > 1:
                queue_name = answer[1]
        if answer[0] == 'q':
            break
        elif answer[0] == 'e':
            # Delete entries
            for entry in ids:
                del content[meta[entry][0]]
            save_file(fname, content)
        elif answer[0] == 'r':
            # recover entries
            for key, entry in [
                    item for (count, item) in enumerate(content.items())
                    if count in ids
            ]:
                if type(entry['message']) is dict:
                    if '__type' in entry['message']:
                        msg = json.dumps(entry['message'])
                    # backwards compat: dumps had no type info
                    elif '-parser' in entry['bot_id']:
                        msg = message.Report(entry['message']).serialize()
                    else:
                        msg = message.Event(entry['message']).serialize()
                elif issubclass(type(entry['message']),
                                (six.binary_type, six.text_type)):
                    msg = entry['message']
                elif entry['message'] is None:
                    print(bold('No message here, deleting directly.'))
                    del content[key]
                    save_file(fname, content)
                    continue
                else:
                    print(
                        bold('Unhandable type of message: {!r}'
                             ''.format(type(entry['message']))))
                    continue
                print(entry['source_queue'])

                default = utils.load_configuration(DEFAULTS_CONF_FILE)
                runtime = utils.load_configuration(RUNTIME_CONF_FILE)
                params = utils.load_parameters(default, runtime)
                pipe = pipeline.PipelineFactory.create(params)
                if queue_name is None:
                    if len(answer) == 2:
                        queue_name = answer[2]
                    else:
                        queue_name = entry['source_queue']
                try:
                    pipe.set_queues(queue_name, 'destination')
                    pipe.connect()
                    pipe.send(msg)
                except exceptions.PipelineError:
                    print(
                        red('Could not reinject into queue {}: {}'
                            ''.format(queue_name, traceback.format_exc())))
                else:
                    del content[key]
                    save_file(fname, content)
        elif answer[0] == 'd':
            # delete dumpfile
            os.remove(fname)
            print('Deleted file {}'.format(fname))
            break
        elif answer[0] == 's':
            # Show entries by id
            for count, (key, value) in enumerate(content.items()):
                if count not in ids:
                    continue
                print('=' * 100, '\nShowing id {} {}\n'.format(count, key),
                      '-' * 50)
                if isinstance(value['message'],
                              (six.binary_type, six.text_type)):
                    value['message'] = json.loads(value['message'])
                    if ('raw' in value['message']
                            and len(value['message']['raw']) > 1000):
                        value['message']['raw'] = value['message'][
                            'raw'][:1000] + '...[truncated]'
                value['traceback'] = value['traceback'].splitlines()
                pprint.pprint(value)
Exemplo n.º 11
0
def main():
    parser = argparse.ArgumentParser(
        prog=APPNAME,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        usage=USAGE,
        description=DESCRIPTION,
        epilog=EPILOG,
    )

    parser.add_argument('botid', metavar='botid', nargs='?',
                        default=None, help='botid to inspect dumps of')
    args = parser.parse_args()

    # Try to get log_level from defaults_configuration, else use default
    try:
        log_level = utils.load_configuration(DEFAULTS_CONF_FILE)['logging_level']
    except Exception:
        log_level = DEFAULT_LOGGING_LEVEL

    try:
        logger = utils.log('intelmqdump', log_level=log_level)
    except (FileNotFoundError, PermissionError) as exc:
        logger = utils.log('intelmqdump', log_level=log_level, log_path=False)
        logger.error('Not logging to file: %s', exc)

    ctl = intelmqctl.IntelMQController()
    readline.parse_and_bind("tab: complete")
    readline.set_completer_delims('')

    pipeline_config = utils.load_configuration(PIPELINE_CONF_FILE)
    pipeline_pipes = {}
    for bot, pipes in pipeline_config.items():
        pipeline_pipes[pipes.get('source-queue', '')] = bot

    if args.botid is None:
        filenames = glob.glob(os.path.join(DEFAULT_LOGGING_PATH, '*.dump'))
        if not len(filenames):
            print(green('Nothing to recover from, no dump files found!'))
            sys.exit(0)
        filenames = [(fname, fname[len(DEFAULT_LOGGING_PATH):-5])
                     for fname in sorted(filenames)]

        length = max([len(value[1]) for value in filenames])
        print(bold("{c:>3}: {s:{length}} {i}".format(c='id', s='name (bot id)',
                                                     i='content',
                                                     length=length)))
        for count, (fname, shortname) in enumerate(filenames):
            info = dump_info(fname)
            print("{c:3}: {s:{length}} {i}".format(c=count, s=shortname, i=info,
                                                   length=length))
        try:
            bot_completer = Completer(possible_values=[f[1] for f in filenames])
            readline.set_completer(bot_completer.complete)
            botid = input(inverted('Which dump file to process (id or name)?') +
                          ' ')
        except EOFError:
            sys.exit(0)
        else:
            botid = botid.strip()
            if botid == 'q' or not botid:
                exit(0)
        try:
            fname, botid = filenames[int(botid)]
        except ValueError:
            fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'
    else:
        botid = args.botid
        fname = os.path.join(DEFAULT_LOGGING_PATH, botid) + '.dump'

    if not os.path.isfile(fname):
        print(bold('Given file does not exist: {}'.format(fname)))
        exit(1)

    answer = None
    delete_file = False
    while True:
        with open(fname, 'r+') as handle:
            try:
                fcntl.flock(handle, fcntl.LOCK_EX | fcntl.LOCK_NB)
            except BlockingIOError:
                print(red('Dump file is currently locked. Stopping.'))
                break
            info = dump_info(fname, file_descriptor=handle)
            handle.seek(0)
            available_answers = ACTIONS.keys()
            print('Processing {}: {}'.format(bold(botid), info))

            if info.startswith(str(red)):
                available_opts = [item[0] for item in ACTIONS.values() if item[2]]
                available_answers = [k for k, v in ACTIONS.items() if v[2]]
                print('Restricted actions.')
            else:
                # don't display list after 'show' and 'recover' command
                if not (answer and isinstance(answer, list) and answer[0] in ['s', 'r']):
                    content = json.load(handle)
                    handle.seek(0)
                    content = OrderedDict(sorted(content.items(), key=lambda t: t[0]))  # sort by key here, #1280
                    meta = load_meta(content)

                    available_opts = [item[0] for item in ACTIONS.values()]
                    for count, line in enumerate(meta):
                        print('{:3}: {} {}'.format(count, *line))

            # Determine bot status
            try:
                bot_status = ctl.bot_status(botid)
                if bot_status[1] == 'running':
                    print(red('This bot is currently running, the dump file is now locked and '
                              'the bot can\'t write it.'))
            except KeyError:
                bot_status = 'error'
                print(red('Attention: This bot is not defined!'))
                available_opts = [item[0] for item in ACTIONS.values() if item[2]]
                available_answers = [k for k, v in ACTIONS.items() if v[2]]
                print('Restricted actions.')

            try:
                possible_answers = list(available_answers)
                for id_action in ['r', 'a']:
                    if id_action in possible_answers:
                        possible_answers[possible_answers.index(id_action)] = id_action + ' '
                action_completer = Completer(possible_answers, queues=pipeline_pipes.keys())
                readline.set_completer(action_completer.complete)
                answer = input(inverted(', '.join(available_opts) + '?') + ' ').split()
            except EOFError:
                break
            else:
                if not answer:
                    continue
            if len(answer) == 0 or answer[0] not in available_answers:
                print('Action not allowed.')
                continue
            if any([answer[0] == char for char in AVAILABLE_IDS]) and len(answer) > 1:
                ids = [int(item) for item in answer[1].split(',')]
            else:
                ids = []
            queue_name = None
            if answer[0] == 'a':
                # recover all -> recover all by ids
                answer[0] = 'r'
                ids = range(len(meta))
                if len(answer) > 1:
                    queue_name = answer[1]
            if answer[0] == 'q':
                break
            elif answer[0] == 'e':
                # Delete entries
                for entry in ids:
                    del content[meta[entry][0]]
                save_file(handle, content)
            elif answer[0] == 'r':
                # recover entries
                default = utils.load_configuration(DEFAULTS_CONF_FILE)
                runtime = utils.load_configuration(RUNTIME_CONF_FILE)
                params = utils.load_parameters(default, runtime)
                pipe = pipeline.PipelineFactory.create(params, logger)
                try:
                    for i, (key, entry) in enumerate([item for (count, item)
                                                      in enumerate(content.items()) if count in ids]):
                        if entry['message']:
                            msg = copy.copy(entry['message'])  # otherwise the message field gets converted
                            if isinstance(msg, dict):
                                msg = json.dumps(msg)
                        else:
                            print('No message here, deleting entry.')
                            del content[key]
                            continue

                        if queue_name is None:
                            if len(answer) == 3:
                                queue_name = answer[2]
                            else:
                                queue_name = entry['source_queue']
                        if queue_name in pipeline_pipes:
                            if runtime[pipeline_pipes[queue_name]]['group'] == 'Parser' and json.loads(msg)['__type'] == 'Event':
                                print('Event converted to Report automatically.')
                                msg = message.Report(message.MessageFactory.unserialize(msg)).serialize()
                        try:
                            pipe.set_queues(queue_name, 'destination')
                            pipe.connect()
                            pipe.send(msg)
                        except exceptions.PipelineError:
                            print(red('Could not reinject into queue {}: {}'
                                      ''.format(queue_name, traceback.format_exc())))
                        else:
                            del content[key]
                            print(green('Recovered dump {}.'.format(i)))
                finally:
                    save_file(handle, content)
                if not content:
                    delete_file = True
                    print('Deleting empty file {}'.format(fname))
                    break
            elif answer[0] == 'd':
                # delete dumpfile
                delete_file = True
                print('Deleting empty file {}'.format(fname))
                break
            elif answer[0] == 's':
                # Show entries by id
                for count, (key, orig_value) in enumerate(content.items()):
                    value = copy.copy(orig_value)  # otherwise the raw field gets truncated
                    if count not in ids:
                        continue
                    print('=' * 100, '\nShowing id {} {}\n'.format(count, key),
                          '-' * 50)
                    if isinstance(value['message'], (bytes, str)):
                        value['message'] = json.loads(value['message'])
                        if ('raw' in value['message'] and
                                len(value['message']['raw']) > 1000):
                            value['message']['raw'] = value['message'][
                                'raw'][:1000] + '...[truncated]'
                    if type(value['traceback']) is not list:
                        value['traceback'] = value['traceback'].splitlines()
                    pprint.pprint(value)

    if delete_file:
        os.remove(fname)