Пример #1
0
def main(args):
    """
    Try to cover possible logging scenarios. For each scenario covered, here's the expected args and outcome:
    - Log without enabling: ['<level>']
        * logging when being imported - should never print
    - Log as JSON without enabling: ['as-json', '<level>']
        * logging as JSON when being imported - should never print
    - Enable and log: ['enable', '<level>']
        * logging when being run as script - should log messages
    - Enable and log as JSON: ['as-json', 'enable', '<level>']
        * logging as JSON when being run as script - should log messages as JSON
    - Enable, log as JSON and throw: ['enable', 'as-json', 'throw', '<level>']
        * should produce JSON-compatible output, even after an unhandled exception
    """

    # the level should always be the last argument passed
    level = args[-1]
    use_json = 'as-json' in args
    throw = 'throw' in args
    percent_autoformat = '%-autoformat' in args

    log_helper_test_imported.logger.setLevel(logging.ERROR)

    if 'enable' in args:
        log_helper.enable_logging(use_json, level, stream=sys.stdout)

    do_log(percent_autoformat)

    if throw:
        raise Exception('An exception occurred before ending the logging')

    log_helper.end_logging()
Пример #2
0
def test():
    """ Main function, called when running file as script

    see module doc for more info
    """
    log_helper.enable_logging(False, logger.DEBUG)
    if len(sys.argv) != 2:
        print(u'To test this code, give me a single file as arg')
        return 2

    # test get_type
    print('Detected type: ' + get_type(sys.argv[1]))

    # test complete parsing
    parser = XmlParser(sys.argv[1])
    for subfile, elem, depth in parser.iter_xml():
        if depth < 4:
            print(u'{0} {1}{2}'.format(subfile, '  ' * depth, debug_str(elem)))
    for index, (subfile, content_type, _) in enumerate(parser.iter_non_xml()):
        print(u'Non-XML subfile: {0} of type {1}'
              .format(subfile, content_type or u'unknown'))
        if index > 100:
            print(u'...')
            break

    log_helper.end_logging()

    return 0
Пример #3
0
def test():
    """
    Test xml parsing; called when running this file as a script.

    Prints every element found in input file (to be given as command line arg).
    """
    log_helper.enable_logging(False, 'debug')
    if len(sys.argv) != 2:
        print(u'To test this code, give me a single file as arg')
        return 2

    # test get_type
    print('Detected type: ' + get_type(sys.argv[1]))

    # test complete parsing
    parser = XmlParser(sys.argv[1])
    for subfile, elem, depth in parser.iter_xml():
        if depth < 4:
            print(u'{0} {1}{2}'.format(subfile, '  ' * depth, debug_str(elem)))
    for index, (subfile, content_type, _) in enumerate(parser.iter_non_xml()):
        print(u'Non-XML subfile: {0} of type {1}'.format(
            subfile, content_type or u'unknown'))
        if index > 100:
            print(u'...')
            break

    log_helper.end_logging()

    return 0
Пример #4
0
def init_logging_and_log(args):
    """
    Try to cover possible logging scenarios. For each scenario covered, here's the expected args and outcome:
    - Log without enabling: ['<level>']
        * logging when being imported - should never print
    - Log as JSON without enabling: ['as-json', '<level>']
        * logging as JSON when being imported - should never print
    - Enable and log: ['enable', '<level>']
        * logging when being run as script - should log messages
    - Enable and log as JSON: ['as-json', 'enable', '<level>']
        * logging as JSON when being run as script - should log messages as JSON
    - Enable, log as JSON and throw: ['enable', 'as-json', 'throw', '<level>']
        * should produce JSON-compatible output, even after an unhandled exception
    """

    # the level should always be the last argument passed
    level = args[-1]
    use_json = 'as-json' in args
    throw = 'throw' in args

    if 'enable' in args:
        log_helper.enable_logging(use_json, level, stream=sys.stdout)

    _log()

    if throw:
        raise Exception('An exception occurred before ending the logging')

    log_helper.end_logging()
Пример #5
0
def main(cmd_line_args=None):
    """ Main function, called if this file is called as a script

    Optional argument: command line arguments to be forwarded to ArgumentParser
    in process_args. Per default (cmd_line_args=None), sys.argv is used. Option
    mainly added for unit-testing
    """
    args = process_args(cmd_line_args)

    # Setup logging to the console:
    # here we use stdout instead of stderr by default, so that the output
    # can be redirected properly.
    log_helper.enable_logging(args.json, args.loglevel, stream=sys.stdout)

    if args.nounquote:
        global NO_QUOTES
        NO_QUOTES = True

    logger.print_str(BANNER)
    logger.print_str('Opening file: %s' % args.filepath)

    text = ''
    return_code = 1
    try:
        text = process_file(args.filepath, args.field_filter_mode)
        return_code = 0
    except Exception as exc:
        logger.exception(exc.message)

    logger.print_str('DDE Links:')
    logger.print_str(text)

    log_helper.end_logging()

    return return_code
Пример #6
0
def main(cmd_line_args=None):
    """ Main function, called if this file is called as a script

    Optional argument: command line arguments to be forwarded to ArgumentParser
    in process_args. Per default (cmd_line_args=None), sys.argv is used. Option
    mainly added for unit-testing
    """
    args = process_args(cmd_line_args)

    # Setup logging to the console:
    # here we use stdout instead of stderr by default, so that the output
    # can be redirected properly.
    log_helper.enable_logging(args.json, args.loglevel, stream=sys.stdout)

    if args.nounquote:
        global NO_QUOTES
        NO_QUOTES = True

    logger.print_str(BANNER)
    logger.print_str('Opening file: %s' % args.filepath)

    text = ''
    return_code = 1
    try:
        text = process_file(args.filepath, args.field_filter_mode)
        return_code = 0
    except Exception as exc:
        logger.exception(exc.message)

    logger.print_str('DDE Links:')
    logger.print_str(text)

    log_helper.end_logging()

    return return_code
Пример #7
0
def run_print():
    """This is called from test_read* tests as script. Prints & logs unicode"""
    from oletools.common.io_encoding import ensure_stdout_handles_unicode
    from oletools.common.log_helper import log_helper
    ensure_stdout_handles_unicode()
    print(u'Check: \u2713')  # print check mark

    # check logging as well
    logger = log_helper.get_or_create_silent_logger('test_encoding_handler')
    log_helper.enable_logging(False, 'debug', stream=sys.stdout)
    logger.info(u'Check: \u2713')
    return 0
Пример #8
0
def main():
    """Called when running this file as script. Shows all info on input file."""
    # print banner with version
    print('oleid %s - http://decalage.info/oletools' % __version__)
    print('THIS IS WORK IN PROGRESS - Check updates regularly!')
    print('Please report any issue at '
          'https://github.com/decalage2/oletools/issues')
    print('')

    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument('input',
                        type=str,
                        nargs='*',
                        metavar='FILE',
                        help='Name of files to process')
    # parser.add_argument('-o', '--ole', action='store_true', dest='ole',
    #                   help='Parse an OLE file (e.g. Word, Excel) to look for '
    #                        'SWF in each stream')

    args = parser.parse_args()

    # Print help if no argurments are passed
    if len(args.input) == 0:
        parser.print_help()
        return

    log_helper.enable_logging()

    for filename in args.input:
        print('Filename:', filename)
        oleid = OleID(filename)
        indicators = oleid.check()

        #TODO: add description
        #TODO: highlight suspicious indicators
        table = tablestream.TableStream(
            [20, 20, 10, 26],
            header_row=['Indicator', 'Value', 'Risk', 'Description'],
            style=tablestream.TableStyleSlimSep)
        for indicator in indicators:
            if not (indicator.hide_if_false and not indicator.value):
                #print '%s: %s' % (indicator.name, indicator.value)
                color = risk_color.get(indicator.risk, None)
                table.write_row((indicator.name, indicator.value,
                                 indicator.risk, indicator.description),
                                colors=(color, color, color, None))
        table.close()
Пример #9
0
def test(filenames,
         ole_file_class=OleRecordFile,
         must_parse=None,
         do_per_record=None,
         verbose=False):
    """ parse all given file names and print rough structure

    if an error occurs while parsing a stream of type in must_parse, the error
    will be raised. Otherwise a message is printed
    """
    log_helper.enable_logging(False, 'debug' if verbose else 'info')
    if do_per_record is None:

        def do_per_record(record):  # pylint: disable=function-redefined
            pass  # do nothing

    if not filenames:
        logger.info('need file name[s]')
        return 2
    for filename in filenames:
        logger.info('checking file {0}'.format(filename))
        if not olefile.isOleFile(filename):
            logger.info('not an ole file - skip')
            continue
        ole = ole_file_class(filename)

        for stream in ole.iter_streams():
            logger.info('  parse ' + str(stream))
            try:
                for record in stream.iter_records():
                    logger.info('    ' + str(record))
                    do_per_record(record)
            except Exception:
                if not must_parse:
                    raise
                elif isinstance(stream, must_parse):
                    raise
                else:
                    logger.info('  failed to parse', exc_info=True)

    log_helper.end_logging()
    return 0
Пример #10
0
def main():
    """
    Main function, called when olevba is run from the command line
    """
    DEFAULT_LOG_LEVEL = "warning" # Default log level

    usage = 'usage: mraptor [options] <filename> [filename2 ...]'
    parser = optparse.OptionParser(usage=usage)
    parser.add_option("-r", action="store_true", dest="recursive",
                      help='find files recursively in subdirectories.')
    parser.add_option("-z", "--zip", dest='zip_password', type='str', default=None,
                      help='if the file is a zip archive, open all files from it, using the provided password (requires Python 2.6+)')
    parser.add_option("-f", "--zipfname", dest='zip_fname', type='str', default='*',
                      help='if the file is a zip archive, file(s) to be opened within the zip. Wildcards * and ? are supported. (default:*)')
    parser.add_option('-l', '--loglevel', dest="loglevel", action="store", default=DEFAULT_LOG_LEVEL,
                            help="logging level debug/info/warning/error/critical (default=%default)")
    parser.add_option("-m", '--matches', action="store_true", dest="show_matches",
                      help='Show matched strings.')

    # TODO: add logfile option

    (options, args) = parser.parse_args()

    # Print help if no arguments are passed
    if len(args) == 0:
        print('MacroRaptor %s - http://decalage.info/python/oletools' % __version__)
        print('This is work in progress, please report issues at %s' % URL_ISSUES)
        print(__doc__)
        parser.print_help()
        print('\nAn exit code is returned based on the analysis result:')
        for result in (Result_NoMacro, Result_NotMSOffice, Result_MacroOK, Result_Error, Result_Suspicious):
            print(' - %d: %s' % (result.exit_code, result.name))
        sys.exit()

    # print banner with version
    print('MacroRaptor %s - http://decalage.info/python/oletools' % __version__)
    print('This is work in progress, please report issues at %s' % URL_ISSUES)

    log_helper.enable_logging(level=options.loglevel)
    # enable logging in the modules:
    olevba.enable_logging()

    t = tablestream.TableStream(style=tablestream.TableStyleSlim,
            header_row=['Result', 'Flags', 'Type', 'File'],
            column_width=[10, 5, 4, 56])

    exitcode = -1
    global_result = None
    # TODO: handle errors in xglob, to continue processing the next files
    for container, filename, data in xglob.iter_files(args, recursive=options.recursive,
                                                      zip_password=options.zip_password, zip_fname=options.zip_fname):
        # ignore directory names stored in zip files:
        if container and filename.endswith('/'):
            continue
        full_name = '%s in %s' % (filename, container) if container else filename
        # try:
        #     # Open the file
        #     if data is None:
        #         data = open(filename, 'rb').read()
        # except:
        #     log.exception('Error when opening file %r' % full_name)
        #     continue
        if isinstance(data, Exception):
            result = Result_Error
            t.write_row([result.name, '', '', full_name],
                        colors=[result.color, None, None, None])
            t.write_row(['', '', '', str(data)],
                        colors=[None, None, None, result.color])
        else:
            filetype = '???'
            try:
                vba_parser = olevba.VBA_Parser(filename=filename, data=data, container=container)
                filetype = TYPE2TAG[vba_parser.type]
            except Exception as e:
                # log.error('Error when parsing VBA macros from file %r' % full_name)
                # TODO: distinguish actual errors from non-MSOffice files
                result = Result_Error
                t.write_row([result.name, '', filetype, full_name],
                            colors=[result.color, None, None, None])
                t.write_row(['', '', '', str(e)],
                            colors=[None, None, None, result.color])
                continue
            if vba_parser.detect_vba_macros():
                vba_code_all_modules = ''
                try:
                    vba_code_all_modules = vba_parser.get_vba_code_all_modules()
                except Exception as e:
                    # log.error('Error when parsing VBA macros from file %r' % full_name)
                    result = Result_Error
                    t.write_row([result.name, '', TYPE2TAG[vba_parser.type], full_name],
                                colors=[result.color, None, None, None])
                    t.write_row(['', '', '', str(e)],
                                colors=[None, None, None, result.color])
                    continue
                mraptor = MacroRaptor(vba_code_all_modules)
                mraptor.scan()
                if mraptor.suspicious:
                    result = Result_Suspicious
                else:
                    result = Result_MacroOK
                t.write_row([result.name, mraptor.get_flags(), filetype, full_name],
                            colors=[result.color, None, None, None])
                if mraptor.matches and options.show_matches:
                    t.write_row(['', '', '', 'Matches: %r' % mraptor.matches])
            else:
                result = Result_NoMacro
                t.write_row([result.name, '', filetype, full_name],
                            colors=[result.color, None, None, None])
        if result.exit_code > exitcode:
            global_result = result
            exitcode = result.exit_code

    log_helper.end_logging()
    print('')
    print('Flags: A=AutoExec, W=Write, X=Execute')
    print('Exit code: %d - %s' % (exitcode, global_result.name))
    sys.exit(exitcode)