def ascii_strings(self, file_data, min_len):
     string_list = ''
     if HAVE_FLOSS:
         for s in strings.extract_ascii_strings(file_data, n=min_len):
             # s is a tuple (string, offset)
             string_list += '\n{0}\t{1}'.format(s.offset, s.s)
     else:
         chars = " !\"#\$%&\'\(\)\*\+,-\./0123456789:;<=>\?@ABCDEFGHIJKLMNOPQRSTUVWXYZ\[\]\^_`abcdefghijklmnopqrstuvwxyz\{\|\}\\\~\t"
         regexp = '[%s]{%d,}' % (chars, min_len)
         pattern = re.compile(regexp)
         for s in pattern.finditer(file_data):
             string_list += '\n{0}\t{1}'.format(s.start(), s.group())
     return string_list
Beispiel #2
0
 def ascii_strings(self, file_data, min_len):
     string_list = ''
     if HAVE_FLOSS:
         for s in strings.extract_ascii_strings(file_data, n=min_len):
             # s is a tuple (string, offset)
             string_list += '\n{0}\t{1}'.format(s.offset, s.s)
     else:
         chars = " !\"#\$%&\'\(\)\*\+,-\./0123456789:;<=>\?@ABCDEFGHIJKLMNOPQRSTUVWXYZ\[\]\^_`abcdefghijklmnopqrstuvwxyz\{\|\}\\\~\t"
         regexp = '[%s]{%d,}' % (chars, min_len)
         pattern = re.compile(regexp)
         for s in pattern.finditer(file_data):
             string_list += '\n{0}\t{1}'.format(s.start(), s.group())
     return string_list
Beispiel #3
0
def print_static_strings(file_buf, min_length, quiet=False):
    """
    Print static ASCII and UTF-16 strings from provided file.
    :param file_buf: the file buffer
    :param min_length: minimum string length
    :param quiet: print strings only, suppresses headers
    """
    static_ascii_strings = strings.extract_ascii_strings(file_buf, min_length)
    static_unicode_strings = strings.extract_unicode_strings(file_buf, min_length)

    if not quiet:
        print("FLOSS static ASCII strings")
    for s in static_ascii_strings:
        print("%s" % s.s)
    if not quiet:
        print("")

    if not quiet:
        print("FLOSS static Unicode strings")
    for s in static_unicode_strings:
        print("%s" % s.s)
    if not quiet:
        print("")
Beispiel #4
0
def main(argv=None):
    """
    :param argv: optional command line arguments, like sys.argv[1:]
    :return: 0 on success, non-zero on failure
    """
    logging.basicConfig(level=logging.WARNING)

    parser = make_parser()
    if argv is not None:
        options, args = parser.parse_args(argv[1:])
    else:
        options, args = parser.parse_args()

    set_log_config(options.debug, options.verbose)

    if options.list_plugins:
        print_plugin_list()
        return 0

    sample_file_path = parse_sample_file_path(parser, args)
    min_length = parse_min_length_option(options.min_length)

    # expert profile settings
    if options.expert:
        options.save_workspace = True
        options.group_functions = True
        options.quiet = False

    if not is_workspace_file(sample_file_path):
        if not options.no_static_strings and not options.functions:
            floss_logger.info("Extracting static strings...")
            if os.path.getsize(sample_file_path) > sys.maxsize:
                floss_logger.warning("File too large, strings listings may be truncated.")
                floss_logger.warning("FLOSS cannot handle files larger than 4GB on 32bit systems.")

            file_buf = get_file_as_mmap(sample_file_path)
            print_static_strings(file_buf, min_length=min_length, quiet=options.quiet)
            static_ascii_strings = strings.extract_ascii_strings(file_buf, min_length)
            static_unicode_strings = strings.extract_unicode_strings(file_buf, min_length)
            static_strings = chain(static_ascii_strings, static_unicode_strings)
            del file_buf
        else:
            static_strings = []

        if options.no_decoded_strings and options.no_stack_strings and not options.should_show_metainfo:
            if options.json_output_file:
                create_json_output_static_only(options, sample_file_path, static_strings)
            # we are done
            return 0

    if os.path.getsize(sample_file_path) > MAX_FILE_SIZE:
        floss_logger.error("FLOSS cannot extract obfuscated strings or stackstrings from files larger than"
                           " %d bytes" % MAX_FILE_SIZE)
        if options.json_output_file:
            create_json_output_static_only(options, sample_file_path, static_strings)
        return 1

    try:
        vw = load_vw(sample_file_path, options.save_workspace, options.verbose, options.is_shellcode,
                     options.shellcode_entry_point, options.shellcode_base)
    except WorkspaceLoadError:
        if options.json_output_file:
            create_json_output_static_only(options, sample_file_path, static_strings)
        return 1

    try:
        selected_functions = select_functions(vw, options.functions)
    except Exception as e:
        floss_logger.error(str(e))
        return 1

    floss_logger.debug("Selected the following functions: %s", get_str_from_func_list(selected_functions))

    selected_plugin_names = select_plugins(options.plugins)
    floss_logger.debug("Selected the following plugins: %s", ", ".join(map(str, selected_plugin_names)))
    selected_plugins = filter(lambda p: str(p) in selected_plugin_names, get_all_plugins())

    if options.should_show_metainfo:
        meta_functions = None
        if options.functions:
            meta_functions = selected_functions
        print_file_meta_info(vw, meta_functions)

    time0 = time()

    if not options.no_decoded_strings:
        floss_logger.info("Identifying decoding functions...")
        decoding_functions_candidates = im.identify_decoding_functions(vw, selected_plugins, selected_functions)
        if options.expert:
            print_identification_results(sample_file_path, decoding_functions_candidates)

        floss_logger.info("Decoding strings...")
        decoded_strings = decode_strings(vw, decoding_functions_candidates, min_length, options.no_filter,
                                         options.max_instruction_count, options.max_address_revisits + 1)
        # TODO: The de-duplication process isn't perfect as it is done here and in print_decoding_results and
        # TODO: all of them on non-sanitized strings.
        if not options.expert:
            decoded_strings = filter_unique_decoded(decoded_strings)
        print_decoding_results(decoded_strings, options.group_functions, quiet=options.quiet, expert=options.expert)
    else:
        decoded_strings = []

    if not options.no_stack_strings:
        floss_logger.info("Extracting stackstrings...")
        stack_strings = stackstrings.extract_stackstrings(vw, selected_functions, min_length, options.no_filter)
        stack_strings = list(stack_strings)
        if not options.expert:
            # remove duplicate entries
            stack_strings = set(stack_strings)
        print_stack_strings(stack_strings, quiet=options.quiet, expert=options.expert)
    else:
        stack_strings = []

    if options.x64dbg_database_file:
        imagebase = vw.filemeta.values()[0]['imagebase']
        floss_logger.info("Creating x64dbg database...")
        create_x64dbg_database(sample_file_path, options.x64dbg_database_file, imagebase, decoded_strings)

    if options.ida_python_file:
        floss_logger.info("Creating IDA script...")
        create_ida_script(sample_file_path, options.ida_python_file, decoded_strings, stack_strings)

    if options.radare2_script_file:
        floss_logger.info("Creating r2script...")
        create_r2_script(sample_file_path, options.radare2_script_file, decoded_strings, stack_strings)

    if options.binja_script_file:
        floss_logger.info("Creating Binary Ninja script...")
        create_binja_script(sample_file_path, options.binja_script_file, decoded_strings, stack_strings)

    time1 = time()
    if not options.quiet:
        print("\nFinished execution after %f seconds" % (time1 - time0))

    if options.json_output_file:
        create_json_output(options, sample_file_path,
                           decoded_strings=decoded_strings,
                           stack_strings=stack_strings,
                           static_strings=static_strings)
        floss_logger.info("Wrote JSON file to %s\n" % options.json_output_file)

    return 0