示例#1
0
def print_all_strings(path, min_length, quiet=False):
    """
    Print static ASCII and UTF-16 strings from provided file.
    :param path: input file
    :param min_length: minimum string length
    :param quiet: print strings only, suppresses headers
    """
    with open(path, "rb") as f:
        b = f.read()

    if quiet:
        for s in strings.extract_ascii_strings(b, n=min_length):
            print("%s" % (s.s))
        for s in strings.extract_unicode_strings(b, n=min_length):
            print("%s" % (s.s))
    else:
        ascii_strings = list(strings.extract_ascii_strings(b, n=min_length))
        print("Static ASCII strings")
        if len(ascii_strings) == 0:
            print("none.")
        else:
            print(tabulate.tabulate(
                [(hex(s.offset), s.s) for s in ascii_strings],
                headers=["Offset", "String"]))
        print("")

        uni_strings = list(strings.extract_unicode_strings(b, n=min_length))
        print("Static UTF-16 strings")
        if len(uni_strings) == 0:
            print("none.")
        else:
            print(tabulate.tabulate(
                [(hex(s.offset), s.s) for s in uni_strings],
                headers=["Offset", "String"]))
        print("")
示例#2
0
def print_all_strings(path, min_length, quiet=False):
    """
    Print static ASCII and UTF-16 strings from provided file.
    :param path: input file
    :param min_length: minimum string length
    :param quiet: print strings only, suppresses headers
    """
    with open(path, "rb") as f:
        b = f.read()

    if quiet:
        for s in strings.extract_ascii_strings(b, n=min_length):
            print("%s" % (s.s))
        for s in strings.extract_unicode_strings(b, n=min_length):
            print("%s" % (s.s))
    else:
        ascii_strings = strings.extract_ascii_strings(b, n=min_length)
        print("Static ASCII strings")
        if len(ascii_strings) == 0:
            print("none.")
        else:
            print(tabulate.tabulate(
                [(hex(s.offset), s.s) for s in ascii_strings],
                headers=["Offset", "String"]))
        print("")

        uni_strings = strings.extract_unicode_strings(b, n=min_length)
        print("Static UTF-16 strings")
        if len(uni_strings) == 0:
            print("none.")
        else:
            print(tabulate.tabulate(
                [(hex(s.offset), s.s) for s in uni_strings],
                headers=["Offset", "String"]))
        print("")
示例#3
0
def print_static_strings(path, min_length, quiet=False):
    """
    Print static ASCII and UTF-16 strings from provided file.
    :param path: input file
    :param min_length: minimum string length
    :param quiet: print strings only, suppresses headers
    """
    with open(path, "rb") as f:
        b = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)

        if os.path.getsize(path) > MAX_FILE_SIZE:
            # for large files, there might be a huge number of strings,
            # so don't worry about forming everything into a perfect table
            if not quiet:
                print("FLOSS static ASCII strings")
            for s in strings.extract_ascii_strings(b, n=min_length):
                print("%s" % s.s)
            if not quiet:
                print("")

            if not quiet:
                print("FLOSS static Unicode strings")
            for s in strings.extract_unicode_strings(b, n=min_length):
                print("%s" % s.s)
            if not quiet:
                print("")

            if os.path.getsize(path) > sys.maxint:
                floss_logger.warning(
                    "File too large, strings listings may be truncated.")
                floss_logger.warning(
                    "FLOSS cannot handle files larger than 4GB on 32bit systems."
                )

        else:
            # for reasonably sized files, we can read all the strings at once
            if not quiet:
                print("FLOSS static ASCII strings")
            for s in strings.extract_ascii_strings(b, n=min_length):
                print("%s" % (s.s))
            if not quiet:
                print("")

            if not quiet:
                print("FLOSS static UTF-16 strings")
            for s in strings.extract_unicode_strings(b, n=min_length):
                print("%s" % (s.s))
            if not quiet:
                print("")
示例#4
0
def extract_strings(b):
    '''
    Extract the ASCII and UTF-16 strings from a bytestring.

    :type b: decoding_manager.DecodedString
    :param b: The data from which to extract the strings. Note its a
      DecodedString instance that tracks extra metadata beyond the
      bytestring contents.
    :rtype: Sequence[decoding_manager.DecodedString]
    '''
    ret = []
    for s in strings.extract_ascii_strings(b.s):
        if s.s == "A" * len(s.s):
            # ignore strings of all "A", which is likely taint data
            continue
        ret.append(
            DecodedString(b.va + s.offset, s.s, b.decoded_at_va, b.fva,
                          b.global_address))
    for s in strings.extract_unicode_strings(b.s):
        if s.s == "A" * len(s.s):
            continue
        ret.append(
            DecodedString(b.va + s.offset, s.s, b.decoded_at_va, b.fva,
                          b.global_address))
    return ret
示例#5
0
def print_static_strings(path, min_length, quiet=False):
    """
    Print static ASCII and UTF-16 strings from provided file.
    :param path: input file
    :param min_length: minimum string length
    :param quiet: print strings only, suppresses headers
    """
    with open(path, "rb") as f:
        b = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)

        if not quiet:
            print("FLOSS static ASCII strings")
        for s in strings.extract_ascii_strings(b, n=min_length):
            print("%s" % s.s)
        if not quiet:
            print("")

        if not quiet:
            print("FLOSS static UTF-16 strings")
        for s in strings.extract_unicode_strings(b, n=min_length):
            print("%s" % s.s)
        if not quiet:
            print("")

        if os.path.getsize(path) > sys.maxint:
            floss_logger.warning(
                "File too large, strings listings may be truncated.")
            floss_logger.warning(
                "FLOSS cannot handle files larger than 4GB on 32bit systems.")
示例#6
0
def extract_strings(b):
    '''
    Extract the ASCII and UTF-16 strings from a bytestring.

    :type b: decoding_manager.DecodedString
    :param b: The data from which to extract the strings. Note its a
      DecodedString instance that tracks extra metadata beyond the
      bytestring contents.
    :rtype: Sequence[decoding_manager.DecodedString]
    '''
    ret = []
    # ignore strings like: pVA, pVAAA, AAAA
    # which come from vivisect uninitialized taint tracking
    filter = re.compile("^p?V?A+$")
    for s in strings.extract_ascii_strings(b.s):
        if filter.match(s.s):
            continue
        ret.append(
            DecodedString(b.va + s.offset, s.s, b.decoded_at_va, b.fva,
                          b.characteristics))
    for s in strings.extract_unicode_strings(b.s):
        if filter.match(s.s):
            continue
        ret.append(
            DecodedString(b.va + s.offset, s.s, b.decoded_at_va, b.fva,
                          b.characteristics))
    return ret
示例#7
0
def extract_strings(b):
    '''
    Extract the ASCII and UTF-16 strings from a bytestring.

    :type b: decoding_manager.DecodedString
    :param b: The data from which to extract the strings. Note its a
      DecodedString instance that tracks extra metadata beyond the
      bytestring contents.
    :rtype: Sequence[decoding_manager.DecodedString]
    '''
    ret = []
    # ignore strings like: pVA, pVAAA, AAAA
    # which come from vivisect uninitialized taint tracking
    filter = re.compile("^p?V?A+$")
    for s in strings.extract_ascii_strings(b.s):
        if filter.match(s.s):
            continue
        ret.append(DecodedString(b.va + s.offset, s.s, b.decoded_at_va,
                                 b.fva, b.characteristics))
    for s in strings.extract_unicode_strings(b.s):
        if filter.match(s.s):
            continue
        ret.append(DecodedString(b.va + s.offset, s.s, b.decoded_at_va,
                                 b.fva, b.characteristics))
    return ret
示例#8
0
def extract_stackstrings(vw, selected_functions):
    '''
    Extracts the stackstrings from functions in the given workspace.

    :param vw: The vivisect workspace from which to extract stackstrings.
    :rtype: Generator[StackString]
    '''
    logger.debug('extracting stackstrings from %d functions', len(selected_functions))
    for fva in selected_functions:
        logger.debug('extracting stackstrings from function: 0x%x', fva)
        seen = set([])
        filter = re.compile("^p?V?A+$")
        for ctx in extract_call_contexts(vw, fva):
            logger.debug('extracting stackstrings at checkpoint: 0x%x stacksize: 0x%x', ctx.pc, ctx.init_sp - ctx.sp)
            for s in strings.extract_ascii_strings(ctx.stack_memory):
                if filter.match(s.s):
                    # ignore strings like: pVA, pVAAA, AAAA
                    # which come from vivisect uninitialized taint tracking
                    continue
                if s.s not in seen:
                    frame_offset = (ctx.init_sp - ctx.sp) - s.offset - getPointerSize(vw)
                    yield(StackString(fva, s.s, ctx.pc, ctx.sp, ctx.init_sp, s.offset, frame_offset))
                    seen.add(s.s)
            for s in strings.extract_unicode_strings(ctx.stack_memory):
                if s.s == "A" * len(s.s):
                    # ignore vivisect taint strings
                    continue
                if s.s not in seen:
                    frame_offset = (ctx.init_sp - ctx.sp) - s.offset - getPointerSize(vw)
                    yield(StackString(fva, s.s, ctx.pc, ctx.sp, ctx.init_sp, s.offset, frame_offset))
                    seen.add(s.s)
示例#9
0
def extract_stackstrings(vw, selected_functions):
    '''
    Extracts the stackstrings from functions in the given workspace.

    :param vw: The vivisect workspace from which to extract stackstrings.
    :rtype: Generator[StackString]
    '''
    for fva in selected_functions:
        seen = set([])
        filter = re.compile("^p?V?A+$")
        for ctx in extract_call_contexts(vw, fva):
            for s in strings.extract_ascii_strings(ctx.stack_memory):
                if filter.match(s.s):
                    # ignore strings like: pVA, pVAAA, AAAA
                    # which come from vivisect uninitialized taint tracking
                    continue
                if s.s not in seen:
                    frame_offset = (ctx.init_sp -
                                    ctx.sp) - s.offset - getPointerSize(vw)
                    yield (StackString(fva, s.s, ctx.pc, ctx.sp, ctx.init_sp,
                                       s.offset, frame_offset))
                    seen.add(s.s)
            for s in strings.extract_unicode_strings(ctx.stack_memory):
                if s.s == "A" * len(s.s):
                    # ignore vivisect taint strings
                    continue
                if s.s not in seen:
                    frame_offset = (ctx.init_sp -
                                    ctx.sp) - s.offset - getPointerSize(vw)
                    yield (StackString(fva, s.s, ctx.pc, ctx.sp, ctx.init_sp,
                                       s.offset, frame_offset))
                    seen.add(s.s)
示例#10
0
def extract_stackstrings(vw):
    '''
    Extracts the stackstrings from functions in the given workspace.

    :param vw: The vivisect workspace from which to extract stackstrings.
    :rtype: Generator[StackString]
    '''
    for fva in vw.getFunctions():
        seen = set([])
        for ctx in extract_call_contexts(vw, fva):
            for s in strings.extract_ascii_strings(ctx.stack_memory):
                if s.s == "A" * len(s.s):
                    # ignore vivisect taint strings
                    continue
                if s.s not in seen:
                    frame_offset = (ctx.init_sp - ctx.sp) - s.offset - getPointerSize(vw)
                    yield(StackString(fva, s.s, ctx.pc, ctx.sp, ctx.init_sp, s.offset, frame_offset))
                    seen.add(s.s)
            for s in strings.extract_unicode_strings(ctx.stack_memory):
                if s.s == "A" * len(s.s):
                    # ignore vivisect taint strings
                    continue
                if s.s not in seen:
                    frame_offset = (ctx.init_sp - ctx.sp) - s.offset - getPointerSize(vw)
                    yield(StackString(fva, s.s, ctx.pc, ctx.sp, ctx.init_sp, s.offset, frame_offset))
                    seen.add(s.s)
示例#11
0
def print_static_strings(path, min_length, quiet=False):
    """
    Print static ASCII and UTF-16 strings from provided file.
    :param path: input file
    :param min_length: minimum string length
    :param quiet: print strings only, suppresses headers
    """
    with open(path, "rb") as f:
        b = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)

        if os.path.getsize(path) > MAX_FILE_SIZE:
            # for large files, there might be a huge number of strings,
            # so don't worry about forming everything into a perfect table
            if not quiet:
                print("FLOSS static ASCII strings")
            for s in strings.extract_ascii_strings(b, n=min_length):
                print("%s" % s.s)
            if not quiet:
                print("")

            if not quiet:
                print("FLOSS static Unicode strings")
            for s in strings.extract_unicode_strings(b, n=min_length):
                print("%s" % s.s)
            if not quiet:
                print("")

            if os.path.getsize(path) > sys.maxint:
                floss_logger.warning("File too large, strings listings may be truncated.")
                floss_logger.warning("FLOSS cannot handle files larger than 4GB on 32bit systems.")

        else:
            # for reasonably sized files, we can read all the strings at once
            if not quiet:
                print("FLOSS static ASCII strings")
            for s in strings.extract_ascii_strings(b, n=min_length):
                print("%s" % (s.s))
            if not quiet:
                print("")

            if not quiet:
                print("FLOSS static UTF-16 strings")
            for s in strings.extract_unicode_strings(b, n=min_length):
                print("%s" % (s.s))
            if not quiet:
                print("")
示例#12
0
def extract_stackstrings(vw, selected_functions, min_length, no_filter=False):
    '''
    Extracts the stackstrings from functions in the given workspace.

    :param vw: The vivisect workspace from which to extract stackstrings.
    :param selected_functions: list of selected functions
    :param min_length: minimum string length
    :param no_filter: do not filter deobfuscated stackstrings
    :rtype: Generator[StackString]
    '''
    logger.debug('extracting stackstrings from %d functions',
                 len(selected_functions))
    bb_ends = get_basic_block_ends(vw)
    for fva in selected_functions:
        logger.debug('extracting stackstrings from function: 0x%x', fva)
        seen = set([])
        for ctx in extract_call_contexts(vw, fva, bb_ends):
            logger.debug(
                'extracting stackstrings at checkpoint: 0x%x stacksize: 0x%x',
                ctx.pc, ctx.init_sp - ctx.sp)
            for s in strings.extract_ascii_strings(ctx.stack_memory):
                if len(s.s) > MAX_STRING_LENGTH:
                    continue

                if no_filter:
                    decoded_string = s.s
                elif not is_fp_string(s.s):
                    decoded_string = strip_string(s.s)
                else:
                    continue

                if decoded_string not in seen and len(
                        decoded_string) >= min_length:
                    frame_offset = (ctx.init_sp -
                                    ctx.sp) - s.offset - getPointerSize(vw)
                    yield (StackString(fva, decoded_string, ctx.pc, ctx.sp,
                                       ctx.init_sp, s.offset, frame_offset))
                    seen.add(decoded_string)
            for s in strings.extract_unicode_strings(ctx.stack_memory):
                if len(s.s) > MAX_STRING_LENGTH:
                    continue

                if no_filter:
                    decoded_string = s.s
                elif not is_fp_string(s.s):
                    decoded_string = strip_string(s.s)
                else:
                    continue

                if decoded_string not in seen and len(
                        decoded_string) >= min_length:
                    frame_offset = (ctx.init_sp -
                                    ctx.sp) - s.offset - getPointerSize(vw)
                    yield (StackString(fva, decoded_string, ctx.pc, ctx.sp,
                                       ctx.init_sp, s.offset, frame_offset))
                    seen.add(decoded_string)
示例#13
0
def extract_all_strings(stack_buf):
    """
    Extract ASCII and UTF-16 strings from buffer.
    :param stack_buf: memory buffer
    :return: Extracted String namedtuples
    """
    for s in strings.extract_ascii_strings(stack_buf):
        yield s
    for s in strings.extract_unicode_strings(stack_buf):
        yield s
示例#14
0
def extract_all_strings(stack_buf):
    """
    Extract ASCII and UTF-16 strings from buffer.
    :param stack_buf: memory buffer
    :return: Extracted String namedtuples
    """
    for s in strings.extract_ascii_strings(stack_buf):
        yield s
    for s in strings.extract_unicode_strings(stack_buf):
        yield s
示例#15
0
def extract_stackstrings(vw, selected_functions, min_length, no_filter=False):
    '''
    Extracts the stackstrings from functions in the given workspace.

    :param vw: The vivisect workspace from which to extract stackstrings.
    :param selected_functions: list of selected functions
    :param min_length: minimum string length
    :param no_filter: do not filter deobfuscated stackstrings
    :rtype: Generator[StackString]
    '''
    logger.debug('extracting stackstrings from %d functions', len(selected_functions))
    bb_ends = get_basic_block_ends(vw)
    for fva in selected_functions:
        logger.debug('extracting stackstrings from function: 0x%x', fva)
        seen = set([])
        for ctx in extract_call_contexts(vw, fva, bb_ends):
            logger.debug('extracting stackstrings at checkpoint: 0x%x stacksize: 0x%x', ctx.pc, ctx.init_sp - ctx.sp)
            for s in strings.extract_ascii_strings(ctx.stack_memory):
                if len(s.s) > MAX_STRING_LENGTH:
                    continue

                if no_filter:
                    decoded_string = s.s
                elif not is_fp_string(s.s):
                    decoded_string = strip_string(s.s)
                else:
                    continue

                if decoded_string not in seen and len(decoded_string) >= min_length:
                    frame_offset = (ctx.init_sp - ctx.sp) - s.offset - getPointerSize(vw)
                    yield(StackString(fva, decoded_string, ctx.pc, ctx.sp, ctx.init_sp, s.offset, frame_offset))
                    seen.add(decoded_string)
            for s in strings.extract_unicode_strings(ctx.stack_memory):
                if len(s.s) > MAX_STRING_LENGTH:
                    continue

                if no_filter:
                    decoded_string = s.s
                elif not is_fp_string(s.s):
                    decoded_string = strip_string(s.s)
                else:
                    continue

                if decoded_string not in seen and len(decoded_string) >= min_length:
                    frame_offset = (ctx.init_sp - ctx.sp) - s.offset - getPointerSize(vw)
                    yield(StackString(fva, decoded_string, ctx.pc, ctx.sp, ctx.init_sp, s.offset, frame_offset))
                    seen.add(decoded_string)
示例#16
0
def extract_strings(b, min_length, no_filter):
    '''
    Extract the ASCII and UTF-16 strings from a bytestring.

    :type b: decoding_manager.DecodedString
    :param b: The data from which to extract the strings. Note its a
      DecodedString instance that tracks extra metadata beyond the
      bytestring contents.
    :param min_length: minimum string length
    :param no_filter: do not filter decoded strings
    :rtype: Sequence[decoding_manager.DecodedString]
    '''
    ret = []
    for s in strings.extract_ascii_strings(b.s):
        if len(s.s) > MAX_STRING_LENGTH:
            continue

        if no_filter:
            decoded_string = s.s
        elif not is_fp_string(s.s):
            decoded_string = strip_string(s.s)
        else:
            continue

        if len(decoded_string) >= min_length:
            ret.append(
                DecodedString(b.va + s.offset, decoded_string, b.decoded_at_va,
                              b.fva, b.characteristics))
    for s in strings.extract_unicode_strings(b.s):
        if len(s.s) > MAX_STRING_LENGTH:
            continue

        if no_filter:
            decoded_string = s.s
        elif not is_fp_string(s.s):
            decoded_string = strip_string(s.s)
        else:
            continue

        if len(decoded_string) >= min_length:
            ret.append(
                DecodedString(b.va + s.offset, decoded_string, b.decoded_at_va,
                              b.fva, b.characteristics))
    return ret
示例#17
0
def extract_stackstrings(vw, selected_functions):
    '''
    Extracts the stackstrings from functions in the given workspace.

    :param vw: The vivisect workspace from which to extract stackstrings.
    :rtype: Generator[StackString]
    '''
    logger.debug('extracting stackstrings from %d functions',
                 len(selected_functions))
    bb_ends = get_basic_block_ends(vw)
    for fva in selected_functions:
        logger.debug('extracting stackstrings from function: 0x%x', fva)
        seen = set([])
        for ctx in extract_call_contexts(vw, fva, bb_ends):
            logger.debug(
                'extracting stackstrings at checkpoint: 0x%x stacksize: 0x%x',
                ctx.pc, ctx.init_sp - ctx.sp)
            for s in strings.extract_ascii_strings(ctx.stack_memory):
                if FP_FILTER.match(s.s):
                    # ignore strings like: pVA, pVAAA, AAAA
                    # which come from vivisect uninitialized taint tracking
                    continue
                s_stripped = re.sub(FP_FILTER_SUB, "", s.s)
                if s_stripped not in seen:
                    frame_offset = (ctx.init_sp -
                                    ctx.sp) - s.offset - getPointerSize(vw)
                    yield (StackString(fva, s_stripped, ctx.pc, ctx.sp,
                                       ctx.init_sp, s.offset, frame_offset))
                    seen.add(s_stripped)
            for s in strings.extract_unicode_strings(ctx.stack_memory):
                if FP_FILTER.match(s.s):
                    # ignore strings like: pVA, pVAAA, AAAA
                    # which come from vivisect uninitialized taint tracking
                    continue
                s_stripped = re.sub(FP_FILTER_SUB, "", s.s)
                if s_stripped not in seen:
                    frame_offset = (ctx.init_sp -
                                    ctx.sp) - s.offset - getPointerSize(vw)
                    yield (StackString(fva, s_stripped, ctx.pc, ctx.sp,
                                       ctx.init_sp, s.offset, frame_offset))
                    seen.add(s_stripped)
示例#18
0
def extract_strings(b, min_length, no_filter):
    '''
    Extract the ASCII and UTF-16 strings from a bytestring.

    :type b: decoding_manager.DecodedString
    :param b: The data from which to extract the strings. Note its a
      DecodedString instance that tracks extra metadata beyond the
      bytestring contents.
    :param min_length: minimum string length
    :param no_filter: do not filter decoded strings
    :rtype: Sequence[decoding_manager.DecodedString]
    '''
    ret = []
    for s in strings.extract_ascii_strings(b.s):
        if len(s.s) > MAX_STRING_LENGTH:
            continue

        if no_filter:
            decoded_string = s.s
        elif not is_fp_string(s.s):
            decoded_string = strip_string(s.s)
        else:
            continue

        if len(decoded_string) >= min_length:
            ret.append(DecodedString(b.va + s.offset, decoded_string, b.decoded_at_va, b.fva, b.characteristics))
    for s in strings.extract_unicode_strings(b.s):
        if len(s.s) > MAX_STRING_LENGTH:
            continue

        if no_filter:
            decoded_string = s.s
        elif not is_fp_string(s.s):
            decoded_string = strip_string(s.s)
        else:
            continue

        if len(decoded_string) >= min_length:
            ret.append(DecodedString(b.va + s.offset, decoded_string, b.decoded_at_va, b.fva, b.characteristics))
    return ret
示例#19
0
def get_static_jsons(path, min_length):
    """
        Return dictionary with static ASCII and UTF-16 strings from provided file.
        :param path: input file
        :param min_length: minimum string length
        """
    with open(path, "rb") as f:
        b = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)

        ascii_strings = [
            s.s for s in strings.extract_ascii_strings(b, n=min_length)
        ]
        utf16_strings = [
            s.s for s in strings.extract_unicode_strings(b, n=min_length)
        ]

        if os.path.getsize(path) > sys.maxint:
            floss_logger.warning(
                "File too large, strings listings may be truncated.")
            floss_logger.warning(
                "FLOSS cannot handle files larger than 4GB on 32bit systems.")

        return {'ascii_strings': ascii_strings, 'utf16_strings': utf16_strings}
示例#20
0
def extract_strings(b):
    '''
    Extract the ASCII and UTF-16 strings from a bytestring.

    :type b: decoding_manager.DecodedString
    :param b: The data from which to extract the strings. Note its a
      DecodedString instance that tracks extra metadata beyond the
      bytestring contents.
    :rtype: Sequence[decoding_manager.DecodedString]
    '''
    ret = []
    for s in strings.extract_ascii_strings(b.s):
        if s.s == "A" * len(s.s):
            # ignore strings of all "A", which is likely taint data
            continue
        ret.append(DecodedString(b.va + s.offset, s.s, b.decoded_at_va,
                                 b.fva, b.characteristics))
    for s in strings.extract_unicode_strings(b.s):
        if s.s == "A" * len(s.s):
            continue
        ret.append(DecodedString(b.va + s.offset, s.s, b.decoded_at_va,
                                 b.fva, b.characteristics))
    return ret
示例#21
0
def print_static_strings(path, min_length, quiet=False):
    """
    Print static ASCII and UTF-16 strings from provided file.
    :param path: input file
    :param min_length: minimum string length
    :param quiet: print strings only, suppresses headers
    """
    with open(path, "rb") as f:
        b = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)

        if quiet:
            for s in strings.extract_ascii_strings(b, n=min_length):
                print("%s" % (s.s))
            for s in strings.extract_unicode_strings(b, n=min_length):
                print("%s" % (s.s))

        elif os.path.getsize(path) > MAX_FILE_SIZE:
            # for large files, there might be a huge number of strings,
            # so don't worry about forming everything into a perfect table
            print("Static ASCII strings")
            print("Offset   String")
            print("------   ------")
            has_string = False
            for s in strings.extract_ascii_strings(b, n=min_length):
                print("%s %s" % (hex(s.offset), s.s))
                has_string = True
            if not has_string:
                print("none.")
            print("")

            print("Static Unicode strings")
            print("Offset   String")
            print("------   ------")
            has_string = False
            for s in strings.extract_unicode_strings(b, n=min_length):
                print("%s %s" % (hex(s.offset), s.s))
                has_string = True
            if not has_string:
                print("none.")
            print("")

            if os.path.getsize(path) > sys.maxint:
                floss_logger.warning("File too large, strings listings may be trucated.")
                floss_logger.warning("FLOSS cannot handle files larger than 4GB on 32bit systems.")

        else:
            # for reasonably sized files, we can read all the strings at once
            # and format them nicely in a table.
            ascii_strings = list(strings.extract_ascii_strings(b, n=min_length))
            print("Static ASCII strings")
            if len(ascii_strings) == 0:
                print("none.")
            else:
                print(tabulate.tabulate(
                    [(hex(s.offset), s.s) for s in ascii_strings],
                    headers=["Offset", "String"]))
            print("")

            uni_strings = list(strings.extract_unicode_strings(b, n=min_length))
            print("Static UTF-16 strings")
            if len(uni_strings) == 0:
                print("none.")
            else:
                print(tabulate.tabulate(
                    [(hex(s.offset), s.s) for s in uni_strings],
                    headers=["Offset", "String"]))
            print("")
示例#22
0
文件: main.py 项目: zdone/flare-floss
def print_static_strings(path, min_length, quiet=False):
    """
    Print static ASCII and UTF-16 strings from provided file.
    :param path: input file
    :param min_length: minimum string length
    :param quiet: print strings only, suppresses headers
    """
    with open(path, "rb") as f:
        b = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)

        if quiet:
            for s in strings.extract_ascii_strings(b, n=min_length):
                print("%s" % (s.s))
            for s in strings.extract_unicode_strings(b, n=min_length):
                print("%s" % (s.s))

        elif os.path.getsize(path) > MAX_FILE_SIZE:
            # for large files, there might be a huge number of strings,
            # so don't worry about forming everything into a perfect table
            print("Static ASCII strings")
            print("Offset   String")
            print("------   ------")
            has_string = False
            for s in strings.extract_ascii_strings(b, n=min_length):
                print("%s %s" % (hex(s.offset), s.s))
                has_string = True
            if not has_string:
                print("none.")
            print("")

            print("Static Unicode strings")
            print("Offset   String")
            print("------   ------")
            has_string = False
            for s in strings.extract_unicode_strings(b, n=min_length):
                print("%s %s" % (hex(s.offset), s.s))
                has_string = True
            if not has_string:
                print("none.")
            print("")

            if os.path.getsize(path) > sys.maxint:
                floss_logger.warning(
                    "File too large, strings listings may be trucated.")
                floss_logger.warning(
                    "FLOSS cannot handle files larger than 4GB on 32bit systems."
                )

        else:
            # for reasonably sized files, we can read all the strings at once
            # and format them nicely in a table.
            ascii_strings = list(strings.extract_ascii_strings(b,
                                                               n=min_length))
            print("Static ASCII strings")
            if len(ascii_strings) == 0:
                print("none.")
            else:
                print(
                    tabulate.tabulate([(hex(s.offset), s.s)
                                       for s in ascii_strings],
                                      headers=["Offset", "String"]))
            print("")

            uni_strings = list(strings.extract_unicode_strings(b,
                                                               n=min_length))
            print("Static UTF-16 strings")
            if len(uni_strings) == 0:
                print("none.")
            else:
                print(
                    tabulate.tabulate([(hex(s.offset), s.s)
                                       for s in uni_strings],
                                      headers=["Offset", "String"]))
            print("")