def main():
    #Read the README before you start.
    
    Logger.info("Setting up configuration")

    gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $rip, $rip+16:\n"
disassemble $rip, $rip+16
"""
    gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $eip, $eip+16:\n"
disassemble $eip, $eip+16
"""
    where_this_python_script_lives = os.path.dirname(os.path.realpath(__file__))
    
    gdb_command = "gdb"
    gdb_command_osx = "/opt/local/bin/gdb-apple"
    
    config_gm = CrashAnalysisConfig(where_this_python_script_lives, 
                            target_binary_instrumented=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-afl/utilities/gm", 
                            args_before="identify", 
                            args_after="", 
                            target_binary_plain=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-plain/utilities/gm", 
                            target_binary_asan=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-asan/utilities/gm",
                            env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"},
                            crash_dir=where_this_python_script_lives+"/test-cases/gm/crashes",
                            gdb_script=gdb_script_32bit,
                            gdb_binary=gdb_command
                            )
    
#    config_ffmpeg = CrashAnalysisConfig(where_this_python_script_lives, 
#                        target_binary_instrumented=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-afl/ffmpeg", 
#                        args_before="-i", 
#                        args_after="-loglevel quiet", 
#                        target_binary_plain=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-plain/ffmpeg", 
##                        target_binary_asan=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-asan/ffmpeg",
#                        env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"},
#                        crash_dir=where_this_python_script_lives+"/test-cases/ffmpeg/crashes",
#                        gdb_script=gdb_script_32bit,
#                        gdb_binary=gdb_command
#                        )

    #
    Logger.info("Input crashes directory operations")
    #
    
    Logger.info("Removing README.txt files")
    fdf = FileDuplicateFinder(config_gm, config_gm.original_crashes_directory)
    fdf.remove_readmes()
    
    Logger.info("Removing duplicates from original crashes folder (same file size + MD5)")
    fdf.delete_duplicates_recursively()
    
    Logger.info("Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions")
    fdf.rename_same_name_files()
    
    #
    Logger.info("Finding interesting signals (all crashes)")
    #
    sf_all_crashes = SignalFinder(config_gm)
    if os.path.exists(config_gm.default_signal_directory):
        Logger.warning("Seems like all crashes were already categorized by signal, skipping. If you want to rerun: rm -r", config_gm.default_signal_directory)
    else:
        Logger.debug("Dividing files to output folder according to their signal")
        sf_all_crashes.divide_by_signal()
    
    #Interestings signals: negative on OSX, 129 and above for Linux
    #Uninteresting signals: We usually don't care about signals 0, 1, 2, etc. up to 128
    uninteresting_signals = range(0,129)
    
    analyze_output_and_exploitability(config_gm, sf_all_crashes, uninteresting_signals, message_prefix="Interesting signals /")
        
    Logger.info("Interesting signals / Minimizing input (afl-tmin)")
    if os.path.exists(config_gm.default_minimized_crashes_directory):
        Logger.warning("Seems like crashes were already minimized, skipping. If you want to rerun: rm -r", config_gm.default_minimized_crashes_directory)
    else:
        for signal, signal_folder in sf_all_crashes.get_folder_paths_for_signals_if_exist(uninteresting_signals):
            Logger.debug("Minimizing inputs resulting in signal %i" % signal)
            im = InputMinimizer(config_gm, signal_folder)
            im.minimize_testcases()
        
        Logger.info("Interesting signals / Minimized inputs / Deduplication")
        fdf_minimized = FileDuplicateFinder(config_gm, config_gm.default_minimized_crashes_directory)
        fdf_minimized.delete_duplicates_recursively()
        
    #
    Logger.info("Interesting signals / Minimized inputs / Finding interesting signals")
    #
    sf_minimized_crashes = SignalFinder(config_gm, config_gm.default_minimized_crashes_directory, os.path.join(config_gm.output_dir, "minimized-per-signal"))
    if os.path.exists(sf_minimized_crashes.output_dir):
        Logger.warning("Seems like minimized crashes were already categorized by signal, skipping. If you want to rerun: rm -r", sf_minimized_crashes.output_dir)
    else:
        os.mkdir(sf_minimized_crashes.output_dir)
        Logger.info("Dividing files to output folder according to their signal")
        sf_minimized_crashes.divide_by_signal(0)
    
    
    analyze_output_and_exploitability(config_gm, sf_minimized_crashes, uninteresting_signals, message_prefix="Interesting signals / Minimized inputs /")
    
    
#     # If you are in the mood to waste a little CPU time, run this
#     Logger.info("Found interesting_signals (interesting interesting_signals) / Minimized inputs (interested interesting_signals) / Feeling lucky auto exploitation")
#     #
#     fle = FeelingLuckyExploiter(config_gm, sf_minimized_crashes.output_dir)
#     #os.mkdir(fle.output_dir)
#     fle.run_forest_run()
    
#TODO: develop
#- peruvian were rabbit?
#- exploitable script, something along: less `grep -l 'Exploitability Classification: EXPLOITABLE' output/per-signal/*/*gdb*`

    cleanup(config_gm)
def main():
    #Read the README before you start.
    
    Logger.info("Setting up configuration")

    gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $rip, $rip+16:\n"
disassemble $rip, $rip+16
printf "[+] list\n"
list
"""
    gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $eip, $eip+16:\n"
disassemble $eip, $eip+16
printf "[+] list\n"
list
"""

    #TODO: Make sure gdb script doesn't abort on error
    #ignoring errors in gdb scripts: http://stackoverflow.com/questions/17923865/gdb-stops-in-a-command-file-if-there-is-an-error-how-to-continue-despite-the-er
    gdb_script_32bit_noerror = r"""python
def my_ignore_errors(arg):
  try:
    gdb.execute("print \"" + "Executing command: " + arg + "\"")
    gdb.execute (arg)
  except:
    gdb.execute("print \"" + "ERROR: " + arg + "\"")

my_ignore_errors("p p")
my_ignore_errors("p p->v1")
gdb.execute("quit")
    """

    where_this_python_script_lives = os.path.dirname(os.path.realpath(__file__))
    
    gdb_command = "/usr/bin/gdb"
    #gdb_command_osx = "/opt/local/bin/gdb-apple"
    
    #TODO: For some reason the ASAN environment variables are not correctly set when given to the subprocess module... so let's just set it in parent process already:
    os.environ['ASAN_SYMBOLIZER_PATH'] = "/usr/bin/llvm-symbolizer-3.4"
    os.environ['ASAN_OPTIONS'] = "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1:abort_on_error=1"
    env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1:abort_on_error=1"}
    
    ###
    #This import decides which testcase/binary we want to run!
    ###
    from testcases.gm.Config import create_config
    #from testcases.ffmpeg.Config import create_config
    #see CrashAnalysisConfig for more options that get passed on by create_config
    chosen_config = create_config(where_this_python_script_lives, env=env, gdb_script=gdb_script_32bit, gdb_binary=gdb_command)
    chosen_config.sanity_check()
    
    #
    Logger.info("Input crashes directory operations")
    #
    
    Logger.info("Removing README.txt files")
    fdf = FileDuplicateFinder(chosen_config, chosen_config.original_crashes_directory)
    fdf.remove_readmes()
    
    Logger.info("Removing duplicates from original crashes folder (same file size + MD5)")
    fdf.delete_duplicates_recursively()
    
    Logger.info("Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions")
    fdf.rename_same_name_files()
    #OR:
    #Logger.info("Renaming all files to numeric values, as some programs prefer no special chars in filenames and might require a specific file extension")
    #fdf.rename_all_files(".png")
    
    #
    Logger.info("Finding interesting signals (all crashes)")
    #
    sf_all_crashes = SignalFinder(chosen_config)
    if os.path.exists(chosen_config.default_signal_directory):
        Logger.warning("Seems like all crashes were already categorized by signal, skipping. If you want to rerun: rm -r", chosen_config.default_signal_directory)
    else:
        Logger.debug("Dividing files to output folder according to their signal")
        sf_all_crashes.divide_by_signal()
    
    #Interestings signals: negative on OSX, 129 and above sometimes for Linux on the shell (depending on used mechanism)
    #Uninteresting signals: We usually don't care about signals 0, 1, 2, etc. up to 128
    uninteresting_signals = range(0, 129)
    
    analyze_output_and_exploitability(chosen_config, sf_all_crashes, uninteresting_signals, message_prefix="Interesting signals /")
        
    Logger.info("Interesting signals / Minimizing input (afl-tmin)")
    if os.path.exists(chosen_config.default_minimized_crashes_directory):
        Logger.warning("Seems like crashes were already minimized, skipping. If you want to rerun: rm -r", chosen_config.default_minimized_crashes_directory)
    else:
        for signal, signal_folder in sf_all_crashes.get_folder_paths_for_signals_if_exist(uninteresting_signals):
            Logger.debug("Minimizing inputs resulting in signal %i" % signal)
            im = InputMinimizer(chosen_config, signal_folder)
            im.minimize_testcases()
        
        Logger.info("Interesting signals / Minimized inputs / Deduplication")
        fdf_minimized = FileDuplicateFinder(chosen_config, chosen_config.default_minimized_crashes_directory)
        fdf_minimized.delete_duplicates_recursively()
        
    #
    Logger.info("Interesting signals / Minimized inputs / Finding interesting signals")
    #
    sf_minimized_crashes = SignalFinder(chosen_config, chosen_config.default_minimized_crashes_directory, os.path.join(chosen_config.output_dir, "minimized-per-signal"))
    if os.path.exists(sf_minimized_crashes.output_dir):
        Logger.warning("Seems like minimized crashes were already categorized by signal, skipping. If you want to rerun: rm -r", sf_minimized_crashes.output_dir)
    else:
        os.mkdir(sf_minimized_crashes.output_dir)
        Logger.info("Dividing files to output folder according to their signal")
        sf_minimized_crashes.divide_by_signal(0)
    
    
    analyze_output_and_exploitability(chosen_config, sf_minimized_crashes, uninteresting_signals, message_prefix="Interesting signals / Minimized inputs /")
    
#TODO:
#- Make (some) modules work as standalone applications with command line parsing
#- The FeelingLuckyExplotier thing. Need to get a small test sample where I know it should work.
#     # If you are in the mood to waste a little CPU time, run this
#     Logger.info("Found interesting_signals (interesting interesting_signals) / Minimized inputs (interested interesting_signals) / Feeling lucky auto exploitation")
#     #
#     fle = FeelingLuckyExploiter(chosen_config, sf_minimized_crashes.output_dir)
#     #os.mkdir(fle.output_dir)
#     fle.run_forest_run()


    cleanup(chosen_config)
def main():
    #Read the README before you start.
    
    Logger.info("Setting up configuration")

    gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $rip, $rip+16:\n"
disassemble $rip, $rip+16
"""
    gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $eip, $eip+16:\n"
disassemble $eip, $eip+16
"""
    where_this_python_script_lives = os.path.dirname(os.path.realpath(__file__))
    
    gdb_command = "gdb"
    gdb_command_osx = "/opt/local/bin/gdb-apple"
    
    config_gm = CrashAnalysisConfig(where_this_python_script_lives, 
                            target_binary_instrumented=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-afl/utilities/gm", 
                            args_before="identify", 
                            args_after="", 
                            target_binary_plain=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-plain/utilities/gm", 
                            target_binary_asan=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-asan/utilities/gm",
                            env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"},
                            crash_dir=where_this_python_script_lives+"/test-cases/gm/crashes",
                            gdb_script=gdb_script_32bit,
                            gdb_binary=gdb_command
                            )
    
#    config_ffmpeg = CrashAnalysisConfig(where_this_python_script_lives, 
#                        target_binary_instrumented=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-afl/ffmpeg", 
#                        args_before="-i", 
#                        args_after="-loglevel quiet", 
#                        target_binary_plain=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-plain/ffmpeg", 
##                        target_binary_asan=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-asan/ffmpeg",
#                        env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"},
#                        crash_dir=where_this_python_script_lives+"/test-cases/ffmpeg/crashes",
#                        gdb_script=gdb_script_32bit,
#                        gdb_binary=gdb_command
#                        )

    #
    Logger.info("Input crashes directory operations")
    #
    
    Logger.info("Removing README.txt files")
    fdf = FileDuplicateFinder(config_gm)
    fdf.remove_readmes(config_gm.original_crashes_directory)
    
    Logger.info("Removing duplicates from original crashes folder (same file size + MD5)")
    fdf.delete_duplicates_recursively(config_gm.original_crashes_directory)
    
    Logger.info("Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions")
    fdf.rename_same_name_files(config_gm.original_crashes_directory)
    
    #
    Logger.info("Finding signals for all crash files")
    #
    sf = SignalFinder(config_gm)
    if os.path.exists(sf.output_dir):
        Logger.warning("Seems like all crashes were already categorized by signal, skipping. Remove output directory or remove this folder if you want to rerun:", sf.output_dir)
    else:
        Logger.info("Dividing files to output folder according to their signal")
        os.mkdir(sf.output_dir)
        sf.divide_by_signal(0)
        
    
    #
    Logger.info("Running binaries to discover stdout/stderr, gdb and ASAN output for crash files that result in interesting signals")
    #
    #signals, negative on OSX, 129 and above for Linux. No harm if we go on with all of them.
    signals = (-4, -6, -11, 132, 134, 139)
    get_output_for_signals(config_gm, sf, signals)

    
    #
    Logger.info("Minimizing input files that result in interesting signals (and removing duplicates from the results)")
    #
    im = InputMinimizer(config_gm)
    if os.path.exists(im.output_dir):
        Logger.warning("Seems like minimized crashes were already categorized by signal, skipping. Remove output directory or remove this folder if you want to rerun:", im.output_dir)
    else:
        os.mkdir(im.output_dir)
        for signal in signals:
            Logger.info("Processing minimized folder for crash-minimizer for signal %i" % signal)
            signal_folder = sf.get_folder_path_for_signal(signal)
            im = InputMinimizer(config_gm, signal_folder)
            if os.path.exists(signal_folder):
                Logger.info("Minimizing inputs resulting in signal %i" % signal)
                im.minimize_testcases()
            else:
                Logger.warning("Seems that none of the crashes results in a %i signal" % signal)
        Logger.info("Removing duplicates from minimized tests")
        fdf.delete_duplicates_recursively(im.output_dir)
        
    #
    Logger.info("Finding signals for minimized crash files")
    #
    sf_minimized_crashes = SignalFinder(config_gm, im.output_dir, os.path.join(config_gm.output_dir, "minimized-inputs-per-signal"))
    if os.path.exists(sf_minimized_crashes.output_dir):
        Logger.warning("Seems like crashes were already categorized by signal, skipping.")
        Logger.warning("Remove output directory or remove this folder if you want to rerun:", sf_minimized_crashes.output_dir)
    else:
        os.mkdir(sf_minimized_crashes.output_dir)
        Logger.info("Dividing files to output folder according to their signal")
        sf_minimized_crashes.divide_by_signal(0)
        
    
    #
    Logger.info("Running binaries to discover stdout/stderr, gdb and ASAN output for minimized input files that result in interesting signals")
    #
    get_output_for_signals(config_gm, sf_minimized_crashes, signals)
Ejemplo n.º 4
0
def main():
    # Read the README before you start.

    Logger.info("Setting up configuration")

    gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $rip, $rip+16:\n"
disassemble $rip, $rip+16
printf "[+] list\n"
list
"""
    gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $eip, $eip+16:\n"
disassemble $eip, $eip+16
printf "[+] list\n"
list
"""

    # TODO: Make sure gdb script doesn't abort on error
    # ignoring errors in gdb scripts: http://stackoverflow.com/questions/17923865/gdb-stops-in-a-command-file-if-there-is-an-error-how-to-continue-despite-the-er
    gdb_script_32bit_noerror = r"""python
def my_ignore_errors(arg):
  try:
    gdb.execute("print \"" + "Executing command: " + arg + "\"")
    gdb.execute (arg)
  except:
    gdb.execute("print \"" + "ERROR: " + arg + "\"")

my_ignore_errors("p p")
my_ignore_errors("p p->v1")
gdb.execute("quit")
    """

    where_this_python_script_lives = os.path.dirname(
        os.path.realpath(__file__))

    gdb_command = "/usr/bin/gdb"
    #gdb_command_osx = "/opt/local/bin/gdb-apple"

    #TODO: For some reason the ASAN environment variables are not correctly set when given to the subprocess module... so let's just set it in parent process already:
    os.environ['ASAN_SYMBOLIZER_PATH'] = "/usr/bin/llvm-symbolizer-3.4"
    os.environ[
        'ASAN_OPTIONS'] = "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1:abort_on_error=1"
    env = {
        "ASAN_SYMBOLIZER_PATH":
        "/usr/bin/llvm-symbolizer-3.4",
        "ASAN_OPTIONS":
        "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1:abort_on_error=1"
    }

    ###
    # This import decides which testcase/binary we want to run!
    ###
    from testcases.ffmpeg.Config import create_config
    #from testcases.ffmpeg.Config import create_config
    #see CrashAnalysisConfig for more options that get passed on by create_config
    chosen_config = create_config(where_this_python_script_lives,
                                  env=env,
                                  gdb_script=gdb_script_32bit,
                                  gdb_binary=gdb_command)
    chosen_config.sanity_check()

    #
    Logger.info("Input crashes directory operations")
    #

    Logger.info("Removing README.txt files")
    fdf = FileDuplicateFinder(chosen_config,
                              chosen_config.original_crashes_directory)
    fdf.remove_readmes()

    Logger.info(
        "Removing duplicates from original crashes folder (same file size + MD5)"
    )
    fdf.delete_duplicates_recursively()

    Logger.info(
        "Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions"
    )
    fdf.rename_same_name_files()
    #OR:
    #Logger.info("Renaming all files to numeric values, as some programs prefer no special chars in filenames and might require a specific file extension")
    #fdf.rename_all_files(".png")

    #
    Logger.info("Finding interesting signals (all crashes)")
    #
    sf_all_crashes = SignalFinder(chosen_config)
    if os.path.exists(chosen_config.default_signal_directory):
        Logger.warning(
            "Seems like all crashes were already categorized by signal, skipping. If you want to rerun: rm -r",
            chosen_config.default_signal_directory)
    else:
        Logger.debug(
            "Dividing files to output folder according to their signal")
        sf_all_crashes.divide_by_signal()

    #Interestings signals: negative on OSX, 129 and above sometimes for Linux on the shell (depending on used mechanism)
    #Uninteresting signals: We usually don't care about signals 0, 1, 2, etc. up to 128
    uninteresting_signals = range(0, 129)

    analyze_output_and_exploitability(chosen_config,
                                      sf_all_crashes,
                                      uninteresting_signals,
                                      message_prefix="Interesting signals /")

    Logger.info("Interesting signals / Minimizing input (afl-tmin)")
    if os.path.exists(chosen_config.default_minimized_crashes_directory):
        Logger.warning(
            "Seems like crashes were already minimized, skipping. If you want to rerun: rm -r",
            chosen_config.default_minimized_crashes_directory)
    else:
        for signal, signal_folder in sf_all_crashes.get_folder_paths_for_signals_if_exist(
                uninteresting_signals):
            Logger.debug("Minimizing inputs resulting in signal %i" % signal)
            im = InputMinimizer(chosen_config, signal_folder)
            im.minimize_testcases()

        Logger.info("Interesting signals / Minimized inputs / Deduplication")
        fdf_minimized = FileDuplicateFinder(
            chosen_config, chosen_config.default_minimized_crashes_directory)
        fdf_minimized.delete_duplicates_recursively()

    #
    Logger.info(
        "Interesting signals / Minimized inputs / Finding interesting signals")
    #
    sf_minimized_crashes = SignalFinder(
        chosen_config, chosen_config.default_minimized_crashes_directory,
        os.path.join(chosen_config.output_dir, "minimized-per-signal"))
    if os.path.exists(sf_minimized_crashes.output_dir):
        Logger.warning(
            "Seems like minimized crashes were already categorized by signal, skipping. If you want to rerun: rm -r",
            sf_minimized_crashes.output_dir)
    else:
        os.mkdir(sf_minimized_crashes.output_dir)
        Logger.info(
            "Dividing files to output folder according to their signal")
        sf_minimized_crashes.divide_by_signal(0)

    analyze_output_and_exploitability(
        chosen_config,
        sf_minimized_crashes,
        uninteresting_signals,
        message_prefix="Interesting signals / Minimized inputs /")

    #TODO:
    #- Make (some) modules work as standalone applications with command line parsing
    #- The FeelingLuckyExplotier thing. Need to get a small test sample where I know it should work.
    #     # If you are in the mood to waste a little CPU time, run this
    #     Logger.info("Found interesting_signals (interesting interesting_signals) / Minimized inputs (interested interesting_signals) / Feeling lucky auto exploitation")
    #     #
    #     fle = FeelingLuckyExploiter(chosen_config, sf_minimized_crashes.output_dir)
    #     #os.mkdir(fle.output_dir)
    #     fle.run_forest_run()

    cleanup(chosen_config)
Ejemplo n.º 5
0
def main():
    #Read the README before you start.

    Logger.info("Setting up configuration")

    gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $rip, $rip+16:\n"
disassemble $rip, $rip+16
"""
    gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n"
set verbose off
set complaints 0
printf "[+] Backtrace:\n"
bt
printf "[+] info reg:\n"
info reg
printf "[+] exploitable:\n"
exploitable
printf "[+] disassemble $eip, $eip+16:\n"
disassemble $eip, $eip+16
"""
    where_this_python_script_lives = os.path.dirname(
        os.path.realpath(__file__))

    gdb_command = "gdb"
    gdb_command_osx = "/opt/local/bin/gdb-apple"

    config_gm = CrashAnalysisConfig(
        where_this_python_script_lives,
        target_binary_instrumented=where_this_python_script_lives +
        "/test-cases/gm/graphicsmagick-afl/utilities/gm",
        args_before="identify",
        args_after="",
        target_binary_plain=where_this_python_script_lives +
        "/test-cases/gm/graphicsmagick-plain/utilities/gm",
        target_binary_asan=where_this_python_script_lives +
        "/test-cases/gm/graphicsmagick-asan/utilities/gm",
        env={
            "ASAN_SYMBOLIZER_PATH":
            "/usr/bin/llvm-symbolizer-3.4",
            "ASAN_OPTIONS":
            "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"
        },
        crash_dir=where_this_python_script_lives + "/test-cases/gm/crashes",
        gdb_script=gdb_script_32bit,
        gdb_binary=gdb_command)

    #    config_ffmpeg = CrashAnalysisConfig(where_this_python_script_lives,
    #                        target_binary_instrumented=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-afl/ffmpeg",
    #                        args_before="-i",
    #                        args_after="-loglevel quiet",
    #                        target_binary_plain=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-plain/ffmpeg",
    ##                        target_binary_asan=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-asan/ffmpeg",
    #                        env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"},
    #                        crash_dir=where_this_python_script_lives+"/test-cases/ffmpeg/crashes",
    #                        gdb_script=gdb_script_32bit,
    #                        gdb_binary=gdb_command
    #                        )

    #
    Logger.info("Input crashes directory operations")
    #

    Logger.info("Removing README.txt files")
    fdf = FileDuplicateFinder(config_gm)
    fdf.remove_readmes(config_gm.original_crashes_directory)

    Logger.info(
        "Removing duplicates from original crashes folder (same file size + MD5)"
    )
    fdf.delete_duplicates_recursively(config_gm.original_crashes_directory)

    Logger.info(
        "Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions"
    )
    fdf.rename_same_name_files(config_gm.original_crashes_directory)

    #
    Logger.info("Finding signals for all crash files")
    #
    sf = SignalFinder(config_gm)
    if os.path.exists(sf.output_dir):
        Logger.warning(
            "Seems like all crashes were already categorized by signal, skipping. Remove output directory or remove this folder if you want to rerun:",
            sf.output_dir)
    else:
        Logger.info(
            "Dividing files to output folder according to their signal")
        os.mkdir(sf.output_dir)
        sf.divide_by_signal(0)

    #
    Logger.info(
        "Running binaries to discover stdout/stderr, gdb and ASAN output for crash files that result in interesting signals"
    )
    #
    #signals, negative on OSX, 129 and above for Linux. No harm if we go on with all of them.
    signals = (-4, -6, -11, 132, 134, 139)
    get_output_for_signals(config_gm, sf, signals)

    #
    Logger.info(
        "Minimizing input files that result in interesting signals (and removing duplicates from the results)"
    )
    #
    im = InputMinimizer(config_gm)
    if os.path.exists(im.output_dir):
        Logger.warning(
            "Seems like minimized crashes were already categorized by signal, skipping. Remove output directory or remove this folder if you want to rerun:",
            im.output_dir)
    else:
        os.mkdir(im.output_dir)
        for signal in signals:
            Logger.info(
                "Processing minimized folder for crash-minimizer for signal %i"
                % signal)
            signal_folder = sf.get_folder_path_for_signal(signal)
            im = InputMinimizer(config_gm, signal_folder)
            if os.path.exists(signal_folder):
                Logger.info("Minimizing inputs resulting in signal %i" %
                            signal)
                im.minimize_testcases()
            else:
                Logger.warning(
                    "Seems that none of the crashes results in a %i signal" %
                    signal)
        Logger.info("Removing duplicates from minimized tests")
        fdf.delete_duplicates_recursively(im.output_dir)

    #
    Logger.info("Finding signals for minimized crash files")
    #
    sf_minimized_crashes = SignalFinder(
        config_gm, im.output_dir,
        os.path.join(config_gm.output_dir, "minimized-inputs-per-signal"))
    if os.path.exists(sf_minimized_crashes.output_dir):
        Logger.warning(
            "Seems like crashes were already categorized by signal, skipping.")
        Logger.warning(
            "Remove output directory or remove this folder if you want to rerun:",
            sf_minimized_crashes.output_dir)
    else:
        os.mkdir(sf_minimized_crashes.output_dir)
        Logger.info(
            "Dividing files to output folder according to their signal")
        sf_minimized_crashes.divide_by_signal(0)

    #
    Logger.info(
        "Running binaries to discover stdout/stderr, gdb and ASAN output for minimized input files that result in interesting signals"
    )
    #
    get_output_for_signals(config_gm, sf_minimized_crashes, signals)