def analyze_output_and_exploitability(config, signal_finder, uninteresting_signals, message_prefix=""): for signal, signal_folder in signal_finder.get_folder_paths_for_signals_if_exist( uninteresting_signals): skip = False for cat in ExploitableGdbPlugin.get_classifications(): if os.path.exists(os.path.join(signal_folder, cat)): Logger.warning( "Seems like there are already exploitability analysis results, skipping. If you want to rerun: rm -r %s" % os.path.join(signal_folder, cat)) skip = True if not skip: Logger.info( message_prefix, "Discover stdout, stderr, gdb and ASAN output (signal %s)" % signal) wildcard_for_run_output_files = os.path.join( signal_folder, "*" + config.run_extension) if glob.glob(wildcard_for_run_output_files): Logger.warning( "Seems like there are already results from running the binaries, skipping. If you want to rerun: rm", wildcard_for_run_output_files) else: of = OutputFinder(config, signal_folder) of.do_sane_output_runs() Logger.info(message_prefix, "Analyzing exploitability (signal %s)" % signal) egp = ExploitableGdbPlugin(config, signal_folder) egp.divide_by_exploitability()
def divide_by_exploitability(self, function=shutil.move): if self.output_dir is not None and not os.path.exists(self.output_dir): os.mkdir(self.output_dir) for path, _, files in os.walk(self.search_dir): for filename in files: if filename.endswith(self.config.run_extension): continue filepath = os.path.join(path, filename) gdb_out_filepath = filepath+self.config.get_gdb_exploitable_file_extension() if os.path.exists(gdb_out_filepath): file_content = file(gdb_out_filepath, "rb").read() out_dir_main = self.output_dir if out_dir_main is None: out_dir_main = path out_dir = os.path.join(out_dir_main, "UNCATEGORIZED") + os.path.sep for classification in self.classifications: if self._get_search_string_for_classification(classification) in file_content: out_dir = os.path.join(out_dir_main, classification) + os.path.sep break if not os.path.exists(out_dir): os.mkdir(out_dir) Logger.debug("Moving", filepath+"* to", out_dir, debug_level=4) for file_all_extensions in glob.glob(filepath+"*"): function(file_all_extensions, out_dir) else: Logger.warning("Seems like there is no gdb output file %s, can not find exploitability" % gdb_out_filepath)
def sanity_check(self): ## # Sanity checks and initial setup ## if not os.access(self.target_binary_instrumented, os.R_OK): Logger.fatal( "AFL target binary not accessible:", self.target_binary_instrumented + ". Did you configure the CrashAnalysisConfig class?") if not self.target_binary_plain is None and not os.access( self.target_binary_plain, os.R_OK): Logger.fatal( "Target binary not accessible:", self.target_binary_plain + ". Did you configure the CrashAnalysisConfig class?") if not self.target_binary_asan is None and not os.access( self.target_binary_asan, os.R_OK): Logger.fatal( "ASAN target binary not accessible:", self.target_binary_asan + ". Did you configure the CrashAnalysisConfig class?") if not os.access(self.main_dir, os.F_OK): Logger.fatal("Your main_dir doesn't exist:", self.main_dir) if not os.access(self.original_crashes_directory, os.F_OK): Logger.fatal("Your original_crashes_directory doesn't exist:", self.original_crashes_directory) if os.path.exists(self.output_dir): Logger.warning( "Your output directory already exists, did you want to move it before running?", self.output_dir) else: Logger.info("Output folder will be:", self.output_dir) os.mkdir(self.output_dir) if not os.path.exists(self.tmp_dir): os.mkdir(self.tmp_dir) self.prepare_gdb_script()
def plain_combined_stdout_stderr(self, gdb_run=False): if not self.config.target_binary_plain: Logger.warning( "You didn't configure a plain binary (recommended: with symbols), therefore skipping run with plain binary." ) else: self._combined_stdout_stderr(self.config.target_binary_plain, gdb_run, self.config.output_prefix_plain)
def asan_combined_stdout_stderr(self, gdb_run=False): if not self.config.target_binary_asan: Logger.warning( "You didn't configure an ASAN enabled binary (recommended: with symbols), therefore skipping run with ASAN binary." ) else: self._combined_stdout_stderr(self.config.target_binary_asan, gdb_run, self.config.output_prefix_asan)
def do_sane_output_runs(self): if self.output_dir is not None and not os.path.exists(self.output_dir): os.mkdir(self.output_dir) if self.config.target_binary_plain is None and self.config.target_binary_asan is None: Logger.warning("You didn't specify any non-instrumented binary, running tests with instrumented binaries") self.instrumented_combined_stdout_stderr() self.instrumented_combined_stdout_stderr(gdb_run=True) else: Logger.info("Plain run") self.plain_combined_stdout_stderr() Logger.info("Plain gdb run") self.plain_combined_stdout_stderr(gdb_run=True) Logger.info("ASAN run") self.asan_combined_stdout_stderr()
def get_output_for_signals(config, signal_finder, signals): wildcard_for_run_output_files = signal_finder.output_dir + "/*/*"+config.run_extension if glob.glob(wildcard_for_run_output_files): Logger.warning("Seems like there are already results from running the binaries, skipping. Remove output directory or run this command if you want to rerun:") Logger.warning("rm ", wildcard_for_run_output_files) else: Logger.info("We analyze only a couple of signals like SIGABRT, SIGSEGV, but do not care about the rest. Going for", signals) for signal in signals: Logger.info("Processing folder for output generation for signal %i" % signal) signal_folder = signal_finder.get_folder_path_for_signal(signal) if os.path.exists(signal_folder): Logger.info("Getting stdout and stderr of runs which result in %i. Additionally running with gdb script." % signal) of = OutputFinder(config, signal_folder) if config.target_binary_plain is None and config.target_binary_asan is None: Logger.warning("You didn't specify any non-instrumented binary, running tests with instrumented binaries") of.instrumented_combined_stdout_stderr() of.instrumented_combined_stdout_stderr(gdb_run=True) else: Logger.info("Plain run for", signal_folder) of.plain_combined_stdout_stderr() Logger.info("Plain gdb run for", signal_folder) of.plain_combined_stdout_stderr(gdb_run=True) Logger.info("ASAN run for", signal_folder) of.asan_combined_stdout_stderr() #Logger.info("ASAN gdb run for", signal_folder) #of.asan_combined_stdout_stderr(gdb_run=True) else: Logger.warning("Seems that none of the crashes results in a %i signal" % signal)
def do_sane_output_runs(self): if self.output_dir is not None and not os.path.exists(self.output_dir): os.mkdir(self.output_dir) if self.config.target_binary_plain is None and self.config.target_binary_asan is None: Logger.warning( "You didn't specify any non-instrumented binary, running tests with instrumented binaries" ) self.instrumented_combined_stdout_stderr() self.instrumented_combined_stdout_stderr(gdb_run=True) else: Logger.info("Plain run") self.plain_combined_stdout_stderr() Logger.info("Plain gdb run") self.plain_combined_stdout_stderr(gdb_run=True) Logger.info("ASAN run") self.asan_combined_stdout_stderr()
def analyze_output_and_exploitability(config, signal_finder, uninteresting_signals, message_prefix=""): for signal, signal_folder in signal_finder.get_folder_paths_for_signals_if_exist(uninteresting_signals): skip = False for cat in ExploitableGdbPlugin.get_classifications(): if os.path.exists(os.path.join(signal_folder, cat)): Logger.warning("Seems like there are already exploitability analysis results, skipping. If you want to rerun: rm -r %s" % os.path.join(signal_folder, cat)) skip = True if not skip: Logger.info(message_prefix, "Discover stdout, stderr, gdb and ASAN output (signal %s)" % signal) wildcard_for_run_output_files = os.path.join(signal_folder, "*" + config.run_extension) if glob.glob(wildcard_for_run_output_files): Logger.warning("Seems like there are already results from running the binaries, skipping. If you want to rerun: rm", wildcard_for_run_output_files) else: of = OutputFinder(config, signal_folder) of.do_sane_output_runs() Logger.info(message_prefix, "Analyzing exploitability (signal %s)" % signal) egp = ExploitableGdbPlugin(config, signal_folder) egp.divide_by_exploitability()
def sanity_check(self): ## # Sanity checks and initial setup ## if not os.access(self.target_binary_instrumented, os.R_OK): Logger.fatal("AFL target binary not accessible:", self.target_binary_instrumented+". Did you configure the CrashAnalysisConfig class?") if not self.target_binary_plain is None and not os.access(self.target_binary_plain, os.R_OK): Logger.fatal("Target binary not accessible:", self.target_binary_plain+". Did you configure the CrashAnalysisConfig class?") if not self.target_binary_asan is None and not os.access(self.target_binary_asan, os.R_OK): Logger.fatal("ASAN target binary not accessible:", self.target_binary_asan+". Did you configure the CrashAnalysisConfig class?") if not os.access(self.main_dir, os.F_OK): Logger.fatal("Your main_dir doesn't exist:", self.main_dir) if not os.access(self.original_crashes_directory, os.F_OK): Logger.fatal("Your original_crashes_directory doesn't exist:", self.original_crashes_directory) if os.path.exists(self.output_dir): Logger.warning("Your output directory already exists, did you want to move it before running?", self.output_dir) else: Logger.info("Output folder will be:", self.output_dir) os.mkdir(self.output_dir) if not os.path.exists(self.tmp_dir): os.mkdir(self.tmp_dir) self.prepare_gdb_script()
def get_output_for_signals(config, signal_finder, signals): wildcard_for_run_output_files = signal_finder.output_dir + "/*/*" + config.run_extension if glob.glob(wildcard_for_run_output_files): Logger.warning( "Seems like there are already results from running the binaries, skipping. Remove output directory or run this command if you want to rerun:" ) Logger.warning("rm ", wildcard_for_run_output_files) else: Logger.info( "We analyze only a couple of signals like SIGABRT, SIGSEGV, but do not care about the rest. Going for", signals) for signal in signals: Logger.info( "Processing folder for output generation for signal %i" % signal) signal_folder = signal_finder.get_folder_path_for_signal(signal) if os.path.exists(signal_folder): Logger.info( "Getting stdout and stderr of runs which result in %i. Additionally running with gdb script." % signal) of = OutputFinder(config, signal_folder) if config.target_binary_plain is None and config.target_binary_asan is None: Logger.warning( "You didn't specify any non-instrumented binary, running tests with instrumented binaries" ) of.instrumented_combined_stdout_stderr() of.instrumented_combined_stdout_stderr(gdb_run=True) else: Logger.info("Plain run for", signal_folder) of.plain_combined_stdout_stderr() Logger.info("Plain gdb run for", signal_folder) of.plain_combined_stdout_stderr(gdb_run=True) Logger.info("ASAN run for", signal_folder) of.asan_combined_stdout_stderr() #Logger.info("ASAN gdb run for", signal_folder) #of.asan_combined_stdout_stderr(gdb_run=True) else: Logger.warning( "Seems that none of the crashes results in a %i signal" % signal)
def main(): #Read the README before you start. Logger.info("Setting up configuration") gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $rip, $rip+16:\n" disassemble $rip, $rip+16 """ gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $eip, $eip+16:\n" disassemble $eip, $eip+16 """ where_this_python_script_lives = os.path.dirname(os.path.realpath(__file__)) gdb_command = "gdb" gdb_command_osx = "/opt/local/bin/gdb-apple" config_gm = CrashAnalysisConfig(where_this_python_script_lives, target_binary_instrumented=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-afl/utilities/gm", args_before="identify", args_after="", target_binary_plain=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-plain/utilities/gm", target_binary_asan=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-asan/utilities/gm", env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"}, crash_dir=where_this_python_script_lives+"/test-cases/gm/crashes", gdb_script=gdb_script_32bit, gdb_binary=gdb_command ) # config_ffmpeg = CrashAnalysisConfig(where_this_python_script_lives, # target_binary_instrumented=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-afl/ffmpeg", # args_before="-i", # args_after="-loglevel quiet", # target_binary_plain=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-plain/ffmpeg", ## target_binary_asan=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-asan/ffmpeg", # env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"}, # crash_dir=where_this_python_script_lives+"/test-cases/ffmpeg/crashes", # gdb_script=gdb_script_32bit, # gdb_binary=gdb_command # ) # Logger.info("Input crashes directory operations") # Logger.info("Removing README.txt files") fdf = FileDuplicateFinder(config_gm) fdf.remove_readmes(config_gm.original_crashes_directory) Logger.info("Removing duplicates from original crashes folder (same file size + MD5)") fdf.delete_duplicates_recursively(config_gm.original_crashes_directory) Logger.info("Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions") fdf.rename_same_name_files(config_gm.original_crashes_directory) # Logger.info("Finding signals for all crash files") # sf = SignalFinder(config_gm) if os.path.exists(sf.output_dir): Logger.warning("Seems like all crashes were already categorized by signal, skipping. Remove output directory or remove this folder if you want to rerun:", sf.output_dir) else: Logger.info("Dividing files to output folder according to their signal") os.mkdir(sf.output_dir) sf.divide_by_signal(0) # Logger.info("Running binaries to discover stdout/stderr, gdb and ASAN output for crash files that result in interesting signals") # #signals, negative on OSX, 129 and above for Linux. No harm if we go on with all of them. signals = (-4, -6, -11, 132, 134, 139) get_output_for_signals(config_gm, sf, signals) # Logger.info("Minimizing input files that result in interesting signals (and removing duplicates from the results)") # im = InputMinimizer(config_gm) if os.path.exists(im.output_dir): Logger.warning("Seems like minimized crashes were already categorized by signal, skipping. Remove output directory or remove this folder if you want to rerun:", im.output_dir) else: os.mkdir(im.output_dir) for signal in signals: Logger.info("Processing minimized folder for crash-minimizer for signal %i" % signal) signal_folder = sf.get_folder_path_for_signal(signal) im = InputMinimizer(config_gm, signal_folder) if os.path.exists(signal_folder): Logger.info("Minimizing inputs resulting in signal %i" % signal) im.minimize_testcases() else: Logger.warning("Seems that none of the crashes results in a %i signal" % signal) Logger.info("Removing duplicates from minimized tests") fdf.delete_duplicates_recursively(im.output_dir) # Logger.info("Finding signals for minimized crash files") # sf_minimized_crashes = SignalFinder(config_gm, im.output_dir, os.path.join(config_gm.output_dir, "minimized-inputs-per-signal")) if os.path.exists(sf_minimized_crashes.output_dir): Logger.warning("Seems like crashes were already categorized by signal, skipping.") Logger.warning("Remove output directory or remove this folder if you want to rerun:", sf_minimized_crashes.output_dir) else: os.mkdir(sf_minimized_crashes.output_dir) Logger.info("Dividing files to output folder according to their signal") sf_minimized_crashes.divide_by_signal(0) # Logger.info("Running binaries to discover stdout/stderr, gdb and ASAN output for minimized input files that result in interesting signals") # get_output_for_signals(config_gm, sf_minimized_crashes, signals)
def main(): #Read the README before you start. Logger.info("Setting up configuration") gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $rip, $rip+16:\n" disassemble $rip, $rip+16 """ gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $eip, $eip+16:\n" disassemble $eip, $eip+16 """ where_this_python_script_lives = os.path.dirname(os.path.realpath(__file__)) gdb_command = "gdb" gdb_command_osx = "/opt/local/bin/gdb-apple" config_gm = CrashAnalysisConfig(where_this_python_script_lives, target_binary_instrumented=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-afl/utilities/gm", args_before="identify", args_after="", target_binary_plain=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-plain/utilities/gm", target_binary_asan=where_this_python_script_lives+"/test-cases/gm/graphicsmagick-asan/utilities/gm", env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"}, crash_dir=where_this_python_script_lives+"/test-cases/gm/crashes", gdb_script=gdb_script_32bit, gdb_binary=gdb_command ) # config_ffmpeg = CrashAnalysisConfig(where_this_python_script_lives, # target_binary_instrumented=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-afl/ffmpeg", # args_before="-i", # args_after="-loglevel quiet", # target_binary_plain=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-plain/ffmpeg", ## target_binary_asan=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-asan/ffmpeg", # env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"}, # crash_dir=where_this_python_script_lives+"/test-cases/ffmpeg/crashes", # gdb_script=gdb_script_32bit, # gdb_binary=gdb_command # ) # Logger.info("Input crashes directory operations") # Logger.info("Removing README.txt files") fdf = FileDuplicateFinder(config_gm, config_gm.original_crashes_directory) fdf.remove_readmes() Logger.info("Removing duplicates from original crashes folder (same file size + MD5)") fdf.delete_duplicates_recursively() Logger.info("Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions") fdf.rename_same_name_files() # Logger.info("Finding interesting signals (all crashes)") # sf_all_crashes = SignalFinder(config_gm) if os.path.exists(config_gm.default_signal_directory): Logger.warning("Seems like all crashes were already categorized by signal, skipping. If you want to rerun: rm -r", config_gm.default_signal_directory) else: Logger.debug("Dividing files to output folder according to their signal") sf_all_crashes.divide_by_signal() #Interestings signals: negative on OSX, 129 and above for Linux #Uninteresting signals: We usually don't care about signals 0, 1, 2, etc. up to 128 uninteresting_signals = range(0,129) analyze_output_and_exploitability(config_gm, sf_all_crashes, uninteresting_signals, message_prefix="Interesting signals /") Logger.info("Interesting signals / Minimizing input (afl-tmin)") if os.path.exists(config_gm.default_minimized_crashes_directory): Logger.warning("Seems like crashes were already minimized, skipping. If you want to rerun: rm -r", config_gm.default_minimized_crashes_directory) else: for signal, signal_folder in sf_all_crashes.get_folder_paths_for_signals_if_exist(uninteresting_signals): Logger.debug("Minimizing inputs resulting in signal %i" % signal) im = InputMinimizer(config_gm, signal_folder) im.minimize_testcases() Logger.info("Interesting signals / Minimized inputs / Deduplication") fdf_minimized = FileDuplicateFinder(config_gm, config_gm.default_minimized_crashes_directory) fdf_minimized.delete_duplicates_recursively() # Logger.info("Interesting signals / Minimized inputs / Finding interesting signals") # sf_minimized_crashes = SignalFinder(config_gm, config_gm.default_minimized_crashes_directory, os.path.join(config_gm.output_dir, "minimized-per-signal")) if os.path.exists(sf_minimized_crashes.output_dir): Logger.warning("Seems like minimized crashes were already categorized by signal, skipping. If you want to rerun: rm -r", sf_minimized_crashes.output_dir) else: os.mkdir(sf_minimized_crashes.output_dir) Logger.info("Dividing files to output folder according to their signal") sf_minimized_crashes.divide_by_signal(0) analyze_output_and_exploitability(config_gm, sf_minimized_crashes, uninteresting_signals, message_prefix="Interesting signals / Minimized inputs /") # # If you are in the mood to waste a little CPU time, run this # Logger.info("Found interesting_signals (interesting interesting_signals) / Minimized inputs (interested interesting_signals) / Feeling lucky auto exploitation") # # # fle = FeelingLuckyExploiter(config_gm, sf_minimized_crashes.output_dir) # #os.mkdir(fle.output_dir) # fle.run_forest_run() #TODO: develop #- peruvian were rabbit? #- exploitable script, something along: less `grep -l 'Exploitability Classification: EXPLOITABLE' output/per-signal/*/*gdb*` cleanup(config_gm)
def asan_combined_stdout_stderr(self, gdb_run=False): if not self.config.target_binary_asan: Logger.warning("You didn't configure an ASAN enabled binary (recommended: with symbols), therefore skipping run with ASAN binary.") else: self._combined_stdout_stderr(self.config.target_binary_asan, gdb_run, self.config.output_prefix_asan)
def plain_combined_stdout_stderr(self, gdb_run=False): if not self.config.target_binary_plain: Logger.warning("You didn't configure a plain binary (recommended: with symbols), therefore skipping run with plain binary.") else: self._combined_stdout_stderr(self.config.target_binary_plain, gdb_run, self.config.output_prefix_plain)
def main(): # Read the README before you start. Logger.info("Setting up configuration") gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $rip, $rip+16:\n" disassemble $rip, $rip+16 printf "[+] list\n" list """ gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $eip, $eip+16:\n" disassemble $eip, $eip+16 printf "[+] list\n" list """ # TODO: Make sure gdb script doesn't abort on error # ignoring errors in gdb scripts: http://stackoverflow.com/questions/17923865/gdb-stops-in-a-command-file-if-there-is-an-error-how-to-continue-despite-the-er gdb_script_32bit_noerror = r"""python def my_ignore_errors(arg): try: gdb.execute("print \"" + "Executing command: " + arg + "\"") gdb.execute (arg) except: gdb.execute("print \"" + "ERROR: " + arg + "\"") my_ignore_errors("p p") my_ignore_errors("p p->v1") gdb.execute("quit") """ where_this_python_script_lives = os.path.dirname( os.path.realpath(__file__)) gdb_command = "/usr/bin/gdb" #gdb_command_osx = "/opt/local/bin/gdb-apple" #TODO: For some reason the ASAN environment variables are not correctly set when given to the subprocess module... so let's just set it in parent process already: os.environ['ASAN_SYMBOLIZER_PATH'] = "/usr/bin/llvm-symbolizer-3.4" os.environ[ 'ASAN_OPTIONS'] = "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1:abort_on_error=1" env = { "ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1:abort_on_error=1" } ### # This import decides which testcase/binary we want to run! ### from testcases.ffmpeg.Config import create_config #from testcases.ffmpeg.Config import create_config #see CrashAnalysisConfig for more options that get passed on by create_config chosen_config = create_config(where_this_python_script_lives, env=env, gdb_script=gdb_script_32bit, gdb_binary=gdb_command) chosen_config.sanity_check() # Logger.info("Input crashes directory operations") # Logger.info("Removing README.txt files") fdf = FileDuplicateFinder(chosen_config, chosen_config.original_crashes_directory) fdf.remove_readmes() Logger.info( "Removing duplicates from original crashes folder (same file size + MD5)" ) fdf.delete_duplicates_recursively() Logger.info( "Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions" ) fdf.rename_same_name_files() #OR: #Logger.info("Renaming all files to numeric values, as some programs prefer no special chars in filenames and might require a specific file extension") #fdf.rename_all_files(".png") # Logger.info("Finding interesting signals (all crashes)") # sf_all_crashes = SignalFinder(chosen_config) if os.path.exists(chosen_config.default_signal_directory): Logger.warning( "Seems like all crashes were already categorized by signal, skipping. If you want to rerun: rm -r", chosen_config.default_signal_directory) else: Logger.debug( "Dividing files to output folder according to their signal") sf_all_crashes.divide_by_signal() #Interestings signals: negative on OSX, 129 and above sometimes for Linux on the shell (depending on used mechanism) #Uninteresting signals: We usually don't care about signals 0, 1, 2, etc. up to 128 uninteresting_signals = range(0, 129) analyze_output_and_exploitability(chosen_config, sf_all_crashes, uninteresting_signals, message_prefix="Interesting signals /") Logger.info("Interesting signals / Minimizing input (afl-tmin)") if os.path.exists(chosen_config.default_minimized_crashes_directory): Logger.warning( "Seems like crashes were already minimized, skipping. If you want to rerun: rm -r", chosen_config.default_minimized_crashes_directory) else: for signal, signal_folder in sf_all_crashes.get_folder_paths_for_signals_if_exist( uninteresting_signals): Logger.debug("Minimizing inputs resulting in signal %i" % signal) im = InputMinimizer(chosen_config, signal_folder) im.minimize_testcases() Logger.info("Interesting signals / Minimized inputs / Deduplication") fdf_minimized = FileDuplicateFinder( chosen_config, chosen_config.default_minimized_crashes_directory) fdf_minimized.delete_duplicates_recursively() # Logger.info( "Interesting signals / Minimized inputs / Finding interesting signals") # sf_minimized_crashes = SignalFinder( chosen_config, chosen_config.default_minimized_crashes_directory, os.path.join(chosen_config.output_dir, "minimized-per-signal")) if os.path.exists(sf_minimized_crashes.output_dir): Logger.warning( "Seems like minimized crashes were already categorized by signal, skipping. If you want to rerun: rm -r", sf_minimized_crashes.output_dir) else: os.mkdir(sf_minimized_crashes.output_dir) Logger.info( "Dividing files to output folder according to their signal") sf_minimized_crashes.divide_by_signal(0) analyze_output_and_exploitability( chosen_config, sf_minimized_crashes, uninteresting_signals, message_prefix="Interesting signals / Minimized inputs /") #TODO: #- Make (some) modules work as standalone applications with command line parsing #- The FeelingLuckyExplotier thing. Need to get a small test sample where I know it should work. # # If you are in the mood to waste a little CPU time, run this # Logger.info("Found interesting_signals (interesting interesting_signals) / Minimized inputs (interested interesting_signals) / Feeling lucky auto exploitation") # # # fle = FeelingLuckyExploiter(chosen_config, sf_minimized_crashes.output_dir) # #os.mkdir(fle.output_dir) # fle.run_forest_run() cleanup(chosen_config)
def main(): #Read the README before you start. Logger.info("Setting up configuration") gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $rip, $rip+16:\n" disassemble $rip, $rip+16 printf "[+] list\n" list """ gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $eip, $eip+16:\n" disassemble $eip, $eip+16 printf "[+] list\n" list """ #TODO: Make sure gdb script doesn't abort on error #ignoring errors in gdb scripts: http://stackoverflow.com/questions/17923865/gdb-stops-in-a-command-file-if-there-is-an-error-how-to-continue-despite-the-er gdb_script_32bit_noerror = r"""python def my_ignore_errors(arg): try: gdb.execute("print \"" + "Executing command: " + arg + "\"") gdb.execute (arg) except: gdb.execute("print \"" + "ERROR: " + arg + "\"") my_ignore_errors("p p") my_ignore_errors("p p->v1") gdb.execute("quit") """ where_this_python_script_lives = os.path.dirname(os.path.realpath(__file__)) gdb_command = "/usr/bin/gdb" #gdb_command_osx = "/opt/local/bin/gdb-apple" #TODO: For some reason the ASAN environment variables are not correctly set when given to the subprocess module... so let's just set it in parent process already: os.environ['ASAN_SYMBOLIZER_PATH'] = "/usr/bin/llvm-symbolizer-3.4" os.environ['ASAN_OPTIONS'] = "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1:abort_on_error=1" env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1:abort_on_error=1"} ### #This import decides which testcase/binary we want to run! ### from testcases.gm.Config import create_config #from testcases.ffmpeg.Config import create_config #see CrashAnalysisConfig for more options that get passed on by create_config chosen_config = create_config(where_this_python_script_lives, env=env, gdb_script=gdb_script_32bit, gdb_binary=gdb_command) chosen_config.sanity_check() # Logger.info("Input crashes directory operations") # Logger.info("Removing README.txt files") fdf = FileDuplicateFinder(chosen_config, chosen_config.original_crashes_directory) fdf.remove_readmes() Logger.info("Removing duplicates from original crashes folder (same file size + MD5)") fdf.delete_duplicates_recursively() Logger.info("Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions") fdf.rename_same_name_files() #OR: #Logger.info("Renaming all files to numeric values, as some programs prefer no special chars in filenames and might require a specific file extension") #fdf.rename_all_files(".png") # Logger.info("Finding interesting signals (all crashes)") # sf_all_crashes = SignalFinder(chosen_config) if os.path.exists(chosen_config.default_signal_directory): Logger.warning("Seems like all crashes were already categorized by signal, skipping. If you want to rerun: rm -r", chosen_config.default_signal_directory) else: Logger.debug("Dividing files to output folder according to their signal") sf_all_crashes.divide_by_signal() #Interestings signals: negative on OSX, 129 and above sometimes for Linux on the shell (depending on used mechanism) #Uninteresting signals: We usually don't care about signals 0, 1, 2, etc. up to 128 uninteresting_signals = range(0, 129) analyze_output_and_exploitability(chosen_config, sf_all_crashes, uninteresting_signals, message_prefix="Interesting signals /") Logger.info("Interesting signals / Minimizing input (afl-tmin)") if os.path.exists(chosen_config.default_minimized_crashes_directory): Logger.warning("Seems like crashes were already minimized, skipping. If you want to rerun: rm -r", chosen_config.default_minimized_crashes_directory) else: for signal, signal_folder in sf_all_crashes.get_folder_paths_for_signals_if_exist(uninteresting_signals): Logger.debug("Minimizing inputs resulting in signal %i" % signal) im = InputMinimizer(chosen_config, signal_folder) im.minimize_testcases() Logger.info("Interesting signals / Minimized inputs / Deduplication") fdf_minimized = FileDuplicateFinder(chosen_config, chosen_config.default_minimized_crashes_directory) fdf_minimized.delete_duplicates_recursively() # Logger.info("Interesting signals / Minimized inputs / Finding interesting signals") # sf_minimized_crashes = SignalFinder(chosen_config, chosen_config.default_minimized_crashes_directory, os.path.join(chosen_config.output_dir, "minimized-per-signal")) if os.path.exists(sf_minimized_crashes.output_dir): Logger.warning("Seems like minimized crashes were already categorized by signal, skipping. If you want to rerun: rm -r", sf_minimized_crashes.output_dir) else: os.mkdir(sf_minimized_crashes.output_dir) Logger.info("Dividing files to output folder according to their signal") sf_minimized_crashes.divide_by_signal(0) analyze_output_and_exploitability(chosen_config, sf_minimized_crashes, uninteresting_signals, message_prefix="Interesting signals / Minimized inputs /") #TODO: #- Make (some) modules work as standalone applications with command line parsing #- The FeelingLuckyExplotier thing. Need to get a small test sample where I know it should work. # # If you are in the mood to waste a little CPU time, run this # Logger.info("Found interesting_signals (interesting interesting_signals) / Minimized inputs (interested interesting_signals) / Feeling lucky auto exploitation") # # # fle = FeelingLuckyExploiter(chosen_config, sf_minimized_crashes.output_dir) # #os.mkdir(fle.output_dir) # fle.run_forest_run() cleanup(chosen_config)
def main(): #Read the README before you start. Logger.info("Setting up configuration") gdb_script_64bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $rip, $rip+16:\n" disassemble $rip, $rip+16 """ gdb_script_32bit = r"""printf "[+] Disabling verbose and complaints\n" set verbose off set complaints 0 printf "[+] Backtrace:\n" bt printf "[+] info reg:\n" info reg printf "[+] exploitable:\n" exploitable printf "[+] disassemble $eip, $eip+16:\n" disassemble $eip, $eip+16 """ where_this_python_script_lives = os.path.dirname( os.path.realpath(__file__)) gdb_command = "gdb" gdb_command_osx = "/opt/local/bin/gdb-apple" config_gm = CrashAnalysisConfig( where_this_python_script_lives, target_binary_instrumented=where_this_python_script_lives + "/test-cases/gm/graphicsmagick-afl/utilities/gm", args_before="identify", args_after="", target_binary_plain=where_this_python_script_lives + "/test-cases/gm/graphicsmagick-plain/utilities/gm", target_binary_asan=where_this_python_script_lives + "/test-cases/gm/graphicsmagick-asan/utilities/gm", env={ "ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1" }, crash_dir=where_this_python_script_lives + "/test-cases/gm/crashes", gdb_script=gdb_script_32bit, gdb_binary=gdb_command) # config_ffmpeg = CrashAnalysisConfig(where_this_python_script_lives, # target_binary_instrumented=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-afl/ffmpeg", # args_before="-i", # args_after="-loglevel quiet", # target_binary_plain=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-plain/ffmpeg", ## target_binary_asan=where_this_python_script_lives+"/test-cases/ffmpeg/ffmpeg-asan/ffmpeg", # env={"ASAN_SYMBOLIZER_PATH": "/usr/bin/llvm-symbolizer-3.4", "ASAN_OPTIONS": "symbolize=1:redzone=512:quarantine_size=512Mb:exitcode=1"}, # crash_dir=where_this_python_script_lives+"/test-cases/ffmpeg/crashes", # gdb_script=gdb_script_32bit, # gdb_binary=gdb_command # ) # Logger.info("Input crashes directory operations") # Logger.info("Removing README.txt files") fdf = FileDuplicateFinder(config_gm) fdf.remove_readmes(config_gm.original_crashes_directory) Logger.info( "Removing duplicates from original crashes folder (same file size + MD5)" ) fdf.delete_duplicates_recursively(config_gm.original_crashes_directory) Logger.info( "Renaming files from original crashes folder so that the filename is a unique identifier. This allows us to copy all crash files into one directory (eg. for tmin output) if necessary, without name collisions" ) fdf.rename_same_name_files(config_gm.original_crashes_directory) # Logger.info("Finding signals for all crash files") # sf = SignalFinder(config_gm) if os.path.exists(sf.output_dir): Logger.warning( "Seems like all crashes were already categorized by signal, skipping. Remove output directory or remove this folder if you want to rerun:", sf.output_dir) else: Logger.info( "Dividing files to output folder according to their signal") os.mkdir(sf.output_dir) sf.divide_by_signal(0) # Logger.info( "Running binaries to discover stdout/stderr, gdb and ASAN output for crash files that result in interesting signals" ) # #signals, negative on OSX, 129 and above for Linux. No harm if we go on with all of them. signals = (-4, -6, -11, 132, 134, 139) get_output_for_signals(config_gm, sf, signals) # Logger.info( "Minimizing input files that result in interesting signals (and removing duplicates from the results)" ) # im = InputMinimizer(config_gm) if os.path.exists(im.output_dir): Logger.warning( "Seems like minimized crashes were already categorized by signal, skipping. Remove output directory or remove this folder if you want to rerun:", im.output_dir) else: os.mkdir(im.output_dir) for signal in signals: Logger.info( "Processing minimized folder for crash-minimizer for signal %i" % signal) signal_folder = sf.get_folder_path_for_signal(signal) im = InputMinimizer(config_gm, signal_folder) if os.path.exists(signal_folder): Logger.info("Minimizing inputs resulting in signal %i" % signal) im.minimize_testcases() else: Logger.warning( "Seems that none of the crashes results in a %i signal" % signal) Logger.info("Removing duplicates from minimized tests") fdf.delete_duplicates_recursively(im.output_dir) # Logger.info("Finding signals for minimized crash files") # sf_minimized_crashes = SignalFinder( config_gm, im.output_dir, os.path.join(config_gm.output_dir, "minimized-inputs-per-signal")) if os.path.exists(sf_minimized_crashes.output_dir): Logger.warning( "Seems like crashes were already categorized by signal, skipping.") Logger.warning( "Remove output directory or remove this folder if you want to rerun:", sf_minimized_crashes.output_dir) else: os.mkdir(sf_minimized_crashes.output_dir) Logger.info( "Dividing files to output folder according to their signal") sf_minimized_crashes.divide_by_signal(0) # Logger.info( "Running binaries to discover stdout/stderr, gdb and ASAN output for minimized input files that result in interesting signals" ) # get_output_for_signals(config_gm, sf_minimized_crashes, signals)