Esempio n. 1
0
    def cleanse(self, target_path, arguments, input_path, output_path,
                max_time):
        """Optional (but recommended): Cleanse a testcase.

    Args:
      target_path: Path to the target.
      arguments: Additional arguments needed for testcase cleanse.
      input_path: Path to the reproducer input.
      output_path: Path to the cleansed output.
      max_time: Maximum allowed time for the cleanse.

    Returns:
      A ReproduceResult.

    Raises:
      TimeoutError: If the cleanse exceeds max_time.
    """
        runner = libfuzzer.get_runner(target_path)
        libfuzzer.set_sanitizer_options(target_path)

        cleanse_tmp_dir = self._create_temp_corpus_dir('cleanse-workdir')
        result = runner.cleanse_crash(input_path,
                                      output_path,
                                      max_time,
                                      artifact_prefix=cleanse_tmp_dir,
                                      additional_args=arguments)

        if result.timed_out:
            logs.log_error('Cleanse timed out.', fuzzer_output=result.output)
            raise TimeoutError('Cleanse timed out.')

        return engine.ReproduceResult(result.command, result.return_code,
                                      result.time_executed, result.output)
Esempio n. 2
0
    def reproduce(self, target_path, input_path, arguments, max_time):
        """Reproduce a crash given an input.

    Args:
      target_path: Path to the target.
      input_path: Path to the reproducer input.
      arguments: Additional arguments needed for reproduction.
      max_time: Maximum allowed time for the reproduction.

    Returns:
      A ReproduceResult.

    Raises:
      TimeoutError: If the reproduction exceeds max_time.
    """
        runner = libfuzzer.get_runner(target_path)
        libfuzzer.set_sanitizer_options(target_path)

        # Remove fuzzing specific arguments. This is only really needed for legacy
        # testcases, and can be removed in the distant future.
        arguments = arguments[:]
        libfuzzer.remove_fuzzing_arguments(arguments)

        runs_argument = constants.RUNS_FLAG + str(constants.RUNS_TO_REPRODUCE)
        arguments.append(runs_argument)

        result = runner.run_single_testcase(input_path,
                                            timeout=max_time,
                                            additional_args=arguments)

        if result.timed_out:
            raise TimeoutError('Reproducing timed out\n' + result.output)

        return engine.ReproduceResult(result.command, result.return_code,
                                      result.time_executed, result.output)
Esempio n. 3
0
    def minimize_corpus(self, target_path, arguments, input_dirs, output_dir,
                        reproducers_dir, max_time):
        """Optional (but recommended): run corpus minimization.

    Args:
      target_path: Path to the target.
      arguments: Additional arguments needed for corpus minimization.
      input_dirs: Input corpora.
      output_dir: Output directory to place minimized corpus.
      reproducers_dir: The directory to put reproducers in when crashes are
          found.
      max_time: Maximum allowed time for the minimization.

    Returns:
      A Result object.

    Raises:
      TimeoutError: If the corpus minimization exceeds max_time.
      Error: If the merge failed in some other way.
    """
        runner = libfuzzer.get_runner(target_path)
        libfuzzer.set_sanitizer_options(target_path)
        merge_tmp_dir = self._create_temp_corpus_dir('merge-workdir')

        result = runner.merge([output_dir] + input_dirs,
                              merge_timeout=max_time,
                              tmp_dir=merge_tmp_dir,
                              additional_args=arguments,
                              artifact_prefix=reproducers_dir,
                              merge_control_file=getattr(
                                  self, '_merge_control_file', None))

        if result.timed_out:
            logs.log_error('Merging new testcases timed out.',
                           fuzzer_output=result.output)
            raise TimeoutError('Merging new testcases timed out.')

        if result.return_code != 0:
            logs.log_error('Merging new testcases timed out.',
                           fuzzer_output=result.output)
            raise MergeError('Merging new testcases failed.')

        merge_output = result.output
        merge_stats = stats.parse_stats_from_merge_log(
            merge_output.splitlines())

        # TODO(ochang): Get crashes found during merge.
        return engine.FuzzResult(merge_output, result.command, [], merge_stats,
                                 result.time_executed)
Esempio n. 4
0
    def fuzz(self, target_path, options, reproducers_dir, max_time):
        """Run a fuzz session.

    Args:
      target_path: Path to the target.
      options: The FuzzOptions object returned by prepare().
      reproducers_dir: The directory to put reproducers in when crashes
          are found.
      max_time: Maximum allowed time for the fuzzing to run.

    Returns:
      A FuzzResult object.
    """
        profiler.start_if_needed('libfuzzer_fuzz')
        runner = libfuzzer.get_runner(target_path)
        libfuzzer.set_sanitizer_options(target_path, fuzz_options=options)

        # Directory to place new units.
        if options.merge_back_new_testcases:
            new_corpus_dir = self._create_temp_corpus_dir('new')
            corpus_directories = [new_corpus_dir] + options.fuzz_corpus_dirs
        else:
            corpus_directories = options.fuzz_corpus_dirs

        fuzz_result = runner.fuzz(corpus_directories,
                                  fuzz_timeout=max_time,
                                  additional_args=options.arguments,
                                  artifact_prefix=reproducers_dir,
                                  extra_env=options.extra_env)

        project_qualified_fuzzer_name = (
            engine_common.get_project_qualified_fuzzer_name(target_path))
        dict_error_match = DICT_PARSING_FAILED_REGEX.search(fuzz_result.output)
        if dict_error_match:
            logs.log_error(
                'Dictionary parsing failed '
                f'(target={project_qualified_fuzzer_name}, '
                f'line={dict_error_match.group(1)}).',
                engine_output=fuzz_result.output)
        elif (not environment.get_value('USE_MINIJAIL') and
              fuzz_result.return_code == constants.LIBFUZZER_ERROR_EXITCODE):
            # Minijail returns 1 if the exit code is nonzero.
            # Otherwise: we can assume that a return code of 1 means that libFuzzer
            # itself ran into an error.
            logs.log_error(ENGINE_ERROR_MESSAGE +
                           f' (target={project_qualified_fuzzer_name}).',
                           engine_output=fuzz_result.output)

        log_lines = fuzz_result.output.splitlines()
        # Output can be large, so save some memory by removing reference to the
        # original output which is no longer needed.
        fuzz_result.output = None

        # Check if we crashed, and get the crash testcase path.
        crash_testcase_file_path = runner.get_testcase_path(log_lines)

        # If we exited with a non-zero return code with no crash file in output from
        # libFuzzer, this is most likely a startup crash. Use an empty testcase to
        # to store it as a crash.
        if (not crash_testcase_file_path and fuzz_result.return_code
                not in constants.NONCRASH_RETURN_CODES):
            crash_testcase_file_path = self._create_empty_testcase_file(
                reproducers_dir)

        # Parse stats information based on libFuzzer output.
        parsed_stats = libfuzzer.parse_log_stats(log_lines)

        # Extend parsed stats by additional performance features.
        parsed_stats.update(
            stats.parse_performance_features(log_lines, options.strategies,
                                             options.arguments))

        # Set some initial stat overrides.
        timeout_limit = fuzzer_utils.extract_argument(options.arguments,
                                                      constants.TIMEOUT_FLAG,
                                                      remove=False)

        actual_duration = int(fuzz_result.time_executed)
        fuzzing_time_percent = 100 * actual_duration / float(max_time)
        parsed_stats.update({
            'timeout_limit': int(timeout_limit),
            'expected_duration': int(max_time),
            'actual_duration': actual_duration,
            'fuzzing_time_percent': fuzzing_time_percent,
        })

        # Remove fuzzing arguments before merge and dictionary analysis step.
        non_fuzz_arguments = options.arguments.copy()
        libfuzzer.remove_fuzzing_arguments(non_fuzz_arguments, is_merge=True)

        if options.merge_back_new_testcases:
            self._merge_new_units(target_path, options.corpus_dir,
                                  new_corpus_dir, options.fuzz_corpus_dirs,
                                  non_fuzz_arguments, parsed_stats)

        fuzz_logs = '\n'.join(log_lines)
        crashes = []
        if crash_testcase_file_path:
            reproduce_arguments = options.arguments[:]
            libfuzzer.remove_fuzzing_arguments(reproduce_arguments)

            # Use higher timeout for reproduction.
            libfuzzer.fix_timeout_argument_for_reproduction(
                reproduce_arguments)

            # Write the new testcase.
            # Copy crash testcase contents into the main testcase path.
            crashes.append(
                engine.Crash(crash_testcase_file_path, fuzz_logs,
                             reproduce_arguments, actual_duration))

        if options.analyze_dictionary:
            libfuzzer.analyze_and_update_recommended_dictionary(
                runner, project_qualified_fuzzer_name, log_lines,
                options.corpus_dir, non_fuzz_arguments)

        return engine.FuzzResult(fuzz_logs, fuzz_result.command, crashes,
                                 parsed_stats, fuzz_result.time_executed)