def symbolize_stacktrace(request):
    """Symbolize stacktrace."""
    symbolized_stacktrace = stack_symbolizer.symbolize_stacktrace(
        request.unsymbolized_crash_stacktrace, request.enable_inline_frames)

    return untrusted_runner_pb2.SymbolizeStacktraceResponse(
        symbolized_stacktrace=symbolized_stacktrace)
    def run(self, initial_corpus_path, minimized_corpus_path, bad_units_path):
        """Run corpus pruning. Output result to directory."""
        if not shell.get_directory_file_count(initial_corpus_path):
            # Empty corpus, nothing to do.
            return None

        # Set memory tool options and fuzzer arguments.
        engine_common.unpack_seed_corpus_if_needed(self.runner.target_path,
                                                   initial_corpus_path,
                                                   force_unpack=True)

        environment.reset_current_memory_tool_options(redzone_size=MIN_REDZONE,
                                                      leaks=True)
        self.runner.process_sanitizer_options()
        additional_args = self.runner.get_libfuzzer_flags()

        # Execute fuzzer with arguments for corpus pruning.
        logs.log('Running merge...')
        try:
            result = self.runner.minimize_corpus(additional_args,
                                                 [initial_corpus_path],
                                                 minimized_corpus_path,
                                                 bad_units_path,
                                                 CORPUS_PRUNING_TIMEOUT)
        except TimeoutError as e:
            raise CorpusPruningException(
                'Corpus pruning timed out while minimizing corpus\n' + repr(e))
        except engine.Error as e:
            raise CorpusPruningException(
                'Corpus pruning failed to minimize corpus\n' + repr(e))

        symbolized_output = stack_symbolizer.symbolize_stacktrace(result.logs)

        # Sanity check that there are files in minimized corpus after merging.
        if not shell.get_directory_file_count(minimized_corpus_path):
            raise CorpusPruningException(
                'Corpus pruning failed to minimize corpus\n' +
                symbolized_output)

        logs.log('Corpus merge finished successfully.',
                 output=symbolized_output)

        return result.stats
    def run(self, timeout):
        """Merge testcases from corpus from other fuzz targets."""
        if not shell.get_directory_file_count(self.context.shared_corpus_path):
            logs.log('No files found in shared corpus, skip merge.')
            return None

        # Run pruning on the shared corpus and log the result in case of error.
        logs.log('Merging shared corpus...')
        environment.reset_current_memory_tool_options(
            redzone_size=DEFAULT_REDZONE)
        self.runner.process_sanitizer_options()

        additional_args = self.runner.get_libfuzzer_flags()

        try:
            result = self.runner.minimize_corpus(
                additional_args, [self.context.shared_corpus_path],
                self.context.minimized_corpus_path,
                self.context.bad_units_path, timeout)
            symbolized_output = stack_symbolizer.symbolize_stacktrace(
                result.logs)
            logs.log('Shared corpus merge finished successfully.',
                     output=symbolized_output)
        except TimeoutError as e:
            # Other cross pollinated fuzzer corpuses can have unexpected test cases
            # that time us out. This is expected, so bail out.
            logs.log_warn(
                'Corpus pruning timed out while merging shared corpus\n' +
                repr(e))
            return None
        except engine.Error as e:
            # Other cross pollinated fuzzer corpuses can be large, so we can run out
            # of disk space and exception out. This is expected, so bail out.
            logs.log_warn('Corpus pruning failed to merge shared corpus\n' +
                          repr(e))
            return None

        return result.stats
def get_crash_data(crash_data,
                   symbolize_flag=True,
                   fuzz_target=None,
                   already_symbolized=False,
                   detect_ooms_and_hangs=None) -> stacktraces.CrashInfo:
    """Get crash parameters from crash data.
  Crash parameters include crash type, address, state and stacktrace.
  If the stacktrace is not already symbolized, we will try to symbolize it
  unless |symbolize| flag is set to False. Symbolized stacktrace will contain
  inline frames, but we do exclude them for purposes of crash state generation
  (helps in testcase deduplication)."""
    # Decide whether to symbolize or not symbolize the input stacktrace.
    # Note that Fuchsia logs are always symbolized.
    if symbolize_flag:
        # Defer imports since stack_symbolizer pulls in a lot of things.
        from clusterfuzz._internal.crash_analysis.stack_parsing import \
            stack_symbolizer
        crash_stacktrace_with_inlines = stack_symbolizer.symbolize_stacktrace(
            crash_data, enable_inline_frames=True)
        crash_stacktrace_without_inlines = stack_symbolizer.symbolize_stacktrace(
            crash_data, enable_inline_frames=False)
    else:
        # We are explicitly indicated to not symbolize using |symbolize_flag|. There
        # is no distinction between inline and non-inline frames for an unsymbolized
        # stacktrace.
        crash_stacktrace_with_inlines = crash_data
        crash_stacktrace_without_inlines = crash_data

    # Additional stack frame ignore regexes.
    custom_stack_frame_ignore_regexes = (local_config.ProjectConfig().get(
        'stacktrace.stack_frame_ignore_regexes', []))

    if environment.get_value('TASK_NAME') == 'analyze':
        detect_v8_runtime_errors = True
    else:
        detect_v8_runtime_errors = environment.get_value(
            'DETECT_V8_RUNTIME_ERRORS', False)

    fuzz_target = fuzz_target or environment.get_value('FUZZ_TARGET')
    redzone_size = environment.get_value('REDZONE')
    if detect_ooms_and_hangs is None:
        detect_ooms_and_hangs = (
            environment.get_value('REPORT_OOMS_AND_HANGS')
            and (not redzone_size
                 or redzone_size <= MAX_REDZONE_SIZE_FOR_OOMS_AND_HANGS))

    include_ubsan = 'halt_on_error=0' not in environment.get_value(
        'UBSAN_OPTIONS', '')

    stack_parser = stacktraces.StackParser(
        symbolized=symbolize_flag or already_symbolized,
        detect_ooms_and_hangs=detect_ooms_and_hangs,
        detect_v8_runtime_errors=detect_v8_runtime_errors,
        custom_stack_frame_ignore_regexes=custom_stack_frame_ignore_regexes,
        fuzz_target=fuzz_target,
        include_ubsan=include_ubsan)

    result = stack_parser.parse(crash_stacktrace_without_inlines)

    # Use stacktrace with inlines for the result.
    if result.crash_stacktrace:
        result.crash_stacktrace = crash_stacktrace_with_inlines

    # Linkify Android Kernel or lkl stacktrace.
    linkify_kernel_or_lkl_stacktrace_if_needed(result)

    return result