Пример #1
0
def _get_tls_cert_and_key():
  """Get the TLS cert from instance metadata."""
  # TODO(ochang): Implement a fake metadata server for testing.
  local_cert_location = environment.get_value('UNTRUSTED_TLS_CERT_FOR_TESTING')
  local_key_location = environment.get_value('UNTRUSTED_TLS_KEY_FOR_TESTING')

  if local_cert_location and local_key_location:
    with open(local_cert_location, 'rb') as f:
      cert_contents = f.read()

    with open(local_key_location, 'rb') as f:
      key_contents = f.read()

    return cert_contents, key_contents

  cert_contents = compute_metadata.get('instance/attributes/tls-cert').encode()
  key_contents = compute_metadata.get('instance/attributes/tls-key').encode()
  return cert_contents, key_contents
Пример #2
0
def get_device_path():
  """Gets a device path to be cached and used by reset_usb."""

  def _get_usb_devices():
    """Returns a list of device objects containing a serial and USB path."""
    usb_list_cmd = 'lsusb -v'
    output = execute_command(usb_list_cmd, timeout=RECOVERY_CMD_TIMEOUT)
    if output is None:
      logs.log_error('Failed to populate usb devices using lsusb, '
                     'host restart might be needed.')
      bad_state_reached()

    devices = []
    path = None
    for line in output.splitlines():
      match = LSUSB_BUS_RE.match(line)
      if match:
        path = '/dev/bus/usb/%s/%s' % (match.group(1), match.group(2))
        continue

      match = LSUSB_SERIAL_RE.match(line)
      if path and match and match.group(1):
        serial = match.group(1)
        devices.append(DEVICE(serial, path))

    return devices

  def _get_device_path_for_serial():
    """Return device path. Assumes a simple ANDROID_SERIAL."""
    devices = _get_usb_devices()
    for device in devices:
      if device_serial == device.serial:
        return device.path

    return None

  def _get_device_path_for_usb():
    """Returns a device path.

    Assumes ANDROID_SERIAL in the form "usb:<identifier>"."""
    # Android serial may reference a usb device rather than a serial number.
    device_id = device_serial[len('usb:'):]
    bus_number = int(
        open('/sys/bus/usb/devices/%s/busnum' % device_id).read().strip())
    device_number = int(
        open('/sys/bus/usb/devices/%s/devnum' % device_id).read().strip())
    return '/dev/bus/usb/%03d/%03d' % (bus_number, device_number)

  if environment.is_android_cuttlefish():
    return None

  device_serial = environment.get_value('ANDROID_SERIAL')
  if device_serial.startswith('usb:'):
    return _get_device_path_for_usb()

  return _get_device_path_for_serial()
Пример #3
0
def get_strategy_probability(strategy_name, default):
    """Returns a strategy weight based on env variable |FUZZING_STRATEGIES|"""
    fuzzing_strategies = environment.get_value('FUZZING_STRATEGIES')
    if fuzzing_strategies is None or not isinstance(fuzzing_strategies, dict):
        return default

    if strategy_name not in fuzzing_strategies:
        return 0.0

    return fuzzing_strategies[strategy_name]
Пример #4
0
def get_command_line_flags(testcase_path):
    """Returns command line flags to use for a testcase."""
    arguments = environment.get_value('APP_ARGS')
    additional_arguments = get_additional_command_line_flags(testcase_path)
    if arguments:
        arguments += ' ' + additional_arguments
    else:
        arguments = additional_arguments

    return arguments.strip()
Пример #5
0
    def setUp(self):
        """Set up."""
        super().setUp()
        environment.set_value('JOB_NAME', 'libfuzzer_asan_job')

        patcher = mock.patch(
            'clusterfuzz._internal.bot.fuzzers.libFuzzer.fuzzer.LibFuzzer.fuzzer_directory',
            new_callable=mock.PropertyMock)

        mock_fuzzer_directory = patcher.start()
        self.addCleanup(patcher.stop)

        mock_fuzzer_directory.return_value = os.path.join(
            environment.get_value('ROOT_DIR'), 'src', 'clusterfuzz',
            '_internal', 'bot', 'fuzzers', 'libFuzzer')

        job = data_types.Job(
            name='libfuzzer_asan_job',
            environment_string=(
                'RELEASE_BUILD_BUCKET_PATH = '
                'gs://clusterfuzz-test-data/test_libfuzzer_builds/'
                'test-libfuzzer-build-([0-9]+).zip\n'
                'REVISION_VARS_URL = https://commondatastorage.googleapis.com/'
                'clusterfuzz-test-data/test_libfuzzer_builds/'
                'test-libfuzzer-build-%s.srcmap.json\n'))
        job.put()

        data_types.FuzzTarget(engine='libFuzzer',
                              binary='test_fuzzer',
                              project='test-project').put()
        data_types.FuzzTargetJob(fuzz_target_name='libFuzzer_test_fuzzer',
                                 engine='libFuzzer',
                                 job='libfuzzer_asan_job').put()

        environment.set_value('USE_MINIJAIL', True)
        data_types.Fuzzer(revision=1,
                          file_size='builtin',
                          source='builtin',
                          name='libFuzzer',
                          max_testcases=4,
                          builtin=True).put()
        self.temp_dir = tempfile.mkdtemp(
            dir=environment.get_value('FUZZ_INPUTS'))
Пример #6
0
def is_crash(return_code, console_output):
    """Analyze the return code and console output to see if this was a crash."""
    if not return_code:
        return False

    crash_signature = environment.get_value('CRASH_SIGNATURE')
    if crash_signature:
        return re.search(crash_signature, console_output)

    return True
Пример #7
0
def is_directory_on_nfs(data_bundle_directory):
    """Return whether this directory is on NFS."""
    nfs_root = environment.get_value('NFS_ROOT')
    if not nfs_root:
        return False

    data_bundle_directory_real_path = os.path.realpath(data_bundle_directory)
    nfs_root_real_path = os.path.realpath(nfs_root)
    return data_bundle_directory_real_path.startswith(nfs_root_real_path +
                                                      os.sep)
Пример #8
0
 def test_sanitizer_options_not_changed_unrelated_section(
         self, options_name, section_name):
     """Test that sanitizer options are not changed when provided an unrelated
 sanitizer section name."""
     environment.set_value(options_name, 'a=1:b=2:c=1')
     self.fs.create_file(self.fuzz_target_options_file,
                         contents='[{section_name}]\nc=3:d=4'.format(
                             section_name=section_name))
     engine_common.process_sanitizer_options_overrides(self.fuzz_target)
     self.assertEqual('a=1:b=2:c=1', environment.get_value(options_name))
Пример #9
0
def get_temp_dir():
    """Define a tempdir for undercoat to store its data in.

  This tempdir needs to be of a scope that persists across invocations of the
  bot, to ensure proper cleanup of stale handles/data."""
    temp_dir = os.path.join(environment.get_value('ROOT_DIR'), 'bot',
                            'undercoat')
    os.makedirs(temp_dir, exist_ok=True)

    return temp_dir
Пример #10
0
    def __init__(self, fuzzer, job, build_revision, timestamp):
        super(TestcaseRun, self).__init__(fuzzer, job, build_revision,
                                          timestamp)
        self._stats_data.update({
            'kind': 'TestcaseRun',
        })

        source = environment.get_value('STATS_SOURCE')
        if source:
            self._stats_data['source'] = source
Пример #11
0
 def test_sanitizer_options_changed(self, options_name, section_name):
     """Test that sanitizer options set in .options file are added to the
 environment variable."""
     environment.set_value(options_name, 'a=1:b=2:c=1')
     self.fs.create_file(self.fuzz_target_options_file,
                         contents='[{section_name}]\nc=3:d=4'.format(
                             section_name=section_name))
     engine_common.process_sanitizer_options_overrides(self.fuzz_target)
     self.assertEqual('a=1:b=2:c=3:d=4',
                      environment.get_value(options_name))
Пример #12
0
def needs_update(revision_file, revision):
    """Check a revision file against the provided revision
  to see if an update is required."""
    failure_wait_interval = environment.get_value('FAIL_WAIT')
    file_exists = False
    retry_limit = environment.get_value('FAIL_RETRIES')

    for _ in range(retry_limit):
        # NFS can sometimes return a wrong result on file existence, so redo
        # this check a couple of times to be sure.
        if not os.path.exists(revision_file):
            file_exists = False
            time.sleep(15)
            continue

        # Found the file, now try to read its contents.
        file_exists = True

        try:
            with open(revision_file, 'r') as file_handle:
                current_revision = file_handle.read()
        except:
            logs.log_error('Error occurred while reading revision file %s.' %
                           revision_file)
            time.sleep(utils.random_number(1, failure_wait_interval))
            continue

        if current_revision.isdigit():
            return int(revision) > int(current_revision)

        return str(revision) != str(current_revision)

    # If there is no revision file or if we have lost track of its revision,
    # then we do need to update the data bundle.
    if not file_exists:
        return True

    # An error has occurred and we have failed to read revision file
    # despite several retries. So, don't bother updating the data
    # bundle as it will probably fail as well.
    logs.log_error('Failed to read revision file, exiting.')
    return False
Пример #13
0
def get(path):
  """Get GCE metadata value."""
  attribute_url = (
      'http://{}/computeMetadata/v1/'.format(_METADATA_SERVER) + path)
  headers = {'Metadata-Flavor': 'Google'}
  operations_timeout = environment.get_value('URL_BLOCKING_OPERATIONS_TIMEOUT')

  response = requests.get(
      attribute_url, headers=headers, timeout=operations_timeout)
  response.raise_for_status()
  return response.text
Пример #14
0
 def wrapper(task):
   """Wrapper."""
   environment.set_value('TASK_PAYLOAD', task.payload())
   try:
     return func(task)
   except:  # Truly catch *all* exceptions.
     e = sys.exc_info()[1]
     e.extras = {'task_payload': environment.get_value('TASK_PAYLOAD')}
     raise
   finally:
     environment.remove_key('TASK_PAYLOAD')
Пример #15
0
def get_testcase_run(stats, fuzzer_command):
    """Get testcase run for stats."""
    build_revision = fuzzer_utils.get_build_revision()
    job = environment.get_value('JOB_NAME')
    # fuzzer name is filled by fuzz_task.
    testcase_run = fuzzer_stats.TestcaseRun(None, job, build_revision,
                                            current_timestamp())

    testcase_run['command'] = fuzzer_command
    testcase_run.update(stats)
    return testcase_run
Пример #16
0
def get_command_override():
    """Get command override task."""
    command_override = environment.get_value('COMMAND_OVERRIDE', '').strip()
    if not command_override:
        return None

    parts = command_override.split()
    if len(parts) != 3:
        raise ValueError('Command override should have 3 components.')

    return Task(*parts, is_command_override=True)
Пример #17
0
def _get_nfs_data_bundle_path(data_bundle_name):
    """Get  path for a data bundle on NFS."""
    nfs_root = environment.get_value('NFS_ROOT')

    # Special naming and path for search index based bundles.
    if _is_search_index_data_bundle(data_bundle_name):
        return os.path.join(
            nfs_root, testcase_manager.SEARCH_INDEX_TESTCASES_DIRNAME,
            data_bundle_name[len(testcase_manager.SEARCH_INDEX_BUNDLE_PREFIX
                                 ):])

    return os.path.join(nfs_root, data_bundle_name)
Пример #18
0
def get_build_parameters():
  """Return build_id, target and type from the device's fingerprint"""
  build_fingerprint = environment.get_value('BUILD_FINGERPRINT',
                                            get_build_fingerprint())
  build_fingerprint_match = BUILD_FINGERPRINT_REGEX.match(build_fingerprint)
  if not build_fingerprint_match:
    return None

  build_id = build_fingerprint_match.group('build_id')
  target = build_fingerprint_match.group('target')
  build_type = build_fingerprint_match.group('type')
  return {'build_id': build_id, 'target': target, 'type': build_type}
Пример #19
0
def recreate_cuttlefish_device():
  """Recreate cuttlefish device, restoring from backup images."""
  logs.log('Reimaging cuttlefish device.')
  cvd_dir = environment.get_value('CVD_DIR')

  stop_cuttlefish_device()

  # Delete all existing images.
  rm_cmd = f'rm -rf {cvd_dir}/*'
  execute_command(rm_cmd, timeout=RECOVERY_CMD_TIMEOUT, on_cuttlefish_host=True)

  # Copy and Combine cvd host package and OTA images.
  image_directory = environment.get_value('IMAGES_DIR')
  for image_filename in os.listdir(image_directory):
    if image_filename.endswith('.zip') or image_filename.endswith('.tar.gz'):
      continue
    image_src = os.path.join(image_directory, image_filename)
    image_dest = os.path.join(cvd_dir, image_filename)
    copy_to_cuttlefish(image_src, image_dest)

  start_cuttlefish_device()
def execute_task(full_fuzzer_name, job_type):
    """Execute corpus pruning task."""
    fuzz_target = data_handler.get_fuzz_target(full_fuzzer_name)
    task_name = 'corpus_pruning_%s_%s' % (full_fuzzer_name, job_type)
    revision = 0  # Trunk revision

    # Get status of last execution.
    last_execution_metadata = data_handler.get_task_status(task_name)
    last_execution_failed = (last_execution_metadata
                             and last_execution_metadata.status
                             == data_types.TaskState.ERROR)

    # Make sure we're the only instance running for the given fuzzer and
    # job_type.
    if not data_handler.update_task_status(task_name,
                                           data_types.TaskState.STARTED):
        logs.log('A previous corpus pruning task is still running, exiting.')
        return

    # Setup fuzzer and data bundle.
    if not setup.update_fuzzer_and_data_bundles(fuzz_target.engine):
        raise CorpusPruningException('Failed to set up fuzzer %s.' %
                                     fuzz_target.engine)

    cross_pollination_method, tag = choose_cross_pollination_strategy(
        full_fuzzer_name)

    # TODO(unassigned): Use coverage information for better selection here.
    cross_pollinate_fuzzers = _get_cross_pollinate_fuzzers(
        fuzz_target.engine, full_fuzzer_name, cross_pollination_method, tag)

    context = Context(fuzz_target, cross_pollinate_fuzzers,
                      cross_pollination_method, tag)

    # Copy global blacklist into local suppressions file if LSan is enabled.
    is_lsan_enabled = environment.get_value('LSAN')
    if is_lsan_enabled:
        # TODO(ochang): Copy this to untrusted worker.
        leak_blacklist.copy_global_to_local_blacklist()

    try:
        result = do_corpus_pruning(context, last_execution_failed, revision)
        _record_cross_pollination_stats(result.cross_pollination_stats)
        _save_coverage_information(context, result)
        _process_corpus_crashes(context, result)
    except Exception:
        logs.log_error('Corpus pruning failed.')
        data_handler.update_task_status(task_name, data_types.TaskState.ERROR)
        return
    finally:
        context.cleanup()

    data_handler.update_task_status(task_name, data_types.TaskState.FINISHED)
 def get_server_error(cls):
     """Get server error."""
     upload_url = crash_uploader.CRASH_REPORT_UPLOAD_URL[
         environment.get_value('UPLOAD_MODE')]
     ping_url = urllib.parse.urlsplit(upload_url).netloc
     try:
         # Use a port that has been used for crash/ uploads before.
         sock = socket.create_connection((ping_url, 443), timeout=1)
         sock.close()
         return None
     except (socket.error, socket.timeout) as e:
         return 'Failed to connect to crash/: %s' % e
Пример #22
0
def acquire_lock(key_name,
                 max_hold_seconds=DEFAULT_MAX_HOLD_SECONDS,
                 retries=None,
                 by_zone=True):
  """Acquire a lock for the given key name. Returns the expiration time if
  succeeded, otherwise None. The lock holder is responsible for making sure it
  doesn't assume the lock is still held after the expiration time."""
  logs.log('Acquiring lock for %s.' % key_name)
  failed_acquires = 0
  total_wait = 0
  wait_exponent = 1

  if by_zone:
    key_name_with_zone = _get_key_name_with_lock_zone(key_name)
    if key_name_with_zone is None:
      logs.log_error('Failed to get zone while trying to lock %s.' % key_name)
      return None

    key_name = key_name_with_zone

  bot_name = environment.get_value('BOT_NAME')
  expiration_delta = datetime.timedelta(seconds=max_hold_seconds)
  while total_wait < LOCK_CHECK_TIMEOUT:
    try:
      lock_entity = ndb.transaction(
          lambda: _try_acquire_lock(key_name,
                                    expiration_time=datetime.datetime.utcnow() +
                                    expiration_delta, holder=bot_name),
          retries=TRANSACTION_RETRIES)

      if lock_entity.holder == bot_name:
        logs.log('Got the lock.')
        return lock_entity.expiration_time
    except exceptions.Error:
      pass

    failed_acquires += 1
    if retries and retries >= failed_acquires:
      logs.log('Failed to acquire lock, exceeded max retries.')
      return None

    logs.log('Failed to acquire lock, waiting...')

    # Exponential backoff.
    max_sleep = (1 << wait_exponent) * LOCK_CHECK_SLEEP_MULTIPLIER
    sleep_time = random.uniform(1.0, max_sleep)
    time.sleep(sleep_time)

    total_wait += sleep_time
    wait_exponent = min(wait_exponent + 1, MAX_WAIT_EXPONENT)

  logs.log('Timeout exceeded while trying to acquire lock, bailing.')
  return None
Пример #23
0
    def get_fuzz_target(self):
        """Get the associated FuzzTarget entity for this test case."""
        name = self.actual_fuzzer_name()
        if not name:
            return None

        target = ndb.Key(FuzzTarget, name).get()
        if environment.get_value('ORIGINAL_JOB_NAME'):
            # Overridden engine (e.g. for minimization).
            target.engine = environment.get_engine_for_job()

        return target
Пример #24
0
def get_config():
    """Get arguments for a given fuzz target."""
    device_serial = environment.get_value('ANDROID_SERIAL')
    build_dir = environment.get_value('BUILD_DIR')
    temp_dir = fuzzer_utils.get_temp_dir()

    binary_path = os.path.join(build_dir, 'syzkaller')
    json_config_path = os.path.join(temp_dir, 'config.json')
    default_vmlinux_path = os.path.join('/tmp', device_serial, 'vmlinux')
    vmlinux_path = environment.get_value('VMLINUX_PATH', default_vmlinux_path)

    syzhub_address = environment.get_value('SYZHUB_ADDRESS')
    syzhub_client = environment.get_value('SYZHUB_CLIENT')
    syzhub_key = environment.get_value('SYZHUB_KEY')
    on_cuttlefish = environment.is_android_cuttlefish()

    config.generate(serial=device_serial,
                    work_dir_path=get_work_dir(),
                    binary_path=binary_path,
                    vmlinux_path=vmlinux_path,
                    config_path=json_config_path,
                    kcov=True,
                    reproduce=False,
                    syzhub_address=syzhub_address,
                    syzhub_client=syzhub_client,
                    syzhub_key=syzhub_key,
                    on_cuttlefish=on_cuttlefish)
    return ['-config', json_config_path]
Пример #25
0
def download_system_symbols_if_needed(symbols_directory):
  """Download system libraries from |SYMBOLS_URL| and cache locally."""
  if not should_download_symbols():
    return

  # Get the build fingerprint parameters.
  build_params = settings.get_build_parameters()
  if not build_params:
    logs.log_error('Unable to determine build parameters.')
    return

  build_params_check_path = os.path.join(symbols_directory,
                                         '.cached_build_params')
  if check_symbols_cached(build_params_check_path, build_params):
    return

  build_id = build_params.get('build_id')
  target = build_params.get('target')
  build_type = build_params.get('type')
  if not build_id or not target or not build_type:
    logs.log_error('Null build parameters found, exiting.')
    return

  symbols_archive_filename = f'{target}-symbols-{build_id}.zip'
  artifact_file_name = symbols_archive_filename
  output_filename_override = None

  # Include type and sanitizer information in the target.
  tool_suffix = environment.get_value('SANITIZER_TOOL_NAME')
  target_with_type_and_san = f'{target}-{build_type}'
  if tool_suffix and not tool_suffix in target_with_type_and_san:
    target_with_type_and_san += f'_{tool_suffix}'

  targets_with_type_and_san = [target_with_type_and_san]

  symbols_archive_path = os.path.join(symbols_directory,
                                      symbols_archive_filename)
  download_artifact_if_needed(build_id, symbols_directory, symbols_archive_path,
                              targets_with_type_and_san, artifact_file_name,
                              output_filename_override)
  if not os.path.exists(symbols_archive_path):
    logs.log_error(
        'Unable to locate symbols archive %s.' % symbols_archive_path)
    return

  # Store the artifact for later use or for use by other bots.
  storage.store_file_in_cache(symbols_archive_path)

  archive.unpack(symbols_archive_path, symbols_directory, trusted=True)
  shell.remove_file(symbols_archive_path)

  utils.write_data_to_file(build_params, build_params_check_path)
Пример #26
0
def clear_temp_directory(clear_user_profile_directories=True):
    """Clear the temporary directories."""
    temp_directory = environment.get_value('BOT_TMPDIR')
    remove_directory(temp_directory, recreate=True)

    test_temp_directory = environment.get_value('TEST_TMPDIR')
    if test_temp_directory != temp_directory:
        remove_directory(test_temp_directory, recreate=True)

    if environment.is_trusted_host():
        from clusterfuzz._internal.bot.untrusted_runner import file_host
        file_host.clear_temp_directory()

    if not clear_user_profile_directories:
        return

    user_profile_root_directory = environment.get_value(
        'USER_PROFILE_ROOT_DIR')
    if not user_profile_root_directory:
        return

    remove_directory(user_profile_root_directory, recreate=True)
Пример #27
0
    def setup_additional_args_for_app(self):
        """Select additional args for the specified app at random."""
        trial_args = [
            trial.app_args for trial in self.trials
            if random.random() < trial.probability
        ]
        if not trial_args:
            return

        trial_app_args = ' '.join(trial_args)
        app_args = environment.get_value('APP_ARGS', '')
        environment.set_value('APP_ARGS', '%s %s' % (app_args, trial_app_args))
        environment.set_value('TRIAL_APP_ARGS', trial_app_args)
Пример #28
0
    def run(self, round_number: int) -> CrashResult:
        """Run the testcase once."""
        app_directory = environment.get_value('APP_DIR')
        warmup_timeout = environment.get_value('WARMUP_TIMEOUT')
        run_timeout = warmup_timeout if round_number == 1 else self._test_timeout

        if self._is_black_box:
            return_code, crash_time, output = process_handler.run_process(
                self._command,
                timeout=run_timeout,
                gestures=self._gestures,
                current_working_directory=app_directory)
        else:
            try:
                result = engine_reproduce(self._engine_impl,
                                          self._fuzz_target.binary,
                                          self._testcase_path, self._arguments,
                                          run_timeout)
            except TimeoutError:
                # Treat reproduction timeouts as not crashing.
                return CrashResult(0, run_timeout, '')

            return_code = result.return_code
            crash_time = result.time_executed

            log_header = engine_common.get_log_header(result.command,
                                                      result.time_executed)
            output = log_header + '\n' + result.output

        process_handler.terminate_stale_application_instances()

        crash_result = CrashResult(return_code, crash_time, output)
        if not crash_result.is_crash():
            logs.log(
                f'No crash occurred (round {round_number}).',
                output=output,
            )

        return crash_result
def get_recommended_dictionary_gcs_path(fuzzer_name):
  """Generate a GCS url to a recommended dictionary.

  Returns:
    String representing GCS path for a dictionary.
  """
  bucket_name = environment.get_value('FUZZ_LOGS_BUCKET')
  bucket_subdirectory_name = 'dictionaries'
  recommended_dictionary_gcs_path = '/%s/%s/%s/%s' % (
      bucket_name, bucket_subdirectory_name, fuzzer_name,
      RECOMMENDED_DICTIONARY_FILENAME)

  return recommended_dictionary_gcs_path
Пример #30
0
def get_crash_info_and_stacktrace(application_command_line, crash_stacktrace,
                                  gestures):
    """Return crash minidump location and updated crash stacktrace."""
    app_name_lower = environment.get_value('APP_NAME').lower()
    retry_limit = environment.get_value('FAIL_RETRIES')
    using_android = environment.is_android()
    using_chrome = 'chrome' in app_name_lower or 'chromium' in app_name_lower
    warmup_timeout = environment.get_value('WARMUP_TIMEOUT', 90)

    # Minidump generation is only applicable on Chrome application.
    # FIXME: Support minidump generation on platforms other than Android.
    if not using_chrome or not using_android:
        return None, crash_stacktrace

    # Get the crash info from stacktrace.
    crash_info = get_crash_info(crash_stacktrace)

    # If we lost the minidump file, we need to recreate it.
    # Note that because of the way crash_info is generated now, if we have a
    # non-None crash_info, we should also have its minidump path; we insert
    # the check to safeguard against possibly constructing the crash_info in
    # other ways in the future that might potentially lose the minidump path.
    if not crash_info or not crash_info.minidump_info.path:
        for _ in range(retry_limit):
            _, _, output = (process_handler.run_process(
                application_command_line,
                timeout=warmup_timeout,
                gestures=gestures))

            crash_info = get_crash_info(output)
            if crash_info and crash_info.minidump_info.path:
                crash_stacktrace = utils.decode_to_unicode(output)
                break

        if not crash_info or not crash_info.minidump_info.path:
            # We could not regenerate a minidump for this crash.
            logs.log('Unable to regenerate a minidump for this crash.')

    return crash_info, crash_stacktrace