Exemplo n.º 1
0
def get_stacktrace(testcase, stack_attribute='crash_stacktrace'):
  """Returns the stacktrace for a test case.

  This may require a blobstore read.
  """
  result = getattr(testcase, stack_attribute)
  if not result or not result.startswith(data_types.BLOBSTORE_STACK_PREFIX):
    return result

  # For App Engine, we can't write to local file, so use blobs.read_key instead.
  if environment.is_running_on_app_engine():
    key = result[len(data_types.BLOBSTORE_STACK_PREFIX):]
    return unicode(blobs.read_key(key), errors='replace')

  key = result[len(data_types.BLOBSTORE_STACK_PREFIX):]
  tmpdir = environment.get_value('BOT_TMPDIR')
  tmp_stacktrace_file = os.path.join(tmpdir, 'stacktrace.tmp')
  blobs.read_blob_to_disk(key, tmp_stacktrace_file)

  try:
    handle = open(tmp_stacktrace_file)
    result = handle.read()
    handle.close()
  except:
    logs.log_error(
        'Unable to read stacktrace for testcase %d.' % testcase.key.id())
    result = ''

  shell.remove_file(tmp_stacktrace_file)
  return result
Exemplo n.º 2
0
    def test_minimize(self):
        """Test minimize."""
        helpers.patch(self, ["base.utils.is_oss_fuzz"])
        self.mock.is_oss_fuzz.return_value = True

        testcase_file_path = os.path.join(self.temp_dir, "testcase")
        with open(testcase_file_path, "wb") as f:
            f.write("EEE")

        with open(testcase_file_path) as f:
            fuzzed_keys = blobs.write_blob(f)

        testcase_path = os.path.join(self.temp_dir, "testcase")

        testcase = data_types.Testcase(
            crash_type="Null-dereference WRITE",
            crash_address="",
            crash_state="Foo\n",
            crash_stacktrace="",
            crash_revision=1337,
            fuzzed_keys=fuzzed_keys,
            fuzzer_name="libFuzzer",
            overridden_fuzzer_name="libFuzzer_test_fuzzer",
            job_type="libfuzzer_asan_job",
            original_absolute_path=testcase_path,
            absolute_path=testcase_path,
            minimized_arguments="%TESTCASE% test_fuzzer",
        )
        testcase.put()

        data_types.FuzzTarget(engine="libFuzzer", binary="test_fuzzer").put()

        fuzzers_init.run()

        self._setup_env(job_type="libfuzzer_asan_job")
        environment.set_value("APP_ARGS", testcase.minimized_arguments)
        environment.set_value("LIBFUZZER_MINIMIZATION_ROUNDS", 3)
        environment.set_value("UBSAN_OPTIONS",
                              "unneeded_option=1:silence_unsigned_overflow=1")
        minimize_task.execute_task(testcase.key.id(), "libfuzzer_asan_job")

        testcase = data_handler.get_testcase_by_id(testcase.key.id())
        self.assertNotEqual("", testcase.minimized_keys)
        self.assertNotEqual("NA", testcase.minimized_keys)
        self.assertNotEqual(testcase.fuzzed_keys, testcase.minimized_keys)
        self.assertEqual(
            {
                "ASAN_OPTIONS": {},
                "UBSAN_OPTIONS": {
                    "silence_unsigned_overflow": 1
                }
            },
            testcase.get_metadata("env"),
        )

        blobs.read_blob_to_disk(testcase.minimized_keys, testcase_path)

        with open(testcase_path) as f:
            self.assertEqual(1, len(f.read()))
Exemplo n.º 3
0
    def test_minimize(self):
        """Test minimize."""
        helpers.patch(self, ['base.utils.is_oss_fuzz'])
        self.mock.is_oss_fuzz.return_value = True

        testcase_file_path = os.path.join(self.temp_dir, 'testcase')
        with open(testcase_file_path, 'wb') as f:
            f.write(b'EEE')

        with open(testcase_file_path) as f:
            fuzzed_keys = blobs.write_blob(f)

        testcase_path = os.path.join(self.temp_dir, 'testcase')

        testcase = data_types.Testcase(
            crash_type='Null-dereference WRITE',
            crash_address='',
            crash_state='Foo\n',
            crash_stacktrace='',
            crash_revision=1337,
            fuzzed_keys=fuzzed_keys,
            fuzzer_name='libFuzzer',
            overridden_fuzzer_name='libFuzzer_test_fuzzer',
            job_type='libfuzzer_asan_job',
            original_absolute_path=testcase_path,
            absolute_path=testcase_path,
            minimized_arguments='%TESTCASE% test_fuzzer')
        testcase.put()

        data_types.FuzzTarget(engine='libFuzzer', binary='test_fuzzer').put()

        fuzzers_init.run()

        self._setup_env(job_type='libfuzzer_asan_job')
        environment.set_value('APP_ARGS', testcase.minimized_arguments)
        environment.set_value('LIBFUZZER_MINIMIZATION_ROUNDS', 3)
        environment.set_value('UBSAN_OPTIONS',
                              'unneeded_option=1:silence_unsigned_overflow=1')
        minimize_task.execute_task(testcase.key.id(), 'libfuzzer_asan_job')

        testcase = data_handler.get_testcase_by_id(testcase.key.id())
        self.assertNotEqual('', testcase.minimized_keys)
        self.assertNotEqual('NA', testcase.minimized_keys)
        self.assertNotEqual(testcase.fuzzed_keys, testcase.minimized_keys)
        self.assertEqual(
            {
                'ASAN_OPTIONS': {},
                'UBSAN_OPTIONS': {
                    'silence_unsigned_overflow': 1
                }
            }, testcase.get_metadata('env'))

        blobs.read_blob_to_disk(testcase.minimized_keys, testcase_path)

        with open(testcase_path) as f:
            self.assertEqual(1, len(f.read()))
Exemplo n.º 4
0
  def _unpack_build(self):
    """Unpack the custom build."""
    if not shell.remove_directory(self.build_dir, recreate=True):
      logs.log_error('Unable to clear custom binary directory.')
      _handle_unrecoverable_error_on_windows()
      return False

    build_local_archive = os.path.join(self.build_dir,
                                       self.custom_binary_filename)
    if not blobs.read_blob_to_disk(self.custom_binary_key, build_local_archive):
      return False

    # If custom binary is an archive, then unpack it.
    if archive.is_archive(self.custom_binary_filename):
      if not _make_space_for_build(build_local_archive, self.base_build_dir):
        # Remove downloaded archive to free up space and otherwise, it won't get
        # deleted until next job run.
        shell.remove_file(build_local_archive)

        logs.log_fatal_and_exit('Could not make space for build.')

      try:
        archive.unpack(build_local_archive, self.build_dir, trusted=True)
      except:
        logs.log_error(
            'Unable to unpack build archive %s.' % build_local_archive)
        return False

      # Remove the archive.
      shell.remove_file(build_local_archive)

    return True
Exemplo n.º 5
0
    def test_minimize(self):
        """Test minimize."""
        testcase_file_path = os.path.join(self.temp_dir, 'testcase')
        with open(testcase_file_path, 'wb') as f:
            f.write('EEE')

        with open(testcase_file_path) as f:
            fuzzed_keys = blobs.write_blob(f)

        testcase_path = os.path.join(self.temp_dir, 'testcase')

        testcase = data_types.Testcase(
            crash_type='Null-dereference WRITE',
            crash_address='',
            crash_state='Foo\n',
            crash_stacktrace='',
            crash_revision=1337,
            fuzzed_keys=fuzzed_keys,
            fuzzer_name='libFuzzer',
            overridden_fuzzer_name='libFuzzer_test_fuzzer',
            job_type='libfuzzer_asan_job',
            original_absolute_path=testcase_path,
            absolute_path=testcase_path,
            minimized_arguments='%TESTCASE% test_fuzzer')
        testcase.put()

        self._setup_env(job_type='libfuzzer_asan_job')
        environment.set_value('APP_ARGS', testcase.minimized_arguments)
        environment.set_value('LIBFUZZER_MINIMIZATION_ROUNDS', 3)
        minimize_task.execute_task(testcase.key.id(), 'libfuzzer_asan_job')

        testcase = data_handler.get_testcase_by_id(testcase.key.id())
        self.assertNotEqual('', testcase.minimized_keys)
        self.assertNotEqual('NA', testcase.minimized_keys)
        self.assertNotEqual(testcase.fuzzed_keys, testcase.minimized_keys)
        self.assertEqual({'ASAN_OPTIONS': {}}, testcase.get_metadata('env'))

        blobs.read_blob_to_disk(testcase.minimized_keys, testcase_path)

        with open(testcase_path) as f:
            self.assertEqual(1, len(f.read()))
Exemplo n.º 6
0
def unpack_testcase(testcase):
    """Unpack a testcase and return all files it is composed of."""
    # Figure out where the testcase file should be stored.
    input_directory, testcase_file_path = _get_testcase_file_and_path(testcase)

    minimized = testcase.minimized_keys and testcase.minimized_keys != 'NA'
    if minimized:
        key = testcase.minimized_keys
        archived = bool(testcase.archive_state
                        & data_types.ArchiveStatus.MINIMIZED)
    else:
        key = testcase.fuzzed_keys
        archived = bool(testcase.archive_state
                        & data_types.ArchiveStatus.FUZZED)

    if archived:
        if minimized:
            temp_filename = (os.path.join(
                input_directory,
                str(testcase.key.id()) + _TESTCASE_ARCHIVE_EXTENSION))
        else:
            temp_filename = os.path.join(input_directory,
                                         testcase.archive_filename)
    else:
        temp_filename = testcase_file_path

    if not blobs.read_blob_to_disk(key, temp_filename):
        return None, input_directory, testcase_file_path

    file_list = []
    if archived:
        archive.unpack(temp_filename, input_directory)
        file_list = archive.get_file_list(temp_filename)
        shell.remove_file(temp_filename)

        file_exists = False
        for file_name in file_list:
            if os.path.basename(file_name) == os.path.basename(
                    testcase_file_path):
                file_exists = True
                break

        if not file_exists:
            logs.log_error(
                'Expected file to run %s is not in archive. Base directory is %s and '
                'files in archive are [%s].' %
                (testcase_file_path, input_directory, ','.join(file_list)))
            return None, input_directory, testcase_file_path
    else:
        file_list.append(testcase_file_path)

    return file_list, input_directory, testcase_file_path
Exemplo n.º 7
0
def update_fuzzer_and_data_bundles(fuzzer_name):
    """Update the fuzzer with a given name if necessary."""
    fuzzer = data_types.Fuzzer.query(
        data_types.Fuzzer.name == fuzzer_name).get()
    if not fuzzer:
        logs.log_error('No fuzzer exists with name %s.' % fuzzer_name)
        raise errors.InvalidFuzzerError

    # Set some helper environment variables.
    fuzzer_directory = get_fuzzer_directory(fuzzer_name)
    environment.set_value('FUZZER_DIR', fuzzer_directory)
    environment.set_value('UNTRUSTED_CONTENT', fuzzer.untrusted_content)

    # Adjust the test timeout, if user has provided one.
    if fuzzer.timeout:
        environment.set_value('TEST_TIMEOUT', fuzzer.timeout)

        # Increase fuzz test timeout if the fuzzer timeout is higher than its
        # current value.
        fuzz_test_timeout = environment.get_value('FUZZ_TEST_TIMEOUT')
        if fuzz_test_timeout and fuzz_test_timeout < fuzzer.timeout:
            environment.set_value('FUZZ_TEST_TIMEOUT', fuzzer.timeout)

    # Adjust the max testcases if this fuzzer has specified a lower limit.
    max_testcases = environment.get_value('MAX_TESTCASES')
    if fuzzer.max_testcases and fuzzer.max_testcases < max_testcases:
        environment.set_value('MAX_TESTCASES', fuzzer.max_testcases)

    # Check for updates to this fuzzer.
    version_file = os.path.join(fuzzer_directory, '.%s_version' % fuzzer_name)
    if (not fuzzer.builtin
            and revisions.needs_update(version_file, fuzzer.revision)):
        logs.log('Fuzzer update was found, updating.')

        # Clear the old fuzzer directory if it exists.
        if not shell.remove_directory(fuzzer_directory, recreate=True):
            logs.log_error('Failed to clear fuzzer directory.')
            return False

        # Copy the archive to local disk and unpack it.
        archive_path = os.path.join(fuzzer_directory, fuzzer.filename)
        if not blobs.read_blob_to_disk(fuzzer.blobstore_key, archive_path):
            logs.log_error('Failed to copy fuzzer archive.')
            return False

        try:
            archive.unpack(archive_path, fuzzer_directory)
        except Exception:
            error_message = (
                'Failed to unpack fuzzer archive %s '
                '(bad archive or unsupported format).') % fuzzer.filename
            logs.log_error(error_message)
            fuzzer_logs.upload_script_log('Fatal error: ' + error_message,
                                          fuzzer_name=fuzzer_name)
            return False

        fuzzer_path = os.path.join(fuzzer_directory, fuzzer.executable_path)
        if not os.path.exists(fuzzer_path):
            error_message = (
                'Fuzzer executable %s not found. '
                'Check fuzzer configuration.') % fuzzer.executable_path
            logs.log_error(error_message)
            fuzzer_logs.upload_script_log('Fatal error: ' + error_message,
                                          fuzzer_name=fuzzer_name)
            return False

        # Make fuzzer executable.
        os.chmod(fuzzer_path, 0o750)

        # Cleanup unneeded archive.
        shell.remove_file(archive_path)

        # Save the current revision of this fuzzer in a file for later checks.
        revisions.write_revision_to_revision_file(version_file,
                                                  fuzzer.revision)
        logs.log('Updated fuzzer to revision %d.' % fuzzer.revision)

    # Setup data bundles associated with this fuzzer.
    data_bundles = ndb_utils.get_all_from_query(
        data_types.DataBundle.query(
            data_types.DataBundle.name == fuzzer.data_bundle_name))
    for data_bundle in data_bundles:
        if not update_data_bundle(fuzzer, data_bundle):
            return False

    # Setup environment variable for launcher script path.
    if fuzzer.launcher_script:
        fuzzer_launcher_path = shell.get_execute_command(
            os.path.join(fuzzer_directory, fuzzer.launcher_script))
        environment.set_value('LAUNCHER_PATH', fuzzer_launcher_path)

    return True
Exemplo n.º 8
0
def execute_task(metadata_id, job_type):
  """Unpack a bundled testcase archive and create analyze jobs for each item."""
  metadata = ndb.Key(data_types.BundledArchiveMetadata, int(metadata_id)).get()
  if not metadata:
    logs.log_error('Invalid bundle metadata id %s.' % metadata_id)
    return

  bot_name = environment.get_value('BOT_NAME')
  upload_metadata = data_types.TestcaseUploadMetadata.query(
      data_types.TestcaseUploadMetadata.blobstore_key ==
      metadata.blobstore_key).get()
  if not upload_metadata:
    logs.log_error('Invalid upload metadata key %s.' % metadata.blobstore_key)
    return

  # Update the upload metadata with this bot name.
  upload_metadata.bot_name = bot_name
  upload_metadata.put()

  # We can't use FUZZ_INPUTS directory since it is constrained
  # by tmpfs limits.
  testcases_directory = environment.get_value('FUZZ_INPUTS_DISK')

  # Retrieve multi-testcase archive.
  archive_path = os.path.join(testcases_directory, metadata.archive_filename)
  if not blobs.read_blob_to_disk(metadata.blobstore_key, archive_path):
    logs.log_error('Could not retrieve archive for bundle %d.' % metadata_id)
    tasks.add_task('unpack', metadata_id, job_type)
    return

  try:
    archive.unpack(archive_path, testcases_directory)
  except:
    logs.log_error('Could not unpack archive for bundle %d.' % metadata_id)
    tasks.add_task('unpack', metadata_id, job_type)
    return

  archive_state = data_types.ArchiveStatus.NONE
  bundled = True
  file_list = archive.get_file_list(archive_path)
  for file_path in file_list:
    absolute_file_path = os.path.join(testcases_directory, file_path)
    filename = os.path.basename(absolute_file_path)

    # Only files are actual testcases. Skip directories.
    if not os.path.isfile(absolute_file_path):
      continue

    try:
      file_handle = open(absolute_file_path, 'rb')
      blob_key = blobs.write_blob(file_handle)
      file_handle.close()
    except:
      blob_key = None

    if not blob_key:
      logs.log_error(
          'Could not write testcase %s to blobstore.' % absolute_file_path)
      continue

    data_handler.create_user_uploaded_testcase(
        blob_key, metadata.blobstore_key, archive_state,
        metadata.archive_filename, filename, metadata.timeout,
        metadata.job_type, metadata.job_queue, metadata.http_flag,
        metadata.gestures, metadata.additional_arguments,
        metadata.bug_information, metadata.crash_revision,
        metadata.uploader_email, metadata.platform_id,
        metadata.app_launch_command, metadata.fuzzer_name,
        metadata.overridden_fuzzer_name, metadata.fuzzer_binary_name, bundled,
        upload_metadata.retries, upload_metadata.bug_summary_update_flag)

  # The upload metadata for the archive is not needed anymore since we created
  # one for each testcase.
  upload_metadata.key.delete()

  shell.clear_testcase_directories()
Exemplo n.º 9
0
 def test_read_blob_to_disk_legacy(self):
     """Test read_blob_to_disk for legacy files."""
     self.assertTrue(blobs.read_blob_to_disk('legacyblobkey', '/file'))
     self.mock.copy_file_from.assert_has_calls([
         mock.call('/blobs-bucket/legacy', '/file'),
     ])
Exemplo n.º 10
0
 def test_read_blob_to_disk(self):
     """Test read_blob_to_disk for GCS files."""
     self.assertTrue(blobs.read_blob_to_disk(TEST_UUID, '/file'))
     self.mock.copy_file_from.assert_has_calls([
         mock.call('/blobs-bucket/' + TEST_UUID, '/file'),
     ])