Ejemplo n.º 1
0
def DownloadFuzzerCorpus(fuzzer, dest_directory=None):
    """Downloads a corpus and returns its path.

  Downloads a corpus to a subdirectory of dest_directory if specified and
  returns path on the filesystem of the corpus. Asks users to authenticate
  if permission to read from bucket is denied.

  Args:
    fuzzer: The name of the fuzzer who's corpus we want to download.
    dest_directory: The directory to download the corpus to.

  Returns:
    The path to the downloaded corpus.

  Raises:
    gs.NoSuchKey: A corpus for the fuzzer doesn't exist.
    gs.GSCommandError: The corpus failed to download for another reason.
  """
    if not fuzzer.startswith('chromeos_'):
        # ClusterFuzz internally appends "chromeos_" to chromeos targets' names.
        # Therefore we must do so in order to find the corpus.
        fuzzer = 'chromeos_%s' % fuzzer

    if dest_directory is None:
        dest_directory = GetScriptStoragePath(CORPUS_DIRECTORY_NAME).chroot
    osutils.SafeMakedirsNonRoot(dest_directory)

    clusterfuzz_gcs_corpus_bucket = 'chromeos-corpus'
    suburl = 'libfuzzer/%s' % fuzzer
    gcs_path = gs.GetGsURL(clusterfuzz_gcs_corpus_bucket,
                           for_gsutil=True,
                           public=False,
                           suburl=suburl)

    dest_path = os.path.join(dest_directory, fuzzer)

    try:
        logging.info('Downloading corpus to %s.', dest_path)
        ctx = gs.GSContext()
        ctx.Copy(gcs_path,
                 dest_directory,
                 recursive=True,
                 parallel=True,
                 debug_level=logging.DEBUG)
        logging.info('Finished downloading corpus.')
    except gs.GSNoSuchKey as exception:
        logging.error('Corpus for fuzzer: %s does not exist.', fuzzer)
        raise exception
    # Try to authenticate if we were denied permission to access the corpus.
    except gs.GSCommandError as exception:
        logging.error(
            'gsutil failed to download the corpus. You may need to log in. See:\n'
            'https://chromium.googlesource.com/chromiumos/docs/+/master/gsutil.md'
            '#setup\n'
            'for instructions on doing this.')
        raise exception

    return dest_path
Ejemplo n.º 2
0
def GetSdkURL(for_gsutil=False, suburl=''):
  """Construct a Google Storage URL for accessing SDK related archives

  Args:
    for_gsutil: Do you want a URL for passing to `gsutil`?
    suburl: A url fragment to tack onto the end
  Returns:
    The fully constructed URL
  """
  return gs.GetGsURL(constants.SDK_GS_BUCKET, for_gsutil=for_gsutil,
                     suburl=suburl)
Ejemplo n.º 3
0
def PrepareBinhostUploads(input_proto, output_proto, config):
    """Return a list of files to upload to the binhost.

  See BinhostService documentation in api/proto/binhost.proto.

  Args:
    input_proto (PrepareBinhostUploadsRequest): The input proto.
    output_proto (PrepareBinhostUploadsResponse): The output proto.
    config (api_config.ApiConfig): The API call config.
  """
    if input_proto.sysroot.build_target.name:
        build_target_msg = input_proto.sysroot.build_target
    else:
        build_target_msg = input_proto.build_target
    sysroot_path = input_proto.sysroot.path

    if not sysroot_path and not build_target_msg.name:
        cros_build_lib.Die('Sysroot.path is required.')

    build_target = controller_util.ParseBuildTarget(build_target_msg)
    chroot = controller_util.ParseChroot(input_proto.chroot)

    if not sysroot_path:
        sysroot_path = build_target.root
    sysroot = sysroot_lib.Sysroot(sysroot_path)

    uri = input_proto.uri
    # For now, we enforce that all input URIs are Google Storage buckets.
    if not gs.PathIsGs(uri):
        raise ValueError('Upload URI %s must be Google Storage.' % uri)

    if config.validate_only:
        return controller.RETURN_CODE_VALID_INPUT

    parsed_uri = urllib.parse.urlparse(uri)
    upload_uri = gs.GetGsURL(parsed_uri.netloc, for_gsutil=True).rstrip('/')
    upload_path = parsed_uri.path.lstrip('/')

    # Read all packages and update the index. The index must be uploaded to the
    # binhost for Portage to use it, so include it in upload_targets.
    uploads_dir = binhost.GetPrebuiltsRoot(chroot, sysroot, build_target)
    index_path = binhost.UpdatePackageIndex(uploads_dir,
                                            upload_uri,
                                            upload_path,
                                            sudo=True)
    upload_targets = binhost.GetPrebuiltsFiles(uploads_dir)
    assert index_path.startswith(uploads_dir), (
        'expected index_path to start with uploads_dir')
    upload_targets.append(index_path[len(uploads_dir):])

    output_proto.uploads_dir = uploads_dir
    for upload_target in upload_targets:
        output_proto.upload_targets.add().path = upload_target.strip('/')
Ejemplo n.º 4
0
def PrepareDevInstallBinhostUploads(input_proto, output_proto, config):
    """Return a list of files to upload to the binhost"

  The files will also be copied to the uploads_dir.
  See BinhostService documentation in api/proto/binhost.proto.

  Args:
    input_proto (PrepareDevInstallBinhostUploadsRequest): The input proto.
    output_proto (PrepareDevInstallBinhostUploadsResponse): The output proto.
    config (api_config.ApiConfig): The API call config.
  """
    sysroot_path = input_proto.sysroot.path

    # build_target = build_target_util.BuildTarget(target_name)
    chroot = controller_util.ParseChroot(input_proto.chroot)
    sysroot = sysroot_lib.Sysroot(sysroot_path)

    uri = input_proto.uri
    # For now, we enforce that all input URIs are Google Storage buckets.
    if not gs.PathIsGs(uri):
        raise ValueError('Upload URI %s must be Google Storage.' % uri)

    if config.validate_only:
        return controller.RETURN_CODE_VALID_INPUT

    parsed_uri = urllib.parse.urlparse(uri)
    upload_uri = gs.GetGsURL(parsed_uri.netloc, for_gsutil=True).rstrip('/')
    upload_path = parsed_uri.path.lstrip('/')

    # Calculate the filename for the to-be-created Packages file, which will
    # contain only devinstall packages.
    devinstall_package_index_path = os.path.join(input_proto.uploads_dir,
                                                 'Packages')
    upload_targets_list = binhost.ReadDevInstallFilesToCreatePackageIndex(
        chroot, sysroot, devinstall_package_index_path, upload_uri,
        upload_path)

    package_dir = chroot.full_path(sysroot.path, 'packages')
    for upload_target in upload_targets_list:
        # Copy each package to target/category/package
        upload_target = upload_target.strip('/')
        category = upload_target.split(os.sep)[0]
        target_dir = os.path.join(input_proto.uploads_dir, category)
        if not os.path.exists(target_dir):
            os.makedirs(target_dir)
        full_src_pkg_path = os.path.join(package_dir, upload_target)
        full_target_src_path = os.path.join(input_proto.uploads_dir,
                                            upload_target)
        shutil.copyfile(full_src_pkg_path, full_target_src_path)
        output_proto.upload_targets.add().path = upload_target
    output_proto.upload_targets.add().path = 'Packages'