示例#1
0
def PrepareBinhostUploads(input_proto, output_proto, config):
    """Return a list of files to upload to the binhost.

  See BinhostService documentation in api/proto/binhost.proto.

  Args:
    input_proto (PrepareBinhostUploadsRequest): The input proto.
    output_proto (PrepareBinhostUploadsResponse): The output proto.
    config (api_config.ApiConfig): The API call config.
  """
    if input_proto.sysroot.build_target.name:
        build_target_msg = input_proto.sysroot.build_target
    else:
        build_target_msg = input_proto.build_target
    sysroot_path = input_proto.sysroot.path

    if not sysroot_path and not build_target_msg.name:
        cros_build_lib.Die('Sysroot.path is required.')

    build_target = controller_util.ParseBuildTarget(build_target_msg)
    chroot = controller_util.ParseChroot(input_proto.chroot)

    if not sysroot_path:
        sysroot_path = build_target.root
    sysroot = sysroot_lib.Sysroot(sysroot_path)

    uri = input_proto.uri
    # For now, we enforce that all input URIs are Google Storage buckets.
    if not gs.PathIsGs(uri):
        raise ValueError('Upload URI %s must be Google Storage.' % uri)

    if config.validate_only:
        return controller.RETURN_CODE_VALID_INPUT

    parsed_uri = urllib.parse.urlparse(uri)
    upload_uri = gs.GetGsURL(parsed_uri.netloc, for_gsutil=True).rstrip('/')
    upload_path = parsed_uri.path.lstrip('/')

    # Read all packages and update the index. The index must be uploaded to the
    # binhost for Portage to use it, so include it in upload_targets.
    uploads_dir = binhost.GetPrebuiltsRoot(chroot, sysroot, build_target)
    index_path = binhost.UpdatePackageIndex(uploads_dir,
                                            upload_uri,
                                            upload_path,
                                            sudo=True)
    upload_targets = binhost.GetPrebuiltsFiles(uploads_dir)
    assert index_path.startswith(uploads_dir), (
        'expected index_path to start with uploads_dir')
    upload_targets.append(index_path[len(uploads_dir):])

    output_proto.uploads_dir = uploads_dir
    for upload_target in upload_targets:
        output_proto.upload_targets.add().path = upload_target.strip('/')
示例#2
0
def PrepareDevInstallBinhostUploads(input_proto, output_proto, config):
    """Return a list of files to upload to the binhost"

  The files will also be copied to the uploads_dir.
  See BinhostService documentation in api/proto/binhost.proto.

  Args:
    input_proto (PrepareDevInstallBinhostUploadsRequest): The input proto.
    output_proto (PrepareDevInstallBinhostUploadsResponse): The output proto.
    config (api_config.ApiConfig): The API call config.
  """
    sysroot_path = input_proto.sysroot.path

    # build_target = build_target_util.BuildTarget(target_name)
    chroot = controller_util.ParseChroot(input_proto.chroot)
    sysroot = sysroot_lib.Sysroot(sysroot_path)

    uri = input_proto.uri
    # For now, we enforce that all input URIs are Google Storage buckets.
    if not gs.PathIsGs(uri):
        raise ValueError('Upload URI %s must be Google Storage.' % uri)

    if config.validate_only:
        return controller.RETURN_CODE_VALID_INPUT

    parsed_uri = urllib.parse.urlparse(uri)
    upload_uri = gs.GetGsURL(parsed_uri.netloc, for_gsutil=True).rstrip('/')
    upload_path = parsed_uri.path.lstrip('/')

    # Calculate the filename for the to-be-created Packages file, which will
    # contain only devinstall packages.
    devinstall_package_index_path = os.path.join(input_proto.uploads_dir,
                                                 'Packages')
    upload_targets_list = binhost.ReadDevInstallFilesToCreatePackageIndex(
        chroot, sysroot, devinstall_package_index_path, upload_uri,
        upload_path)

    package_dir = chroot.full_path(sysroot.path, 'packages')
    for upload_target in upload_targets_list:
        # Copy each package to target/category/package
        upload_target = upload_target.strip('/')
        category = upload_target.split(os.sep)[0]
        target_dir = os.path.join(input_proto.uploads_dir, category)
        if not os.path.exists(target_dir):
            os.makedirs(target_dir)
        full_src_pkg_path = os.path.join(package_dir, upload_target)
        full_target_src_path = os.path.join(input_proto.uploads_dir,
                                            upload_target)
        shutil.copyfile(full_src_pkg_path, full_target_src_path)
        output_proto.upload_targets.add().path = upload_target
    output_proto.upload_targets.add().path = 'Packages'
示例#3
0
    def _Fetch(self, url, local_path):
        """Fetch a remote file."""
        # We have to nest the import because gs.GSContext uses us to cache its own
        # gsutil tarball.  We know we won't get into a recursive loop though as it
        # only fetches files via non-gs URIs.
        from chromite.lib import gs

        if gs.PathIsGs(url):
            ctx = gs.GSContext()
            ctx.Copy(url, local_path)
        else:
            # Note: unittests assume local_path is at the end.
            retry_util.RunCurl([url, '-o', local_path],
                               debug_level=logging.DEBUG)
示例#4
0
def NormalizeLocalOrGSPath(value):
    """Normalize a local or GS path."""
    ptype = 'gs_path' if gs.PathIsGs(value) else 'path'
    return VALID_TYPES[ptype](value)