Exemplo n.º 1
0
    def Cached(cls, cache_dir, *args, **kwargs):
        """Reuses previously fetched GSUtil, performing the fetch if necessary.

    Arguments:
      cache_dir: The toplevel cache dir.
      *args, **kwargs:  Arguments that are passed through to the GSContext()
        constructor.

    Returns:
      An initialized GSContext() object.
    """
        common_path = os.path.join(cache_dir, constants.COMMON_CACHE)
        tar_cache = cache.TarballCache(common_path)
        key = (cls.GSUTIL_TAR, )

        # The common cache will not be LRU, removing the need to hold a read
        # lock on the cached gsutil.
        ref = tar_cache.Lookup(key)
        if ref.Exists():
            logging.debug('Reusing cached gsutil.')
        else:
            logging.debug('Fetching gsutil.')
            with osutils.TempDir(base_dir=tar_cache.staging_dir) as tempdir:
                gsutil_tar = os.path.join(tempdir, cls.GSUTIL_TAR)
                cros_build_lib.RunCurl([cls.GSUTIL_URL, '-o', gsutil_tar],
                                       debug_level=logging.DEBUG)
                ref.SetDefault(gsutil_tar)

        gsutil_bin = os.path.join(ref.path, 'gsutil', 'gsutil')
        return cls(*args, gsutil_bin=gsutil_bin, **kwargs)
Exemplo n.º 2
0
def FetchRemoteTarballs(storage_dir, urls):
    """Fetches a tarball given by url, and place it in sdk/.

  Args:
    urls: List of URLs to try to download. Download will stop on first success.

  Returns:
    Full path to the downloaded file
  """

    # Note we track content length ourselves since certain versions of curl
    # fail if asked to resume a complete file.
    # pylint: disable=C0301,W0631
    # https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3482927&group_id=976
    for url in urls:
        # http://www.logilab.org/ticket/8766
        # pylint: disable=E1101
        parsed = urlparse.urlparse(url)
        tarball_name = os.path.basename(parsed.path)
        if parsed.scheme in ('', 'file'):
            if os.path.exists(parsed.path):
                return parsed.path
            continue
        content_length = 0
        print 'Attempting download: %s' % url
        result = cros_build_lib.RunCurl(['-I', url],
                                        redirect_stdout=True,
                                        redirect_stderr=True,
                                        print_cmd=False)
        successful = False
        for header in result.output.splitlines():
            # We must walk the output to find the 200 code for use cases where
            # a proxy is involved and may have pushed down the actual header.
            if (header.startswith("HTTP/1.0 200")
                    or header.startswith("HTTP/1.1 200")
                    or header.startswith("HTTP/2.0 200")
                    or header.startswith("HTTP/2 200")):
                successful = True
            elif header.lower().startswith("content-length:"):
                content_length = int(header.split(":", 1)[-1].strip())
                if successful:
                    break
        if successful:
            break
    else:
        raise Exception('No valid URLs found!')

    tarball_dest = os.path.join(storage_dir, tarball_name)
    current_size = 0
    if os.path.exists(tarball_dest):
        current_size = os.path.getsize(tarball_dest)
        if current_size > content_length:
            osutils.SafeUnlink(tarball_dest)
            current_size = 0

    if current_size < content_length:
        cros_build_lib.RunCurl(
            ['-f', '-L', '-y', '30', '-C', '-', '--output', tarball_dest, url],
            print_cmd=False)

    # Cleanup old tarballs now since we've successfull fetched; only cleanup
    # the tarballs for our prefix, or unknown ones.
    ignored_prefix = ('stage3-'
                      if tarball_name.startswith('cros-sdk-') else 'cros-sdk-')
    for filename in os.listdir(storage_dir):
        if filename == tarball_name or filename.startswith(ignored_prefix):
            continue

        print 'Cleaning up old tarball: %s' % (filename, )
        osutils.SafeUnlink(os.path.join(storage_dir, filename))

    return tarball_dest