コード例 #1
0
 def _CheckPayloads(self, payload_name):
     payload_url = self._GetStagedUrl(staged_filename=payload_name,
                                      build_id=self._payload_dir)
     try:
         retry_util.RunCurl(['-I', payload_url, '--fail'])
     except retry_util.DownloadError as e:
         raise ChromiumOSTransferError(
             'Payload %s does not exist at %s: %s' %
             (payload_name, payload_url, e))
コード例 #2
0
    def _Fetch(self, url, local_path):
        """Fetch a remote file."""
        # We have to nest the import because gs.GSContext uses us to cache its own
        # gsutil tarball.  We know we won't get into a recursive loop though as it
        # only fetches files via non-gs URIs.
        from chromite.lib import gs

        if gs.PathIsGs(url):
            ctx = gs.GSContext()
            ctx.Copy(url, local_path)
        else:
            # Note: unittests assume local_path is at the end.
            retry_util.RunCurl([url, '-o', local_path],
                               debug_level=logging.DEBUG)
コード例 #3
0
    def GetPayloadPropsFile(self):
        """Downloads the PayloadProperties file onto the drone.

    The payload properties file may be required to be updated in
    auto_updater.ResolveAppIsMismatchIfAny(). Download the file from where it
    has been staged on the staging server into the tempdir of the drone, so that
    the file is available locally for any updates.
    """
        if self._local_payload_props_path is None:
            payload_props_filename = GetPayloadPropertiesFileName(
                self._payload_name)
            cmd = self._GetCurlCmdForPayloadDownload(
                payload_dir=self._tempdir,
                build_id=self._payload_dir,
                payload_filename=payload_props_filename)
            try:
                retry_util.RunCurl(cmd[1:])
            except retry_util.DownloadError as e:
                raise ChromiumOSTransferError('Unable to download %s: %s' %
                                              (payload_props_filename, e))
            else:
                self._local_payload_props_path = os.path.join(
                    self._tempdir, payload_props_filename)
        return self._local_payload_props_path
コード例 #4
0
ファイル: cros_sdk.py プロジェクト: zhaozhangpeng/chromium.bb
def FetchRemoteTarballs(storage_dir, urls, desc, allow_none=False):
  """Fetches a tarball given by url, and place it in |storage_dir|.

  Args:
    storage_dir: Path where to save the tarball.
    urls: List of URLs to try to download. Download will stop on first success.
    desc: A string describing what tarball we're downloading (for logging).
    allow_none: Don't fail if none of the URLs worked.

  Returns:
    Full path to the downloaded file, or None if |allow_none| and no URL worked.

  Raises:
    ValueError: If |allow_none| is False and none of the URLs worked.
  """

  # Note we track content length ourselves since certain versions of curl
  # fail if asked to resume a complete file.
  # pylint: disable=C0301,W0631
  # https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3482927&group_id=976
  logging.notice('Downloading %s tarball...', desc)
  status_re = re.compile(r'^HTTP/[0-9]+(\.[0-9]+)? 200')
  for url in urls:
    # http://www.logilab.org/ticket/8766
    # pylint: disable=E1101
    parsed = urlparse.urlparse(url)
    tarball_name = os.path.basename(parsed.path)
    if parsed.scheme in ('', 'file'):
      if os.path.exists(parsed.path):
        return parsed.path
      continue
    content_length = 0
    logging.debug('Attempting download from %s', url)
    result = retry_util.RunCurl(
        ['-I', url], fail=False, capture_output=False, redirect_stdout=True,
        redirect_stderr=True, print_cmd=False, debug_level=logging.NOTICE)
    successful = False
    for header in result.output.splitlines():
      # We must walk the output to find the 200 code for use cases where
      # a proxy is involved and may have pushed down the actual header.
      if status_re.match(header):
        successful = True
      elif header.lower().startswith('content-length:'):
        content_length = int(header.split(':', 1)[-1].strip())
        if successful:
          break
    if successful:
      break
  else:
    if allow_none:
      return None
    raise ValueError('No valid URLs found!')

  tarball_dest = os.path.join(storage_dir, tarball_name)
  current_size = 0
  if os.path.exists(tarball_dest):
    current_size = os.path.getsize(tarball_dest)
    if current_size > content_length:
      osutils.SafeUnlink(tarball_dest)
      current_size = 0

  if current_size < content_length:
    retry_util.RunCurl(
        ['-L', '-y', '30', '-C', '-', '--output', tarball_dest, url],
        print_cmd=False, capture_output=False, debug_level=logging.NOTICE)

  # Cleanup old tarballs now since we've successfull fetched; only cleanup
  # the tarballs for our prefix, or unknown ones. This gets a bit tricky
  # because we might have partial overlap between known prefixes.
  my_prefix = tarball_name.rsplit('-', 1)[0] + '-'
  all_prefixes = ('stage3-amd64-', 'cros-sdk-', 'cros-sdk-overlay-')
  ignored_prefixes = [prefix for prefix in all_prefixes if prefix != my_prefix]
  for filename in os.listdir(storage_dir):
    if (filename == tarball_name or
        any([(filename.startswith(p) and
              not (len(my_prefix) > len(p) and filename.startswith(my_prefix)))
             for p in ignored_prefixes])):
      continue
    logging.info('Cleaning up old tarball: %s', filename)
    osutils.SafeUnlink(os.path.join(storage_dir, filename))

  return tarball_dest