Ejemplo n.º 1
0
def DownloadFiles(files, check_hashes=True, parallel=False):
  """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
  files_to_download = []
  filenames = []
  download_dir = naclports.package_index.PREBUILT_ROOT
  if not os.path.exists(download_dir):
    os.makedirs(download_dir)

  for file_info in files:
    basename = os.path.basename(file_info.url)
    file_info.name = os.path.join(download_dir, basename)
    filenames.append((file_info.name, file_info.url))
    if os.path.exists(file_info.name):
      if not check_hashes or CheckHash(file_info.name, file_info.md5):
        Log('Up-to-date: %s' % file_info.name)
        continue
    files_to_download.append(file_info)

  def Check(file_info):
    if check_hashes and not CheckHash(file_info.name, file_info.md5):
      raise naclports.Error(
          'Checksum failed: %s\nExpected=%s\nActual=%s' %
          (file_info.name, file_info.md5, GetHash(file_info.name)))

  if not files_to_download:
    Log('All files up-to-date')
  else:
    total_size = sum(f.size for f in files_to_download)
    Log('Need to download %d/%d files [%s]' %
        (len(files_to_download), len(files), FormatSize(total_size)))

    gsutil = FindGsutil()
    if parallel:
      remaining_files = files_to_download
      num_files = 20
      while remaining_files:
        files = remaining_files[:num_files]
        remaining_files = remaining_files[num_files:]
        cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files] + [download_dir]
        LogVerbose(cmd)
        subprocess.check_call(cmd)
        for file_info in files:
          Check(file_info)
    else:
      for file_info in files_to_download:
        naclports.DownloadFile(file_info.name, file_info.url)
        Check(file_info)

  return filenames
Ejemplo n.º 2
0
    def Download(self, mirror=True):
        filename = self.DownloadLocation()
        if not filename or os.path.exists(filename):
            return
        if not os.path.exists(os.path.dirname(filename)):
            os.makedirs(os.path.dirname(filename))

        if mirror:
            # First try downloading form the mirror URL and silently fall
            # back to the original if this fails.
            mirror_url = self.GetMirrorURL()
            try:
                naclports.DownloadFile(filename, mirror_url)
                return
            except Error:
                pass

        naclports.DownloadFile(filename, self.URL)
Ejemplo n.º 3
0
def DownloadAndInstallSDK(url):
    bz2_dir = OUT_DIR
    if not os.path.exists(bz2_dir):
        os.makedirs(bz2_dir)
    bz2_filename = os.path.join(bz2_dir, url.split('/')[-1])

    if sys.platform in ['win32', 'cygwin']:
        cygbin = os.path.join(FindCygwin(), 'bin')

    print('Downloading "%s" to "%s"...' % (url, bz2_filename))
    sys.stdout.flush()

    # Download it.
    naclports.DownloadFile(bz2_filename, url)

    # Extract toolchain.
    old_cwd = os.getcwd()
    os.chdir(bz2_dir)
    Untar(bz2_filename)
    os.chdir(old_cwd)

    # Calculate pepper_dir by taking common prefix of tar
    # file contents
    tar = tarfile.open(bz2_filename)
    names = tar.getnames()
    tar.close()
    pepper_dir = os.path.commonprefix(names)

    actual_dir = os.path.join(bz2_dir, pepper_dir)

    # Drop old versions.
    if os.path.exists(TARGET_DIR):
        print('Cleaning up old SDK...')
        if sys.platform in ['win32', 'cygwin']:
            cmd = [os.path.join(cygbin, 'bin', 'rm.exe'), '-rf']
        else:
            cmd = ['rm', '-rf']
        cmd.append(TARGET_DIR)
        returncode = subprocess.call(cmd)
        assert returncode == 0

    print('Renaming toolchain "%s" -> "%s"' % (actual_dir, TARGET_DIR))
    os.rename(actual_dir, TARGET_DIR)

    if sys.platform in ['win32', 'cygwin']:
        time.sleep(2)  # Wait for windows.

    # Clean up: remove the sdk bz2.
    os.remove(bz2_filename)

    print('Install complete.')
Ejemplo n.º 4
0
def DownloadToCache(url, sha1):
    filename = os.path.basename(url)
    full_name = os.path.join(naclports.paths.CACHE_ROOT, filename)
    if os.path.exists(full_name):
        try:
            naclports.util.VerifyHash(full_name, sha1)
            naclports.Log("Verified cached file: %s" % filename)
            return full_name
        except naclports.util.HashVerificationError:
            naclports.Log("Hash mistmatch on cached download: %s" % filename)

    naclports.DownloadFile(full_name, url)
    naclports.util.VerifyHash(full_name, sha1)
    return full_name
Ejemplo n.º 5
0
 def Download(self, package_name, config):
   PREBUILT_ROOT = os.path.join(package.PACKAGES_ROOT, 'prebuilt')
   if not os.path.exists(PREBUILT_ROOT):
     os.makedirs(PREBUILT_ROOT)
   info = self.packages[(package_name, config)]
   filename = os.path.join(PREBUILT_ROOT, os.path.basename(info['BIN_URL']))
   if os.path.exists(filename):
     if VerifyHash(filename, info['BIN_SHA1']):
       return filename
   naclports.Log('Downloading prebuilt binary ...')
   naclports.DownloadFile(filename, info['BIN_URL'])
   if not VerifyHash(filename, info['BIN_SHA1']):
     raise naclports.Error('Unexepected SHA1: %s' % filename)
   return filename
Ejemplo n.º 6
0
def DownloadFiles(files, check_hashes=True):
  """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
  files_to_download = []
  filenames = []
  download_dir = naclports.package_index.PREBUILT_ROOT
  if not os.path.exists(download_dir):
    os.makedirs(download_dir)

  for file_info in files:
    basename = os.path.basename(file_info.url)
    fullname = os.path.join(download_dir, basename)
    filenames.append((fullname, file_info.url))
    if os.path.exists(fullname):
      if not check_hashes or CheckHash(fullname, file_info.etag):
        Log('Up-to-date: %s' % file_info.name)
        continue
    files_to_download.append(FileInfo(fullname, file_info.size, file_info.url,
      file_info.etag))

  if not files_to_download:
    Log('All files up-to-date')
  else:
    total_size = sum(f[1] for f in files_to_download)
    Log('Need to download %d/%d files [%s]' % (len(files_to_download),
         len(files), FormatSize(total_size)))

    for file_info in files_to_download:
      naclports.DownloadFile(file_info.name, file_info.url)
      if check_hashes and not CheckHash(file_info.name, file_info.etag):
        raise naclports.Error('Checksum failed: %s' % file_info.name)

  return filenames