Beispiel #1
0
def download_files(files, check_hashes=True, parallel=False):
    """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
    files_to_download = []
    filenames = []
    download_dir = webports.package_index.PREBUILT_ROOT
    if not os.path.exists(download_dir):
        os.makedirs(download_dir)

    for file_info in files:
        basename = os.path.basename(file_info.url)
        file_info.name = os.path.join(download_dir, basename)
        filenames.append((file_info.name, file_info.url))
        if os.path.exists(file_info.name):
            if not check_hashes or check_hash(file_info.name, file_info.md5):
                log('Up-to-date: %s' % file_info.name)
                continue
        files_to_download.append(file_info)

    def check(file_info):
        if check_hashes and not check_hash(file_info.name, file_info.md5):
            raise webports.Error(
                'Checksum failed: %s\nExpected=%s\nActual=%s' %
                (file_info.name, file_info.md5, get_hash(file_info.name)))

    if not files_to_download:
        log('All files up-to-date')
    else:
        total_size = sum(f.size for f in files_to_download)
        log('Need to download %d/%d files [%s]' %
            (len(files_to_download), len(files), format_size(total_size)))

        gsutil = find_gsutil()
        if parallel:
            remaining_files = files_to_download
            num_files = 20
            while remaining_files:
                files = remaining_files[:num_files]
                remaining_files = remaining_files[num_files:]
                cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files
                                               ] + [download_dir]
                log_verbose(cmd)
                subprocess.check_call(cmd)
                for file_info in files:
                    check(file_info)
        else:
            for file_info in files_to_download:
                webports.download_file(file_info.name, file_info.url)
                check(file_info)

    return filenames
Beispiel #2
0
def download_files(files, check_hashes=True, parallel=False):
  """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
  files_to_download = []
  filenames = []
  download_dir = webports.package_index.PREBUILT_ROOT
  if not os.path.exists(download_dir):
    os.makedirs(download_dir)

  for file_info in files:
    basename = os.path.basename(file_info.url)
    file_info.name = os.path.join(download_dir, basename)
    filenames.append((file_info.name, file_info.url))
    if os.path.exists(file_info.name):
      if not check_hashes or check_hash(file_info.name, file_info.md5):
        log('Up-to-date: %s' % file_info.name)
        continue
    files_to_download.append(file_info)

  def check(file_info):
    if check_hashes and not check_hash(file_info.name, file_info.md5):
      raise webports.Error(
          'Checksum failed: %s\nExpected=%s\nActual=%s' %
          (file_info.name, file_info.md5, get_hash(file_info.name)))

  if not files_to_download:
    log('All files up-to-date')
  else:
    total_size = sum(f.size for f in files_to_download)
    log('Need to download %d/%d files [%s]' %
        (len(files_to_download), len(files), format_size(total_size)))

    gsutil = find_gsutil()
    if parallel:
      remaining_files = files_to_download
      num_files = 20
      while remaining_files:
        files = remaining_files[:num_files]
        remaining_files = remaining_files[num_files:]
        cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files] + [download_dir]
        log_verbose(cmd)
        subprocess.check_call(cmd)
        for file_info in files:
          check(file_info)
    else:
      for file_info in files_to_download:
        webports.download_file(file_info.name, file_info.url)
        check(file_info)

  return filenames
Beispiel #3
0
def download_and_install_sdk(url, target_dir):
    bz2_dir = OUT_DIR
    if not os.path.exists(bz2_dir):
        os.makedirs(bz2_dir)
    bz2_filename = os.path.join(bz2_dir, url.split('/')[-1])

    if sys.platform in ['win32', 'cygwin']:
        cygbin = os.path.join(find_cygwin(), 'bin')

    # Download it.
    webports.download_file(bz2_filename, url)

    # Extract toolchain.
    old_cwd = os.getcwd()
    os.chdir(bz2_dir)
    untar(bz2_filename)
    os.chdir(old_cwd)

    # Calculate pepper_dir by taking common prefix of tar
    # file contents
    tar = tarfile.open(bz2_filename)
    names = tar.getnames()
    tar.close()
    pepper_dir = os.path.commonprefix(names)

    actual_dir = os.path.join(bz2_dir, pepper_dir)

    # Drop old versions.
    if os.path.exists(target_dir):
        webports.log('Cleaning up old SDK...')
        if sys.platform in ['win32', 'cygwin']:
            cmd = [os.path.join(cygbin, 'bin', 'rm.exe'), '-rf']
        else:
            cmd = ['rm', '-rf']
        cmd.append(target_dir)
        returncode = subprocess.call(cmd)
        assert returncode == 0

    webports.log('Renaming toolchain "%s" -> "%s"' % (actual_dir, target_dir))
    os.rename(actual_dir, target_dir)

    if sys.platform in ['win32', 'cygwin']:
        time.sleep(2)  # Wait for windows.

    # Clean up: remove the sdk bz2.
    os.remove(bz2_filename)

    webports.log('Install complete.')
Beispiel #4
0
def download_and_install_sdk(url, target_dir):
  bz2_dir = OUT_DIR
  if not os.path.exists(bz2_dir):
    os.makedirs(bz2_dir)
  bz2_filename = os.path.join(bz2_dir, url.split('/')[-1])

  if sys.platform in ['win32', 'cygwin']:
    cygbin = os.path.join(find_cygwin(), 'bin')

  # Download it.
  webports.download_file(bz2_filename, url)

  # Extract toolchain.
  old_cwd = os.getcwd()
  os.chdir(bz2_dir)
  untar(bz2_filename)
  os.chdir(old_cwd)

  # Calculate pepper_dir by taking common prefix of tar
  # file contents
  tar = tarfile.open(bz2_filename)
  names = tar.getnames()
  tar.close()
  pepper_dir = os.path.commonprefix(names)

  actual_dir = os.path.join(bz2_dir, pepper_dir)

  # Drop old versions.
  if os.path.exists(target_dir):
    webports.log('Cleaning up old SDK...')
    if sys.platform in ['win32', 'cygwin']:
      cmd = [os.path.join(cygbin, 'bin', 'rm.exe'), '-rf']
    else:
      cmd = ['rm', '-rf']
    cmd.append(target_dir)
    returncode = subprocess.call(cmd)
    assert returncode == 0

  webports.log('Renaming toolchain "%s" -> "%s"' % (actual_dir, target_dir))
  os.rename(actual_dir, target_dir)

  if sys.platform in ['win32', 'cygwin']:
    time.sleep(2)  # Wait for windows.

  # Clean up: remove the sdk bz2.
  os.remove(bz2_filename)

  webports.log('Install complete.')
Beispiel #5
0
def download_to_cache(url, sha1):
  filename = os.path.basename(url)
  download_dir = webports.paths.CACHE_ROOT
  if not os.path.exists(download_dir):
    os.makedirs(download_dir)
  full_name = os.path.join(download_dir, filename)
  if os.path.exists(full_name):
    try:
      webports.util.verify_hash(full_name, sha1)
      webports.log("Verified cached file: %s" % filename)
      return full_name
    except webports.util.HashVerificationError:
      webports.log("Hash mistmatch on cached download: %s" % filename)

  webports.download_file(full_name, url)
  webports.util.verify_hash(full_name, sha1)
  return full_name