示例#1
0
 def test_check_hash(self):  # pylint: disable=no-self-use
     file_mock = mock_file_object('1234\n')
     md5 = Mock()
     md5.hexdigest.return_value('4321')
     with patch('__builtin__.open',
                Mock(return_value=file_mock),
                create=True):
         scan_packages.check_hash('foo', '1234')
示例#2
0
 def test_check_hash(self):  # pylint: disable=no-self-use
     file_mock = mock_file_object("1234\n")
     md5 = Mock()
     md5.hexdigest.return_value("4321")
     with patch("__builtin__.open", Mock(return_value=file_mock), create=True):
         scan_packages.check_hash("foo", "1234")
示例#3
0
 def check(file_info):
   if check_hashes and not check_hash(file_info.name, file_info.md5):
     raise webports.Error(
         'Checksum failed: %s\nExpected=%s\nActual=%s' %
         (file_info.rel_name, file_info.md5, get_hash(file_info.name)))
示例#4
0
def download_files(pkg_dir, files, check_hashes=True, parallel=False):
  """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
  files_to_download = []
  filenames = []
  download_dir = os.path.join(webports.package_index.PREBUILT_ROOT, 'pkg',
                              pkg_dir)
  if not os.path.exists(download_dir):
    os.makedirs(download_dir)

  for file_info in files:
    basename = os.path.basename(file_info.url)
    # TODO(bradnelson): The pnacl package is >2GB, which confuses pkg.
    #                   Figure out why and re-enable this.
    if basename == 'pnacl-0.0.1.tbz':
      continue
    file_info.name = os.path.join(download_dir, basename)
    file_info.rel_name = file_info.name[len(webports.paths.NACLPORTS_ROOT) + 1:]
    filenames.append((file_info.name, file_info.url))
    if os.path.exists(file_info.name):
      if not check_hashes or check_hash(file_info.name, file_info.md5):
        log('Up-to-date: %s' % file_info.rel_name)
        continue
    files_to_download.append(file_info)

  def check(file_info):
    if check_hashes and not check_hash(file_info.name, file_info.md5):
      raise webports.Error(
          'Checksum failed: %s\nExpected=%s\nActual=%s' %
          (file_info.rel_name, file_info.md5, get_hash(file_info.name)))

  if not files_to_download:
    log('All files up-to-date')
  else:
    total_size = sum(f.size for f in files_to_download)
    log('Need to download %d/%d files [%s]' %
        (len(files_to_download), len(files), format_size(total_size)))

    gsutil = find_gsutil()
    if parallel:
      remaining_files = files_to_download
      num_files = 20
      while remaining_files:
        files = remaining_files[:num_files]
        remaining_files = remaining_files[num_files:]
        cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files] + [download_dir]
        log_verbose(cmd)
        subprocess.check_call(cmd)
        for file_info in files:
          check(file_info)
    else:
      for file_info in files_to_download:
        webports.download_file(file_info.name, file_info.url)
        check(file_info)

  return filenames
示例#5
0
 def check(file_info):
     if check_hashes and not check_hash(file_info.name, file_info.md5):
         raise webports.Error(
             'Checksum failed: %s\nExpected=%s\nActual=%s' %
             (file_info.rel_name, file_info.md5, get_hash(file_info.name)))
示例#6
0
def download_files(pkg_dir, files, check_hashes=True, parallel=False):
    """Download one of more files to the local disk.

  Args:
    files: List of FileInfo objects to download.
    check_hashes: When False assume local files have the correct
    hash otherwise always check the hashes match the onces in the
    FileInfo ojects.

  Returns:
    List of (filename, url) tuples.
  """
    files_to_download = []
    filenames = []
    download_dir = os.path.join(webports.package_index.PREBUILT_ROOT, 'pkg',
                                pkg_dir)
    if not os.path.exists(download_dir):
        os.makedirs(download_dir)

    for file_info in files:
        basename = os.path.basename(file_info.url)
        # TODO(bradnelson): The pnacl package is >2GB, which confuses pkg.
        #                   Figure out why and re-enable this.
        if basename == 'pnacl-0.0.1.tbz':
            continue
        file_info.name = os.path.join(download_dir, basename)
        file_info.rel_name = file_info.name[len(webports.paths.NACLPORTS_ROOT
                                                ) + 1:]
        filenames.append((file_info.name, file_info.url))
        if os.path.exists(file_info.name):
            if not check_hashes or check_hash(file_info.name, file_info.md5):
                log('Up-to-date: %s' % file_info.rel_name)
                continue
        files_to_download.append(file_info)

    def check(file_info):
        if check_hashes and not check_hash(file_info.name, file_info.md5):
            raise webports.Error(
                'Checksum failed: %s\nExpected=%s\nActual=%s' %
                (file_info.rel_name, file_info.md5, get_hash(file_info.name)))

    if not files_to_download:
        log('All files up-to-date')
    else:
        total_size = sum(f.size for f in files_to_download)
        log('Need to download %d/%d files [%s]' %
            (len(files_to_download), len(files), format_size(total_size)))

        gsutil = find_gsutil()
        if parallel:
            remaining_files = files_to_download
            num_files = 20
            while remaining_files:
                files = remaining_files[:num_files]
                remaining_files = remaining_files[num_files:]
                cmd = gsutil + ['-m', 'cp'] + [f.gsurl for f in files
                                               ] + [download_dir]
                log_verbose(cmd)
                subprocess.check_call(cmd)
                for file_info in files:
                    check(file_info)
        else:
            for file_info in files_to_download:
                webports.download_file(file_info.name, file_info.url)
                check(file_info)

    return filenames