Esempio n. 1
0
    def test_DownloadArchive(self):
        # Check that we can download a package archive correctly.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_tar = self.GenerateMockFile(work_dir)

            fake_url = 'http://www.fake.com/archive.tar'
            self._fake_downloader.StoreURL(fake_url, mock_tar)

            package_desc = self.GeneratePackageInfo(
                [mock_tar], url_dict={mock_tar: fake_url})

            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'archive_target'
            package_name = 'archive_name'
            package_version.DownloadPackageArchives(
                tar_dir,
                package_target,
                package_name,
                package_desc,
                downloader=self._fake_downloader.Download)
            self.assertEqual(
                self._fake_downloader.GetDownloadCount(), 1,
                "Expected a single archive to have been downloaded.")

            mock_name = os.path.basename(mock_tar)
            local_archive_file = package_locations.GetLocalPackageArchiveFile(
                tar_dir, package_target, package_name, mock_name)

            self.assertEqual(archive_info.GetArchiveHash(local_archive_file),
                             archive_info.GetArchiveHash(mock_tar))
    def test_DownloadArchive(self):
        # Check that we can download a package archive correctly.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_tar = self.GenerateMockFile(work_dir)
            mock_hash = archive_info.GetArchiveHash(mock_tar)

            fake_url = 'http://www.fake.com/archive.tar'
            self._fake_downloader.StoreURL(fake_url, mock_tar)

            mock_log = self.GenerateMockFile(work_dir)
            fake_log_url = 'http://www.fake.com/archive_log.txt'
            self._fake_downloader.StoreURL(fake_log_url, mock_log)

            package_desc = self.GeneratePackageInfo(
                [mock_tar],
                url_dict={mock_tar: fake_url},
                log_url_dict={mock_tar: fake_log_url},
            )

            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'archive_target'
            package_name = 'archive_name'
            package_version.DownloadPackageArchives(
                tar_dir,
                package_target,
                package_name,
                package_desc,
                downloader=self._fake_downloader.Download,
                include_logs=False,
            )
            self.assertEqual(
                self._fake_downloader.GetDownloadCount(), 1,
                "Expected a single archive to have been downloaded.")

            mock_name = os.path.basename(mock_tar)
            local_archive_file = package_locations.GetLocalPackageArchiveFile(
                tar_dir, mock_name, mock_hash)

            self.assertEqual(archive_info.GetArchiveHash(local_archive_file),
                             mock_hash)

            # Check log is not downloaded.
            local_archive_log = package_locations.GetLocalPackageArchiveLogFile(
                local_archive_file)
            self.assertFalse(os.path.isfile(local_archive_log))

            # Check log is downloaded when include_logs is True
            package_version.DownloadPackageArchives(
                tar_dir,
                package_target,
                package_name,
                package_desc,
                downloader=self._fake_downloader.Download,
                include_logs=True,
            )
            self.assertEqual(self._fake_downloader.GetDownloadCount(), 2,
                             "Expected only log to have been downloaded.")

            self.assertEqual(archive_info.GetArchiveHash(local_archive_log),
                             archive_info.GetArchiveHash(mock_log))
Esempio n. 3
0
    def test_ArchiveHashStable(self):
        # Check if archive hash produces a stable hash
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            temp1 = os.path.join(work_dir, 'temp1.txt')
            temp2 = os.path.join(work_dir, 'temp2.txt')

            temp_contents = 'this is a test'
            with open(temp1, 'wt') as f:
                f.write(temp_contents)
            with open(temp2, 'wt') as f:
                f.write(temp_contents)

            self.assertEqual(archive_info.GetArchiveHash(temp1),
                             archive_info.GetArchiveHash(temp2))
    def CopyToLocalArchiveFile(self, archive_file, tar_dir):
        local_archive_file = package_locations.GetLocalPackageArchiveFile(
            tar_dir, os.path.basename(archive_file),
            archive_info.GetArchiveHash(archive_file))

        pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
        shutil.copy2(archive_file, local_archive_file)
Esempio n. 5
0
def _DoFillEmptyTarsCmd(arguments):
  package_target_packages = GetPackageTargetPackages(
      arguments.fillemptytars_package,
      arguments.package_target_packages
  )
  if not package_target_packages:
    raise NameError('Unknown package: %s.' % arguments.fillemptytars_package
                    + ' Did you forget to add "$PACKAGE_TARGET/"?')

  for package_target, package_name in package_target_packages:
    package_path = package_locations.GetLocalPackageFile(arguments.tar_dir,
                                                         package_target,
                                                         package_name)

    package_desc = package_info.PackageInfo(package_path, skip_missing=True)
    output_package_desc = package_info.PackageInfo()
    for archive in package_desc.GetArchiveList():
      # If archive does not exist, fill it with an empty one.
      archive_data = archive.GetArchiveData()
      if archive_data.hash:
        output_package_desc.AppendArchive(archive)
      else:
        logging.info('Filling missing archive: %s.', archive_data.name)
        if (archive_data.name.endswith('.tar.gz') or
            archive_data.name.endswith('.tgz')):
          mode = 'w:gz'
        elif archive_data.name.endswith('.bz2'):
          mode = 'w:bz2'
        elif archive_data.name.endswith('.tar'):
          mode = 'w:'
        else:
          raise NameError('Unknown archive type: %s.' % archive_data.name)

        archive_file = package_locations.GetLocalPackageArchiveFile(
            arguments.tar_dir,
            package_target,
            package_name,
            archive_data.name
            )

        tar_file = cygtar.CygTar(archive_file, mode)
        tar_file.Close()
        tar_hash = archive_info.GetArchiveHash(archive_file)

        empty_archive = archive_info.ArchiveInfo(name=archive_data.name,
                                                 archive_hash=tar_hash)
        output_package_desc.AppendArchive(empty_archive)

    output_package_desc.SavePackageFile(package_path)
    def test_CleanupExtraFiles(self):
        # Test that the cleanup function properly cleans up extra files
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_tar = self.GenerateMockFile(work_dir)

            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'custom_package_target'
            package_name = 'custom_package'

            package_desc = self.GeneratePackageInfo([mock_tar], )
            package_file = package_locations.GetLocalPackageFile(
                tar_dir, package_target, package_name)
            package_desc.SavePackageFile(package_file)
            self.CopyToLocalArchiveFile(mock_tar, tar_dir)

            package_dir = os.path.dirname(package_file)
            archive_file = package_locations.GetLocalPackageArchiveFile(
                tar_dir, os.path.basename(mock_tar),
                archive_info.GetArchiveHash(mock_tar))

            extra_file = self.GenerateMockFile(tar_dir)
            extra_file2 = self.GenerateMockFile(package_dir)
            extra_dir = os.path.join(tar_dir, 'extra_dir')
            os.makedirs(extra_dir)
            extra_file3 = self.GenerateMockFile(extra_dir)

            package_version.CleanupTarDirectory(tar_dir)

            # Make sure all the key files were not deleted
            self.assertTrue(os.path.isfile(package_file))
            self.assertTrue(os.path.isfile(archive_file))

            # Make sure package file can be loaded and nothing vital was deleted.
            new_package_desc = package_info.PackageInfo(package_file)

            # Make sure all the extra directories and files were deleted
            self.assertFalse(os.path.isfile(extra_file))
            self.assertFalse(os.path.isfile(extra_file2))
            self.assertFalse(os.path.isfile(extra_file3))
            self.assertFalse(os.path.isdir(extra_dir))
    def GeneratePackageInfo(self,
                            archive_list,
                            url_dict={},
                            src_dir_dict={},
                            dir_dict={},
                            log_url_dict={}):
        """Generates a package_info.PackageInfo object for list of archives."

    Args:
      archive_list: List of file paths where package archives sit.
      url_dict: dict of archive file path to URL if url exists.
      src_dir_dict: dict of archive file path to source tar dir if exists.
      dir_dict: dict of archive file path to root dir if exists.
    """
        package_desc = package_info.PackageInfo()
        for archive_file in archive_list:
            archive_name = os.path.basename(archive_file)

            if os.path.isfile(archive_file):
                archive_hash = archive_info.GetArchiveHash(archive_file)
            else:
                archive_hash = 'invalid'

            archive_url = url_dict.get(archive_file, None)
            archive_src_tar_dir = src_dir_dict.get(archive_file, '')
            archive_dir = dir_dict.get(archive_file, '')
            archive_log_url = log_url_dict.get(archive_file, None)
            archive_desc = archive_info.ArchiveInfo(
                name=archive_name,
                hash=archive_hash,
                url=archive_url,
                tar_src_dir=archive_src_tar_dir,
                extract_dir=archive_dir,
                log_url=archive_log_url)
            package_desc.AppendArchive(archive_desc)

        return package_desc
    def test_DownloadMismatchArchiveUponExtraction(self):
        # Test that mismatching archive files are downloaded upon extraction.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_file1 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile1.txt')
            mock_file2 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile2.txt')

            tar_dir = os.path.join(work_dir, 'tar_dir')
            dest_dir = os.path.join(work_dir, 'dest_dir')
            mock_tars_dir = os.path.join(work_dir, 'mock_tars')
            package_target = 'custom_package_target'
            package_name = 'custom_package'
            package_revision = 10

            # Create mock tars and mock URLS where the tars can be downloaded from.
            os.makedirs(mock_tars_dir)
            mock_tar1 = os.path.join(mock_tars_dir, 'mock1.tar')
            mock_url1 = 'https://www.mock.com/tar1.tar'
            with tarfile.TarFile(mock_tar1, 'w') as f:
                f.add(mock_file1, arcname=os.path.basename(mock_file1))
            self._fake_downloader.StoreURL(mock_url1, mock_tar1)

            mock_tar2 = os.path.join(mock_tars_dir, 'mock2.tar')
            mock_url2 = 'https://www.mock.com/tar2.tar'
            with tarfile.TarFile(mock_tar2, 'w') as f:
                f.add(mock_file2, arcname=os.path.basename(mock_file2))
            self._fake_downloader.StoreURL(mock_url2, mock_tar2)

            # Have tar1 be missing, have tar2 be a file with invalid data.
            mismatch_tar2 = package_locations.GetLocalPackageArchiveFile(
                tar_dir, os.path.basename(mock_tar2),
                archive_info.GetArchiveHash(mock_tar2))
            os.makedirs(os.path.dirname(mismatch_tar2))
            with open(mismatch_tar2, 'wb') as f:
                f.write('mismatch tar')

            package_desc = self.GeneratePackageInfo([mock_tar1, mock_tar2],
                                                    url_dict={
                                                        mock_tar1: mock_url1,
                                                        mock_tar2: mock_url2
                                                    })
            package_file = package_locations.GetLocalPackageFile(
                tar_dir, package_target, package_name)
            package_desc.SavePackageFile(package_file)

            package_version.ExtractPackageTargets(
                [(package_target, package_name)],
                tar_dir,
                dest_dir,
                downloader=self._fake_downloader.Download)
            self.assertEqual(
                self._fake_downloader.GetDownloadCount(), 2,
                "Expected to download exactly 2 mismatched archives.")

            full_dest_dir = package_locations.GetFullDestDir(
                dest_dir, package_target, package_name)

            dest_mock_file2 = os.path.join(full_dest_dir,
                                           os.path.basename(mock_file2))
            dest_mock_file1 = os.path.join(full_dest_dir,
                                           os.path.basename(mock_file1))

            with open(mock_file1, 'rb') as f:
                mock_contents1 = f.read()
            with open(mock_file2, 'rb') as f:
                mock_contents2 = f.read()
            with open(dest_mock_file1, 'rb') as f:
                dest_mock_contents1 = f.read()
            with open(dest_mock_file2, 'rb') as f:
                dest_mock_contents2 = f.read()

            self.assertEqual(mock_contents1, dest_mock_contents1)
            self.assertEqual(mock_contents2, dest_mock_contents2)
Esempio n. 9
0
 def test_HashEmptyForMissingFiles(self):
     # Many scripts rely on the archive hash returning None for missing files.
     with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
         self.assertEqual(None,
                          archive_info.GetArchiveHash('missingfile.tgz'))
Esempio n. 10
0
def ExtractPackageTargets(package_target_packages, tar_dir, dest_dir,
                          downloader=None, skip_missing=False, quiet=False):
  """Extracts package targets from the tar directory to the destination.

  Each package archive within a package will be verified before being
  extracted. If a package archive does not exist or does not match the hash
  stored within the package file, it will be re-downloaded before being
  extracted.

  Args:
    package_target_packages: List of tuples of package target and package names.
    tar_dir: Source tar directory where package archives live.
    dest_dir: Root destination directory where packages will be extracted to.
    downloader: function which takes a url and a file path for downloading.
  """
  if downloader is None:
    downloader = pynacl.gsd_storage.HttpDownload

  for package_target, package_name in package_target_packages:
    package_file = package_locations.GetLocalPackageFile(tar_dir,
                                                         package_target,
                                                         package_name)
    package_desc = package_info.PackageInfo(package_file,
                                            skip_missing=skip_missing)
    dest_package_dir = package_locations.GetFullDestDir(dest_dir,
                                                        package_target,
                                                        package_name)
    dest_package_file = package_locations.GetDestPackageFile(dest_dir,
                                                             package_target,
                                                             package_name)

    # Only do the extraction if the extract packages do not match.
    if os.path.isfile(dest_package_file):
      try:
        dest_package_desc = package_info.PackageInfo(dest_package_file)
        if dest_package_desc == package_desc:
          logging.debug('Skipping extraction for package (%s)', package_name)
          continue
      except:
        # Destination package file cannot be trusted, if invalid re-extract.
        pass

      # Delete the old package file before we extract.
      os.unlink(dest_package_file)

    if os.path.isdir(dest_package_dir):
      logging.debug('Deleting old package directory: %s', dest_package_dir)
      pynacl.file_tools.RemoveDir(dest_package_dir)

    logging.info('Extracting package (%s) to directory: %s',
                 package_name, dest_package_dir)
    archive_list = package_desc.GetArchiveList()
    num_archives = len(archive_list)
    for index, archive_obj in enumerate(archive_list):
      archive_desc = archive_obj.GetArchiveData()
      archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_desc.name
      )

      # Upon extraction, some files may not be downloaded (or have stale files),
      # we need to check the hash of each file and attempt to download it if
      # they do not match.
      archive_hash = archive_info.GetArchiveHash(archive_file)
      if archive_hash != archive_desc.hash:
        if archive_desc.url is None:
          if skip_missing:
            logging.info('Skipping extraction of missing archive: %s' %
                         archive_file)
            continue
          raise IOError('Invalid archive file and URL: %s' % archive_file)

        logging.warn('Expected archive missing, downloading: %s',
                     archive_desc.name)

        pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file)
        downloader(archive_desc.url, archive_file)
        archive_hash = archive_info.GetArchiveHash(archive_file)
        if archive_hash != archive_desc.hash:
          raise IOError('Downloaded archive file does not match hash.'
                        ' [%s] Expected %s, received %s.' %
                        (archive_file, archive_desc.hash, archive_hash))

      destination_dir = os.path.join(dest_package_dir, archive_desc.extract_dir)
      logging.info('Extracting %s (%d/%d)' %
                   (archive_desc.name, index+1, num_archives))

      temp_dir = os.path.join(destination_dir, '.tmp')
      pynacl.file_tools.RemoveDir(temp_dir)
      os.makedirs(temp_dir)
      tar_output = not quiet
      tar = cygtar.CygTar(archive_file, 'r:*', verbose=tar_output)
      curdir = os.getcwd()
      os.chdir(temp_dir)
      try:
        tar.Extract()
        tar.Close()
      finally:
        os.chdir(curdir)

      temp_src_dir = os.path.join(temp_dir, archive_desc.tar_src_dir)
      pynacl.file_tools.MoveAndMergeDirTree(temp_src_dir, destination_dir)
      pynacl.file_tools.RemoveDir(temp_dir)

    pynacl.file_tools.MakeParentDirectoryIfAbsent(dest_package_file)
    package_desc.SavePackageFile(dest_package_file)
Esempio n. 11
0
def UploadPackage(storage, revision, tar_dir, package_target, package_name,
                  is_shared_package, annotate=False, skip_missing=False,
                  custom_package_file=None):
  """Uploads a local package file to the supplied cloud storage object.

  By default local package files are expected to be found in the standardized
  location within the tar directory, however a custom package file may be
  specified to upload from a different location. Package archives that do not
  have their URL field set will automaticaly have the archives uploaded so that
  someone accessing the package file from the cloud storage will also have
  access to the package archives.

  Args:
    storage: Cloud storage object which supports PutFile and GetFile.
    revision: SVN Revision number the package should be associated with.
    tar_dir: Root tar directory where archives live.
    package_target: Package target of the package to archive.
    package_name: Package name of the package to archive.
    is_shared_package: Is this package shared among all package targets?
    annotate: Print annotations for build bots?
    skip_missing: Skip missing package archive files?
    custom_package_file: File location for a custom package file.
  Returns:
    Returns remote download key for the uploaded package file.
  """
  if custom_package_file is not None:
    local_package_file = custom_package_file
  else:
    local_package_file = package_locations.GetLocalPackageFile(
        tar_dir,
        package_target,
        package_name)

  # Upload the package file and also upload any local package archives so
  # that they are downloadable.
  package_desc = package_info.PackageInfo(local_package_file,
                                          skip_missing=skip_missing)
  upload_package_desc = package_info.PackageInfo()

  for archive_obj in package_desc.GetArchiveList():
    archive_desc = archive_obj.GetArchiveData()
    url = archive_desc.url
    if archive_desc.hash and url is None:
      if annotate:
        print '@@@BUILD_STEP Archive:%s (upload)@@@' % archive_desc.name

      archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_desc.name)
      archive_hash = archive_info.GetArchiveHash(archive_file)
      if archive_hash is None:
        raise IOError('Missing Archive File: %s' % archive_file)
      elif archive_hash != archive_desc.hash:
        raise IOError(
            'Archive hash does not match package hash: %s' % archive_file
            + '\n  Archive Hash: %s' % archive_hash
            + '\n  Package Hash: %s' % archive_desc.hash)

      logging.warn('Missing archive URL: %s', archive_desc.name)
      logging.warn('Uploading archive to be publically available...')
      remote_archive_key = package_locations.GetRemotePackageArchiveKey(
          archive_desc.name,
          archive_desc.hash)
      url = storage.PutFile(archive_file, remote_archive_key, clobber=True)
      if annotate:
        print '@@@STEP_LINK@download@%s@@@' % url

    archive_desc = archive_info.ArchiveInfo(
        archive_desc.name,
        archive_desc.hash,
        url=url,
        tar_src_dir=archive_desc.tar_src_dir,
        extract_dir=archive_desc.extract_dir)
    upload_package_desc.AppendArchive(archive_desc)

  upload_package_file = local_package_file + '.upload'
  pynacl.file_tools.MakeParentDirectoryIfAbsent(upload_package_file)
  upload_package_desc.SavePackageFile(upload_package_file)

  logging.info('Uploading package information: %s', package_name)
  remote_package_key = package_locations.GetRemotePackageKey(
      is_shared_package,
      revision,
      package_target,
      package_name)
  package_info.UploadPackageInfoFiles(storage, package_target, package_name,
                                      remote_package_key, upload_package_file,
                                      skip_missing=skip_missing,
                                      annotate=annotate)

  return remote_package_key
Esempio n. 12
0
def ArchivePackageArchives(tar_dir, package_target, package_name, archives,
                           extra_archives=[]):
  """Archives local package archives to the tar directory.

  Args:
    tar_dir: Root tar directory where archives live.
    package_target: Package target of the package to archive.
    package_name: Package name of the package to archive.
    archives: List of archive file paths where archives currently live.
    extra_archives: Extra archives that are expected to be build elsewhere.
  Returns:
    Returns the local package file that was archived.
  """
  local_package_file = package_locations.GetLocalPackageFile(tar_dir,
                                                             package_target,
                                                             package_name)

  valid_archive_files = set()
  archive_list = []

  package_desc = package_info.PackageInfo()
  package_archives = ([(archive, False) for archive in archives] +
                      [(archive, True) for archive in extra_archives])
  for archive, skip_missing in package_archives:
    archive_url = None
    if '@' in archive:
      archive, archive_url = archive.split('@', 1)

    extract_param = ''
    tar_src_dir = ''
    extract_dir = ''
    if ',' in archive:
      archive, extract_param = archive.split(',', 1)
      if ':' in extract_param:
        tar_src_dir, extract_dir = extract_param.split(':', 1)
      else:
        tar_src_dir = extract_param

    archive_hash = archive_info.GetArchiveHash(archive)
    archive_name = os.path.basename(archive)
    archive_desc = archive_info.ArchiveInfo(archive_name,
                                            archive_hash,
                                            url=archive_url,
                                            tar_src_dir=tar_src_dir,
                                            extract_dir=extract_dir)
    package_desc.AppendArchive(archive_desc)

    if archive_hash is None:
      if skip_missing:
        logging.info('Skipping archival of missing file: %s', archive)
        continue
      raise IOError('Invalid package: %s.' % archive)
    archive_list.append(archive)

    archive_basename = os.path.basename(archive)
    archive_json = archive_basename + '.json'
    valid_archive_files.update([archive_basename, archive_json])

  # Delete any stale archive files
  local_archive_dir = package_locations.GetLocalPackageArchiveDir(
      tar_dir,
      package_target,
      package_name)

  if os.path.isdir(local_archive_dir):
    for dir_item in os.listdir(local_archive_dir):
      if dir_item in valid_archive_files:
        continue

      item_path = os.path.join(local_archive_dir, dir_item)
      if os.path.isdir(item_path):
        pynacl.file_tools.RemoveDir(item_path)
      else:
        pynacl.file_tools.RemoveFile(item_path)

  # We do not need to archive the package if it already matches. But if the
  # local package file is invalid or does not match, then we should recreate
  # the json file.
  if os.path.isfile(local_package_file):
    try:
      current_package_desc = package_info.PackageInfo(local_package_file,
                                                      skip_missing=True)
      if current_package_desc == package_desc:
        return
    except ValueError:
      pass

  # Copy each of the packages over to the tar directory first.
  for archive_file in archive_list:
    archive_name = os.path.basename(archive_file)
    local_archive_file = package_locations.GetLocalPackageArchiveFile(
        tar_dir,
        package_target,
        package_name,
        archive_name)

    logging.info('Archiving file: %s', archive_file)
    pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
    shutil.copyfile(archive_file, local_archive_file)

  # Once all the copying is completed, update the local packages file.
  logging.info('Package "%s" archived: %s', package_name, local_package_file)
  pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file)
  package_desc.SavePackageFile(local_package_file)

  return local_package_file
Esempio n. 13
0
def DownloadPackageArchives(tar_dir, package_target, package_name, package_desc,
                            downloader=None, revision_num=None):
  """Downloads package archives from the cloud to the tar directory.

  Args:
    tar_dir: Root tar directory where archives will be downloaded to.
    package_target: Package target of the package to download.
    package_name: Package name of the package to download.
    package_desc: package_info object of the package to download.
    downloader: function which takes a url and a file path for downloading.
  Returns:
    The list of files that were downloaded.
  """
  downloaded_files = []
  if downloader is None:
    downloader = pynacl.gsd_storage.HttpDownload
  local_package_file = package_locations.GetLocalPackageFile(tar_dir,
                                                             package_target,
                                                             package_name)
  # To ensure that we do not redownload extra archives that we already have,
  # create a dictionary of old package archives that contains the hash of each
  # package archive.
  old_archives = {}
  if os.path.isfile(local_package_file):
    try:
      old_package_desc = package_info.PackageInfo(local_package_file)
      old_archives_list = old_package_desc.GetArchiveList()
      old_archive_names = [archive.GetArchiveData().name
                           for archive
                           in old_archives_list]
      for archive_name in old_archive_names:
        archive_file = package_locations.GetLocalPackageArchiveFile(
            tar_dir,
            package_target,
            package_name,
            archive_name
            )

        archive_hash = archive_info.GetArchiveHash(archive_file)
        if archive_hash is not None:
          old_archives[archive_name] = archive_hash
    except:
      # Nothing can be trusted here anymore, delete all package archives.
      archive_directory = package_locations.GetLocalPackageArchiveDir(
          tar_dir,
          package_target,
          package_name
          )
      os.unlink(local_package_file)
      pynacl.file_tools.RemoveDir(archive_directory)

  # Download packages information file along with each of the package
  # archives described in the information file. Also keep track of what
  # new package names matches old package names. We will have to delete
  # stale package names after we are finished.
  update_archives = []
  for archive_obj in package_desc.GetArchiveList():
    archive_desc = archive_obj.GetArchiveData()
    old_hash = old_archives.get(archive_desc.name, None)
    if old_hash is not None:
      old_archives.pop(archive_desc.name)
      if archive_desc.hash == old_hash:
        logging.debug('Skipping matching archive: %s', archive_desc.name)
        continue
    update_archives.append(archive_obj)

  if update_archives:
    logging.info('--Syncing %s to revision %s--' % (package_name, revision_num))
    num_archives = len(update_archives)
    for index, archive_obj in enumerate(update_archives):
      archive_desc = archive_obj.GetArchiveData()
      local_archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_desc.name
      )
      pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)

      if archive_desc.url is None:
        raise IOError('Error, no URL for archive: %s' % archive_desc.name)

      logging.info('Downloading package archive: %s (%d/%d)' %
                   (archive_desc.name, index+1, num_archives))
      try:
        downloader(archive_desc.url, local_archive_file)
      except Exception as e:
        raise IOError('Could not download URL (%s): %s' %
                      (archive_desc.url, e))

      verified_hash = archive_info.GetArchiveHash(local_archive_file)
      if verified_hash != archive_desc.hash:
        raise IOError('Package hash check failed: %s != %s' %
                      (verified_hash, archive_desc.hash))

      downloaded_files.append(local_archive_file)

  # Delete any stale left over packages.
  for old_archive in old_archives:
    archive_file = package_locations.GetLocalPackageArchiveFile(
        tar_dir,
        package_target,
        package_name,
        old_archive)
    os.unlink(archive_file)

  # Save the package file so we know what we currently have.
  package_desc.SavePackageFile(local_package_file)

  return downloaded_files