def test_DownloadSharedTarsDownloadsOnce(self):
        # Test that tars with same name and hash get downloaded only once.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            tar_dir = os.path.join(work_dir, 'tar_dir')
            overlay_dir = os.path.join(work_dir, 'overlay_dir')
            dest_dir = os.path.join(work_dir, 'dest_dir')
            package_target_1 = 'custom_package_target_1'
            package_name_1 = 'custom_package_1'
            package_target_2 = 'custom_package_target_2'
            package_name_2 = 'custom_package_2'
            package_revision = 10

            mock_file = self.GenerateMockFile(work_dir)
            mock_tar = os.path.join(work_dir, 'shared_archive.tar')
            with tarfile.TarFile(mock_tar, 'w') as f:
                f.add(mock_file, arcname=os.path.basename(mock_file))

            fake_url = 'http://www.fake.com/archive.tar'
            self._fake_downloader.StoreURL(fake_url, mock_tar)

            # Generate 2 package files both containing the same tar file
            package_desc_1 = self.GeneratePackageInfo(
                [mock_tar], url_dict={mock_tar: fake_url})
            package_file_1 = package_locations.GetLocalPackageFile(
                tar_dir, package_target_1, package_name_1)
            package_desc_1.SavePackageFile(package_file_1)

            package_desc_2 = self.GeneratePackageInfo(
                [mock_tar], url_dict={mock_tar: fake_url})
            package_file_2 = package_locations.GetLocalPackageFile(
                tar_dir, package_target_2, package_name_2)
            package_desc_2.SavePackageFile(package_file_2)

            package_version.DownloadPackageArchives(
                tar_dir,
                package_target_1,
                package_name_1,
                package_desc_1,
                downloader=self._fake_downloader.Download,
                include_logs=False,
            )
            package_version.DownloadPackageArchives(
                tar_dir,
                package_target_2,
                package_name_2,
                package_desc_2,
                downloader=self._fake_downloader.Download,
                include_logs=False,
            )
            self.assertEqual(
                self._fake_downloader.GetDownloadCount(), 1,
                "Expected a single archive to have been downloaded.")
    def test_PackageUpload(self):
        # Check if we can properly upload a package file from the tar directory.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'test_package_archives'
            package_name = 'package_archives'
            package_revision = 10
            package_version.ArchivePackageArchives(tar_dir, package_target,
                                                   package_name, [])

            package_version.UploadPackage(self._fake_storage, package_revision,
                                          tar_dir, package_target,
                                          package_name, False)
            self.assertEqual(self._fake_storage.WriteCount(), 1,
                             "Package did not get properly uploaded")

            remote_package_key = package_locations.GetRemotePackageKey(
                False, package_revision, package_target, package_name)
            downloaded_package = os.path.join(work_dir,
                                              'download_package.json')
            package_info.DownloadPackageInfoFiles(
                downloaded_package,
                remote_package_key,
                downloader=self._fake_storage.GetFile)
            downloaded_package_desc = package_info.PackageInfo(
                downloaded_package)

            original_package_file = package_locations.GetLocalPackageFile(
                tar_dir, package_target, package_name)
            original_package_desc = package_info.PackageInfo(
                original_package_file)

            self.assertEqual(downloaded_package_desc, original_package_desc)
Beispiel #3
0
def _DoFillEmptyTarsCmd(arguments):
  package_target_packages = GetPackageTargetPackages(
      arguments.fillemptytars_package,
      arguments.package_target_packages
  )
  if not package_target_packages:
    raise NameError('Unknown package: %s.' % arguments.fillemptytars_package
                    + ' Did you forget to add "$PACKAGE_TARGET/"?')

  for package_target, package_name in package_target_packages:
    package_path = package_locations.GetLocalPackageFile(arguments.tar_dir,
                                                         package_target,
                                                         package_name)

    package_desc = package_info.PackageInfo(package_path, skip_missing=True)
    output_package_desc = package_info.PackageInfo()
    for archive in package_desc.GetArchiveList():
      # If archive does not exist, fill it with an empty one.
      archive_data = archive.GetArchiveData()
      if archive_data.hash:
        output_package_desc.AppendArchive(archive)
      else:
        logging.info('Filling missing archive: %s.', archive_data.name)
        if (archive_data.name.endswith('.tar.gz') or
            archive_data.name.endswith('.tgz')):
          mode = 'w:gz'
        elif archive_data.name.endswith('.bz2'):
          mode = 'w:bz2'
        elif archive_data.name.endswith('.tar'):
          mode = 'w:'
        else:
          raise NameError('Unknown archive type: %s.' % archive_data.name)

        archive_file = package_locations.GetLocalPackageArchiveFile(
            arguments.tar_dir,
            package_target,
            package_name,
            archive_data.name
            )

        tar_file = cygtar.CygTar(archive_file, mode)
        tar_file.Close()
        tar_hash = archive_info.GetArchiveHash(archive_file)

        empty_archive = archive_info.ArchiveInfo(name=archive_data.name,
                                                 archive_hash=tar_hash)
        output_package_desc.AppendArchive(empty_archive)

    output_package_desc.SavePackageFile(package_path)
    def test_CleanupExtraFiles(self):
        # Test that the cleanup function properly cleans up extra files
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_tar = self.GenerateMockFile(work_dir)

            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'custom_package_target'
            package_name = 'custom_package'

            package_desc = self.GeneratePackageInfo([mock_tar], )
            package_file = package_locations.GetLocalPackageFile(
                tar_dir, package_target, package_name)
            package_desc.SavePackageFile(package_file)
            self.CopyToLocalArchiveFile(mock_tar, tar_dir)

            package_dir = os.path.dirname(package_file)
            archive_file = package_locations.GetLocalPackageArchiveFile(
                tar_dir, os.path.basename(mock_tar),
                archive_info.GetArchiveHash(mock_tar))

            extra_file = self.GenerateMockFile(tar_dir)
            extra_file2 = self.GenerateMockFile(package_dir)
            extra_dir = os.path.join(tar_dir, 'extra_dir')
            os.makedirs(extra_dir)
            extra_file3 = self.GenerateMockFile(extra_dir)

            package_version.CleanupTarDirectory(tar_dir)

            # Make sure all the key files were not deleted
            self.assertTrue(os.path.isfile(package_file))
            self.assertTrue(os.path.isfile(archive_file))

            # Make sure package file can be loaded and nothing vital was deleted.
            new_package_desc = package_info.PackageInfo(package_file)

            # Make sure all the extra directories and files were deleted
            self.assertFalse(os.path.isfile(extra_file))
            self.assertFalse(os.path.isfile(extra_file2))
            self.assertFalse(os.path.isfile(extra_file3))
            self.assertFalse(os.path.isdir(extra_dir))
    def test_ArchivePackageArchives(self):
        # Check if we can archive a list of archives to the tar directory.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_tar1 = self.GenerateMockFile(work_dir,
                                              mock_file='file1.tar',
                                              contents='mock contents 1')
            mock_tar2 = self.GenerateMockFile(work_dir,
                                              mock_file='file2.tar',
                                              contents='mock contents 2')

            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'test_package_archives'
            package_name = 'package_archives'
            package_version.ArchivePackageArchives(tar_dir, package_target,
                                                   package_name,
                                                   [mock_tar1, mock_tar2])

            package_file = package_locations.GetLocalPackageFile(
                tar_dir, package_target, package_name)
            expected_package_desc = self.GeneratePackageInfo(
                [mock_tar1, mock_tar2])
            package_desc = package_info.PackageInfo(package_file)

            self.assertEqual(expected_package_desc, package_desc)
    def test_DownloadMismatchArchiveUponExtraction(self):
        # Test that mismatching archive files are downloaded upon extraction.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_file1 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile1.txt')
            mock_file2 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile2.txt')

            tar_dir = os.path.join(work_dir, 'tar_dir')
            dest_dir = os.path.join(work_dir, 'dest_dir')
            mock_tars_dir = os.path.join(work_dir, 'mock_tars')
            package_target = 'custom_package_target'
            package_name = 'custom_package'
            package_revision = 10

            # Create mock tars and mock URLS where the tars can be downloaded from.
            os.makedirs(mock_tars_dir)
            mock_tar1 = os.path.join(mock_tars_dir, 'mock1.tar')
            mock_url1 = 'https://www.mock.com/tar1.tar'
            with tarfile.TarFile(mock_tar1, 'w') as f:
                f.add(mock_file1, arcname=os.path.basename(mock_file1))
            self._fake_downloader.StoreURL(mock_url1, mock_tar1)

            mock_tar2 = os.path.join(mock_tars_dir, 'mock2.tar')
            mock_url2 = 'https://www.mock.com/tar2.tar'
            with tarfile.TarFile(mock_tar2, 'w') as f:
                f.add(mock_file2, arcname=os.path.basename(mock_file2))
            self._fake_downloader.StoreURL(mock_url2, mock_tar2)

            # Have tar1 be missing, have tar2 be a file with invalid data.
            mismatch_tar2 = package_locations.GetLocalPackageArchiveFile(
                tar_dir, os.path.basename(mock_tar2),
                archive_info.GetArchiveHash(mock_tar2))
            os.makedirs(os.path.dirname(mismatch_tar2))
            with open(mismatch_tar2, 'wb') as f:
                f.write('mismatch tar')

            package_desc = self.GeneratePackageInfo([mock_tar1, mock_tar2],
                                                    url_dict={
                                                        mock_tar1: mock_url1,
                                                        mock_tar2: mock_url2
                                                    })
            package_file = package_locations.GetLocalPackageFile(
                tar_dir, package_target, package_name)
            package_desc.SavePackageFile(package_file)

            package_version.ExtractPackageTargets(
                [(package_target, package_name)],
                tar_dir,
                dest_dir,
                downloader=self._fake_downloader.Download)
            self.assertEqual(
                self._fake_downloader.GetDownloadCount(), 2,
                "Expected to download exactly 2 mismatched archives.")

            full_dest_dir = package_locations.GetFullDestDir(
                dest_dir, package_target, package_name)

            dest_mock_file2 = os.path.join(full_dest_dir,
                                           os.path.basename(mock_file2))
            dest_mock_file1 = os.path.join(full_dest_dir,
                                           os.path.basename(mock_file1))

            with open(mock_file1, 'rb') as f:
                mock_contents1 = f.read()
            with open(mock_file2, 'rb') as f:
                mock_contents2 = f.read()
            with open(dest_mock_file1, 'rb') as f:
                dest_mock_contents1 = f.read()
            with open(dest_mock_file2, 'rb') as f:
                dest_mock_contents2 = f.read()

            self.assertEqual(mock_contents1, dest_mock_contents1)
            self.assertEqual(mock_contents2, dest_mock_contents2)
    def test_OverlayPackageTargets(self):
        # Tests that we can extract package targets with an overlay directory
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_file1 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile1.txt')
            mock_file2 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile2.txt')
            mock_file3 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile3.txt')

            tar_dir = os.path.join(work_dir, 'tar_dir')
            overlay_dir = os.path.join(work_dir, 'overlay_dir')
            dest_dir = os.path.join(work_dir, 'dest_dir')
            package_target = 'custom_package_target'
            package_name = 'custom_package'
            package_revision = 10

            os.makedirs(tar_dir)
            os.makedirs(overlay_dir)

            # Tar1 (mockfile1) will be a regular archive within the tar directory,
            # while tar2 (mockfile2) will be overlaid and replaced by
            # overlay_tar2 (mockfile3).
            mock_tar1 = os.path.join(tar_dir, 'archive_name1.tar')
            with tarfile.TarFile(mock_tar1, 'w') as f:
                f.add(mock_file1, arcname=os.path.basename(mock_file1))

            mock_tar2 = os.path.join(tar_dir, 'archive_name2.tar')
            with tarfile.TarFile(mock_tar2, 'w') as f:
                f.add(mock_file2, arcname=os.path.basename(mock_file2))

            overlay_tar2 = os.path.join(overlay_dir, 'archive_name2.tar')
            with tarfile.TarFile(overlay_tar2, 'w') as f:
                f.add(mock_file3, arcname=os.path.basename(mock_file3))

            self.CopyToLocalArchiveFile(mock_tar1, tar_dir)
            self.CopyToLocalArchiveFile(mock_tar2, tar_dir)
            self.CopyToLocalArchiveFile(overlay_tar2, overlay_dir)

            # Generate the regular package file, along with the overlay package file.
            package_desc = self.GeneratePackageInfo([mock_tar1, mock_tar2])
            package_file = package_locations.GetLocalPackageFile(
                tar_dir, package_target, package_name)
            package_desc.SavePackageFile(package_file)

            overlay_package_desc = self.GeneratePackageInfo([overlay_tar2])
            overlay_package_file = package_locations.GetLocalPackageFile(
                overlay_dir, package_target, package_name)
            overlay_package_desc.SavePackageFile(overlay_package_file)

            package_version.ExtractPackageTargets(
                [(package_target, package_name)],
                tar_dir,
                dest_dir,
                downloader=self._fake_downloader.Download,
                overlay_tar_dir=overlay_dir,
            )

            full_dest_dir = package_locations.GetFullDestDir(
                dest_dir, package_target, package_name)

            dest_mock_file1 = os.path.join(full_dest_dir,
                                           os.path.basename(mock_file1))
            dest_mock_file2 = os.path.join(full_dest_dir,
                                           os.path.basename(mock_file2))
            dest_mock_file3 = os.path.join(full_dest_dir,
                                           os.path.basename(mock_file3))

            # mock_file2 should not exist in the destination since it was replaced.
            self.assertFalse(os.path.isfile(dest_mock_file2))

            with open(mock_file1, 'rb') as f:
                mock_contents1 = f.read()
            with open(mock_file3, 'rb') as f:
                mock_contents3 = f.read()
            with open(dest_mock_file1, 'rb') as f:
                dest_mock_contents1 = f.read()
            with open(dest_mock_file3, 'rb') as f:
                dest_mock_contents3 = f.read()

            self.assertEqual(mock_contents1, dest_mock_contents1)
            self.assertEqual(mock_contents3, dest_mock_contents3)
    def test_ExtractPackageTargets(self):
        # Tests that we can extract package targets from the tar directory properly.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_file1 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile1.txt')
            mock_file2 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile2.txt')
            mock_file3 = self.GenerateMockFile(work_dir,
                                               mock_file='mockfile3.txt')

            tar_dir = os.path.join(work_dir, 'tar_dir')
            dest_dir = os.path.join(work_dir, 'dest_dir')
            package_target = 'custom_package_target'
            package_name = 'custom_package'
            package_revision = 10

            mock_tar1 = os.path.join(work_dir, 'archive_name1.tar')
            with tarfile.TarFile(mock_tar1, 'w') as f:
                f.add(mock_file1, arcname=os.path.basename(mock_file1))

            mock_tar2 = os.path.join(work_dir, 'archive_name2.tar')
            with tarfile.TarFile(mock_tar2, 'w') as f:
                f.add(mock_file2, arcname=os.path.basename(mock_file2))

            mock_tar3 = os.path.join(work_dir, 'archive_name3.tar')
            with tarfile.TarFile(mock_tar3, 'w') as f:
                arcname = os.path.join('rel_dir', os.path.basename(mock_file3))
                f.add(mock_file3, arcname=arcname)

            self.CopyToLocalArchiveFile(mock_tar1, tar_dir)
            self.CopyToLocalArchiveFile(mock_tar2, tar_dir)
            self.CopyToLocalArchiveFile(mock_tar3, tar_dir)

            package_desc = self.GeneratePackageInfo(
                [mock_tar1, mock_tar2, mock_tar3],
                dir_dict={mock_tar2: 'tar2_dir'},
                src_dir_dict={mock_tar3: 'rel_dir'},
            )
            package_file = package_locations.GetLocalPackageFile(
                tar_dir, package_target, package_name)
            package_desc.SavePackageFile(package_file)

            package_version.ExtractPackageTargets(
                [(package_target, package_name)],
                tar_dir,
                dest_dir,
                downloader=self._fake_downloader.Download)
            self.assertEqual(
                self._fake_downloader.GetDownloadCount(), 0,
                "Extracting a package should not download anything.")

            full_dest_dir = package_locations.GetFullDestDir(
                dest_dir, package_target, package_name)
            dest_mock_file1 = os.path.join(full_dest_dir,
                                           os.path.basename(mock_file1))
            dest_mock_file2 = os.path.join(full_dest_dir, 'tar2_dir',
                                           os.path.basename(mock_file2))
            dest_mock_file3 = os.path.join(full_dest_dir,
                                           os.path.basename(mock_file3))

            with open(mock_file1, 'rb') as f:
                mock_contents1 = f.read()
            with open(mock_file2, 'rb') as f:
                mock_contents2 = f.read()
            with open(mock_file3, 'rb') as f:
                mock_contents3 = f.read()
            with open(dest_mock_file1, 'rb') as f:
                dest_mock_contents1 = f.read()
            with open(dest_mock_file2, 'rb') as f:
                dest_mock_contents2 = f.read()
            with open(dest_mock_file3, 'rb') as f:
                dest_mock_contents3 = f.read()

            self.assertEqual(mock_contents1, dest_mock_contents1)
            self.assertEqual(mock_contents2, dest_mock_contents2)
            self.assertEqual(mock_contents3, dest_mock_contents3)
Beispiel #9
0
def ExtractPackageTargets(package_target_packages, tar_dir, dest_dir,
                          downloader=None):
  """Extracts package targets from the tar directory to the destination.

  Each package archive within a package will be verified before being
  extracted. If a package archive does not exist or does not match the hash
  stored within the package file, it will be re-downloaded before being
  extracted.

  Args:
    package_target_packages: List of tuples of package target and package names.
    tar_dir: Source tar directory where package archives live.
    dest_dir: Root destination directory where packages will be extracted to.
    downloader: function which takes a url and a file path for downloading.
  """
  if downloader is None:
    downloader = gsd_storage.HttpDownload

  for package_target, package_name in package_target_packages:
    package_file = package_locations.GetLocalPackageFile(
        tar_dir,
        package_target,
        package_name
    )
    package_desc = package_info.PackageInfo(package_file)
    dest_package_dir = package_locations.GetFullDestDir(
        dest_dir,
        package_target,
        package_name
    )
    dest_package_file = package_locations.GetDestPackageFile(
        dest_dir,
        package_target,
        package_name
    )

    # Only do the extraction if the extract packages do not match.
    if os.path.isfile(dest_package_file):
      dest_package_desc = package_info.PackageInfo(dest_package_file)
      if dest_package_desc == package_desc:
        logging.debug('Skipping extraction for package (%s)', package_name)
        continue

    if os.path.isdir(dest_package_dir):
      logging.info('Deleting old package directory: %s', dest_package_dir)
      pynacl.file_tools.RemoveDir(dest_package_dir)

    logging.info('Extracting package (%s) to directory: %s',
                 package_name, dest_package_dir)
    for archive_desc in package_desc.GetArchiveList():
      archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_desc.name
      )

      # Upon extraction, some files may not be downloaded (or have stale files),
      # we need to check the hash of each file and attempt to download it if
      # they do not match.
      archive_hash = package_info.GetArchiveHash(archive_file)
      if archive_hash != archive_desc.hash:
        logging.warn('Expected archive missing, downloading: %s',
                     archive_desc.name)
        if archive_desc.url is None:
          raise IOError('Invalid archive file and URL: %s' % archive_file)

        pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file)
        downloader(archive_desc.url, archive_file)
        archive_hash = package_info.GetArchiveHash(archive_file)
        if archive_hash != archive_desc.hash:
          raise IOError('Downloaded archive file does not match hash.'
                        ' [%s] Expected %s, received %s.' %
                        (archive_file, archive_desc.hash, archive_hash))

      destination_dir = os.path.join(dest_package_dir, archive_desc.extract_dir)
      logging.info('Extracting %s to %s...', archive_desc.name, destination_dir)

      temp_dir = os.path.join(destination_dir, '.tmp')
      pynacl.file_tools.RemoveDir(temp_dir)
      os.makedirs(temp_dir)
      with tarfile.TarFile(archive_file, 'r') as f:
        f.extractall(temp_dir)

      temp_src_dir = os.path.join(temp_dir, archive_desc.tar_src_dir)
      pynacl.file_tools.MoveAndMergeDirTree(temp_src_dir, destination_dir)
      pynacl.file_tools.RemoveDir(temp_dir)

    pynacl.file_tools.MakeParentDirectoryIfAbsent(dest_package_file)
    shutil.copy(package_file, dest_package_file)
Beispiel #10
0
def UploadPackage(storage, revision, tar_dir, package_target, package_name,
                  is_shared_package, annotate=False, custom_package_file=None):
  """Uploads a local package file to the supplied cloud storage object.

  By default local package files are expected to be found in the standardized
  location within the tar directory, however a custom package file may be
  specified to upload from a different location. Package archives that do not
  have their URL field set will automaticaly have the archives uploaded so that
  someone accessing the package file from the cloud storage will also have
  access to the package archives.

  Args:
    storage: Cloud storage object which supports PutFile and GetFile.
    revision: SVN Revision number the package should be associated with.
    tar_dir: Root tar directory where archives live.
    package_target: Package target of the package to archive.
    package_name: Package name of the package to archive.
    is_shared_package: Is this package shared among all package targets?
    custom_package_file: File location for a custom package file.
  Returns:
    Returns remote download key for the uploaded package file.
  """
  if annotate:
    print '@@@BUILD_STEP upload_package@@@'
  if custom_package_file is not None:
    local_package_file = custom_package_file
  else:
    local_package_file = package_locations.GetLocalPackageFile(
        tar_dir,
        package_target,
        package_name
    )

  # Upload the package file and also upload any local package archives so
  # that they are downloadable.
  package_desc = package_info.PackageInfo(local_package_file)
  upload_package_desc = package_info.PackageInfo()

  for archive_desc in package_desc.GetArchiveList():
    url = archive_desc.url
    if url is None:
      archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_desc.name)
      archive_hash = package_info.GetArchiveHash(archive_file)
      if archive_hash is None:
        raise IOError('Missing Archive File: %s' % archive_file)
      elif archive_hash != archive_desc.hash:
        raise IOError(
            'Archive hash does not match package hash: %s' % archive_file
            + '\n  Archive Hash: %s' % archive_hash
            + '\n  Package Hash: %s' % archive_desc.hash
        )

      logging.warn('Missing archive URL: %s', archive_desc.name)
      logging.warn('Uploading archive to be publically available...')
      remote_archive_key = package_locations.GetRemotePackageArchiveKey(
          archive_desc.name,
          archive_desc.hash
      )
      url = storage.PutFile(
          archive_file,
          remote_archive_key,
          clobber=True
      )
      if annotate:
        print '@@@STEP_LINK@download@%s@@@' % url

    upload_package_desc.AppendArchive(
        archive_desc.name,
        archive_desc.hash,
        url=url,
        tar_src_dir=archive_desc.tar_src_dir,
        extract_dir=archive_desc.extract_dir
    )

  upload_package_file = local_package_file + '.upload'
  pynacl.file_tools.MakeParentDirectoryIfAbsent(upload_package_file)
  upload_package_desc.SavePackageFile(upload_package_file)

  logging.info('Uploading package information: %s', package_name)
  remote_package_key = package_locations.GetRemotePackageKey(
      is_shared_package,
      revision,
      package_target,
      package_name
  )
  url = storage.PutFile(upload_package_file, remote_package_key)
  print '@@@STEP_LINK@download@%s@@@' % url

  return remote_package_key
Beispiel #11
0
def ArchivePackageArchives(tar_dir, package_target, package_name, archives):
  """Archives local package archives to the tar directory.

  Args:
    tar_dir: Root tar directory where archives live.
    package_target: Package target of the package to archive.
    package_name: Package name of the package to archive.
    archives: List of archive file paths where archives currently live.
  Returns:
    Returns the local package file that was archived.
  """
  local_package_file = package_locations.GetLocalPackageFile(
      tar_dir,
      package_target,
      package_name
  )

  archive_list = []

  package_desc = package_info.PackageInfo()
  for archive in archives:
    archive_url = None
    if '@' in archive:
      archive, archive_url = archive.split('@', 1)

    extract_param = ''
    tar_src_dir = ''
    extract_dir = ''
    if ',' in archive:
      archive, extract_param = archive.split(',', 1)
      if ':' in extract_param:
        tar_src_dir, extract_dir = extract_param.split(':', 1)
      else:
        tar_src_dir = extract_param

    archive_hash = package_info.GetArchiveHash(archive)
    if archive_hash is None:
      raise IOError('Invalid package: %s.' % archive)

    archive_name = os.path.basename(archive)

    archive_list.append(archive)
    package_desc.AppendArchive(
        archive_name,
        archive_hash,
        url=archive_url,
        tar_src_dir=tar_src_dir,
        extract_dir=extract_dir
    )

  # We do not need to archive the package if it already matches. But if the
  # local package file is invalid or does not match, then we should recreate
  # the json file.
  if os.path.isfile(local_package_file):
    try:
      current_package_desc = package_info.PackageInfo(local_package_file)
      if current_package_desc == package_desc:
        return
    except ValueError:
      pass

  # Copy each of the packages over to the tar directory first.
  for archive_file in archive_list:
    archive_name = os.path.basename(archive_file)
    local_archive_file = package_locations.GetLocalPackageArchiveFile(
        tar_dir,
        package_target,
        package_name,
        archive_name
    )

    logging.info('Archiving file: %s', archive_file)
    pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
    shutil.copyfile(archive_file, local_archive_file)

  # Once all the copying is completed, update the local packages file.
  logging.info('Package "%s" archived: %s', package_name, local_package_file)
  pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file)
  package_desc.SavePackageFile(local_package_file)

  return local_package_file
Beispiel #12
0
def DownloadPackageArchives(tar_dir, package_target, package_name, package_desc,
                            downloader=None):
  """Downloads package archives from the cloud to the tar directory.

  Args:
    tar_dir: Root tar directory where archives will be downloaded to.
    package_target: Package target of the package to download.
    package_name: Package name of the package to download.
    package_desc: package_info object of the package to download.
    downloader: function which takes a url and a file path for downloading.
  Returns:
    The list of files that were downloaded.
  """
  downloaded_files = []
  if downloader is None:
    downloader = gsd_storage.HttpDownload
  local_package_file = package_locations.GetLocalPackageFile(
      tar_dir,
      package_target,
      package_name
  )
  # To ensure that we do not redownload extra archives that we already have,
  # create a dictionary of old package archives that contains the hash of each
  # package archive.
  old_archives = {}
  if os.path.isfile(local_package_file):
    old_package_desc = package_info.PackageInfo(local_package_file)
    old_archives_list = old_package_desc.GetArchiveList()
    old_archive_names = [archive.name for archive in old_archives_list]
    for archive_name in old_archive_names:
      archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_name
      )

      archive_hash = package_info.GetArchiveHash(archive_file)
      if archive_hash is not None:
        old_archives[archive_name] = archive_hash

  # Download packages information file along with each of the package
  # archives described in the information file. Also keep track of what
  # new package names matches old package names. We will have to delete
  # stale package names after we are finished.
  for archive_info in package_desc.GetArchiveList():
    old_hash = old_archives.get(archive_info.name, None)
    if old_hash is not None:
      old_archives.pop(archive_info.name)
      if archive_info.hash == old_hash:
        logging.debug('Skipping matching archive: %s', archive_info.name)
        continue

    local_archive_file = package_locations.GetLocalPackageArchiveFile(
        tar_dir,
        package_target,
        package_name,
        archive_info.name
    )
    pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)

    if archive_info.url is None:
      raise IOError('Error, no URL for archive: %s' % archive_info.name)

    logging.info('Downloading package archive: %s', archive_info.name)
    downloader(archive_info.url, local_archive_file)
    verified_hash = package_info.GetArchiveHash(local_archive_file)
    if verified_hash != archive_info.hash:
      raise IOError('Package hash check failed: %s != %s' %
                    (verified_hash, archive_info.hash))

    downloaded_files.append(local_archive_file)

  # Delete any stale left over packages.
  for old_archive in old_archives:
    archive_file = package_locations.GetLocalPackageArchiveFile(
        tar_dir,
        package_target,
        package_name,
        old_archive
    )
    os.unlink(archive_file)

  return downloaded_files
Beispiel #13
0
def ExtractPackageTargets(package_target_packages, tar_dir, dest_dir,
                          downloader=None, skip_missing=False, quiet=False):
  """Extracts package targets from the tar directory to the destination.

  Each package archive within a package will be verified before being
  extracted. If a package archive does not exist or does not match the hash
  stored within the package file, it will be re-downloaded before being
  extracted.

  Args:
    package_target_packages: List of tuples of package target and package names.
    tar_dir: Source tar directory where package archives live.
    dest_dir: Root destination directory where packages will be extracted to.
    downloader: function which takes a url and a file path for downloading.
  """
  if downloader is None:
    downloader = pynacl.gsd_storage.HttpDownload

  for package_target, package_name in package_target_packages:
    package_file = package_locations.GetLocalPackageFile(tar_dir,
                                                         package_target,
                                                         package_name)
    package_desc = package_info.PackageInfo(package_file,
                                            skip_missing=skip_missing)
    dest_package_dir = package_locations.GetFullDestDir(dest_dir,
                                                        package_target,
                                                        package_name)
    dest_package_file = package_locations.GetDestPackageFile(dest_dir,
                                                             package_target,
                                                             package_name)

    # Only do the extraction if the extract packages do not match.
    if os.path.isfile(dest_package_file):
      try:
        dest_package_desc = package_info.PackageInfo(dest_package_file)
        if dest_package_desc == package_desc:
          logging.debug('Skipping extraction for package (%s)', package_name)
          continue
      except:
        # Destination package file cannot be trusted, if invalid re-extract.
        pass

      # Delete the old package file before we extract.
      os.unlink(dest_package_file)

    if os.path.isdir(dest_package_dir):
      logging.debug('Deleting old package directory: %s', dest_package_dir)
      pynacl.file_tools.RemoveDir(dest_package_dir)

    logging.info('Extracting package (%s) to directory: %s',
                 package_name, dest_package_dir)
    archive_list = package_desc.GetArchiveList()
    num_archives = len(archive_list)
    for index, archive_obj in enumerate(archive_list):
      archive_desc = archive_obj.GetArchiveData()
      archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_desc.name
      )

      # Upon extraction, some files may not be downloaded (or have stale files),
      # we need to check the hash of each file and attempt to download it if
      # they do not match.
      archive_hash = archive_info.GetArchiveHash(archive_file)
      if archive_hash != archive_desc.hash:
        if archive_desc.url is None:
          if skip_missing:
            logging.info('Skipping extraction of missing archive: %s' %
                         archive_file)
            continue
          raise IOError('Invalid archive file and URL: %s' % archive_file)

        logging.warn('Expected archive missing, downloading: %s',
                     archive_desc.name)

        pynacl.file_tools.MakeParentDirectoryIfAbsent(archive_file)
        downloader(archive_desc.url, archive_file)
        archive_hash = archive_info.GetArchiveHash(archive_file)
        if archive_hash != archive_desc.hash:
          raise IOError('Downloaded archive file does not match hash.'
                        ' [%s] Expected %s, received %s.' %
                        (archive_file, archive_desc.hash, archive_hash))

      destination_dir = os.path.join(dest_package_dir, archive_desc.extract_dir)
      logging.info('Extracting %s (%d/%d)' %
                   (archive_desc.name, index+1, num_archives))

      temp_dir = os.path.join(destination_dir, '.tmp')
      pynacl.file_tools.RemoveDir(temp_dir)
      os.makedirs(temp_dir)
      tar_output = not quiet
      tar = cygtar.CygTar(archive_file, 'r:*', verbose=tar_output)
      curdir = os.getcwd()
      os.chdir(temp_dir)
      try:
        tar.Extract()
        tar.Close()
      finally:
        os.chdir(curdir)

      temp_src_dir = os.path.join(temp_dir, archive_desc.tar_src_dir)
      pynacl.file_tools.MoveAndMergeDirTree(temp_src_dir, destination_dir)
      pynacl.file_tools.RemoveDir(temp_dir)

    pynacl.file_tools.MakeParentDirectoryIfAbsent(dest_package_file)
    package_desc.SavePackageFile(dest_package_file)
Beispiel #14
0
def ArchivePackageArchives(tar_dir, package_target, package_name, archives,
                           extra_archives=[]):
  """Archives local package archives to the tar directory.

  Args:
    tar_dir: Root tar directory where archives live.
    package_target: Package target of the package to archive.
    package_name: Package name of the package to archive.
    archives: List of archive file paths where archives currently live.
    extra_archives: Extra archives that are expected to be build elsewhere.
  Returns:
    Returns the local package file that was archived.
  """
  local_package_file = package_locations.GetLocalPackageFile(tar_dir,
                                                             package_target,
                                                             package_name)

  valid_archive_files = set()
  archive_list = []

  package_desc = package_info.PackageInfo()
  package_archives = ([(archive, False) for archive in archives] +
                      [(archive, True) for archive in extra_archives])
  for archive, skip_missing in package_archives:
    archive_url = None
    if '@' in archive:
      archive, archive_url = archive.split('@', 1)

    extract_param = ''
    tar_src_dir = ''
    extract_dir = ''
    if ',' in archive:
      archive, extract_param = archive.split(',', 1)
      if ':' in extract_param:
        tar_src_dir, extract_dir = extract_param.split(':', 1)
      else:
        tar_src_dir = extract_param

    archive_hash = archive_info.GetArchiveHash(archive)
    archive_name = os.path.basename(archive)
    archive_desc = archive_info.ArchiveInfo(archive_name,
                                            archive_hash,
                                            url=archive_url,
                                            tar_src_dir=tar_src_dir,
                                            extract_dir=extract_dir)
    package_desc.AppendArchive(archive_desc)

    if archive_hash is None:
      if skip_missing:
        logging.info('Skipping archival of missing file: %s', archive)
        continue
      raise IOError('Invalid package: %s.' % archive)
    archive_list.append(archive)

    archive_basename = os.path.basename(archive)
    archive_json = archive_basename + '.json'
    valid_archive_files.update([archive_basename, archive_json])

  # Delete any stale archive files
  local_archive_dir = package_locations.GetLocalPackageArchiveDir(
      tar_dir,
      package_target,
      package_name)

  if os.path.isdir(local_archive_dir):
    for dir_item in os.listdir(local_archive_dir):
      if dir_item in valid_archive_files:
        continue

      item_path = os.path.join(local_archive_dir, dir_item)
      if os.path.isdir(item_path):
        pynacl.file_tools.RemoveDir(item_path)
      else:
        pynacl.file_tools.RemoveFile(item_path)

  # We do not need to archive the package if it already matches. But if the
  # local package file is invalid or does not match, then we should recreate
  # the json file.
  if os.path.isfile(local_package_file):
    try:
      current_package_desc = package_info.PackageInfo(local_package_file,
                                                      skip_missing=True)
      if current_package_desc == package_desc:
        return
    except ValueError:
      pass

  # Copy each of the packages over to the tar directory first.
  for archive_file in archive_list:
    archive_name = os.path.basename(archive_file)
    local_archive_file = package_locations.GetLocalPackageArchiveFile(
        tar_dir,
        package_target,
        package_name,
        archive_name)

    logging.info('Archiving file: %s', archive_file)
    pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
    shutil.copyfile(archive_file, local_archive_file)

  # Once all the copying is completed, update the local packages file.
  logging.info('Package "%s" archived: %s', package_name, local_package_file)
  pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file)
  package_desc.SavePackageFile(local_package_file)

  return local_package_file
Beispiel #15
0
def DownloadPackageArchives(tar_dir, package_target, package_name, package_desc,
                            downloader=None, revision_num=None):
  """Downloads package archives from the cloud to the tar directory.

  Args:
    tar_dir: Root tar directory where archives will be downloaded to.
    package_target: Package target of the package to download.
    package_name: Package name of the package to download.
    package_desc: package_info object of the package to download.
    downloader: function which takes a url and a file path for downloading.
  Returns:
    The list of files that were downloaded.
  """
  downloaded_files = []
  if downloader is None:
    downloader = pynacl.gsd_storage.HttpDownload
  local_package_file = package_locations.GetLocalPackageFile(tar_dir,
                                                             package_target,
                                                             package_name)
  # To ensure that we do not redownload extra archives that we already have,
  # create a dictionary of old package archives that contains the hash of each
  # package archive.
  old_archives = {}
  if os.path.isfile(local_package_file):
    try:
      old_package_desc = package_info.PackageInfo(local_package_file)
      old_archives_list = old_package_desc.GetArchiveList()
      old_archive_names = [archive.GetArchiveData().name
                           for archive
                           in old_archives_list]
      for archive_name in old_archive_names:
        archive_file = package_locations.GetLocalPackageArchiveFile(
            tar_dir,
            package_target,
            package_name,
            archive_name
            )

        archive_hash = archive_info.GetArchiveHash(archive_file)
        if archive_hash is not None:
          old_archives[archive_name] = archive_hash
    except:
      # Nothing can be trusted here anymore, delete all package archives.
      archive_directory = package_locations.GetLocalPackageArchiveDir(
          tar_dir,
          package_target,
          package_name
          )
      os.unlink(local_package_file)
      pynacl.file_tools.RemoveDir(archive_directory)

  # Download packages information file along with each of the package
  # archives described in the information file. Also keep track of what
  # new package names matches old package names. We will have to delete
  # stale package names after we are finished.
  update_archives = []
  for archive_obj in package_desc.GetArchiveList():
    archive_desc = archive_obj.GetArchiveData()
    old_hash = old_archives.get(archive_desc.name, None)
    if old_hash is not None:
      old_archives.pop(archive_desc.name)
      if archive_desc.hash == old_hash:
        logging.debug('Skipping matching archive: %s', archive_desc.name)
        continue
    update_archives.append(archive_obj)

  if update_archives:
    logging.info('--Syncing %s to revision %s--' % (package_name, revision_num))
    num_archives = len(update_archives)
    for index, archive_obj in enumerate(update_archives):
      archive_desc = archive_obj.GetArchiveData()
      local_archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_desc.name
      )
      pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)

      if archive_desc.url is None:
        raise IOError('Error, no URL for archive: %s' % archive_desc.name)

      logging.info('Downloading package archive: %s (%d/%d)' %
                   (archive_desc.name, index+1, num_archives))
      try:
        downloader(archive_desc.url, local_archive_file)
      except Exception as e:
        raise IOError('Could not download URL (%s): %s' %
                      (archive_desc.url, e))

      verified_hash = archive_info.GetArchiveHash(local_archive_file)
      if verified_hash != archive_desc.hash:
        raise IOError('Package hash check failed: %s != %s' %
                      (verified_hash, archive_desc.hash))

      downloaded_files.append(local_archive_file)

  # Delete any stale left over packages.
  for old_archive in old_archives:
    archive_file = package_locations.GetLocalPackageArchiveFile(
        tar_dir,
        package_target,
        package_name,
        old_archive)
    os.unlink(archive_file)

  # Save the package file so we know what we currently have.
  package_desc.SavePackageFile(local_package_file)

  return downloaded_files