Exemplo n.º 1
0
    def test_PackageUpload(self):
        # Check if we can properly upload a package file from the tar directory.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'test_package_archives'
            package_name = 'package_archives'
            package_revision = 10
            package_version.ArchivePackageArchives(tar_dir, package_target,
                                                   package_name, [])

            package_version.UploadPackage(self._fake_storage, package_revision,
                                          tar_dir, package_target,
                                          package_name, False)
            self.assertEqual(self._fake_storage.WriteCount(), 1,
                             "Package did not get properly uploaded")

            remote_package_key = package_locations.GetRemotePackageKey(
                False, package_revision, package_target, package_name)
            downloaded_package = os.path.join(work_dir,
                                              'download_package.json')
            package_info.DownloadPackageInfoFiles(
                downloaded_package,
                remote_package_key,
                downloader=self._fake_storage.GetFile)
            downloaded_package_desc = package_info.PackageInfo(
                downloaded_package)

            original_package_file = package_locations.GetLocalPackageFile(
                tar_dir, package_target, package_name)
            original_package_desc = package_info.PackageInfo(
                original_package_file)

            self.assertEqual(downloaded_package_desc, original_package_desc)
    def test_CustomPackageUpload(self):
        # Check if we can upload a package file from a custom location.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            custom_package_file = os.path.join(work_dir, 'custom_package.json')
            package_desc = self.GeneratePackageInfo([])
            package_desc.SavePackageFile(custom_package_file)

            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'custom_package_target'
            package_name = 'custom_package'
            package_revision = 10

            package_version.UploadPackage(
                self._fake_storage,
                package_revision,
                tar_dir,
                package_target,
                package_name,
                False,
                custom_package_file=custom_package_file)
            self.assertEqual(self._fake_storage.WriteCount(), 1,
                             "Package did not get properly uploaded")

            remote_package_key = package_locations.GetRemotePackageKey(
                False, package_revision, package_target, package_name)
            downloaded_package = os.path.join(work_dir,
                                              'download_package.json')
            self._fake_storage.GetFile(remote_package_key, downloaded_package)
            downloaded_package_desc = package_info.PackageInfo(
                downloaded_package)

            original_package_desc = package_info.PackageInfo(
                custom_package_file)

            self.assertEqual(downloaded_package_desc, original_package_desc)
Exemplo n.º 3
0
def _DoSyncCmd(arguments):
  # TODO(dyen): remove this section eventually.
  # Before syncing, remove any old toolchain files temporarily.
  RemoveOldToolchainFiles(arguments.dest_dir)

  for package_target, package_name in arguments.package_target_packages:
    if arguments.sync__revision is None:
      # When the sync revision number is not specified, use the set
      # revision number found in the revision directory.
      revision_file = package_locations.GetRevisionFile(
          arguments.revisions_dir,
          package_name)
      revision_desc = revision_info.RevisionInfo(
          arguments.packages_desc,
          revision_file)
      package_desc = revision_desc.GetPackageInfo(package_target)
      revision_num = revision_desc.GetRevisionNumber()
    else:
      # When the sync revision number is specified, find the package to
      # download remotely using the revision.
      revision_num = arguments.sync__revision
      remote_package_key = package_locations.GetRemotePackageKey(
          arguments.packages_desc.IsSharedPackage(package_name),
          arguments.sync__revision,
          package_target,
          package_name)
      with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
        temp_package_file = os.path.join(
            work_dir,
            os.path.basename(remote_package_key) + TEMP_SUFFIX)

        package_info.DownloadPackageInfoFiles(
            temp_package_file,
            remote_package_key,
            downloader=arguments.gsd_store.GetFile)

        package_desc = package_info.PackageInfo(temp_package_file)

    DownloadPackageArchives(
        arguments.tar_dir,
        package_target,
        package_name,
        package_desc,
        revision_num=revision_num)

  CleanTempFiles(arguments.tar_dir)

  if arguments.sync__extract:
    ExtractPackageTargets(
        arguments.package_target_packages,
        arguments.tar_dir,
        arguments.dest_dir,
        quiet=arguments.quiet)
Exemplo n.º 4
0
def _DoSyncCmd(arguments):
  for package_target, package_name in arguments.package_target_packages:
    if arguments.sync__revision is None:
      # When the sync revision number is not specified, use the set
      # revision number found in the revision directory.
      revision_file = package_locations.GetRevisionFile(
          arguments.revisions_dir,
          package_name
      )
      revision_desc = revision_info.RevisionInfo(
          arguments.packages_desc,
          revision_file
      )
      package_desc = revision_desc.GetPackageInfo(package_target)
    else:
      # When the sync revision number is specified, find the package to
      # download remotely using the revision.
      remote_package_key = package_locations.GetRemotePackageKey(
          arguments.packages_desc.IsSharedPackage(package_name),
          arguments.sync__revision,
          package_target,
          package_name
      )
      temp_package_file = os.path.join(
          arguments.tar_dir,
          os.path.basename(remote_package_key) + TEMP_SUFFIX
      )
      pynacl.file_tools.MakeParentDirectoryIfAbsent(temp_package_file)
      url = arguments.gsd_store.GetFile(remote_package_key, temp_package_file)
      if url is None:
        raise IOError('Could not sync file: %s' % remote_package_key)

      package_desc = package_info.PackageInfo(temp_package_file)

    DownloadPackageArchives(
        arguments.gsd_store,
        arguments.tar_dir,
        package_target,
        package_name,
        package_desc
    )

  CleanTempFiles(arguments.tar_dir)

  if arguments.sync__extract:
    ExtractPackageTargets(
        arguments.package_target_packages,
        arguments.tar_dir,
        arguments.dest_dir
    )
Exemplo n.º 5
0
    def test_NoArchiveURLDoesUpload(self):
        # Checks when uploading package with no archive URL, archive is uploaded.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'custom_package_target'
            package_name = 'custom_package'
            package_revision = 10

            mock_file = self.GenerateMockFile(work_dir)
            mock_tar = package_locations.GetLocalPackageArchiveFile(
                tar_dir, package_target, package_name, 'archive_name.tar')
            os.makedirs(os.path.dirname(mock_tar))
            with tarfile.TarFile(mock_tar, 'w') as f:
                f.add(mock_file)

            package_desc = self.GeneratePackageInfo([mock_tar])

            package_file = os.path.join(work_dir, 'package_file.json')
            package_desc.SavePackageFile(package_file)

            package_version.UploadPackage(self._fake_storage,
                                          package_revision,
                                          tar_dir,
                                          package_target,
                                          package_name,
                                          False,
                                          custom_package_file=package_file)
            self.assertEqual(
                self._fake_storage.WriteCount(), 3,
                "3 files (package, archive_info, archive) should have been uploaded."
            )

            remote_package_key = package_locations.GetRemotePackageKey(
                False, package_revision, package_target, package_name)
            downloaded_package = os.path.join(work_dir,
                                              'download_package.json')
            package_info.DownloadPackageInfoFiles(
                downloaded_package,
                remote_package_key,
                downloader=self._fake_storage.GetFile)
            downloaded_package_desc = package_info.PackageInfo(
                downloaded_package)

            archive_list = downloaded_package_desc.GetArchiveList()
            self.assertEqual(
                len(archive_list), 1,
                "The downloaded package does not have 1 archive.")
            self.assertTrue(
                archive_list[0].GetArchiveData().url,
                "The downloaded archive still does not have a proper URL")
Exemplo n.º 6
0
def _DoSetRevisionCmd(arguments):
  package_name = arguments.setrevision__package
  revision_num = arguments.setrevision__revision

  revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
  revision_desc.SetRevisionNumber(revision_num)

  custom_package_targets = GetPackageTargetPackages(package_name, [])
  if not custom_package_targets:
    package_targets = arguments.packages_desc.GetPackageTargetsForPackage(
        package_name)
  else:
    package_targets = [target[0] for target in custom_package_targets]
    first_target = custom_package_targets[0]
    package_name = first_target[1]

  with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
    for package_target in package_targets:
      remote_package_key = package_locations.GetRemotePackageKey(
          arguments.packages_desc.IsSharedPackage(package_name),
          revision_num,
          package_target,
          package_name)

      temp_package_file = os.path.join(
          work_dir,
          os.path.basename(remote_package_key) + TEMP_SUFFIX)

      package_info.DownloadPackageInfoFiles(
          temp_package_file,
          remote_package_key,
          downloader=arguments.gsd_store.GetFile)

      package_desc = package_info.PackageInfo(temp_package_file)

      logging.info('Setting %s:%s to revision %s',
                   package_target, package_name, revision_num)
      revision_desc.SetTargetRevision(
          package_name,
          package_target,
          package_desc)

  revision_file = package_locations.GetRevisionFile(
      arguments.revisions_dir,
      package_name)
  pynacl.file_tools.MakeParentDirectoryIfAbsent(revision_file)
  revision_desc.SaveRevisionFile(revision_file)

  CleanTempFiles(arguments.revisions_dir)
Exemplo n.º 7
0
def _DoSetRevisionCmd(arguments):
  package_name = arguments.setrevision__package
  revision_num = arguments.setrevision__revision

  revision_desc = revision_info.RevisionInfo(arguments.packages_desc)
  package_targets = arguments.packages_desc.GetPackageTargetsForPackage(
      package_name
  )

  for package_target in package_targets:
    remote_package_key = package_locations.GetRemotePackageKey(
        arguments.packages_desc.IsSharedPackage(package_name),
        revision_num,
        package_target,
        package_name
    )
    temp_package_file = os.path.join(
        arguments.revisions_dir,
        os.path.basename(remote_package_key) + TEMP_SUFFIX)
    pynacl.file_tools.MakeParentDirectoryIfAbsent(temp_package_file)
    url = arguments.gsd_store.GetFile(remote_package_key, temp_package_file)
    if url is None:
      raise IOError('Could not download package file: %s' % remote_package_key)

    package_desc = package_info.PackageInfo(temp_package_file)
    logging.info('Setting %s:%s to revision %s',
                 package_target, package_name, revision_num)
    revision_desc.SetTargetRevision(
        package_name,
        package_target,
        package_desc
    )

  revision_file = package_locations.GetRevisionFile(
      arguments.revisions_dir,
      package_name
  )
  pynacl.file_tools.MakeParentDirectoryIfAbsent(revision_file)
  revision_desc.SaveRevisionFile(revision_file)

  CleanTempFiles(arguments.revisions_dir)
Exemplo n.º 8
0
    def test_UploadKeepsArchiveURL(self):
        # Checks if the archive URL is kept after a package upload.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_tar = self.GenerateMockFile(work_dir)
            mock_url = 'http://www.mock.com/mock.tar'
            package_desc = self.GeneratePackageInfo(
                [mock_tar], url_dict={mock_tar: mock_url})

            package_file = os.path.join(work_dir, 'package_file.json')
            package_desc.SavePackageFile(package_file)

            tar_dir = os.path.join(work_dir, 'tar_dir')
            package_target = 'custom_package_target'
            package_name = 'custom_package'
            package_revision = 10

            package_version.UploadPackage(self._fake_storage,
                                          package_revision,
                                          tar_dir,
                                          package_target,
                                          package_name,
                                          False,
                                          custom_package_file=package_file)
            self.assertEqual(self._fake_storage.WriteCount(), 2,
                             "Package did not get properly uploaded")

            remote_package_key = package_locations.GetRemotePackageKey(
                False, package_revision, package_target, package_name)
            downloaded_package = os.path.join(work_dir,
                                              'download_package.json')
            package_info.DownloadPackageInfoFiles(
                downloaded_package,
                remote_package_key,
                downloader=self._fake_storage.GetFile)
            downloaded_package_desc = package_info.PackageInfo(
                downloaded_package)

            # Verify everything (including URL) still matches.
            self.assertEqual(downloaded_package_desc, package_desc)
Exemplo n.º 9
0
def UploadPackage(storage, revision, tar_dir, package_target, package_name,
                  is_shared_package, annotate=False, custom_package_file=None):
  """Uploads a local package file to the supplied cloud storage object.

  By default local package files are expected to be found in the standardized
  location within the tar directory, however a custom package file may be
  specified to upload from a different location. Package archives that do not
  have their URL field set will automaticaly have the archives uploaded so that
  someone accessing the package file from the cloud storage will also have
  access to the package archives.

  Args:
    storage: Cloud storage object which supports PutFile and GetFile.
    revision: SVN Revision number the package should be associated with.
    tar_dir: Root tar directory where archives live.
    package_target: Package target of the package to archive.
    package_name: Package name of the package to archive.
    is_shared_package: Is this package shared among all package targets?
    custom_package_file: File location for a custom package file.
  Returns:
    Returns remote download key for the uploaded package file.
  """
  if annotate:
    print '@@@BUILD_STEP upload_package@@@'
  if custom_package_file is not None:
    local_package_file = custom_package_file
  else:
    local_package_file = package_locations.GetLocalPackageFile(
        tar_dir,
        package_target,
        package_name
    )

  # Upload the package file and also upload any local package archives so
  # that they are downloadable.
  package_desc = package_info.PackageInfo(local_package_file)
  upload_package_desc = package_info.PackageInfo()

  for archive_desc in package_desc.GetArchiveList():
    url = archive_desc.url
    if url is None:
      archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_desc.name)
      archive_hash = package_info.GetArchiveHash(archive_file)
      if archive_hash is None:
        raise IOError('Missing Archive File: %s' % archive_file)
      elif archive_hash != archive_desc.hash:
        raise IOError(
            'Archive hash does not match package hash: %s' % archive_file
            + '\n  Archive Hash: %s' % archive_hash
            + '\n  Package Hash: %s' % archive_desc.hash
        )

      logging.warn('Missing archive URL: %s', archive_desc.name)
      logging.warn('Uploading archive to be publically available...')
      remote_archive_key = package_locations.GetRemotePackageArchiveKey(
          archive_desc.name,
          archive_desc.hash
      )
      url = storage.PutFile(
          archive_file,
          remote_archive_key,
          clobber=True
      )
      if annotate:
        print '@@@STEP_LINK@download@%s@@@' % url

    upload_package_desc.AppendArchive(
        archive_desc.name,
        archive_desc.hash,
        url=url,
        tar_src_dir=archive_desc.tar_src_dir,
        extract_dir=archive_desc.extract_dir
    )

  upload_package_file = local_package_file + '.upload'
  pynacl.file_tools.MakeParentDirectoryIfAbsent(upload_package_file)
  upload_package_desc.SavePackageFile(upload_package_file)

  logging.info('Uploading package information: %s', package_name)
  remote_package_key = package_locations.GetRemotePackageKey(
      is_shared_package,
      revision,
      package_target,
      package_name
  )
  url = storage.PutFile(upload_package_file, remote_package_key)
  print '@@@STEP_LINK@download@%s@@@' % url

  return remote_package_key