Пример #1
0
    def LoadPackageFile(self, package_file, skip_missing=False):
        """Loads a package file into this object.

    Args:
      package_file: Filename or JSON dictionary.
    """
        archive_names = None
        self._archive_list = []

        # TODO(dyen): Support old format temporarily when it was a list of archives.
        if isinstance(package_file, list) or isinstance(package_file, dict):
            if isinstance(package_file, list):
                self._package_version = 0
                archive_list = package_file
            else:
                self._package_version = package_file[PACKAGE_KEY_VERSION]
                archive_list = package_file[PACKAGE_KEY_ARCHIVES]

            if archive_list:
                if isinstance(archive_list[0], archive_info.ArchiveInfo):
                    # Setting a list of ArchiveInfo objects, no need to interpret JSON.
                    self._archive_list = archive_list
                else:
                    # Assume to be JSON.
                    for archive_json in archive_list:
                        archive = archive_info.ArchiveInfo(
                            archive_info_file=archive_json)
                        self._archive_list.append(archive)

        elif isinstance(package_file, str) or isinstance(
                package_file, unicode):
            package_data = ReadPackageFile(package_file)
            self._package_version = package_data[PACKAGE_KEY_VERSION]
            archive_names = package_data[PACKAGE_KEY_ARCHIVES]

            package_name = GetLocalPackageName(package_file)
            archive_dir = os.path.join(os.path.dirname(package_file),
                                       package_name)
            for archive in archive_names:
                arch_file = archive + '.json'
                arch_path = os.path.join(archive_dir, arch_file)
                if not os.path.isfile(arch_path):
                    if not skip_missing:
                        raise error.Error(
                            'Package (%s) points to invalid archive file (%s).'
                            % (package_file, arch_path))
                    archive_desc = archive_info.ArchiveInfo(name=archive)
                else:
                    archive_desc = archive_info.ArchiveInfo(
                        archive_info_file=arch_path)
                self._archive_list.append(archive_desc)
        else:
            raise error.Error('Invalid load package file type (%s): %s.' %
                              (type(package_file), package_file))
Пример #2
0
  def test_PackageJsonDumpLoad(self):
    # Check if Json from DumpPackageJson() represents a package correctly.
    arch_name1 = 'archive_json1.tar'
    arch_name2 = 'archive_json2.tar'
    arch_hash1 = 'archive_json_hash1'
    arch_hash2 = 'archive_json_hash2'

    mast_package = package_info.PackageInfo()
    mast_package.AppendArchive(archive_info.ArchiveInfo(arch_name1, arch_hash1))
    mast_package.AppendArchive(archive_info.ArchiveInfo(arch_name2, arch_hash2))

    json_package = package_info.PackageInfo(mast_package.DumpPackageJson())
    self.assertEqual(mast_package, json_package)
Пример #3
0
    def OutputPackagesInformation(self):
        """Outputs packages information for the built data."""
        packages_dir = os.path.join(self._options.output, 'packages')
        pynacl.file_tools.RemoveDirectoryIfPresent(packages_dir)
        os.makedirs(packages_dir)

        built_packages = []
        for target, target_dict in self._package_targets.iteritems():
            target_dir = os.path.join(packages_dir, target)
            pynacl.file_tools.MakeDirectoryIfAbsent(target_dir)
            for output_package, components in target_dict.iteritems():
                package_desc = package_info.PackageInfo()

                include_package = False
                for component in components:
                    if '.' in component:
                        archive_name = component
                    else:
                        archive_name = component + '.tgz'
                    cache_item = self._build_once.GetCachedCloudItemForPackage(
                        component)
                    if cache_item is None:
                        archive_desc = archive_info.ArchiveInfo(
                            name=archive_name)
                    else:
                        if cache_item.dir_item:
                            include_package = True
                            archive_desc = archive_info.ArchiveInfo(
                                name=archive_name,
                                hash=cache_item.dir_item.hash,
                                url=cache_item.dir_item.url,
                                log_url=cache_item.log_url)

                    package_desc.AppendArchive(archive_desc)

                # Only output package file if an archive was actually included.
                if include_package:
                    package_file = os.path.join(target_dir,
                                                output_package + '.json')
                    package_desc.SavePackageFile(package_file)

                    built_packages.append(package_file)

        if self._options.packages_file:
            packages_file = pynacl.platform.CygPath(
                self._options.packages_file)
            pynacl.file_tools.MakeParentDirectoryIfAbsent(packages_file)
            with open(packages_file, 'wt') as f:
                f.write('\n'.join(built_packages))
Пример #4
0
    def test_AddArchive(self):
        # Check that we can successfully add an archive.
        archive_name = 'test_archive'
        archive_hash = 'test_archive_hash'
        archive_url = 'test_archive_url'
        tar_src_dir = 'test_archive_dir'
        extract_dir = 'test_extraction_dir'

        package = package_info.PackageInfo()
        archive = archive_info.ArchiveInfo(name=archive_name,
                                           hash=archive_hash,
                                           url=archive_url,
                                           tar_src_dir=tar_src_dir,
                                           extract_dir=extract_dir)
        package.AppendArchive(archive)

        archive_list = package.GetArchiveList()

        self.assertEqual(len(archive_list), 1)
        archive_item = archive_list[0].GetArchiveData()
        self.assertEqual(archive_item.name, archive_name)
        self.assertEqual(archive_item.hash, archive_hash)
        self.assertEqual(archive_item.url, archive_url)
        self.assertEqual(archive_item.tar_src_dir, tar_src_dir)
        self.assertEqual(archive_item.extract_dir, extract_dir)
Пример #5
0
    def test_RevisionFileSavesForMultiTargets(self):
        # Tests that a revision successfully saves a multi-package target package.
        package = package_info.PackageInfo()
        package.AppendArchive(
            archive_info.ArchiveInfo(name='test_name', hash='hash_value'))

        package_targets = self._packages.GetPackageTargetsForPackage(
            TEST_MULTI_PACKAGE_PACKAGE_TARGET)
        self.assertTrue(
            len(package_targets) > 0,
            'Invalid test data, multiple package targets expected')

        revision = revision_info.RevisionInfo(self._packages)
        revision.SetRevisionNumber('123abc')
        for package_target in package_targets:
            revision.SetTargetRevision(TEST_MULTI_PACKAGE_PACKAGE_TARGET,
                                       package_target, package)

        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            revision_file = os.path.join(work_dir, 'complete_revision.json')
            revision.SaveRevisionFile(revision_file)

            new_revision = revision_info.RevisionInfo(self._packages,
                                                      revision_file)

        self.assertEqual(revision, new_revision)
Пример #6
0
    def test_AlteredRevisionFileFails(self):
        # Tests that an altered revision file will fail to load.
        package = package_info.PackageInfo()
        package.AppendArchive(
            archive_info.ArchiveInfo(name='test_name', hash='hash_value'))

        revision = revision_info.RevisionInfo(self._packages)
        revision.SetRevisionNumber('123abc')
        package_targets = self._packages.GetPackageTargetsForPackage(
            TEST_SINGLE_PACKAGE_PACKAGE_TARGET)
        for package_target in package_targets:
            revision.SetTargetRevision(TEST_SINGLE_PACKAGE_PACKAGE_TARGET,
                                       package_target, package)

        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            revision_file = os.path.join(work_dir, 'altered_revision.json')
            revision.SaveRevisionFile(revision_file)

            # Alter the file directly and save it back out
            with open(revision_file, 'rt') as f:
                revision_json = json.load(f)
            revision_json[revision_info.FIELD_REVISION] = 'noise'
            with open(revision_file, 'wt') as f:
                json.dump(revision_json, f)

            new_revision = revision_info.RevisionInfo(self._packages)
            self.assertRaises(error.Error, new_revision.LoadRevisionFile,
                              revision_file)
Пример #7
0
    def test_RevisionFileSaveLoad(self):
        # Tests that we can properly save and load a revision file.
        package = package_info.PackageInfo()
        package.AppendArchive(
            archive_info.ArchiveInfo(name='test_name', hash='hash_value'))

        revision = revision_info.RevisionInfo(self._packages)
        revision.SetRevisionNumber('123abc')
        package_targets = self._packages.GetPackageTargetsForPackage(
            TEST_SINGLE_PACKAGE_PACKAGE_TARGET)
        self.assertEqual(
            1, len(package_targets),
            "Invalid test data, single package package target requires 1 target"
        )

        revision.SetTargetRevision(TEST_SINGLE_PACKAGE_PACKAGE_TARGET,
                                   package_targets[0], package)

        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            revision_file = os.path.join(work_dir, 'test_revision.json')
            revision.SaveRevisionFile(revision_file)

            new_revision = revision_info.RevisionInfo(self._packages,
                                                      revision_file)

        self.assertEqual(revision, new_revision)
Пример #8
0
 def test_ArchiveFileSaveLoad(self):
     with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
         archive = self.CreateTemporaryArchive()
         temp_json = os.path.join(work_dir, 'archive.json')
         archive.SaveArchiveInfoFile(temp_json)
         loaded_archive = archive_info.ArchiveInfo(
             archive_info_file=temp_json)
         self.assertEqual(archive, loaded_archive)
Пример #9
0
def ArchiveAndUpload(version, zipname, zippath, packages_file):
  sys.stdout.flush()
  print >>sys.stderr, '@@@BUILD_STEP archive_and_upload@@@'

  bucket_path = 'nativeclient-archive2/toolchain/%s' % version
  gsd_store = pynacl.gsd_storage.GSDStorage(bucket_path, [bucket_path])

  zipname = os.path.join(TOOLCHAIN_BUILD_OUT, zipname)
  try:
    os.remove(zipname)
  except:
    pass

  # Archive the zippath to the zipname.
  if process.Run(['tar', '-czf', zipname, zippath],
                 cwd=TOOLCHAIN_BUILD_OUT,
                 outfile=sys.stdout):
      raise RuntimeError('Failed to zip %s from %s.\n' % (zipname, zippath))

  # Create Zip Hash file using the hash of the zip file.
  hashzipname = zipname + '.sha1hash'
  hashval = pynacl.hashing_tools.HashFileContents(zipname)
  with open(hashzipname, 'w') as f:
    f.write(hashval)

  # Upload the Zip file.
  zipurl = gsd_store.PutFile(zipname, os.path.basename(zipname))
  sys.stdout.flush()
  print >>sys.stderr, ('@@@STEP_LINK@download (%s)@%s@@@' %
                       (os.path.basename(zipname), zipurl))

  # Upload the Zip Hash file.
  hashurl = gsd_store.PutFile(hashzipname, os.path.basename(hashzipname))
  sys.stdout.flush()
  print >>sys.stderr, ('@@@STEP_LINK@download (%s)@%s@@@' %
                       (os.path.basename(hashzipname), hashurl))

  # Create a package info file for the nacl_arm_bionic package.
  archive_desc = archive_info.ArchiveInfo(name=os.path.basename(zipname),
                                          archive_hash=hashval,
                                          tar_src_dir='linux_arm_bionic',
                                          url=zipurl)
  package_desc = package_info.PackageInfo()
  package_desc.AppendArchive(archive_desc)

  os_name = pynacl.platform.GetOS()
  arch_name = pynacl.platform.GetArch()
  package_info_file = os.path.join(TOOLCHAIN_BUILD_OUT,
                                   'packages',
                                   '%s_%s' % (os_name, arch_name),
                                   'nacl_arm_bionic.json')
  package_desc.SavePackageFile(package_info_file)

  # If packages_file is specified, write out our packages file of 1 package.
  if packages_file:
    with open(packages_file, 'wt') as f:
      f.write(package_info_file)
Пример #10
0
  def test_OrderIndependentEquality(self):
    # Check that order does not matter when adding multiple archives.
    arch_name1 = 'archive1.tar'
    arch_name2 = 'archive2.tar'
    arch_hash1 = 'archive_hash1'
    arch_hash2 = 'archive_hash2'

    package1 = package_info.PackageInfo()
    package1.AppendArchive(archive_info.ArchiveInfo(arch_name1, arch_hash1))
    package1.AppendArchive(archive_info.ArchiveInfo(arch_name2, arch_hash2))

    package2 = package_info.PackageInfo()
    package2.AppendArchive(archive_info.ArchiveInfo(arch_name2, arch_hash2))
    package2.AppendArchive(archive_info.ArchiveInfo(arch_name1, arch_hash1))

    self.assertEqual(len(package1.GetArchiveList()), 2)
    self.assertEqual(len(package2.GetArchiveList()), 2)
    self.assertEqual(package1, package2)
Пример #11
0
  def test_PackageLoadJsonList(self):
    # Check if we can successfully load a list of archive.
    arch_name1 = 'archive_item1.tar'
    arch_name2 = 'archive_item2.tar'
    arch_hash1 = 'archive_item_hash1'
    arch_hash2 = 'archive_item_hash2'

    mast_package = package_info.PackageInfo()
    mast_package.AppendArchive(archive_info.ArchiveInfo(arch_name1, arch_hash1))
    mast_package.AppendArchive(archive_info.ArchiveInfo(arch_name2, arch_hash2))
    json_data = mast_package.DumpPackageJson()

    constructed_package = package_info.PackageInfo(json_data)
    loaded_package = package_info.PackageInfo()
    loaded_package.LoadPackageFile(json_data)

    self.assertEqual(mast_package, constructed_package)
    self.assertEqual(mast_package, loaded_package)
Пример #12
0
    def test_RevisionTargetSamePackage(self):
        # Tests that all the targets must all be the same.
        package = package_info.PackageInfo()
        package.AppendArchive(
            archive_info.ArchiveInfo(name='test_name', hash='hash_value'))

        revision_desc = revision_info.RevisionInfo(self._packages)
        revision_desc.SetTargetRevision('test1', 'package_target', package)

        self.assertRaises(error.Error, revision_desc.SetTargetRevision,
                          'test2', 'package_target', package)
Пример #13
0
    def test_RevTargetSets(self):
        # Tests that we can properly set a target revision.
        package = package_info.PackageInfo()
        package.AppendArchive(
            archive_info.ArchiveInfo(name='test_name', hash='hash_value'))

        revision_desc = revision_info.RevisionInfo(self._packages)
        revision_desc.SetTargetRevision('test_package', 'package_target',
                                        package)

        self.assertEqual(package,
                         revision_desc.GetPackageInfo('package_target'))
Пример #14
0
    def CreateTemporaryArchive(self):
        archive_name = 'test_name' + str(random.random())
        archive_hash = 'test_hash' + str(random.random())
        archive_url = 'test_url' + str(random.random())
        tar_src_dir = 'test_src' + str(random.random())
        extract_dir = 'test_extr' + str(random.random())

        archive = archive_info.ArchiveInfo(archive_name, archive_hash,
                                           archive_url, tar_src_dir,
                                           extract_dir)

        return archive
Пример #15
0
  def test_PackageSaveLoadFile(self):
    # Check if we can save/load a package file and retain it's values.
    arch_name1 = 'archive_saveload1.tar'
    arch_name2 = 'archive_saveload2.tar'
    arch_hash1 = 'archive_saveload_hash1'
    arch_hash2 = 'archive_saveload_hash2'

    mast_package = package_info.PackageInfo()
    mast_package.AppendArchive(archive_info.ArchiveInfo(arch_name1, arch_hash1))
    mast_package.AppendArchive(archive_info.ArchiveInfo(arch_name2, arch_hash2))

    with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
      package_file = os.path.join(work_dir, 'test_package.json')
      mast_package.SavePackageFile(package_file)

      constructed_package = package_info.PackageInfo(package_file)
      loaded_package = package_info.PackageInfo()
      loaded_package.LoadPackageFile(package_file)

      self.assertEqual(mast_package, constructed_package)
      self.assertEqual(mast_package, loaded_package)
Пример #16
0
  def test_ClearArchiveListClearsEverything(self):
    # Check that clear archive list actually clears everything
    package1 = package_info.PackageInfo()
    archive1 = archive_info.ArchiveInfo('name', 'hash', 'url', 'tar_src_dir',
                                        'extract_dir')
    package1.AppendArchive(archive1)
    package1.ClearArchiveList()

    # Test to be sure the archive list is clear.
    self.assertEqual(len(package1.GetArchiveList()), 0)

    # Test to be sure the state is equal to an empty PackageInfo.
    package2 = package_info.PackageInfo()
    self.assertEqual(package1, package2)
Пример #17
0
def _DoFillEmptyTarsCmd(arguments):
  package_target_packages = GetPackageTargetPackages(
      arguments.fillemptytars_package,
      arguments.package_target_packages
  )
  if not package_target_packages:
    raise NameError('Unknown package: %s.' % arguments.fillemptytars_package
                    + ' Did you forget to add "$PACKAGE_TARGET/"?')

  for package_target, package_name in package_target_packages:
    package_path = package_locations.GetLocalPackageFile(arguments.tar_dir,
                                                         package_target,
                                                         package_name)

    package_desc = package_info.PackageInfo(package_path, skip_missing=True)
    output_package_desc = package_info.PackageInfo()
    for archive in package_desc.GetArchiveList():
      # If archive does not exist, fill it with an empty one.
      archive_data = archive.GetArchiveData()
      if archive_data.hash:
        output_package_desc.AppendArchive(archive)
      else:
        logging.info('Filling missing archive: %s.', archive_data.name)
        if (archive_data.name.endswith('.tar.gz') or
            archive_data.name.endswith('.tgz')):
          mode = 'w:gz'
        elif archive_data.name.endswith('.bz2'):
          mode = 'w:bz2'
        elif archive_data.name.endswith('.tar'):
          mode = 'w:'
        else:
          raise NameError('Unknown archive type: %s.' % archive_data.name)

        archive_file = package_locations.GetLocalPackageArchiveFile(
            arguments.tar_dir,
            package_target,
            package_name,
            archive_data.name
            )

        tar_file = cygtar.CygTar(archive_file, mode)
        tar_file.Close()
        tar_hash = archive_info.GetArchiveHash(archive_file)

        empty_archive = archive_info.ArchiveInfo(name=archive_data.name,
                                                 archive_hash=tar_hash)
        output_package_desc.AppendArchive(empty_archive)

    output_package_desc.SavePackageFile(package_path)
Пример #18
0
    def test_ArchiveConstructor(self):
        archive_name = 'test_archive'
        archive_hash = 'test_archive_hash'
        archive_url = 'test_archive_url'
        tar_src_dir = 'test_archive_dir'
        extract_dir = 'test_extraction_dir'

        archive = archive_info.ArchiveInfo(archive_name, archive_hash,
                                           archive_url, tar_src_dir,
                                           extract_dir)

        archive_data = archive.GetArchiveData()
        self.assertEqual(archive_data.name, archive_name)
        self.assertEqual(archive_data.hash, archive_hash)
        self.assertEqual(archive_data.url, archive_url)
        self.assertEqual(archive_data.tar_src_dir, tar_src_dir)
        self.assertEqual(archive_data.extract_dir, extract_dir)
Пример #19
0
    def test_DownloadArchiveMissingURLFails(self):
        # Checks that we fail when the archive has no URL set.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            package_desc = package_info.PackageInfo()
            archive_desc = archive_info.ArchiveInfo(name='missing_name.tar',
                                                    hash='missing_hash',
                                                    url=None)
            package_desc.AppendArchive(archive_desc)

            tar_dir = os.path.join(work_dir, 'tar_dir')
            self.assertRaises(error.Error,
                              package_version.DownloadPackageArchives,
                              tar_dir,
                              'missing_target',
                              'missing_name',
                              package_desc,
                              downloader=self._fake_downloader.Download)
Пример #20
0
    def test_RevisionFileRequiresRevisionNumber(self):
        # Tests that we can properly save and load a revision file.
        package = package_info.PackageInfo()
        package.AppendArchive(
            archive_info.ArchiveInfo(name='test_name', hash='hash_value'))

        revision = revision_info.RevisionInfo(self._packages)
        package_targets = self._packages.GetPackageTargetsForPackage(
            TEST_SINGLE_PACKAGE_PACKAGE_TARGET)
        for package_target in package_targets:
            revision.SetTargetRevision(TEST_SINGLE_PACKAGE_PACKAGE_TARGET,
                                       package_target, package)

        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            revision_file = os.path.join(work_dir, 'test_revision.json')

            self.assertRaises(error.Error, revision.SaveRevisionFile,
                              revision_file)
Пример #21
0
    def test_RevisionFileMustSetAllTargets(self):
        # Tests that a revision file fails if not all package targets are set.
        package = package_info.PackageInfo()
        package.AppendArchive(
            archive_info.ArchiveInfo(name='test_name', hash='hash_value'))

        package_targets = self._packages.GetPackageTargetsForPackage(
            TEST_MULTI_PACKAGE_PACKAGE_TARGET)
        self.assertTrue(
            len(package_targets) > 0,
            'Invalid test data, multiple package targets expected')

        revision = revision_info.RevisionInfo(self._packages)
        revision.SetRevisionNumber('123abc')
        revision.SetTargetRevision(TEST_MULTI_PACKAGE_PACKAGE_TARGET,
                                   package_targets[0], package)

        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            revision_file = os.path.join(work_dir, 'incomplete_revision.json')
            self.assertRaises(error.Error, revision.SaveRevisionFile,
                              revision_file)
Пример #22
0
    def test_DownloadArchiveMismatchFails(self):
        # Check download archive fails when the hash does not match expected hash.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            mock_tar = self.GenerateMockFile(work_dir)
            fake_url = 'http://www.fake.com/archive.tar'
            self._fake_downloader.StoreURL(fake_url, mock_tar)

            package_desc = package_info.PackageInfo()
            archive_desc = archive_info.ArchiveInfo(name='invalid_name.tar',
                                                    hash='invalid_hash',
                                                    url=fake_url)
            package_desc.AppendArchive(archive_desc)

            tar_dir = os.path.join(work_dir, 'tar_dir')
            self.assertRaises(error.Error,
                              package_version.DownloadPackageArchives,
                              tar_dir,
                              'mismatch_target',
                              'mismatch_name',
                              package_desc,
                              downloader=self._fake_downloader.Download)
Пример #23
0
    def GeneratePackageInfo(self,
                            archive_list,
                            url_dict={},
                            src_dir_dict={},
                            dir_dict={},
                            log_url_dict={}):
        """Generates a package_info.PackageInfo object for list of archives."

    Args:
      archive_list: List of file paths where package archives sit.
      url_dict: dict of archive file path to URL if url exists.
      src_dir_dict: dict of archive file path to source tar dir if exists.
      dir_dict: dict of archive file path to root dir if exists.
    """
        package_desc = package_info.PackageInfo()
        for archive_file in archive_list:
            archive_name = os.path.basename(archive_file)

            if os.path.isfile(archive_file):
                archive_hash = archive_info.GetArchiveHash(archive_file)
            else:
                archive_hash = 'invalid'

            archive_url = url_dict.get(archive_file, None)
            archive_src_tar_dir = src_dir_dict.get(archive_file, '')
            archive_dir = dir_dict.get(archive_file, '')
            archive_log_url = log_url_dict.get(archive_file, None)
            archive_desc = archive_info.ArchiveInfo(
                name=archive_name,
                hash=archive_hash,
                url=archive_url,
                tar_src_dir=archive_src_tar_dir,
                extract_dir=archive_dir,
                log_url=archive_log_url)
            package_desc.AppendArchive(archive_desc)

        return package_desc
Пример #24
0
def ArchivePackageArchives(tar_dir, package_target, package_name, archives,
                           extra_archives=[]):
  """Archives local package archives to the tar directory.

  Args:
    tar_dir: Root tar directory where archives live.
    package_target: Package target of the package to archive.
    package_name: Package name of the package to archive.
    archives: List of archive file paths where archives currently live.
    extra_archives: Extra archives that are expected to be build elsewhere.
  Returns:
    Returns the local package file that was archived.
  """
  local_package_file = package_locations.GetLocalPackageFile(tar_dir,
                                                             package_target,
                                                             package_name)

  valid_archive_files = set()
  archive_list = []

  package_desc = package_info.PackageInfo()
  package_archives = ([(archive, False) for archive in archives] +
                      [(archive, True) for archive in extra_archives])
  for archive, skip_missing in package_archives:
    archive_url = None
    if '@' in archive:
      archive, archive_url = archive.split('@', 1)

    extract_param = ''
    tar_src_dir = ''
    extract_dir = ''
    if ',' in archive:
      archive, extract_param = archive.split(',', 1)
      if ':' in extract_param:
        tar_src_dir, extract_dir = extract_param.split(':', 1)
      else:
        tar_src_dir = extract_param

    archive_hash = archive_info.GetArchiveHash(archive)
    archive_name = os.path.basename(archive)
    archive_desc = archive_info.ArchiveInfo(archive_name,
                                            archive_hash,
                                            url=archive_url,
                                            tar_src_dir=tar_src_dir,
                                            extract_dir=extract_dir)
    package_desc.AppendArchive(archive_desc)

    if archive_hash is None:
      if skip_missing:
        logging.info('Skipping archival of missing file: %s', archive)
        continue
      raise IOError('Invalid package: %s.' % archive)
    archive_list.append(archive)

    archive_basename = os.path.basename(archive)
    archive_json = archive_basename + '.json'
    valid_archive_files.update([archive_basename, archive_json])

  # Delete any stale archive files
  local_archive_dir = package_locations.GetLocalPackageArchiveDir(
      tar_dir,
      package_target,
      package_name)

  if os.path.isdir(local_archive_dir):
    for dir_item in os.listdir(local_archive_dir):
      if dir_item in valid_archive_files:
        continue

      item_path = os.path.join(local_archive_dir, dir_item)
      if os.path.isdir(item_path):
        pynacl.file_tools.RemoveDir(item_path)
      else:
        pynacl.file_tools.RemoveFile(item_path)

  # We do not need to archive the package if it already matches. But if the
  # local package file is invalid or does not match, then we should recreate
  # the json file.
  if os.path.isfile(local_package_file):
    try:
      current_package_desc = package_info.PackageInfo(local_package_file,
                                                      skip_missing=True)
      if current_package_desc == package_desc:
        return
    except ValueError:
      pass

  # Copy each of the packages over to the tar directory first.
  for archive_file in archive_list:
    archive_name = os.path.basename(archive_file)
    local_archive_file = package_locations.GetLocalPackageArchiveFile(
        tar_dir,
        package_target,
        package_name,
        archive_name)

    logging.info('Archiving file: %s', archive_file)
    pynacl.file_tools.MakeParentDirectoryIfAbsent(local_archive_file)
    shutil.copyfile(archive_file, local_archive_file)

  # Once all the copying is completed, update the local packages file.
  logging.info('Package "%s" archived: %s', package_name, local_package_file)
  pynacl.file_tools.MakeParentDirectoryIfAbsent(local_package_file)
  package_desc.SavePackageFile(local_package_file)

  return local_package_file
Пример #25
0
 def test_DumpArchiveJson(self):
     archive = self.CreateTemporaryArchive()
     archive_json = archive.DumpArchiveJson()
     loaded_archive = archive_info.ArchiveInfo(
         archive_info_file=archive_json)
     self.assertEqual(archive, loaded_archive)
Пример #26
0
def UploadPackage(storage, revision, tar_dir, package_target, package_name,
                  is_shared_package, annotate=False, skip_missing=False,
                  custom_package_file=None):
  """Uploads a local package file to the supplied cloud storage object.

  By default local package files are expected to be found in the standardized
  location within the tar directory, however a custom package file may be
  specified to upload from a different location. Package archives that do not
  have their URL field set will automaticaly have the archives uploaded so that
  someone accessing the package file from the cloud storage will also have
  access to the package archives.

  Args:
    storage: Cloud storage object which supports PutFile and GetFile.
    revision: SVN Revision number the package should be associated with.
    tar_dir: Root tar directory where archives live.
    package_target: Package target of the package to archive.
    package_name: Package name of the package to archive.
    is_shared_package: Is this package shared among all package targets?
    annotate: Print annotations for build bots?
    skip_missing: Skip missing package archive files?
    custom_package_file: File location for a custom package file.
  Returns:
    Returns remote download key for the uploaded package file.
  """
  if custom_package_file is not None:
    local_package_file = custom_package_file
  else:
    local_package_file = package_locations.GetLocalPackageFile(
        tar_dir,
        package_target,
        package_name)

  # Upload the package file and also upload any local package archives so
  # that they are downloadable.
  package_desc = package_info.PackageInfo(local_package_file,
                                          skip_missing=skip_missing)
  upload_package_desc = package_info.PackageInfo()

  for archive_obj in package_desc.GetArchiveList():
    archive_desc = archive_obj.GetArchiveData()
    url = archive_desc.url
    if archive_desc.hash and url is None:
      if annotate:
        print '@@@BUILD_STEP Archive:%s (upload)@@@' % archive_desc.name

      archive_file = package_locations.GetLocalPackageArchiveFile(
          tar_dir,
          package_target,
          package_name,
          archive_desc.name)
      archive_hash = archive_info.GetArchiveHash(archive_file)
      if archive_hash is None:
        raise IOError('Missing Archive File: %s' % archive_file)
      elif archive_hash != archive_desc.hash:
        raise IOError(
            'Archive hash does not match package hash: %s' % archive_file
            + '\n  Archive Hash: %s' % archive_hash
            + '\n  Package Hash: %s' % archive_desc.hash)

      logging.warn('Missing archive URL: %s', archive_desc.name)
      logging.warn('Uploading archive to be publically available...')
      remote_archive_key = package_locations.GetRemotePackageArchiveKey(
          archive_desc.name,
          archive_desc.hash)
      url = storage.PutFile(archive_file, remote_archive_key, clobber=True)
      if annotate:
        print '@@@STEP_LINK@download@%s@@@' % url

    archive_desc = archive_info.ArchiveInfo(
        archive_desc.name,
        archive_desc.hash,
        url=url,
        tar_src_dir=archive_desc.tar_src_dir,
        extract_dir=archive_desc.extract_dir)
    upload_package_desc.AppendArchive(archive_desc)

  upload_package_file = local_package_file + '.upload'
  pynacl.file_tools.MakeParentDirectoryIfAbsent(upload_package_file)
  upload_package_desc.SavePackageFile(upload_package_file)

  logging.info('Uploading package information: %s', package_name)
  remote_package_key = package_locations.GetRemotePackageKey(
      is_shared_package,
      revision,
      package_target,
      package_name)
  package_info.UploadPackageInfoFiles(storage, package_target, package_name,
                                      remote_package_key, upload_package_file,
                                      skip_missing=skip_missing,
                                      annotate=annotate)

  return remote_package_key