コード例 #1
0
def update_files_privacy(pub_record):
    """Update file privacy according to the publishing destination

    :param pub_record: One of a SourcePackagePublishingHistory or
        BinaryPackagePublishingHistory record.

    :return: a list of changed `LibraryFileAlias` objects.
    """
    package_files = []
    archive = None
    if ISourcePackagePublishingHistory.providedBy(pub_record):
        archive = pub_record.archive
        # Unrestrict the package files files if necessary.
        sourcepackagerelease = pub_record.sourcepackagerelease
        package_files.extend([(source_file, 'libraryfile')
                              for source_file in sourcepackagerelease.files])
        # Unrestrict the package diff files if necessary.
        package_files.extend([(diff, 'diff_content')
                              for diff in sourcepackagerelease.package_diffs])
        # Unrestrict the source upload changesfile if necessary.
        package_upload = sourcepackagerelease.package_upload
        package_files.append((package_upload, 'changesfile'))
        package_files.append((sourcepackagerelease, 'changelog'))
    elif IBinaryPackagePublishingHistory.providedBy(pub_record):
        archive = pub_record.archive
        # Unrestrict the binary files if necessary.
        binarypackagerelease = pub_record.binarypackagerelease
        package_files.extend([(binary_file, 'libraryfile')
                              for binary_file in binarypackagerelease.files])
        # Unrestrict the upload changesfile file as necessary.
        build = binarypackagerelease.build
        package_upload = build.package_upload
        package_files.append((package_upload, 'changesfile'))
        # Unrestrict the buildlog file as necessary.
        package_files.append((build, 'log'))
    elif IPackageUploadCustom.providedBy(pub_record):
        # Unrestrict the custom files included
        package_files.append((pub_record, 'libraryfilealias'))
        # And set archive to the right attribute for PUCs
        archive = pub_record.packageupload.archive
    else:
        raise AssertionError(
            "pub_record is not one of SourcePackagePublishingHistory, "
            "BinaryPackagePublishingHistory or PackageUploadCustom.")

    changed_files = []
    for obj, attr_name in package_files:
        lfa = getattr(obj, attr_name, None)
        # Only unrestrict restricted files published in public archives,
        # not the opposite. We don't have a use-case for privatizing
        # files yet.
        if (lfa is None or lfa.restricted == archive.private
                or lfa.restricted == False):
            continue
        # LibraryFileAlias.restricted is normally read-only, but we have a
        # good excuse here.
        removeSecurityProxy(lfa).restricted = archive.private
        changed_files.append(lfa)

    return changed_files
コード例 #2
0
    def canRemove(self, publication_class, filename, file_md5):
        """Check if given (filename, MD5) can be removed from the pool.

        Check the archive reference-counter implemented in:
        `SourcePackagePublishingHistory` or
        `BinaryPackagePublishingHistory`.

        Only allow removal of unnecessary files.
        """
        clauses = []
        clauseTables = []

        if ISourcePackagePublishingHistory.implementedBy(
            publication_class):
            clauses.append("""
                SourcePackagePublishingHistory.archive = %s AND
                SourcePackagePublishingHistory.dateremoved is NULL AND
                SourcePackagePublishingHistory.sourcepackagerelease =
                    SourcePackageReleaseFile.sourcepackagerelease AND
                SourcePackageReleaseFile.libraryfile = LibraryFileAlias.id
            """ % sqlvalues(self.archive))
            clauseTables.append('SourcePackageReleaseFile')
        elif IBinaryPackagePublishingHistory.implementedBy(
            publication_class):
            clauses.append("""
                BinaryPackagePublishingHistory.archive = %s AND
                BinaryPackagePublishingHistory.dateremoved is NULL AND
                BinaryPackagePublishingHistory.binarypackagerelease =
                    BinaryPackageFile.binarypackagerelease AND
                BinaryPackageFile.libraryfile = LibraryFileAlias.id
            """ % sqlvalues(self.archive))
            clauseTables.append('BinaryPackageFile')
        else:
            raise AssertionError("%r is not supported." % publication_class)

        clauses.append("""
           LibraryFileAlias.content = LibraryFileContent.id AND
           LibraryFileAlias.filename = %s AND
           LibraryFileContent.md5 = %s
        """ % sqlvalues(filename, file_md5))
        clauseTables.extend(
            ['LibraryFileAlias', 'LibraryFileContent'])

        all_publications = publication_class.select(
            " AND ".join(clauses), clauseTables=clauseTables)

        right_now = datetime.datetime.now(pytz.timezone('UTC'))
        for pub in all_publications:
            # Deny removal if any reference is still active.
            if pub.status not in inactive_publishing_status:
                return False
            # Deny removal if any reference wasn't dominated yet.
            if pub.scheduleddeletiondate is None:
                return False
            # Deny removal if any reference is still in 'quarantine'.
            if pub.scheduleddeletiondate > right_now:
                return False

        return True
コード例 #3
0
    def canRemove(self, publication_class, filename, file_md5):
        """Check if given (filename, MD5) can be removed from the pool.

        Check the archive reference-counter implemented in:
        `SourcePackagePublishingHistory` or
        `BinaryPackagePublishingHistory`.

        Only allow removal of unnecessary files.
        """
        clauses = []
        clauseTables = []

        if ISourcePackagePublishingHistory.implementedBy(publication_class):
            clauses.append("""
                SourcePackagePublishingHistory.archive = %s AND
                SourcePackagePublishingHistory.dateremoved is NULL AND
                SourcePackagePublishingHistory.sourcepackagerelease =
                    SourcePackageReleaseFile.sourcepackagerelease AND
                SourcePackageReleaseFile.libraryfile = LibraryFileAlias.id
            """ % sqlvalues(self.archive))
            clauseTables.append('SourcePackageReleaseFile')
        elif IBinaryPackagePublishingHistory.implementedBy(publication_class):
            clauses.append("""
                BinaryPackagePublishingHistory.archive = %s AND
                BinaryPackagePublishingHistory.dateremoved is NULL AND
                BinaryPackagePublishingHistory.binarypackagerelease =
                    BinaryPackageFile.binarypackagerelease AND
                BinaryPackageFile.libraryfile = LibraryFileAlias.id
            """ % sqlvalues(self.archive))
            clauseTables.append('BinaryPackageFile')
        else:
            raise AssertionError("%r is not supported." % publication_class)

        clauses.append("""
           LibraryFileAlias.content = LibraryFileContent.id AND
           LibraryFileAlias.filename = %s AND
           LibraryFileContent.md5 = %s
        """ % sqlvalues(filename, file_md5))
        clauseTables.extend(['LibraryFileAlias', 'LibraryFileContent'])

        all_publications = publication_class.select(" AND ".join(clauses),
                                                    clauseTables=clauseTables)

        right_now = datetime.datetime.now(pytz.timezone('UTC'))
        for pub in all_publications:
            # Deny removal if any reference is still active.
            if pub.status not in inactive_publishing_status:
                return False
            # Deny removal if any reference wasn't dominated yet.
            if pub.scheduleddeletiondate is None:
                return False
            # Deny removal if any reference is still in 'quarantine'.
            if pub.scheduleddeletiondate > right_now:
                return False

        return True
コード例 #4
0
 def is_source(self):
     return ISourcePackagePublishingHistory.providedBy(self.context)
コード例 #5
0
 def is_source(self):
     return ISourcePackagePublishingHistory.providedBy(self.context)
コード例 #6
0
def update_files_privacy(pub_record):
    """Update file privacy according to the publishing destination

    :param pub_record: One of a SourcePackagePublishingHistory or
        BinaryPackagePublishingHistory record.

    :return: a list of changed `LibraryFileAlias` objects.
    """
    package_files = []
    archive = None
    if ISourcePackagePublishingHistory.providedBy(pub_record):
        archive = pub_record.archive
        # Unrestrict the package files files if necessary.
        sourcepackagerelease = pub_record.sourcepackagerelease
        package_files.extend(
            [(source_file, 'libraryfile')
             for source_file in sourcepackagerelease.files])
        # Unrestrict the package diff files if necessary.
        package_files.extend(
            [(diff, 'diff_content')
             for diff in sourcepackagerelease.package_diffs])
        # Unrestrict the source upload changesfile if necessary.
        package_upload = sourcepackagerelease.package_upload
        package_files.append((package_upload, 'changesfile'))
        package_files.append((sourcepackagerelease, 'changelog'))
    elif IBinaryPackagePublishingHistory.providedBy(pub_record):
        archive = pub_record.archive
        # Unrestrict the binary files if necessary.
        binarypackagerelease = pub_record.binarypackagerelease
        package_files.extend(
            [(binary_file, 'libraryfile')
             for binary_file in binarypackagerelease.files])
        # Unrestrict the upload changesfile file as necessary.
        build = binarypackagerelease.build
        package_upload = build.package_upload
        package_files.append((package_upload, 'changesfile'))
        # Unrestrict the buildlog file as necessary.
        package_files.append((build, 'log'))
    elif IPackageUploadCustom.providedBy(pub_record):
        # Unrestrict the custom files included
        package_files.append((pub_record, 'libraryfilealias'))
        # And set archive to the right attribute for PUCs
        archive = pub_record.packageupload.archive
    else:
        raise AssertionError(
            "pub_record is not one of SourcePackagePublishingHistory, "
            "BinaryPackagePublishingHistory or PackageUploadCustom.")

    changed_files = []
    for obj, attr_name in package_files:
        lfa = getattr(obj, attr_name, None)
        # Only unrestrict restricted files published in public archives,
        # not the opposite. We don't have a use-case for privatizing
        # files yet.
        if (lfa is None or
            lfa.restricted == archive.private or
            lfa.restricted == False):
            continue
        # LibraryFileAlias.restricted is normally read-only, but we have a
        # good excuse here.
        removeSecurityProxy(lfa).restricted = archive.private
        changed_files.append(lfa)

    return changed_files
コード例 #7
0
    def attemptCopy(self):
        """Attempt to perform the copy.

        :raise CannotCopy: If the copy fails for a reason that the user
            can deal with.
        """
        reason = self.target_archive.checkUploadToPocket(
            self.target_distroseries,
            self.target_pocket,
            person=self.requester)
        if reason:
            # Wrap any forbidden-pocket error in CannotCopy.
            raise CannotCopy(unicode(reason))

        source_package = self.findSourcePublication()

        # If there's a PackageUpload associated with this job then this
        # job has just been released by an archive admin from the queue.
        # We don't need to check any policies, but the admin may have
        # set overrides which we will get from the job's metadata.
        pu = getUtility(IPackageUploadSet).getByPackageCopyJobIDs(
            [self.context.id]).any()
        if pu is None:
            source_name = getUtility(ISourcePackageNameSet)[self.package_name]
            self._checkPolicies(source_name,
                                source_package.sourcepackagerelease.component,
                                self.auto_approve)

        # The package is free to go right in, so just copy it now.
        ancestry = self.target_archive.getPublishedSources(
            name=self.package_name,
            distroseries=self.target_distroseries,
            pocket=self.target_pocket,
            exact_match=True)
        override = self.getSourceOverride()
        copy_policy = self.getPolicyImplementation()
        send_email = copy_policy.send_email(self.target_archive)
        copied_publications = do_copy(
            sources=[source_package],
            archive=self.target_archive,
            series=self.target_distroseries,
            pocket=self.target_pocket,
            include_binaries=self.include_binaries,
            check_permissions=True,
            person=self.requester,
            overrides=[override],
            send_email=send_email,
            announce_from_person=self.requester,
            sponsored=self.sponsored,
            packageupload=pu,
            unembargo=self.unembargo,
            phased_update_percentage=self.phased_update_percentage)

        # Add a PackageDiff for this new upload if it has ancestry.
        if copied_publications and not ancestry.is_empty():
            from_spr = None
            for publication in copied_publications:
                if ISourcePackagePublishingHistory.providedBy(publication):
                    from_spr = publication.sourcepackagerelease
                    break
            if from_spr:
                for ancestor in ancestry:
                    to_spr = ancestor.sourcepackagerelease
                    if from_spr != to_spr:
                        try:
                            to_spr.requestDiffTo(self.requester, from_spr)
                        except PackageDiffAlreadyRequested:
                            pass
                        break

        if pu is not None:
            # A PackageUpload will only exist if the copy job had to be
            # held in the queue because of policy/ancestry checks.  If one
            # does exist we need to make sure it gets moved to DONE.
            pu.setDone()

        if copied_publications:
            self.logger.debug("Packages copied to %s:" %
                              self.target_archive.displayname)
            for copy in copied_publications:
                self.logger.debug(copy.displayname)
コード例 #8
0
    def attemptCopy(self):
        """Attempt to perform the copy.

        :raise CannotCopy: If the copy fails for a reason that the user
            can deal with.
        """
        reason = self.target_archive.checkUploadToPocket(
            self.target_distroseries, self.target_pocket, person=self.requester
        )
        if reason:
            # Wrap any forbidden-pocket error in CannotCopy.
            raise CannotCopy(unicode(reason))

        source_package = self.findSourcePublication()

        # If there's a PackageUpload associated with this job then this
        # job has just been released by an archive admin from the queue.
        # We don't need to check any policies, but the admin may have
        # set overrides which we will get from the job's metadata.
        pu = getUtility(IPackageUploadSet).getByPackageCopyJobIDs([self.context.id]).any()
        if pu is None:
            source_name = getUtility(ISourcePackageNameSet)[self.package_name]
            self._checkPolicies(source_name, source_package.sourcepackagerelease.component, self.auto_approve)

        # The package is free to go right in, so just copy it now.
        ancestry = self.target_archive.getPublishedSources(
            name=self.package_name, distroseries=self.target_distroseries, pocket=self.target_pocket, exact_match=True
        )
        override = self.getSourceOverride()
        copy_policy = self.getPolicyImplementation()
        send_email = copy_policy.send_email(self.target_archive)
        copied_publications = do_copy(
            sources=[source_package],
            archive=self.target_archive,
            series=self.target_distroseries,
            pocket=self.target_pocket,
            include_binaries=self.include_binaries,
            check_permissions=True,
            person=self.requester,
            overrides=[override],
            send_email=send_email,
            announce_from_person=self.requester,
            sponsored=self.sponsored,
            packageupload=pu,
            unembargo=self.unembargo,
            phased_update_percentage=self.phased_update_percentage,
        )

        # Add a PackageDiff for this new upload if it has ancestry.
        if copied_publications and not ancestry.is_empty():
            from_spr = None
            for publication in copied_publications:
                if ISourcePackagePublishingHistory.providedBy(publication):
                    from_spr = publication.sourcepackagerelease
                    break
            if from_spr:
                for ancestor in ancestry:
                    to_spr = ancestor.sourcepackagerelease
                    if from_spr != to_spr:
                        try:
                            to_spr.requestDiffTo(self.requester, from_spr)
                        except PackageDiffAlreadyRequested:
                            pass
                        break

        if pu is not None:
            # A PackageUpload will only exist if the copy job had to be
            # held in the queue because of policy/ancestry checks.  If one
            # does exist we need to make sure it gets moved to DONE.
            pu.setDone()

        if copied_publications:
            self.logger.debug("Packages copied to %s:" % self.target_archive.displayname)
            for copy in copied_publications:
                self.logger.debug(copy.displayname)