def testObsoleteDistroseriesWorks(self): """Make sure the required publications are obsoleted.""" obsoleter = self.getObsoleter() self.warty.status = SeriesStatus.OBSOLETE # Get all the published sources in warty. published_sources, published_binaries = ( self.getPublicationsForDistroseries()) # Assert that none of them is obsolete yet: self.assertFalse(published_sources.is_empty()) self.assertFalse(published_binaries.is_empty()) for source in published_sources: self.assertTrue(source.status == PackagePublishingStatus.PUBLISHED) self.assertTrue(source.scheduleddeletiondate is None) for binary in published_binaries: self.assertTrue(binary.status == PackagePublishingStatus.PUBLISHED) self.assertTrue(binary.scheduleddeletiondate is None) # Keep their DB IDs for later. source_ids = [source.id for source in published_sources] binary_ids = [binary.id for binary in published_binaries] # Make them obsolete. obsoleter.mainTask() self.layer.txn.commit() # Now see if the modified publications have been correctly obsoleted. # We need to re-fetch the published_sources and published_binaries # because the existing objects are not valid through a transaction. for id in source_ids: source = SourcePackagePublishingHistory.get(id) self.assertTrue(source.status == PackagePublishingStatus.OBSOLETE) self.assertTrue(source.scheduleddeletiondate is not None) for id in binary_ids: binary = BinaryPackagePublishingHistory.get(id) self.assertTrue(binary.status == PackagePublishingStatus.OBSOLETE) self.assertTrue(binary.scheduleddeletiondate is not None) # Make sure nothing else was obsoleted. Subtract the set of # known OBSOLETE IDs from the set of all the IDs and assert that # the remainder are not OBSOLETE. all_sources = SourcePackagePublishingHistory.select(True) all_binaries = BinaryPackagePublishingHistory.select(True) all_source_ids = [source.id for source in all_sources] all_binary_ids = [binary.id for binary in all_binaries] remaining_source_ids = set(all_source_ids) - set(source_ids) remaining_binary_ids = set(all_binary_ids) - set(binary_ids) for id in remaining_source_ids: source = SourcePackagePublishingHistory.get(id) self.assertTrue(source.status != PackagePublishingStatus.OBSOLETE) for id in remaining_binary_ids: binary = BinaryPackagePublishingHistory.get(id) self.assertTrue(binary.status != PackagePublishingStatus.OBSOLETE)
def getPendingPublications(self, archive, pocket, is_careful): """See `ICanPublishPackages`.""" from lp.soyuz.model.publishing import BinaryPackagePublishingHistory queries = [ "distroarchseries = %s AND archive = %s" % sqlvalues(self, archive) ] target_status = [PackagePublishingStatus.PENDING] if is_careful: target_status.append(PackagePublishingStatus.PUBLISHED) queries.append("status IN %s" % sqlvalues(target_status)) # restrict to a specific pocket. queries.append('pocket = %s' % sqlvalues(pocket)) # Exclude RELEASE pocket if the distroseries was already released, # since it should not change, unless the archive allows it. if (not self.distroseries.isUnstable() and not archive.allowUpdatesToReleasePocket()): queries.append('pocket != %s' % sqlvalues(PackagePublishingPocket.RELEASE)) publications = BinaryPackagePublishingHistory.select( " AND ".join(queries), orderBy=["-id"]) return publications
def getLatestPublishingEntry(self, time_interval, deb_only=True): """Return the BinaryPackagePublishingHistory record with the most recent datepublished. :deb_only: If True, return only publishing records whose binarypackagerelease's binarypackagefile.filetype is BinaryPackageFileType.DEB. """ query = """ BinaryPackagePublishingHistory.pocket = %s AND BinaryPackagePublishingHistory.component = %s AND BinaryPackagePublishingHistory.distroarchseries = %s AND BinaryPackagePublishingHistory.archive = %s AND BinaryPackagePublishingHistory.status = %s """ % sqlvalues(self.pocket, self.component, self.distro_arch_series, self.distro_arch_series.main_archive, PackagePublishingStatus.PUBLISHED) if deb_only: query += """ AND BinaryPackagePublishingHistory.binarypackagerelease = BinaryPackageFile.binarypackagerelease AND BinaryPackageFile.filetype = %s """ % sqlvalues(BinaryPackageFileType.DEB) if time_interval is not None: start, end = time_interval assert end > start, '%s is not more recent than %s' % (end, start) query = (query + " AND datepublished >= %s AND datepublished < %s" % sqlvalues(start, end)) return BinaryPackagePublishingHistory.selectFirst( query, clauseTables=['BinaryPackageFile'], orderBy='-datepublished')
def judge(self, distroseries, pocket): """Judge superseded sources and binaries.""" sources = SourcePackagePublishingHistory.select(""" sourcepackagepublishinghistory.distroseries = %s AND sourcepackagepublishinghistory.archive = %s AND sourcepackagepublishinghistory.pocket = %s AND sourcepackagepublishinghistory.status IN %s AND sourcepackagepublishinghistory.scheduleddeletiondate is NULL AND sourcepackagepublishinghistory.dateremoved is NULL """ % sqlvalues(distroseries, self.archive, pocket, inactive_publishing_status)) binaries = BinaryPackagePublishingHistory.select( """ binarypackagepublishinghistory.distroarchseries = distroarchseries.id AND distroarchseries.distroseries = %s AND binarypackagepublishinghistory.archive = %s AND binarypackagepublishinghistory.pocket = %s AND binarypackagepublishinghistory.status IN %s AND binarypackagepublishinghistory.scheduleddeletiondate is NULL AND binarypackagepublishinghistory.dateremoved is NULL """ % sqlvalues(distroseries, self.archive, pocket, inactive_publishing_status), clauseTables=['DistroArchSeries']) self._judgeSuperseded(sources, binaries)
def getPendingPublications(self, archive, pocket, is_careful): """See `ICanPublishPackages`.""" from lp.soyuz.model.publishing import BinaryPackagePublishingHistory queries = [ "distroarchseries = %s AND archive = %s" % sqlvalues(self, archive) ] target_status = [PackagePublishingStatus.PENDING] if is_careful: target_status.append(PackagePublishingStatus.PUBLISHED) queries.append("status IN %s" % sqlvalues(target_status)) # restrict to a specific pocket. queries.append('pocket = %s' % sqlvalues(pocket)) # Exclude RELEASE pocket if the distroseries was already released, # since it should not change, unless the archive allows it. if (not self.distroseries.isUnstable() and not archive.allowUpdatesToReleasePocket()): queries.append( 'pocket != %s' % sqlvalues(PackagePublishingPocket.RELEASE)) publications = BinaryPackagePublishingHistory.select( " AND ".join(queries), orderBy=["-id"]) return publications
def judge(self, distroseries, pocket): """Judge superseded sources and binaries.""" sources = SourcePackagePublishingHistory.select(""" sourcepackagepublishinghistory.distroseries = %s AND sourcepackagepublishinghistory.archive = %s AND sourcepackagepublishinghistory.pocket = %s AND sourcepackagepublishinghistory.status IN %s AND sourcepackagepublishinghistory.scheduleddeletiondate is NULL AND sourcepackagepublishinghistory.dateremoved is NULL """ % sqlvalues( distroseries, self.archive, pocket, inactive_publishing_status)) binaries = BinaryPackagePublishingHistory.select(""" binarypackagepublishinghistory.distroarchseries = distroarchseries.id AND distroarchseries.distroseries = %s AND binarypackagepublishinghistory.archive = %s AND binarypackagepublishinghistory.pocket = %s AND binarypackagepublishinghistory.status IN %s AND binarypackagepublishinghistory.scheduleddeletiondate is NULL AND binarypackagepublishinghistory.dateremoved is NULL """ % sqlvalues( distroseries, self.archive, pocket, inactive_publishing_status), clauseTables=['DistroArchSeries']) self._judgeSuperseded(sources, binaries)
def A2_markPocketsWithDeletionsDirty(self): """An intermediate step in publishing to detect deleted packages. Mark pockets containing deleted packages (status DELETED or OBSOLETE), scheduledeletiondate NULL and dateremoved NULL as dirty, to ensure that they are processed in death row. """ self.log.debug("* Step A2: Mark pockets with deletions as dirty") # Query part that is common to both queries below. base_query = """ archive = %s AND status = %s AND scheduleddeletiondate IS NULL AND dateremoved is NULL """ % sqlvalues(self.archive, PackagePublishingStatus.DELETED) # We need to get a set of (distroseries, pocket) tuples that have # publications that are waiting to be deleted. Each tuple is # added to the dirty_pockets set. # Loop for each pocket in each distroseries: for distroseries in self.distro.series: for pocket in self.archive.getPockets(): if (self.cannotModifySuite(distroseries, pocket) or not self.isAllowed(distroseries, pocket)): # We don't want to mark release pockets dirty in a # stable distroseries, no matter what other bugs # that precede here have dirtied it. continue clauses = [base_query] clauses.append("pocket = %s" % sqlvalues(pocket)) clauses.append("distroseries = %s" % sqlvalues(distroseries)) # Make the source publications query. source_query = " AND ".join(clauses) sources = SourcePackagePublishingHistory.select(source_query) if not sources.is_empty(): self.markPocketDirty(distroseries, pocket) # No need to check binaries if the pocket is already # dirtied from a source. continue # Make the binary publications query. clauses = [base_query] clauses.append("pocket = %s" % sqlvalues(pocket)) clauses.append("DistroArchSeries = DistroArchSeries.id") clauses.append("DistroArchSeries.distroseries = %s" % sqlvalues(distroseries)) binary_query = " AND ".join(clauses) binaries = BinaryPackagePublishingHistory.select(binary_query, clauseTables=['DistroArchSeries']) if not binaries.is_empty(): self.markPocketDirty(distroseries, pocket)
def publishing_history(self): """See `IDistroArchSeriesBinaryPackage`.""" return BinaryPackagePublishingHistory.select( """ distroarchseries = %s AND archive IN %s AND binarypackagerelease = %s """ % sqlvalues(self.distroarchseries, self.distribution.all_distro_archive_ids, self.binarypackagerelease), orderBy=["-datecreated", "-id"], )
def publishing_history(self): """See `IDistroArchSeriesBinaryPackage`.""" return BinaryPackagePublishingHistory.select(""" distroarchseries = %s AND archive IN %s AND binarypackagerelease = %s """ % sqlvalues( self.distroarchseries, self.distribution.all_distro_archive_ids, self.binarypackagerelease), orderBy=['-datecreated', '-id'])
def publish(self, binarypackage, bpdata): """Create the publishing entry on db if does not exist.""" # These need to be pulled from the binary package data, not the # binary package release: the data represents data from /this # specific distroseries/, whereas the package represents data # from when it was first built. if self.component_override is not None: component = self.distro_handler.getComponentByName( self.component_override) log.info('Overriding binary %s component' % binarypackage.binarypackagename.name) else: component = self.distro_handler.getComponentByName( bpdata.component) distribution = self.distroarchseries.distroseries.distribution archive = distribution.getArchiveByComponent(component.name) section = self.distro_handler.ensureSection(bpdata.section) priority = prioritymap[bpdata.priority] # Check if the binarypackage is already published and if yes, # just report it. binpkg_publishinghistory = self._checkPublishing(binarypackage) if binpkg_publishinghistory: if ((binpkg_publishinghistory.section, binpkg_publishinghistory.priority, binpkg_publishinghistory.component) == (section, priority, component)): # If nothing has changed in terms of publication # (overrides) we are free to let this one go log.info('BinaryPackageRelease already published with no ' 'changes as %s' % binpkg_publishinghistory.status.title) return BinaryPackagePublishingHistory( binarypackagerelease=binarypackage.id, binarypackagename=binarypackage.binarypackagename, component=component.id, section=section.id, priority=priority, distroarchseries=self.distroarchseries.id, status=PackagePublishingStatus.PUBLISHED, datecreated=UTC_NOW, datepublished=UTC_NOW, pocket=self.pocket, datesuperseded=None, supersededby=None, datemadepending=None, dateremoved=None, archive=archive) log.info('BinaryPackage %s-%s published into %s.' % (binarypackage.binarypackagename.name, binarypackage.version, self.distroarchseries.architecturetag))
def updatePackageCount(self): """See `IDistroArchSeries`.""" from lp.soyuz.model.publishing import BinaryPackagePublishingHistory query = """ BinaryPackagePublishingHistory.distroarchseries = %s AND BinaryPackagePublishingHistory.archive IN %s AND BinaryPackagePublishingHistory.status = %s AND BinaryPackagePublishingHistory.pocket = %s """ % sqlvalues( self, self.distroseries.distribution.all_distro_archive_ids, PackagePublishingStatus.PUBLISHED, PackagePublishingPocket.RELEASE) self.package_count = BinaryPackagePublishingHistory.select( query).count()
def _checkPublishing(self, binarypackage): """Query for the publishing entry""" ret = BinaryPackagePublishingHistory.select(""" binarypackagerelease = %s AND distroarchseries = %s AND archive = %s AND status in %s""" % sqlvalues(binarypackage, self.distroarchseries, self.distroarchseries.main_archive, active_publishing_status), orderBy=["-datecreated"]) ret = list(ret) if ret: return ret[0] return None
def _checkPublishing(self, binarypackage): """Query for the publishing entry""" ret = BinaryPackagePublishingHistory.select(""" binarypackagerelease = %s AND distroarchseries = %s AND archive = %s AND status in %s""" % sqlvalues( binarypackage, self.distroarchseries, self.distroarchseries.main_archive, active_publishing_status), orderBy=["-datecreated"]) ret = list(ret) if ret: return ret[0] return None
def _latest_publishing_record(self, status=None): query = """ binarypackagerelease = %s AND distroarchseries = %s AND archive IN %s """ % sqlvalues( self.binarypackagerelease, self.distroarchseries, self.distribution.all_distro_archive_ids ) if status is not None: if not isinstance(status, (tuple, list)): status = [status] query += " AND status IN %s" % sqlvalues(status) return BinaryPackagePublishingHistory.selectFirst(query, orderBy=["-datecreated", "-id"])
def _latest_publishing_record(self, status=None): query = """ binarypackagerelease = %s AND distroarchseries = %s AND archive IN %s """ % sqlvalues(self.binarypackagerelease, self.distroarchseries, self.distribution.all_distro_archive_ids) if status is not None: if not isinstance(status, (tuple, list)): status = [status] query += " AND status IN %s" % sqlvalues(status) return BinaryPackagePublishingHistory.selectFirst( query, orderBy=['-datecreated', '-id'])
def updatePackageCount(self): """See `IDistroArchSeries`.""" from lp.soyuz.model.publishing import BinaryPackagePublishingHistory query = """ BinaryPackagePublishingHistory.distroarchseries = %s AND BinaryPackagePublishingHistory.archive IN %s AND BinaryPackagePublishingHistory.status = %s AND BinaryPackagePublishingHistory.pocket = %s """ % sqlvalues( self, self.distroseries.distribution.all_distro_archive_ids, PackagePublishingStatus.PUBLISHED, PackagePublishingPocket.RELEASE ) self.package_count = BinaryPackagePublishingHistory.select( query).count()
def _collectCondemned(self): """Return the condemned source and binary publications as a tuple. Return all the `SourcePackagePublishingHistory` and `BinaryPackagePublishingHistory` records that are eligible for removal ('condemned') where the source/binary package that they refer to is not published somewhere else. Both sources and binaries are lists. """ sources = SourcePackagePublishingHistory.select(""" SourcePackagePublishingHistory.archive = %s AND SourcePackagePublishingHistory.scheduleddeletiondate < %s AND SourcePackagePublishingHistory.dateremoved IS NULL AND NOT EXISTS ( SELECT 1 FROM sourcepackagepublishinghistory as spph WHERE SourcePackagePublishingHistory.sourcepackagerelease = spph.sourcepackagerelease AND spph.archive = %s AND spph.status NOT IN %s) """ % sqlvalues(self.archive, UTC_NOW, self.archive, inactive_publishing_status), orderBy="id") self.logger.debug("%d Sources" % sources.count()) binaries = BinaryPackagePublishingHistory.select(""" BinaryPackagePublishingHistory.archive = %s AND BinaryPackagePublishingHistory.scheduleddeletiondate < %s AND BinaryPackagePublishingHistory.dateremoved IS NULL AND NOT EXISTS ( SELECT 1 FROM binarypackagepublishinghistory as bpph WHERE BinaryPackagePublishingHistory.binarypackagerelease = bpph.binarypackagerelease AND bpph.archive = %s AND bpph.status NOT IN %s) """ % sqlvalues(self.archive, UTC_NOW, self.archive, inactive_publishing_status), orderBy="id") self.logger.debug("%d Binaries" % binaries.count()) return (sources, binaries)
def getPublicationsForDistroseries(self, distroseries=None): """Return a tuple of sources, binaries published in distroseries.""" if distroseries is None: distroseries = self.warty published_sources = SourcePackagePublishingHistory.select(""" distroseries = %s AND status = %s AND archive IN %s """ % sqlvalues(distroseries, PackagePublishingStatus.PUBLISHED, self.main_archive_ids)) published_binaries = BinaryPackagePublishingHistory.select(""" BinaryPackagePublishingHistory.distroarchseries = DistroArchSeries.id AND DistroArchSeries.DistroSeries = DistroSeries.id AND DistroSeries.id = %s AND BinaryPackagePublishingHistory.status = %s AND BinaryPackagePublishingHistory.archive IN %s """ % sqlvalues(distroseries, PackagePublishingStatus.PUBLISHED, self.main_archive_ids), clauseTables=["DistroArchSeries", "DistroSeries"]) return (published_sources, published_binaries)
def getReleasedPackages(self, binary_name, pocket=None, include_pending=False, archive=None): """See IDistroArchSeries.""" from lp.soyuz.model.publishing import BinaryPackagePublishingHistory queries = [] if not IBinaryPackageName.providedBy(binary_name): binary_name = BinaryPackageName.byName(binary_name) queries.append(""" binarypackagerelease=binarypackagerelease.id AND binarypackagerelease.binarypackagename=%s AND distroarchseries = %s """ % sqlvalues(binary_name, self)) if pocket is not None: queries.append("pocket=%s" % sqlvalues(pocket.value)) if include_pending: queries.append("status in (%s, %s)" % sqlvalues(PackagePublishingStatus.PUBLISHED, PackagePublishingStatus.PENDING)) else: queries.append("status=%s" % sqlvalues(PackagePublishingStatus.PUBLISHED)) archives = self.distroseries.distribution.getArchiveIDList(archive) queries.append("archive IN %s" % sqlvalues(archives)) published = BinaryPackagePublishingHistory.select( " AND ".join(queries), clauseTables=['BinaryPackageRelease'], orderBy=['-id']) return shortlist(published)
def getPublicationsForDistroseries(self, distroseries=None): """Return a tuple of sources, binaries published in distroseries.""" if distroseries is None: distroseries = self.warty published_sources = SourcePackagePublishingHistory.select(""" distroseries = %s AND status = %s AND archive IN %s """ % sqlvalues(distroseries, PackagePublishingStatus.PUBLISHED, self.main_archive_ids)) published_binaries = BinaryPackagePublishingHistory.select( """ BinaryPackagePublishingHistory.distroarchseries = DistroArchSeries.id AND DistroArchSeries.DistroSeries = DistroSeries.id AND DistroSeries.id = %s AND BinaryPackagePublishingHistory.status = %s AND BinaryPackagePublishingHistory.archive IN %s """ % sqlvalues(distroseries, PackagePublishingStatus.PUBLISHED, self.main_archive_ids), clauseTables=["DistroArchSeries", "DistroSeries"]) return (published_sources, published_binaries)
def getReleasedPackages(self, binary_name, pocket=None, include_pending=False, archive=None): """See IDistroArchSeries.""" from lp.soyuz.model.publishing import BinaryPackagePublishingHistory queries = [] if not IBinaryPackageName.providedBy(binary_name): binary_name = BinaryPackageName.byName(binary_name) queries.append(""" binarypackagerelease=binarypackagerelease.id AND binarypackagerelease.binarypackagename=%s AND distroarchseries = %s """ % sqlvalues(binary_name, self)) if pocket is not None: queries.append("pocket=%s" % sqlvalues(pocket.value)) if include_pending: queries.append("status in (%s, %s)" % sqlvalues( PackagePublishingStatus.PUBLISHED, PackagePublishingStatus.PENDING)) else: queries.append("status=%s" % sqlvalues( PackagePublishingStatus.PUBLISHED)) archives = self.distroseries.distribution.getArchiveIDList(archive) queries.append("archive IN %s" % sqlvalues(archives)) published = BinaryPackagePublishingHistory.select( " AND ".join(queries), clauseTables=['BinaryPackageRelease'], orderBy=['-id']) return shortlist(published)
def createPublishingForDistroArchSeries(self, binarypackagerelease, distroarchseries): """Return a list of `BinaryPackagePublishingHistory`. The publishing records are created according to the given `BinaryPackageRelease` and `DistroArchSeries` for all (status, archive, pocket) returned from `sample_publishing_data`. """ sample_pub = [] for status, archive, pocket in self.sample_publishing_data: pub = BinaryPackagePublishingHistory( binarypackagerelease=binarypackagerelease, binarypackagename=binarypackagerelease.binarypackagename, distroarchseries=distroarchseries, component=binarypackagerelease.component, section=binarypackagerelease.section, priority=binarypackagerelease.priority, status=status, archive=archive, pocket=pocket) # Flush the object changes into DB do guarantee stable database # ID order as expected in the callsites. sample_pub.append(pub) return sample_pub
def _judgeSuperseded(self, source_records, binary_records): """Determine whether the superseded packages supplied should be moved to death row or not. Currently this is done by assuming that any superseded binary package should be removed. In the future this should attempt to supersede binaries in build-sized chunks only, bug 55030. Superseded source packages are considered removable when they have no binaries in this distroseries which are published or superseded When a package is considered for death row it is given a 'scheduled deletion date' of now plus the defined 'stay of execution' time provided in the configuration parameter. """ self.logger.debug("Beginning superseded processing...") for pub_record in binary_records: binpkg_release = pub_record.binarypackagerelease self.logger.debug( "%s/%s (%s) has been judged eligible for removal", binpkg_release.binarypackagename.name, binpkg_release.version, pub_record.distroarchseries.architecturetag) self._setScheduledDeletionDate(pub_record) # XXX cprov 20070820: 'datemadepending' is useless, since it's # always equals to "scheduleddeletiondate - quarantine". pub_record.datemadepending = UTC_NOW for pub_record in source_records: srcpkg_release = pub_record.sourcepackagerelease # Attempt to find all binaries of this # SourcePackageRelease which are/have been in this # distroseries... considered_binaries = BinaryPackagePublishingHistory.select(""" binarypackagepublishinghistory.distroarchseries = distroarchseries.id AND binarypackagepublishinghistory.scheduleddeletiondate IS NULL AND binarypackagepublishinghistory.dateremoved IS NULL AND binarypackagepublishinghistory.archive = %s AND binarypackagebuild.source_package_release = %s AND distroarchseries.distroseries = %s AND binarypackagepublishinghistory.binarypackagerelease = binarypackagerelease.id AND binarypackagerelease.build = binarypackagebuild.id AND binarypackagepublishinghistory.pocket = %s """ % sqlvalues(self.archive, srcpkg_release, pub_record.distroseries, pub_record.pocket), clauseTables=['DistroArchSeries', 'BinaryPackageRelease', 'BinaryPackageBuild']) # There is at least one non-removed binary to consider if not considered_binaries.is_empty(): # However we can still remove *this* record if there's # at least one other PUBLISHED for the spr. This happens # when a package is moved between components. published = SourcePackagePublishingHistory.selectBy( distroseries=pub_record.distroseries, pocket=pub_record.pocket, status=PackagePublishingStatus.PUBLISHED, archive=self.archive, sourcepackagereleaseID=srcpkg_release.id) # Zero PUBLISHED for this spr, so nothing to take over # for us, so leave it for consideration next time. if published.is_empty(): continue # Okay, so there's no unremoved binaries, let's go for it... self.logger.debug( "%s/%s (%s) source has been judged eligible for removal", srcpkg_release.sourcepackagename.name, srcpkg_release.version, pub_record.id) self._setScheduledDeletionDate(pub_record) # XXX cprov 20070820: 'datemadepending' is pointless, since it's # always equals to "scheduleddeletiondate - quarantine". pub_record.datemadepending = UTC_NOW
def _judgeSuperseded(self, source_records, binary_records): """Determine whether the superseded packages supplied should be moved to death row or not. Currently this is done by assuming that any superseded binary package should be removed. In the future this should attempt to supersede binaries in build-sized chunks only, bug 55030. Superseded source packages are considered removable when they have no binaries in this distroseries which are published or superseded When a package is considered for death row it is given a 'scheduled deletion date' of now plus the defined 'stay of execution' time provided in the configuration parameter. """ self.logger.debug("Beginning superseded processing...") for pub_record in binary_records: binpkg_release = pub_record.binarypackagerelease self.logger.debug( "%s/%s (%s) has been judged eligible for removal", binpkg_release.binarypackagename.name, binpkg_release.version, pub_record.distroarchseries.architecturetag) self._setScheduledDeletionDate(pub_record) # XXX cprov 20070820: 'datemadepending' is useless, since it's # always equals to "scheduleddeletiondate - quarantine". pub_record.datemadepending = UTC_NOW for pub_record in source_records: srcpkg_release = pub_record.sourcepackagerelease # Attempt to find all binaries of this # SourcePackageRelease which are/have been in this # distroseries... considered_binaries = BinaryPackagePublishingHistory.select( """ binarypackagepublishinghistory.distroarchseries = distroarchseries.id AND binarypackagepublishinghistory.scheduleddeletiondate IS NULL AND binarypackagepublishinghistory.dateremoved IS NULL AND binarypackagepublishinghistory.archive = %s AND binarypackagebuild.source_package_release = %s AND distroarchseries.distroseries = %s AND binarypackagepublishinghistory.binarypackagerelease = binarypackagerelease.id AND binarypackagerelease.build = binarypackagebuild.id AND binarypackagepublishinghistory.pocket = %s """ % sqlvalues(self.archive, srcpkg_release, pub_record.distroseries, pub_record.pocket), clauseTables=[ 'DistroArchSeries', 'BinaryPackageRelease', 'BinaryPackageBuild' ]) # There is at least one non-removed binary to consider if not considered_binaries.is_empty(): # However we can still remove *this* record if there's # at least one other PUBLISHED for the spr. This happens # when a package is moved between components. published = SourcePackagePublishingHistory.selectBy( distroseries=pub_record.distroseries, pocket=pub_record.pocket, status=PackagePublishingStatus.PUBLISHED, archive=self.archive, sourcepackagereleaseID=srcpkg_release.id) # Zero PUBLISHED for this spr, so nothing to take over # for us, so leave it for consideration next time. if published.is_empty(): continue # Okay, so there's no unremoved binaries, let's go for it... self.logger.debug( "%s/%s (%s) source has been judged eligible for removal", srcpkg_release.sourcepackagename.name, srcpkg_release.version, pub_record.id) self._setScheduledDeletionDate(pub_record) # XXX cprov 20070820: 'datemadepending' is pointless, since it's # always equals to "scheduleddeletiondate - quarantine". pub_record.datemadepending = UTC_NOW
def testObsoleteDistroseriesWorks(self): """Make sure the required publications are obsoleted.""" obsoleter = self.getObsoleter() self.warty.status = SeriesStatus.OBSOLETE # Get all the published sources in warty. published_sources, published_binaries = ( self.getPublicationsForDistroseries()) # Assert that none of them is obsolete yet: self.assertFalse(published_sources.is_empty()) self.assertFalse(published_binaries.is_empty()) for source in published_sources: self.assertTrue( source.status == PackagePublishingStatus.PUBLISHED) self.assertTrue(source.scheduleddeletiondate is None) for binary in published_binaries: self.assertTrue( binary.status == PackagePublishingStatus.PUBLISHED) self.assertTrue(binary.scheduleddeletiondate is None) # Keep their DB IDs for later. source_ids = [source.id for source in published_sources] binary_ids = [binary.id for binary in published_binaries] # Make them obsolete. obsoleter.mainTask() self.layer.txn.commit() # Now see if the modified publications have been correctly obsoleted. # We need to re-fetch the published_sources and published_binaries # because the existing objects are not valid through a transaction. for id in source_ids: source = SourcePackagePublishingHistory.get(id) self.assertTrue( source.status == PackagePublishingStatus.OBSOLETE) self.assertTrue(source.scheduleddeletiondate is not None) for id in binary_ids: binary = BinaryPackagePublishingHistory.get(id) self.assertTrue( binary.status == PackagePublishingStatus.OBSOLETE) self.assertTrue(binary.scheduleddeletiondate is not None) # Make sure nothing else was obsoleted. Subtract the set of # known OBSOLETE IDs from the set of all the IDs and assert that # the remainder are not OBSOLETE. all_sources = SourcePackagePublishingHistory.select(True) all_binaries = BinaryPackagePublishingHistory.select(True) all_source_ids = [source.id for source in all_sources] all_binary_ids = [binary.id for binary in all_binaries] remaining_source_ids = set(all_source_ids) - set(source_ids) remaining_binary_ids = set(all_binary_ids) - set(binary_ids) for id in remaining_source_ids: source = SourcePackagePublishingHistory.get(id) self.assertTrue( source.status != PackagePublishingStatus.OBSOLETE) for id in remaining_binary_ids: binary = BinaryPackagePublishingHistory.get(id) self.assertTrue( binary.status != PackagePublishingStatus.OBSOLETE)