def testObsoleteDistroseriesWorks(self):
        """Make sure the required publications are obsoleted."""
        obsoleter = self.getObsoleter()
        self.warty.status = SeriesStatus.OBSOLETE

        # Get all the published sources in warty.
        published_sources, published_binaries = (
            self.getPublicationsForDistroseries())

        # Assert that none of them is obsolete yet:
        self.assertFalse(published_sources.is_empty())
        self.assertFalse(published_binaries.is_empty())
        for source in published_sources:
            self.assertTrue(source.status == PackagePublishingStatus.PUBLISHED)
            self.assertTrue(source.scheduleddeletiondate is None)
        for binary in published_binaries:
            self.assertTrue(binary.status == PackagePublishingStatus.PUBLISHED)
            self.assertTrue(binary.scheduleddeletiondate is None)

        # Keep their DB IDs for later.
        source_ids = [source.id for source in published_sources]
        binary_ids = [binary.id for binary in published_binaries]

        # Make them obsolete.
        obsoleter.mainTask()
        self.layer.txn.commit()

        # Now see if the modified publications have been correctly obsoleted.
        # We need to re-fetch the published_sources and published_binaries
        # because the existing objects are not valid through a transaction.
        for id in source_ids:
            source = SourcePackagePublishingHistory.get(id)
            self.assertTrue(source.status == PackagePublishingStatus.OBSOLETE)
            self.assertTrue(source.scheduleddeletiondate is not None)
        for id in binary_ids:
            binary = BinaryPackagePublishingHistory.get(id)
            self.assertTrue(binary.status == PackagePublishingStatus.OBSOLETE)
            self.assertTrue(binary.scheduleddeletiondate is not None)

        # Make sure nothing else was obsoleted.  Subtract the set of
        # known OBSOLETE IDs from the set of all the IDs and assert that
        # the remainder are not OBSOLETE.
        all_sources = SourcePackagePublishingHistory.select(True)
        all_binaries = BinaryPackagePublishingHistory.select(True)
        all_source_ids = [source.id for source in all_sources]
        all_binary_ids = [binary.id for binary in all_binaries]

        remaining_source_ids = set(all_source_ids) - set(source_ids)
        remaining_binary_ids = set(all_binary_ids) - set(binary_ids)

        for id in remaining_source_ids:
            source = SourcePackagePublishingHistory.get(id)
            self.assertTrue(source.status != PackagePublishingStatus.OBSOLETE)
        for id in remaining_binary_ids:
            binary = BinaryPackagePublishingHistory.get(id)
            self.assertTrue(binary.status != PackagePublishingStatus.OBSOLETE)
Ejemplo n.º 2
0
    def judge(self, distroseries, pocket):
        """Judge superseded sources and binaries."""
        sources = SourcePackagePublishingHistory.select("""
            sourcepackagepublishinghistory.distroseries = %s AND
            sourcepackagepublishinghistory.archive = %s AND
            sourcepackagepublishinghistory.pocket = %s AND
            sourcepackagepublishinghistory.status IN %s AND
            sourcepackagepublishinghistory.scheduleddeletiondate is NULL AND
            sourcepackagepublishinghistory.dateremoved is NULL
            """ % sqlvalues(
                distroseries, self.archive, pocket,
                inactive_publishing_status))

        binaries = BinaryPackagePublishingHistory.select("""
            binarypackagepublishinghistory.distroarchseries =
                distroarchseries.id AND
            distroarchseries.distroseries = %s AND
            binarypackagepublishinghistory.archive = %s AND
            binarypackagepublishinghistory.pocket = %s AND
            binarypackagepublishinghistory.status IN %s AND
            binarypackagepublishinghistory.scheduleddeletiondate is NULL AND
            binarypackagepublishinghistory.dateremoved is NULL
            """ % sqlvalues(
                distroseries, self.archive, pocket,
                inactive_publishing_status),
            clauseTables=['DistroArchSeries'])

        self._judgeSuperseded(sources, binaries)
 def latest_overall_publication(self):
     """See `IDistributionSourcePackage`."""
     # XXX kiko 2008-06-03: This is magical code that finds the
     # latest relevant publication. It relies on ordering of status
     # and pocket enum values, which is arguably evil but much faster
     # than CASE sorting; at any rate this can be fixed when
     # https://bugs.launchpad.net/soyuz/+bug/236922 is.
     spph = SourcePackagePublishingHistory.selectFirst("""
         SourcePackagePublishingHistory.distroseries = DistroSeries.id AND
         DistroSeries.distribution = %s AND
         SourcePackagePublishingHistory.sourcepackagename = %s AND
         SourcePackagePublishingHistory.archive IN %s AND
         pocket NOT IN (%s, %s) AND
         status in (%s, %s)""" %
             sqlvalues(self.distribution,
                       self.sourcepackagename,
                       self.distribution.all_distro_archive_ids,
                       PackagePublishingPocket.PROPOSED,
                       PackagePublishingPocket.BACKPORTS,
                       PackagePublishingStatus.PUBLISHED,
                       PackagePublishingStatus.OBSOLETE),
         clauseTables=["SourcePackagePublishingHistory",
                       "DistroSeries"],
         orderBy=["status",
                  SQLConstant(
                     "to_number(DistroSeries.version, '99.99') DESC"),
                  "-pocket"])
     return spph
Ejemplo n.º 4
0
 def probeRemoved(self, pubrec_ids):
     """Check if all source publishing records were removed."""
     right_now = datetime.datetime.now(pytz.timezone('UTC'))
     for pubrec_id in pubrec_ids:
         spph = SourcePackagePublishingHistory.get(pubrec_id)
         self.assertTrue(spph.dateremoved < right_now,
                         "ID %s -> not removed" % (spph.id))
 def probeNotRemoved(self, pubrec_ids):
     """Check if all source publishing records were not removed."""
     for pubrec_id in pubrec_ids:
         spph = SourcePackagePublishingHistory.get(pubrec_id)
         self.assertTrue(
             spph.dateremoved is None,
             "ID %s -> removed" % (spph.id))
Ejemplo n.º 6
0
    def judge(self, distroseries, pocket):
        """Judge superseded sources and binaries."""
        sources = SourcePackagePublishingHistory.select("""
            sourcepackagepublishinghistory.distroseries = %s AND
            sourcepackagepublishinghistory.archive = %s AND
            sourcepackagepublishinghistory.pocket = %s AND
            sourcepackagepublishinghistory.status IN %s AND
            sourcepackagepublishinghistory.scheduleddeletiondate is NULL AND
            sourcepackagepublishinghistory.dateremoved is NULL
            """ % sqlvalues(distroseries, self.archive, pocket,
                            inactive_publishing_status))

        binaries = BinaryPackagePublishingHistory.select(
            """
            binarypackagepublishinghistory.distroarchseries =
                distroarchseries.id AND
            distroarchseries.distroseries = %s AND
            binarypackagepublishinghistory.archive = %s AND
            binarypackagepublishinghistory.pocket = %s AND
            binarypackagepublishinghistory.status IN %s AND
            binarypackagepublishinghistory.scheduleddeletiondate is NULL AND
            binarypackagepublishinghistory.dateremoved is NULL
            """ % sqlvalues(distroseries, self.archive, pocket,
                            inactive_publishing_status),
            clauseTables=['DistroArchSeries'])

        self._judgeSuperseded(sources, binaries)
 def latest_overall_publication(self):
     """See `IDistributionSourcePackage`."""
     # XXX kiko 2008-06-03: This is magical code that finds the
     # latest relevant publication. It relies on ordering of status
     # and pocket enum values, which is arguably evil but much faster
     # than CASE sorting; at any rate this can be fixed when
     # https://bugs.launchpad.net/soyuz/+bug/236922 is.
     spph = SourcePackagePublishingHistory.selectFirst(
         """
         SourcePackagePublishingHistory.distroseries = DistroSeries.id AND
         DistroSeries.distribution = %s AND
         SourcePackagePublishingHistory.sourcepackagename = %s AND
         SourcePackagePublishingHistory.archive IN %s AND
         pocket NOT IN (%s, %s) AND
         status in (%s, %s)""" %
         sqlvalues(self.distribution, self.sourcepackagename,
                   self.distribution.all_distro_archive_ids,
                   PackagePublishingPocket.PROPOSED,
                   PackagePublishingPocket.BACKPORTS,
                   PackagePublishingStatus.PUBLISHED,
                   PackagePublishingStatus.OBSOLETE),
         clauseTables=["SourcePackagePublishingHistory", "DistroSeries"],
         orderBy=[
             "status",
             SQLConstant("to_number(DistroSeries.version, '99.99') DESC"),
             "-pocket"
         ])
     return spph
 def probePublishingStatus(self, pubrec_ids, status):
     """Check if all source publishing records match the given status."""
     for pubrec_id in pubrec_ids:
         spph = SourcePackagePublishingHistory.get(pubrec_id)
         self.assertEqual(
             spph.status, status, "ID %s -> %s (expected %s)" % (
             spph.id, spph.status.title, status.title))
Ejemplo n.º 9
0
 def probePublishingStatus(self, pubrec_ids, status):
     """Check if all source publishing records match the given status."""
     for pubrec_id in pubrec_ids:
         spph = SourcePackagePublishingHistory.get(pubrec_id)
         self.assertEqual(
             spph.status, status, "ID %s -> %s (expected %s)" %
             (spph.id, spph.status.title, status.title))
 def probeRemoved(self, pubrec_ids):
     """Check if all source publishing records were removed."""
     right_now = datetime.datetime.now(pytz.timezone('UTC'))
     for pubrec_id in pubrec_ids:
         spph = SourcePackagePublishingHistory.get(pubrec_id)
         self.assertTrue(
             spph.dateremoved < right_now,
             "ID %s -> not removed" % (spph.id))
Ejemplo n.º 11
0
    def A2_markPocketsWithDeletionsDirty(self):
        """An intermediate step in publishing to detect deleted packages.

        Mark pockets containing deleted packages (status DELETED or
        OBSOLETE), scheduledeletiondate NULL and dateremoved NULL as
        dirty, to ensure that they are processed in death row.
        """
        self.log.debug("* Step A2: Mark pockets with deletions as dirty")

        # Query part that is common to both queries below.
        base_query = """
            archive = %s AND
            status = %s AND
            scheduleddeletiondate IS NULL AND
            dateremoved is NULL
            """ % sqlvalues(self.archive,
                            PackagePublishingStatus.DELETED)

        # We need to get a set of (distroseries, pocket) tuples that have
        # publications that are waiting to be deleted.  Each tuple is
        # added to the dirty_pockets set.

        # Loop for each pocket in each distroseries:
        for distroseries in self.distro.series:
            for pocket in self.archive.getPockets():
                if (self.cannotModifySuite(distroseries, pocket)
                    or not self.isAllowed(distroseries, pocket)):
                    # We don't want to mark release pockets dirty in a
                    # stable distroseries, no matter what other bugs
                    # that precede here have dirtied it.
                    continue
                clauses = [base_query]
                clauses.append("pocket = %s" % sqlvalues(pocket))
                clauses.append("distroseries = %s" % sqlvalues(distroseries))

                # Make the source publications query.
                source_query = " AND ".join(clauses)
                sources = SourcePackagePublishingHistory.select(source_query)
                if not sources.is_empty():
                    self.markPocketDirty(distroseries, pocket)
                    # No need to check binaries if the pocket is already
                    # dirtied from a source.
                    continue

                # Make the binary publications query.
                clauses = [base_query]
                clauses.append("pocket = %s" % sqlvalues(pocket))
                clauses.append("DistroArchSeries = DistroArchSeries.id")
                clauses.append("DistroArchSeries.distroseries = %s" %
                    sqlvalues(distroseries))
                binary_query = " AND ".join(clauses)
                binaries = BinaryPackagePublishingHistory.select(binary_query,
                    clauseTables=['DistroArchSeries'])
                if not binaries.is_empty():
                    self.markPocketDirty(distroseries, pocket)
Ejemplo n.º 12
0
 def _checkPublishing(self, sourcepackagerelease):
     """Query for the publishing entry"""
     ret = SourcePackagePublishingHistory.select("""
         sourcepackagerelease = %s AND
         distroseries = %s AND
         archive = %s AND
         status in %s""" % sqlvalues(
             sourcepackagerelease, self.distroseries,
             self.distroseries.main_archive, active_publishing_status),
         orderBy=["-datecreated"])
     ret = list(ret)
     if ret:
         return ret[0]
     return None
Ejemplo n.º 13
0
 def _checkPublishing(self, sourcepackagerelease):
     """Query for the publishing entry"""
     ret = SourcePackagePublishingHistory.select("""
         sourcepackagerelease = %s AND
         distroseries = %s AND
         archive = %s AND
         status in %s""" % sqlvalues(
         sourcepackagerelease, self.distroseries,
         self.distroseries.main_archive, active_publishing_status),
                                                 orderBy=["-datecreated"])
     ret = list(ret)
     if ret:
         return ret[0]
     return None
Ejemplo n.º 14
0
    def _getPublishingHistory(self,
                              version=None,
                              include_status=None,
                              exclude_status=None,
                              order_by=None):
        """Build a query and return a list of SourcePackagePublishingHistory.

        This is mainly a helper function for this class so that code is
        not duplicated. include_status and exclude_status must be a sequence.
        """
        clauses = []
        clauses.append("""SourcePackagePublishingHistory.sourcepackagerelease =
                   SourcePackageRelease.id AND
                   SourcePackagePublishingHistory.sourcepackagename = %s AND
                   SourcePackagePublishingHistory.distroseries = %s AND
                   SourcePackagePublishingHistory.archive IN %s
                """ % sqlvalues(self.sourcepackagename, self.distroseries,
                                self.distribution.all_distro_archive_ids))
        if version:
            clauses.append("SourcePackageRelease.version = %s" %
                           sqlvalues(version))

        if include_status:
            if not isinstance(include_status, list):
                include_status = list(include_status)
            clauses.append("SourcePackagePublishingHistory.status IN %s" %
                           sqlvalues(include_status))

        if exclude_status:
            if not isinstance(exclude_status, list):
                exclude_status = list(exclude_status)
            clauses.append("SourcePackagePublishingHistory.status NOT IN %s" %
                           sqlvalues(exclude_status))

        query = " AND ".join(clauses)

        if not order_by:
            order_by = '-datepublished'

        return SourcePackagePublishingHistory.select(
            query,
            orderBy=order_by,
            clauseTables=['SourcePackageRelease'],
            prejoinClauseTables=['SourcePackageRelease'])
Ejemplo n.º 15
0
    def getLatestPublishingEntry(self, time_interval):
        query = """
            SourcePackagePublishingHistory.pocket = %s
            AND SourcePackagePublishingHistory.component = %s
            AND SourcePackagePublishingHistory.distroseries = %s
            AND SourcePackagePublishingHistory.archive = %s
            AND SourcePackagePublishingHistory.status = %s
            """ % sqlvalues(self.pocket, self.component, self.distroseries,
                            self.distroseries.main_archive,
                            PackagePublishingStatus.PUBLISHED)

        if time_interval is not None:
            start, end = time_interval
            assert end > start
            query = (query +
                     " AND datepublished >= %s AND datepublished < %s" %
                     sqlvalues(start, end))
        return SourcePackagePublishingHistory.selectFirst(
            query, orderBy='-datepublished')
Ejemplo n.º 16
0
    def _collectCondemned(self):
        """Return the condemned source and binary publications as a tuple.

        Return all the `SourcePackagePublishingHistory` and
        `BinaryPackagePublishingHistory` records that are eligible for
        removal ('condemned') where the source/binary package that they
        refer to is not published somewhere else.

        Both sources and binaries are lists.
        """
        sources = SourcePackagePublishingHistory.select("""
            SourcePackagePublishingHistory.archive = %s AND
            SourcePackagePublishingHistory.scheduleddeletiondate < %s AND
            SourcePackagePublishingHistory.dateremoved IS NULL AND
            NOT EXISTS (
              SELECT 1 FROM sourcepackagepublishinghistory as spph
              WHERE
                  SourcePackagePublishingHistory.sourcepackagerelease =
                      spph.sourcepackagerelease AND
                  spph.archive = %s AND
                  spph.status NOT IN %s)
        """ % sqlvalues(self.archive, UTC_NOW, self.archive,
                        inactive_publishing_status),
                                                        orderBy="id")
        self.logger.debug("%d Sources" % sources.count())

        binaries = BinaryPackagePublishingHistory.select("""
            BinaryPackagePublishingHistory.archive = %s AND
            BinaryPackagePublishingHistory.scheduleddeletiondate < %s AND
            BinaryPackagePublishingHistory.dateremoved IS NULL AND
            NOT EXISTS (
              SELECT 1 FROM binarypackagepublishinghistory as bpph
              WHERE
                  BinaryPackagePublishingHistory.binarypackagerelease =
                      bpph.binarypackagerelease AND
                  bpph.archive = %s AND
                  bpph.status NOT IN %s)
        """ % sqlvalues(self.archive, UTC_NOW, self.archive,
                        inactive_publishing_status),
                                                         orderBy="id")
        self.logger.debug("%d Binaries" % binaries.count())

        return (sources, binaries)
    def getLatestPublishingEntry(self, time_interval):
        query = """
            SourcePackagePublishingHistory.pocket = %s
            AND SourcePackagePublishingHistory.component = %s
            AND SourcePackagePublishingHistory.distroseries = %s
            AND SourcePackagePublishingHistory.archive = %s
            AND SourcePackagePublishingHistory.status = %s
            """ % sqlvalues(self.pocket, self.component,
                            self.distroseries,
                            self.distroseries.main_archive,
                            PackagePublishingStatus.PUBLISHED)

        if time_interval is not None:
            start, end = time_interval
            assert end > start
            query = (query + " AND datepublished >= %s AND datepublished < %s"
                     % sqlvalues(start, end))
        return SourcePackagePublishingHistory.selectFirst(
            query, orderBy='-datepublished')
Ejemplo n.º 18
0
    def _collectCondemned(self):
        """Return the condemned source and binary publications as a tuple.

        Return all the `SourcePackagePublishingHistory` and
        `BinaryPackagePublishingHistory` records that are eligible for
        removal ('condemned') where the source/binary package that they
        refer to is not published somewhere else.

        Both sources and binaries are lists.
        """
        sources = SourcePackagePublishingHistory.select("""
            SourcePackagePublishingHistory.archive = %s AND
            SourcePackagePublishingHistory.scheduleddeletiondate < %s AND
            SourcePackagePublishingHistory.dateremoved IS NULL AND
            NOT EXISTS (
              SELECT 1 FROM sourcepackagepublishinghistory as spph
              WHERE
                  SourcePackagePublishingHistory.sourcepackagerelease =
                      spph.sourcepackagerelease AND
                  spph.archive = %s AND
                  spph.status NOT IN %s)
        """ % sqlvalues(self.archive, UTC_NOW, self.archive,
                        inactive_publishing_status), orderBy="id")
        self.logger.debug("%d Sources" % sources.count())

        binaries = BinaryPackagePublishingHistory.select("""
            BinaryPackagePublishingHistory.archive = %s AND
            BinaryPackagePublishingHistory.scheduleddeletiondate < %s AND
            BinaryPackagePublishingHistory.dateremoved IS NULL AND
            NOT EXISTS (
              SELECT 1 FROM binarypackagepublishinghistory as bpph
              WHERE
                  BinaryPackagePublishingHistory.binarypackagerelease =
                      bpph.binarypackagerelease AND
                  bpph.archive = %s AND
                  bpph.status NOT IN %s)
        """ % sqlvalues(self.archive, UTC_NOW, self.archive,
                        inactive_publishing_status), orderBy="id")
        self.logger.debug("%d Binaries" % binaries.count())

        return (sources, binaries)
 def getPublicationsForDistroseries(self, distroseries=None):
     """Return a tuple of sources, binaries published in distroseries."""
     if distroseries is None:
         distroseries = self.warty
     published_sources = SourcePackagePublishingHistory.select("""
         distroseries = %s AND
         status = %s AND
         archive IN %s
         """ % sqlvalues(distroseries, PackagePublishingStatus.PUBLISHED,
                         self.main_archive_ids))
     published_binaries = BinaryPackagePublishingHistory.select("""
         BinaryPackagePublishingHistory.distroarchseries =
             DistroArchSeries.id AND
         DistroArchSeries.DistroSeries = DistroSeries.id AND
         DistroSeries.id = %s AND
         BinaryPackagePublishingHistory.status = %s AND
         BinaryPackagePublishingHistory.archive IN %s
         """ % sqlvalues(distroseries, PackagePublishingStatus.PUBLISHED,
                         self.main_archive_ids),
         clauseTables=["DistroArchSeries", "DistroSeries"])
     return (published_sources, published_binaries)
 def getPublicationsForDistroseries(self, distroseries=None):
     """Return a tuple of sources, binaries published in distroseries."""
     if distroseries is None:
         distroseries = self.warty
     published_sources = SourcePackagePublishingHistory.select("""
         distroseries = %s AND
         status = %s AND
         archive IN %s
         """ % sqlvalues(distroseries, PackagePublishingStatus.PUBLISHED,
                         self.main_archive_ids))
     published_binaries = BinaryPackagePublishingHistory.select(
         """
         BinaryPackagePublishingHistory.distroarchseries =
             DistroArchSeries.id AND
         DistroArchSeries.DistroSeries = DistroSeries.id AND
         DistroSeries.id = %s AND
         BinaryPackagePublishingHistory.status = %s AND
         BinaryPackagePublishingHistory.archive IN %s
         """ % sqlvalues(distroseries, PackagePublishingStatus.PUBLISHED,
                         self.main_archive_ids),
         clauseTables=["DistroArchSeries", "DistroSeries"])
     return (published_sources, published_binaries)
Ejemplo n.º 21
0
    def createPublishingForDistroSeries(self, sourcepackagerelease,
                                        distroseries):
        """Return a list of `SourcePackagePublishingHistory`.

        The publishing records are created according to the given
        `SourcePackageRelease` and `DistroSeries` for all
        (status, archive, pocket) returned from `sample_publishing_data`.
        """
        sample_pub = []
        for status, archive, pocket in self.sample_publishing_data:
            pub = SourcePackagePublishingHistory(
                sourcepackagerelease=sourcepackagerelease,
                sourcepackagename=sourcepackagerelease.sourcepackagename,
                distroseries=distroseries,
                component=sourcepackagerelease.component,
                section=sourcepackagerelease.section,
                status=status,
                archive=archive,
                pocket=pocket)
            # Flush the object changes into DB do guarantee stable database
            # ID order as expected in the callsites.
            sample_pub.append(pub)
        return sample_pub
 def getVersion(self, version):
     """See `IDistributionSourcePackage`."""
     spph = SourcePackagePublishingHistory.select(
         """
         SourcePackagePublishingHistory.distroseries =
             DistroSeries.id AND
         DistroSeries.distribution = %s AND
         SourcePackagePublishingHistory.archive IN %s AND
         SourcePackagePublishingHistory.sourcepackagerelease =
             SourcePackageRelease.id AND
         SourcePackageRelease.sourcepackagename = %s AND
         SourcePackageRelease.version = %s
         """ % sqlvalues(self.distribution,
                         self.distribution.all_distro_archive_ids,
                         self.sourcepackagename, version),
         orderBy='-datecreated',
         prejoinClauseTables=['SourcePackageRelease'],
         clauseTables=['DistroSeries', 'SourcePackageRelease'])
     if spph.is_empty():
         return None
     return DistributionSourcePackageRelease(
         distribution=self.distribution,
         sourcepackagerelease=spph[0].sourcepackagerelease)
 def getVersion(self, version):
     """See `IDistributionSourcePackage`."""
     spph = SourcePackagePublishingHistory.select("""
         SourcePackagePublishingHistory.distroseries =
             DistroSeries.id AND
         DistroSeries.distribution = %s AND
         SourcePackagePublishingHistory.archive IN %s AND
         SourcePackagePublishingHistory.sourcepackagerelease =
             SourcePackageRelease.id AND
         SourcePackageRelease.sourcepackagename = %s AND
         SourcePackageRelease.version = %s
         """ % sqlvalues(self.distribution,
                         self.distribution.all_distro_archive_ids,
                         self.sourcepackagename,
                         version),
         orderBy='-datecreated',
         prejoinClauseTables=['SourcePackageRelease'],
         clauseTables=['DistroSeries', 'SourcePackageRelease'])
     if spph.is_empty():
         return None
     return DistributionSourcePackageRelease(
         distribution=self.distribution,
         sourcepackagerelease=spph[0].sourcepackagerelease)
Ejemplo n.º 24
0
    def _judgeSuperseded(self, source_records, binary_records):
        """Determine whether the superseded packages supplied should
        be moved to death row or not.

        Currently this is done by assuming that any superseded binary
        package should be removed. In the future this should attempt
        to supersede binaries in build-sized chunks only, bug 55030.

        Superseded source packages are considered removable when they
        have no binaries in this distroseries which are published or
        superseded

        When a package is considered for death row it is given a
        'scheduled deletion date' of now plus the defined 'stay of execution'
        time provided in the configuration parameter.
        """
        self.logger.debug("Beginning superseded processing...")

        for pub_record in binary_records:
            binpkg_release = pub_record.binarypackagerelease
            self.logger.debug(
                "%s/%s (%s) has been judged eligible for removal",
                binpkg_release.binarypackagename.name, binpkg_release.version,
                pub_record.distroarchseries.architecturetag)
            self._setScheduledDeletionDate(pub_record)
            # XXX cprov 20070820: 'datemadepending' is useless, since it's
            # always equals to "scheduleddeletiondate - quarantine".
            pub_record.datemadepending = UTC_NOW

        for pub_record in source_records:
            srcpkg_release = pub_record.sourcepackagerelease
            # Attempt to find all binaries of this
            # SourcePackageRelease which are/have been in this
            # distroseries...
            considered_binaries = BinaryPackagePublishingHistory.select(
                """
            binarypackagepublishinghistory.distroarchseries =
                distroarchseries.id AND
            binarypackagepublishinghistory.scheduleddeletiondate IS NULL AND
            binarypackagepublishinghistory.dateremoved IS NULL AND
            binarypackagepublishinghistory.archive = %s AND
            binarypackagebuild.source_package_release = %s AND
            distroarchseries.distroseries = %s AND
            binarypackagepublishinghistory.binarypackagerelease =
            binarypackagerelease.id AND
            binarypackagerelease.build = binarypackagebuild.id AND
            binarypackagepublishinghistory.pocket = %s
            """ % sqlvalues(self.archive, srcpkg_release,
                            pub_record.distroseries, pub_record.pocket),
                clauseTables=[
                    'DistroArchSeries', 'BinaryPackageRelease',
                    'BinaryPackageBuild'
                ])

            # There is at least one non-removed binary to consider
            if not considered_binaries.is_empty():
                # However we can still remove *this* record if there's
                # at least one other PUBLISHED for the spr. This happens
                # when a package is moved between components.
                published = SourcePackagePublishingHistory.selectBy(
                    distroseries=pub_record.distroseries,
                    pocket=pub_record.pocket,
                    status=PackagePublishingStatus.PUBLISHED,
                    archive=self.archive,
                    sourcepackagereleaseID=srcpkg_release.id)
                # Zero PUBLISHED for this spr, so nothing to take over
                # for us, so leave it for consideration next time.
                if published.is_empty():
                    continue

            # Okay, so there's no unremoved binaries, let's go for it...
            self.logger.debug(
                "%s/%s (%s) source has been judged eligible for removal",
                srcpkg_release.sourcepackagename.name, srcpkg_release.version,
                pub_record.id)
            self._setScheduledDeletionDate(pub_record)
            # XXX cprov 20070820: 'datemadepending' is pointless, since it's
            # always equals to "scheduleddeletiondate - quarantine".
            pub_record.datemadepending = UTC_NOW
    def testObsoleteDistroseriesWorks(self):
        """Make sure the required publications are obsoleted."""
        obsoleter = self.getObsoleter()
        self.warty.status = SeriesStatus.OBSOLETE

        # Get all the published sources in warty.
        published_sources, published_binaries = (
            self.getPublicationsForDistroseries())

        # Assert that none of them is obsolete yet:
        self.assertFalse(published_sources.is_empty())
        self.assertFalse(published_binaries.is_empty())
        for source in published_sources:
            self.assertTrue(
                source.status == PackagePublishingStatus.PUBLISHED)
            self.assertTrue(source.scheduleddeletiondate is None)
        for binary in published_binaries:
            self.assertTrue(
                binary.status == PackagePublishingStatus.PUBLISHED)
            self.assertTrue(binary.scheduleddeletiondate is None)

        # Keep their DB IDs for later.
        source_ids = [source.id for source in published_sources]
        binary_ids = [binary.id for binary in published_binaries]

        # Make them obsolete.
        obsoleter.mainTask()
        self.layer.txn.commit()

        # Now see if the modified publications have been correctly obsoleted.
        # We need to re-fetch the published_sources and published_binaries
        # because the existing objects are not valid through a transaction.
        for id in source_ids:
            source = SourcePackagePublishingHistory.get(id)
            self.assertTrue(
                source.status == PackagePublishingStatus.OBSOLETE)
            self.assertTrue(source.scheduleddeletiondate is not None)
        for id in binary_ids:
            binary = BinaryPackagePublishingHistory.get(id)
            self.assertTrue(
                binary.status == PackagePublishingStatus.OBSOLETE)
            self.assertTrue(binary.scheduleddeletiondate is not None)

        # Make sure nothing else was obsoleted.  Subtract the set of
        # known OBSOLETE IDs from the set of all the IDs and assert that
        # the remainder are not OBSOLETE.
        all_sources = SourcePackagePublishingHistory.select(True)
        all_binaries = BinaryPackagePublishingHistory.select(True)
        all_source_ids = [source.id for source in all_sources]
        all_binary_ids = [binary.id for binary in all_binaries]

        remaining_source_ids = set(all_source_ids) - set(source_ids)
        remaining_binary_ids = set(all_binary_ids) - set(binary_ids)

        for id in remaining_source_ids:
            source = SourcePackagePublishingHistory.get(id)
            self.assertTrue(
                source.status != PackagePublishingStatus.OBSOLETE)
        for id in remaining_binary_ids:
            binary = BinaryPackagePublishingHistory.get(id)
            self.assertTrue(
                binary.status != PackagePublishingStatus.OBSOLETE)
Ejemplo n.º 26
0
    def _judgeSuperseded(self, source_records, binary_records):
        """Determine whether the superseded packages supplied should
        be moved to death row or not.

        Currently this is done by assuming that any superseded binary
        package should be removed. In the future this should attempt
        to supersede binaries in build-sized chunks only, bug 55030.

        Superseded source packages are considered removable when they
        have no binaries in this distroseries which are published or
        superseded

        When a package is considered for death row it is given a
        'scheduled deletion date' of now plus the defined 'stay of execution'
        time provided in the configuration parameter.
        """
        self.logger.debug("Beginning superseded processing...")

        for pub_record in binary_records:
            binpkg_release = pub_record.binarypackagerelease
            self.logger.debug(
                "%s/%s (%s) has been judged eligible for removal",
                binpkg_release.binarypackagename.name, binpkg_release.version,
                pub_record.distroarchseries.architecturetag)
            self._setScheduledDeletionDate(pub_record)
            # XXX cprov 20070820: 'datemadepending' is useless, since it's
            # always equals to "scheduleddeletiondate - quarantine".
            pub_record.datemadepending = UTC_NOW

        for pub_record in source_records:
            srcpkg_release = pub_record.sourcepackagerelease
            # Attempt to find all binaries of this
            # SourcePackageRelease which are/have been in this
            # distroseries...
            considered_binaries = BinaryPackagePublishingHistory.select("""
            binarypackagepublishinghistory.distroarchseries =
                distroarchseries.id AND
            binarypackagepublishinghistory.scheduleddeletiondate IS NULL AND
            binarypackagepublishinghistory.dateremoved IS NULL AND
            binarypackagepublishinghistory.archive = %s AND
            binarypackagebuild.source_package_release = %s AND
            distroarchseries.distroseries = %s AND
            binarypackagepublishinghistory.binarypackagerelease =
            binarypackagerelease.id AND
            binarypackagerelease.build = binarypackagebuild.id AND
            binarypackagepublishinghistory.pocket = %s
            """ % sqlvalues(self.archive, srcpkg_release,
                            pub_record.distroseries, pub_record.pocket),
            clauseTables=['DistroArchSeries', 'BinaryPackageRelease',
                          'BinaryPackageBuild'])

            # There is at least one non-removed binary to consider
            if not considered_binaries.is_empty():
                # However we can still remove *this* record if there's
                # at least one other PUBLISHED for the spr. This happens
                # when a package is moved between components.
                published = SourcePackagePublishingHistory.selectBy(
                    distroseries=pub_record.distroseries,
                    pocket=pub_record.pocket,
                    status=PackagePublishingStatus.PUBLISHED,
                    archive=self.archive,
                    sourcepackagereleaseID=srcpkg_release.id)
                # Zero PUBLISHED for this spr, so nothing to take over
                # for us, so leave it for consideration next time.
                if published.is_empty():
                    continue

            # Okay, so there's no unremoved binaries, let's go for it...
            self.logger.debug(
                "%s/%s (%s) source has been judged eligible for removal",
                srcpkg_release.sourcepackagename.name, srcpkg_release.version,
                pub_record.id)
            self._setScheduledDeletionDate(pub_record)
            # XXX cprov 20070820: 'datemadepending' is pointless, since it's
            # always equals to "scheduleddeletiondate - quarantine".
            pub_record.datemadepending = UTC_NOW
Ejemplo n.º 27
0
 def probeNotRemoved(self, pubrec_ids):
     """Check if all source publishing records were not removed."""
     for pubrec_id in pubrec_ids:
         spph = SourcePackagePublishingHistory.get(pubrec_id)
         self.assertTrue(spph.dateremoved is None,
                         "ID %s -> removed" % (spph.id))