def setUp(self):
     super(TestBuildPrivacy, self).setUp()
     # Add everything we need to create builds.
     self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
     processor = self.factory.makeProcessor(supports_virtualized=True)
     distroseries = self.factory.makeDistroSeries()
     das = self.factory.makeDistroArchSeries(
         distroseries=distroseries, processor=processor)
     with person_logged_in(self.admin):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         distroseries.nominatedarchindep = das
         publisher.addFakeChroots(distroseries=distroseries)
         self.factory.makeBuilder(processors=[processor])
     self.public_archive = self.factory.makeArchive()
     self.private_archive = self.factory.makeArchive(private=True)
     # Create one public and one private build.
     public_spph = publisher.getPubSource(
         sourcename=self.factory.getUniqueString(),
         version="%s.1" % self.factory.getUniqueInteger(),
         distroseries=distroseries, archive=self.public_archive)
     [public_build] = public_spph.createMissingBuilds()
     private_spph = publisher.getPubSource(
         sourcename=self.factory.getUniqueString(),
         version="%s.1" % self.factory.getUniqueInteger(),
         distroseries=distroseries, archive=self.private_archive)
     with person_logged_in(self.admin):
         [private_build] = private_spph.createMissingBuilds()
     self.expected_title = '%s build of %s %s in %s %s RELEASE' % (
         das.architecturetag, private_spph.source_package_name,
         private_spph.source_package_version,
         distroseries.distribution.name, distroseries.name)
 def test_copy_archive_without_leak(self):
     # If source publications are copied to a .COPY archive, they don't
     # "leak" into SourcePackage.getBuildRecords().
     admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
     # Set up a distroseries and related bits, so we can create builds.
     source_name = self.factory.getUniqueString()
     spn = self.factory.makeSourcePackageName(name=source_name)
     processor = self.factory.makeProcessor()
     distroseries = self.factory.makeDistroSeries()
     das = self.factory.makeDistroArchSeries(
         distroseries=distroseries, processor=processor,
         supports_virtualized=True)
     with person_logged_in(admin):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         publisher.addFakeChroots(distroseries=distroseries)
         distroseries.nominatedarchindep = das
         self.factory.makeBuilder(processor=processor)
     spph = self.factory.makeSourcePackagePublishingHistory(
         sourcepackagename=spn, distroseries=distroseries)
     spph.createMissingBuilds()
     # Create a copy archive.
     copy = self.factory.makeArchive(
         purpose=ArchivePurpose.COPY,
         distribution=distroseries.distribution)
     # And copy the publication into it.
     copy_spph = spph.copyTo(
         distroseries, PackagePublishingPocket.RELEASE, copy)
     [copy_build] = copy_spph.createMissingBuilds()
     builds = copy.getBuildRecords()
     self.assertEquals([copy_build], list(builds))
     source = SourcePackage(spn, spph.distroseries)
     # SourcePackage.getBuildRecords() doesn't have two build records.
     builds = source.getBuildRecords().count()
     self.assertEquals(1, builds)
class TestDistributionHasBuildRecords(TestCaseWithFactory):
    """Populate a distroseries with builds"""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestDistributionHasBuildRecords, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create the machinery we need to create builds, such as
        # DistroArchSeries and builders.
        self.processor_one = self.factory.makeProcessor()
        self.processor_two = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.distribution = self.distroseries.distribution
        self.das_one = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries,
            processor=self.processor_one,
            supports_virtualized=True)
        self.das_two = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries,
            processor=self.processor_two,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.arch_ids = [arch.id for arch in self.distroseries.architectures]
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das_one
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder_one = self.factory.makeBuilder(
                processor=self.processor_one)
            self.builder_two = self.factory.makeBuilder(
                processor=self.processor_two)
        self.builds = []
        self.createBuilds()

    def createBuilds(self):
        for i in range(5):
            # Create some test builds.
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (self.factory.getUniqueInteger(), i),
                distroseries=self.distroseries,
                architecturehintlist='any')
            builds = spph.createMissingBuilds()
            for b in builds:
                b.updateStatus(BuildStatus.BUILDING)
                if i == 4:
                    b.updateStatus(BuildStatus.FAILEDTOBUILD)
                else:
                    b.updateStatus(BuildStatus.FULLYBUILT)
                b.buildqueue_record.destroySelf()
            self.builds += builds

    def test_get_build_records(self):
        # A Distribution also implements IHasBuildRecords.
        builds = self.distribution.getBuildRecords().count()
        self.assertEquals(10, builds)
Beispiel #4
0
    def test_dispatchBuildToSlave(self):
        # Ensure dispatchBuildToSlave will make the right calls to the slave
        job = self.makeJob()
        test_publisher = SoyuzTestPublisher()
        test_publisher.addFakeChroots(job.build.distroseries)
        slave = OkSlave()
        builder = MockBuilder("bob-de-bouwer")
        builder.processor = getUtility(IProcessorSet).getByName('386')
        job.setBuilder(builder, slave)
        logger = BufferLogger()
        d = defer.maybeDeferred(job.dispatchBuildToSlave, "someid", logger)

        def check_dispatch(ignored):
            self.assertThat(
                logger.getLogBuffer(),
                StartsWith(
                    dedent("""\
                  INFO Sending chroot file for recipe build to bob-de-bouwer
                  INFO Initiating build 1-someid on http://fake:0000
                  """)))
            self.assertEquals(["ensurepresent", "build"],
                              [call[0] for call in slave.call_log])
            build_args = slave.call_log[1][1:]
            self.assertEquals(build_args[0], job.getBuildCookie())
            self.assertEquals(build_args[1], "sourcepackagerecipe")
            self.assertEquals(build_args[3], [])
            distroarchseries = job.build.distroseries.architectures[0]
            self.assertEqual(build_args[4],
                             job._extraBuildArgs(distroarchseries))

        return d.addCallback(check_dispatch)
    def test_dispatchBuildToSlave(self):
        # Ensure dispatchBuildToSlave will make the right calls to the slave
        job = self.makeJob()
        test_publisher = SoyuzTestPublisher()
        test_publisher.addFakeChroots(job.build.distroseries)
        slave = OkSlave()
        builder = MockBuilder("bob-de-bouwer")
        builder.processor = getUtility(IProcessorSet).getByName("386")
        job.setBuilder(builder, slave)
        logger = BufferLogger()
        d = defer.maybeDeferred(job.dispatchBuildToSlave, "someid", logger)

        def check_dispatch(ignored):
            self.assertThat(
                logger.getLogBuffer(),
                StartsWith(
                    dedent(
                        """\
                  INFO Sending chroot file for recipe build to bob-de-bouwer
                  INFO Initiating build 1-someid on http://fake:0000
                  """
                    )
                ),
            )
            self.assertEquals(["ensurepresent", "build"], [call[0] for call in slave.call_log])
            build_args = slave.call_log[1][1:]
            self.assertEquals(build_args[0], job.getBuildCookie())
            self.assertEquals(build_args[1], "sourcepackagerecipe")
            self.assertEquals(build_args[3], [])
            distroarchseries = job.build.distroseries.architectures[0]
            self.assertEqual(build_args[4], job._extraBuildArgs(distroarchseries))

        return d.addCallback(check_dispatch)
Beispiel #6
0
class TestBuildStartEstimation(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildStartEstimation, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            for buildd in getUtility(IBuilderSet):
                buildd.builderok = True
        self.distroseries = self.factory.makeDistroSeries()
        self.bob = getUtility(IBuilderSet).getByName(BOB_THE_BUILDER_NAME)
        das = self.factory.makeDistroArchSeries(distroseries=self.distroseries,
                                                processor=self.bob.processor,
                                                architecturetag='i386')
        with person_logged_in(self.admin):
            self.distroseries.nominatedarchindep = das
        self.publisher.addFakeChroots(distroseries=self.distroseries)

    def job_start_estimate(self, build):
        return build.buildqueue_record.getEstimatedJobStartTime()

    def test_estimation(self):
        pkg = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        build = pkg.createMissingBuilds()[0]
        now = datetime.now(pytz.UTC)
        estimate = self.job_start_estimate(build)
        self.assertTrue(estimate > now)

    def test_disabled_archives(self):
        pkg1 = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        [build1] = pkg1.createMissingBuilds()
        build1.buildqueue_record.lastscore = 1000
        # No user-serviceable parts inside
        removeSecurityProxy(
            build1.buildqueue_record).estimated_duration = (timedelta(
                minutes=10))
        pkg2 = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        [build2] = pkg2.createMissingBuilds()
        build2.buildqueue_record.lastscore = 100
        now = datetime.now(pytz.UTC)
        # Since build1 is higher priority, it's estimated dispatch time is now
        estimate = self.job_start_estimate(build1)
        self.assertEqual(5, (estimate - now).seconds)
        # And build2 is next, so must take build1's duration into account
        estimate = self.job_start_estimate(build2)
        self.assertEqual(600, (estimate - now).seconds)
        # If we disable build1's archive, build2 is next
        with person_logged_in(self.admin):
            build1.archive.disable()
        estimate = self.job_start_estimate(build2)
        self.assertEqual(5, (estimate - now).seconds)
 def setUp(self):
     super(TestBuildPrivacy, self).setUp()
     # Add everything we need to create builds.
     self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
     processor = self.factory.makeProcessor()
     distroseries = self.factory.makeDistroSeries()
     das = self.factory.makeDistroArchSeries(
         distroseries=distroseries, processor=processor,
         supports_virtualized=True)
     with person_logged_in(self.admin):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         distroseries.nominatedarchindep = das
         publisher.addFakeChroots(distroseries=distroseries)
         self.factory.makeBuilder(processor=processor)
     self.public_archive = self.factory.makeArchive()
     self.private_archive = self.factory.makeArchive(private=True)
     # Create one public and one private build.
     public_spph = publisher.getPubSource(
         sourcename=self.factory.getUniqueString(),
         version="%s.1" % self.factory.getUniqueInteger(),
         distroseries=distroseries, archive=self.public_archive)
     [public_build] = public_spph.createMissingBuilds()
     private_spph = publisher.getPubSource(
         sourcename=self.factory.getUniqueString(),
         version="%s.1" % self.factory.getUniqueInteger(),
         distroseries=distroseries, archive=self.private_archive)
     with person_logged_in(self.admin):
         [private_build] = private_spph.createMissingBuilds()
     self.expected_title = '%s build of %s %s in %s %s RELEASE' % (
         das.architecturetag, private_spph.source_package_name,
         private_spph.source_package_version,
         distroseries.distribution.name, distroseries.name)
Beispiel #8
0
    def _prepareMergeCopy(self):
        """Add a fresher and a new package to ubuntu/hoary.

        This is used to test merge copy functionality."""
        test_publisher = SoyuzTestPublisher()
        ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
        hoary = ubuntu.getSeries('hoary')
        test_publisher.addFakeChroots(hoary)
        test_publisher.setUpDefaultDistroSeries(hoary)
        test_publisher.getPubSource(sourcename="new-in-second-round",
                                    version="1.0",
                                    distroseries=hoary,
                                    archive=ubuntu.main_archive)
        test_publisher.getPubSource(sourcename="alsa-utils",
                                    version="2.0",
                                    distroseries=hoary,
                                    archive=ubuntu.main_archive)
        sources = ubuntu.main_archive.getPublishedSources(
            distroseries=hoary,
            status=self.pending_statuses,
            name=u'alsa-utils')
        for src in sources:
            if src.source_package_version != '2.0':
                src.supersede()
        LaunchpadZopelessLayer.txn.commit()
 def test_copy_archive_without_leak(self):
     # If source publications are copied to a .COPY archive, they don't
     # "leak" into SourcePackage.getBuildRecords().
     admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
     # Set up a distroseries and related bits, so we can create builds.
     source_name = self.factory.getUniqueString()
     spn = self.factory.makeSourcePackageName(name=source_name)
     processor = self.factory.makeProcessor()
     distroseries = self.factory.makeDistroSeries()
     das = self.factory.makeDistroArchSeries(distroseries=distroseries,
                                             processor=processor,
                                             supports_virtualized=True)
     with person_logged_in(admin):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         publisher.addFakeChroots(distroseries=distroseries)
         distroseries.nominatedarchindep = das
         self.factory.makeBuilder(processor=processor)
     spph = self.factory.makeSourcePackagePublishingHistory(
         sourcepackagename=spn, distroseries=distroseries)
     spph.createMissingBuilds()
     # Create a copy archive.
     copy = self.factory.makeArchive(purpose=ArchivePurpose.COPY,
                                     distribution=distroseries.distribution)
     # And copy the publication into it.
     copy_spph = spph.copyTo(distroseries, PackagePublishingPocket.RELEASE,
                             copy)
     [copy_build] = copy_spph.createMissingBuilds()
     builds = copy.getBuildRecords()
     self.assertEquals([copy_build], list(builds))
     source = SourcePackage(spn, spph.distroseries)
     # SourcePackage.getBuildRecords() doesn't have two build records.
     builds = source.getBuildRecords().count()
     self.assertEquals(1, builds)
class TestBuildStartEstimation(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildStartEstimation, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            for buildd in getUtility(IBuilderSet):
                buildd.builderok = True
        self.distroseries = self.factory.makeDistroSeries()
        self.bob = getUtility(IBuilderSet).getByName(BOB_THE_BUILDER_NAME)
        das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.bob.processor,
            architecturetag='i386', supports_virtualized=True)
        with person_logged_in(self.admin):
            self.distroseries.nominatedarchindep = das
        self.publisher.addFakeChroots(distroseries=self.distroseries)

    def job_start_estimate(self, build):
        return build.buildqueue_record.getEstimatedJobStartTime()

    def test_estimation(self):
        pkg = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        build = pkg.createMissingBuilds()[0]
        now = datetime.now(pytz.UTC)
        estimate = self.job_start_estimate(build)
        self.assertTrue(estimate > now)

    def test_disabled_archives(self):
        pkg1 = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        [build1] = pkg1.createMissingBuilds()
        build1.buildqueue_record.lastscore = 1000
        # No user-serviceable parts inside
        removeSecurityProxy(build1.buildqueue_record).estimated_duration = (
            timedelta(minutes=10))
        pkg2 = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        [build2] = pkg2.createMissingBuilds()
        build2.buildqueue_record.lastscore = 100
        now = datetime.now(pytz.UTC)
        # Since build1 is higher priority, it's estimated dispatch time is now
        estimate = self.job_start_estimate(build1)
        self.assertEquals(5, (estimate - now).seconds)
        # And build2 is next, so must take build1's duration into account
        estimate = self.job_start_estimate(build2)
        self.assertEquals(600, (estimate - now).seconds)
        # If we disable build1's archive, build2 is next
        with person_logged_in(self.admin):
            build1.archive.disable()
        estimate = self.job_start_estimate(build2)
        self.assertEquals(5, (estimate - now).seconds)
class TestDistributionHasBuildRecords(TestCaseWithFactory):
    """Populate a distroseries with builds"""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestDistributionHasBuildRecords, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create the machinery we need to create builds, such as
        # DistroArchSeries and builders.
        self.processor_one = self.factory.makeProcessor()
        self.processor_two = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.distribution = self.distroseries.distribution
        self.das_one = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_one,
            supports_virtualized=True)
        self.das_two = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_two,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.arch_ids = [arch.id for arch in self.distroseries.architectures]
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das_one
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder_one = self.factory.makeBuilder(
                processor=self.processor_one)
            self.builder_two = self.factory.makeBuilder(
                processor=self.processor_two)
        self.builds = []
        self.createBuilds()

    def createBuilds(self):
        for i in range(5):
            # Create some test builds.
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (self.factory.getUniqueInteger(), i),
                distroseries=self.distroseries, architecturehintlist='any')
            builds = spph.createMissingBuilds()
            for b in builds:
                b.updateStatus(BuildStatus.BUILDING)
                if i == 4:
                    b.updateStatus(BuildStatus.FAILEDTOBUILD)
                else:
                    b.updateStatus(BuildStatus.FULLYBUILT)
                b.buildqueue_record.destroySelf()
            self.builds += builds

    def test_get_build_records(self):
        # A Distribution also implements IHasBuildRecords.
        builds = self.distribution.getBuildRecords().count()
        self.assertEquals(10, builds)
 def test_composeBuildRequest(self):
     job = self.makeJob(with_builder=True)
     test_publisher = SoyuzTestPublisher()
     test_publisher.addFakeChroots(job.build.distroseries)
     das = job.build.distroseries.nominatedarchindep
     build_request = yield job.composeBuildRequest(None)
     extra_args = yield job.extraBuildArgs()
     self.assertEqual(
         ('sourcepackagerecipe', das, job.build.pocket, {}, extra_args),
         build_request)
    def _prepareMergeCopy(self):
        """Add a fresher and a new package to ubuntu/hoary.

        This is used to test merge copy functionality."""
        test_publisher = SoyuzTestPublisher()
        ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
        hoary = ubuntu.getSeries('hoary')
        test_publisher.addFakeChroots(hoary)
        test_publisher.setUpDefaultDistroSeries(hoary)
        test_publisher.getPubSource(
            sourcename="new-in-second-round", version="1.0",
            distroseries=hoary, archive=ubuntu.main_archive)
        test_publisher.getPubSource(
            sourcename="alsa-utils", version="2.0", distroseries=hoary,
            archive=ubuntu.main_archive)
        sources = ubuntu.main_archive.getPublishedSources(
            distroseries=hoary, status=self.pending_statuses,
            name=u'alsa-utils')
        for src in sources:
            if src.source_package_version != '2.0':
                src.supersede()
        LaunchpadZopelessLayer.txn.commit()
Beispiel #14
0
class TestBuild(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuild, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor = self.factory.makeProcessor(supports_virtualized=True)
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(
                processors=[self.processor])
        self.now = datetime.now(pytz.UTC)

    def test_title(self):
        # A build has a title which describes the context source version and
        # in which series and architecture it is targeted for.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_title = '%s build of %s %s in %s %s RELEASE' % (
            self.das.architecturetag, spph.source_package_name,
            spph.source_package_version, self.distroseries.distribution.name,
            self.distroseries.name)
        self.assertEqual(expected_title, build.title)

    def test_linking(self):
        # A build directly links to the archive, distribution, distroseries,
        # distroarchseries, pocket in its context and also the source version
        # that generated it.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual(self.distroseries.main_archive, build.archive)
        self.assertEqual(self.distroseries.distribution, build.distribution)
        self.assertEqual(self.distroseries, build.distro_series)
        self.assertEqual(self.das, build.distro_arch_series)
        self.assertEqual(PackagePublishingPocket.RELEASE, build.pocket)
        self.assertEqual(self.das.architecturetag, build.arch_tag)
        self.assertTrue(build.virtualized)
        self.assertEqual(
            '%s - %s' %
            (spph.source_package_name, spph.source_package_version),
            build.source_package_release.title)

    def test_processed_builds(self):
        # Builds which were already processed also offer additional
        # information about its process such as the time it was started and
        # finished and its 'log' and 'upload_changesfile' as librarian files.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(
            sourcename=spn,
            version=version,
            distroseries=self.distroseries,
            status=PackagePublishingStatus.PUBLISHED)
        with person_logged_in(self.admin):
            binary = self.publisher.getPubBinaries(
                binaryname=spn,
                distroseries=self.distroseries,
                pub_source=spph,
                version=version,
                builder=self.builder)
        build = binary[0].binarypackagerelease.build
        self.assertTrue(build.was_built)
        self.assertEqual(PackageUploadStatus.DONE, build.package_upload.status)
        self.assertEqual(datetime(2008, 1, 1, 0, 0, 0, tzinfo=pytz.UTC),
                         build.date_started)
        self.assertEqual(datetime(2008, 1, 1, 0, 5, 0, tzinfo=pytz.UTC),
                         build.date_finished)
        self.assertEqual(timedelta(minutes=5), build.duration)
        expected_buildlog = 'buildlog_%s-%s-%s.%s_%s_FULLYBUILT.txt.gz' % (
            self.distroseries.distribution.name, self.distroseries.name,
            self.das.architecturetag, spn, version)
        self.assertEqual(expected_buildlog, build.log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' %
            (self.distroseries.distribution.name, spn, version, build.id))
        expected_buildlog_url = '%s/%s' % (url_start, expected_buildlog)
        self.assertEqual(expected_buildlog_url, build.log_url)
        expected_changesfile = '%s_%s_%s.changes' % (spn, version,
                                                     self.das.architecturetag)
        self.assertEqual(expected_changesfile,
                         build.upload_changesfile.filename)
        expected_changesfile_url = '%s/%s' % (url_start, expected_changesfile)
        self.assertEqual(expected_changesfile_url, build.changesfile_url)
        # Since this build was sucessful, it can not be retried
        self.assertFalse(build.can_be_retried)

    def test_current_component(self):
        # The currently published component is provided via the
        # 'current_component' property.  It looks over the publishing records
        # and finds the current publication of the source in question.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual('main', build.current_component.name)
        # It may not be the same as
        self.assertEqual('main', build.source_package_release.component.name)
        # If the package has no uploads, its package_upload is None
        self.assertIsNone(build.package_upload)

    def test_current_component_when_unpublished(self):
        # Production has some buggy builds without source publications.
        # current_component returns None in that case.
        spph = self.publisher.getPubSource()
        other_das = self.factory.makeDistroArchSeries()
        build = getUtility(IBinaryPackageBuildSet).new(
            spph.sourcepackagerelease, spph.archive, other_das,
            PackagePublishingPocket.RELEASE)
        self.assertIs(None, build.current_component)

    def test_retry_for_released_series(self):
        # Builds can not be retried for released distroseries
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(distroseries=distroseries,
                                                processor=self.processor)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries)
        [build] = spph.createMissingBuilds()
        self.assertFalse(build.can_be_retried)

    def test_partner_retry_for_released_series(self):
        # Builds for PARTNER can be retried -- even if the distroseries is
        # released.
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(distroseries=distroseries,
                                                processor=self.processor)
        archive = self.factory.makeArchive(
            purpose=ArchivePurpose.PARTNER,
            distribution=distroseries.distribution)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries,
            archive=archive)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry(self):
        # A build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry_cancelled(self):
        # A cancelled build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.CANCELLED)
        self.assertTrue(build.can_be_retried)

    def test_retry_superseded(self):
        # A superseded build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.SUPERSEDED)
        self.assertTrue(build.can_be_retried)

    def test_uploadlog(self):
        # The upload log can be attached to a build
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertIsNone(build.upload_log)
        self.assertIsNone(build.upload_log_url)
        build.storeUploadLog('sample upload log')
        expected_filename = 'upload_%s_log.txt' % build.id
        self.assertEqual(expected_filename, build.upload_log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' %
            (self.distroseries.distribution.name, spph.source_package_name,
             spph.source_package_version, build.id))
        expected_url = '%s/%s' % (url_start, expected_filename)
        self.assertEqual(expected_url, build.upload_log_url)

    def test_retry_resets_state(self):
        # Retrying a build resets most of the state attributes, but does
        # not modify the first dispatch time.
        build = self.factory.makeBinaryPackageBuild()
        build.updateStatus(BuildStatus.BUILDING, date_started=self.now)
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        build.gotFailure()
        with person_logged_in(self.admin):
            build.retry()
        self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
        self.assertEqual(self.now, build.date_first_dispatched)
        self.assertIsNone(build.log)
        self.assertIsNone(build.upload_log)
        self.assertEqual(0, build.failure_count)

    def test_retry_resets_virtualized(self):
        # Retrying a build recalculates its virtualization.
        archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution, virtualized=False)
        build = self.factory.makeBinaryPackageBuild(distroarchseries=self.das,
                                                    archive=archive,
                                                    processor=self.processor)
        self.assertFalse(build.virtualized)
        build.updateStatus(BuildStatus.BUILDING)
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        build.gotFailure()
        self.processor.supports_nonvirtualized = False
        with person_logged_in(self.admin):
            build.retry()
        self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
        self.assertTrue(build.virtualized)

    def test_create_bpr(self):
        # Test that we can create a BPR from a given build.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        bpn = self.factory.makeBinaryPackageName(name=spn)
        spph = self.publisher.getPubSource(sourcename=spn,
                                           version=version,
                                           distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        binary = build.createBinaryPackageRelease(
            binarypackagename=bpn,
            version=version,
            summary='',
            description='',
            binpackageformat=BinaryPackageFormat.DEB,
            component=spph.sourcepackagerelease.component.id,
            section=spph.sourcepackagerelease.section.id,
            priority=PackagePublishingPriority.STANDARD,
            installedsize=0,
            architecturespecific=False)
        self.assertEqual(1, build.binarypackages.count())
        self.assertEqual([binary], list(build.binarypackages))

    def test_multiple_create_bpr(self):
        # We can create multiple BPRs from a build
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(sourcename=spn,
                                           version=version,
                                           distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_names = []
        for i in range(15):
            bpn_name = '%s-%s' % (spn, i)
            bpn = self.factory.makeBinaryPackageName(bpn_name)
            expected_names.append(bpn_name)
            build.createBinaryPackageRelease(
                binarypackagename=bpn,
                version=str(i),
                summary='',
                description='',
                binpackageformat=BinaryPackageFormat.DEB,
                component=spph.sourcepackagerelease.component.id,
                section=spph.sourcepackagerelease.section.id,
                priority=PackagePublishingPriority.STANDARD,
                installedsize=0,
                architecturespecific=False)
        self.assertEqual(15, build.binarypackages.count())
        bin_names = [b.name for b in build.binarypackages]
        # Verify .binarypackages returns sorted by name
        expected_names.sort()
        self.assertEqual(expected_names, bin_names)

    def test_cannot_rescore_non_needsbuilds_builds(self):
        # If a build record isn't in NEEDSBUILD, it can not be rescored.
        # We will also need to log into an admin to do the rescore.
        with person_logged_in(self.admin):
            [bpph] = self.publisher.getPubBinaries(
                binaryname=self.factory.getUniqueString(),
                version="%s.1" % self.factory.getUniqueInteger(),
                distroseries=self.distroseries)
            build = bpph.binarypackagerelease.build
            self.assertRaises(CannotBeRescored, build.rescore, 20)

    def test_rescore_builds(self):
        # If the user has build-admin privileges, they can rescore builds
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
        self.assertEqual(2505, build.buildqueue_record.lastscore)
        with person_logged_in(self.admin):
            build.rescore(5000)
            transaction.commit()
        self.assertEqual(5000, build.buildqueue_record.lastscore)

    def test_source_publication_override(self):
        # Components can be overridden in builds.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual(spph, build.current_source_publication)
        universe = getUtility(IComponentSet)['universe']
        overridden_spph = spph.changeOverride(new_component=universe)
        # We can now see current source publication points to the overridden
        # publication.
        self.assertNotEqual(spph, build.current_source_publication)
        self.assertEqual(overridden_spph, build.current_source_publication)

    def test_estimated_duration(self):
        # Builds will have an estimated duration that is set to a
        # previous build of the same sources duration.
        spn = self.factory.getUniqueString()
        spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [build] = spph.createMissingBuilds()
        # Duration is based on package size if there is no previous build.
        self.assertEqual(timedelta(0, 60),
                         build.buildqueue_record.estimated_duration)
        # Set the build as done, and its duration.
        build.updateStatus(BuildStatus.BUILDING,
                           date_started=self.now - timedelta(minutes=72))
        build.updateStatus(BuildStatus.FULLYBUILT, date_finished=self.now)
        build.buildqueue_record.destroySelf()
        new_spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [new_build] = new_spph.createMissingBuilds()
        # The duration for this build is now 72 minutes.
        self.assertEqual(timedelta(0, 72 * 60),
                         new_build.buildqueue_record.estimated_duration)

    def test_store_uploadlog_refuses_to_overwrite(self):
        # Storing an upload log for a build will fail if the build already
        # has an upload log.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOUPLOAD)
        build.storeUploadLog('foo')
        self.assertRaises(AssertionError, build.storeUploadLog, 'bar')
 def getTestPublisher(self, distroseries):
     """Return an `SoyuzTestPublisher`instance."""
     stp = SoyuzTestPublisher()
     stp.addFakeChroots(distroseries)
     stp.setUpDefaultDistroSeries(distroseries)
     return stp
class TestSourcesList(TestCaseWithFactory):
    """Test sources.list contents for building, and related mechanisms."""

    layer = LaunchpadZopelessLayer
    run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=10)

    ubuntu_components = [
        "main", "restricted", "universe", "multiverse", "partner"]

    fingerprints = {
        "*****@*****.**": "0D57E99656BEFB0897606EE9A022DD1F5001B46D",
        "*****@*****.**": (
            "B7B1966662BA8D3F5A6ED89BD640F4A593B2CF67"),
        }

    def setUp(self):
        super(TestSourcesList, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
        self.hoary = self.ubuntu.getSeries("hoary")
        self.publisher.addFakeChroots(self.hoary)
        self.publisher.setUpDefaultDistroSeries(self.hoary)
        for component_name in self.ubuntu_components:
            component = getUtility(IComponentSet)[component_name]
            if component not in self.hoary.components:
                self.factory.makeComponentSelection(self.hoary, component)

    def test_defaults(self):
        # Non-primary archives by default use the Release, Security and
        # Updates pockets from the primary archive, and all its available
        # components.
        self.assertEqual(
            PackagePublishingPocket.UPDATES, default_pocket_dependency)
        self.assertEqual("multiverse", default_component_dependency_name)
        self.assertEqual(
            (PackagePublishingPocket.RELEASE,
             PackagePublishingPocket.SECURITY,
             PackagePublishingPocket.UPDATES),
            pocket_dependencies[default_pocket_dependency])

    @defer.inlineCallbacks
    def makeArchive(self, signing_key_name="*****@*****.**",
                    publish_binary=False, **kwargs):
        try:
            getattr(config, "in-process-key-server-fixture")
        except AttributeError:
            yield self.useFixture(InProcessKeyServerFixture()).start()
        archive = self.factory.makeArchive(distribution=self.ubuntu, **kwargs)
        if signing_key_name is not None:
            key_path = os.path.join(gpgkeysdir, "%s.sec" % signing_key_name)
            yield IArchiveSigningKey(archive).setSigningKey(
                key_path, async_keyserver=True)
        if publish_binary:
            self.publisher.getPubBinaries(
                archive=archive, status=PackagePublishingStatus.PUBLISHED)
        defer.returnValue(archive)

    def makeBuild(self, **kwargs):
        pub_source = self.publisher.getPubSource(**kwargs)
        [build] = pub_source.createMissingBuilds()
        return build

    def assertPrimaryCurrentComponent(self, expected, build):
        self.assertEqual(
            expected,
            get_primary_current_component(
                build.archive, build.distro_series,
                build.source_package_release.name).name)

    @defer.inlineCallbacks
    def assertSourcesListAndKeys(self, expected_sources_list,
                                 expected_key_names, build, **kwargs):
        expected_lines = []
        for archive_or_prefix, suffixes in expected_sources_list:
            if IArchive.providedBy(archive_or_prefix):
                prefix = "deb %s " % archive_or_prefix.archive_url
            else:
                prefix = archive_or_prefix + " "
            expected_lines.extend([prefix + suffix for suffix in suffixes])
        sources_list, trusted_keys = yield get_sources_list_for_building(
            build, build.distro_arch_series, build.source_package_release.name,
            **kwargs)
        self.assertEqual(expected_lines, sources_list)
        key_matchers = [
            Base64KeyMatches(self.fingerprints[key_name])
            for key_name in expected_key_names]
        self.assertThat(trusted_keys, MatchesSetwise(*key_matchers))

    @defer.inlineCallbacks
    def test_ppa_with_no_binaries(self):
        # If there are no published binaries in a PPA, only its primary
        # archive dependencies need to be considered.
        ppa = yield self.makeArchive()
        build = self.makeBuild(archive=ppa)
        self.assertEqual(
            0, ppa.getAllPublishedBinaries(
                distroarchseries=build.distro_arch_series,
                status=PackagePublishingStatus.PUBLISHED).count())
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_ppa_with_binaries(self):
        # If there are binaries published in a PPA, then the PPA is
        # considered as well as its primary dependencies.
        ppa = yield self.makeArchive(publish_binary=True)
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], ["*****@*****.**"], build)

    @defer.inlineCallbacks
    def test_dependent_ppa_with_no_binaries(self):
        # A depended-upon PPA is not considered if it has no published
        # binaries.
        lower_ppa = yield self.makeArchive(
            signing_key_name="*****@*****.**")
        upper_ppa = yield self.makeArchive(publish_binary=True)
        upper_ppa.addArchiveDependency(
            lower_ppa, PackagePublishingPocket.RELEASE,
            getUtility(IComponentSet)["main"])
        build = self.makeBuild(archive=upper_ppa)
        yield self.assertSourcesListAndKeys(
            [(upper_ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], ["*****@*****.**"], build)

    @defer.inlineCallbacks
    def test_dependent_ppa_with_binaries(self):
        # A depended-upon PPA is considered if it has published binaries.
        lower_ppa = yield self.makeArchive(
            signing_key_name="*****@*****.**",
            publish_binary=True)
        upper_ppa = yield self.makeArchive(publish_binary=True)
        upper_ppa.addArchiveDependency(
            lower_ppa, PackagePublishingPocket.RELEASE,
            getUtility(IComponentSet)["main"])
        build = self.makeBuild(archive=upper_ppa)
        yield self.assertSourcesListAndKeys(
            [(upper_ppa, ["hoary main"]),
             (lower_ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ],
            ["*****@*****.**", "*****@*****.**"],
            build)

    @defer.inlineCallbacks
    def test_lax_supported_component_dependencies(self):
        # Dependencies for series with
        # strict_supported_component_dependencies=False are reasonable.
        # PPAs only have the "main" component.
        lower_ppa = yield self.makeArchive(
            signing_key_name="*****@*****.**",
            publish_binary=True)
        upper_ppa = yield self.makeArchive(publish_binary=True)
        upper_ppa.addArchiveDependency(
            lower_ppa, PackagePublishingPocket.RELEASE,
            getUtility(IComponentSet)["main"])
        upper_ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.UPDATES,
            getUtility(IComponentSet)["restricted"])
        build = self.makeBuild(archive=upper_ppa)
        yield self.assertSourcesListAndKeys(
            [(upper_ppa, ["hoary main"]),
             (lower_ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted",
                 "hoary-security main restricted",
                 "hoary-updates main restricted",
                 ]),
             ],
            ["*****@*****.**", "*****@*****.**"],
            build)
        self.hoary.strict_supported_component_dependencies = False
        transaction.commit()
        yield self.assertSourcesListAndKeys(
            [(upper_ppa, ["hoary main"]),
             (lower_ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ],
            ["*****@*****.**", "*****@*****.**"],
            build)

    @defer.inlineCallbacks
    def test_no_op_primary_archive_dependency(self):
        # Overriding the default primary archive dependencies with exactly
        # the same values has no effect.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.UPDATES,
            getUtility(IComponentSet)["multiverse"])
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_primary_archive_dependency_security(self):
        # The primary archive dependency can be modified to behave as an
        # embargoed archive that builds security updates.  This is done by
        # setting the SECURITY pocket dependencies (RELEASE and SECURITY)
        # and following the component dependencies of the component where
        # the source was last published in the primary archive.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.SECURITY)
        build = self.makeBuild(archive=ppa)
        self.assertPrimaryCurrentComponent("universe", build)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main universe",
                 "hoary-security main universe",
                 ]),
             ], [], build)
        self.publisher.getPubSource(
            sourcename="with-ancestry", version="1.0",
            archive=self.ubuntu.main_archive)
        [build_with_ancestry] = self.publisher.getPubSource(
            sourcename="with-ancestry", version="1.1",
            archive=ppa).createMissingBuilds()
        self.assertPrimaryCurrentComponent("main", build_with_ancestry)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main",
                 "hoary-security main",
                 ]),
             ], [], build_with_ancestry)

    @defer.inlineCallbacks
    def test_primary_archive_dependency_release(self):
        # The primary archive dependency can be modified to behave as a
        # pristine build environment based only on what was included in the
        # original release of the corresponding series.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.RELEASE,
            getUtility(IComponentSet)["restricted"])
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, ["hoary main restricted"])], [], build)

    @defer.inlineCallbacks
    def test_primary_archive_dependency_proposed(self):
        # The primary archive dependency can be modified to extend the build
        # environment for PROPOSED.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.PROPOSED,
            getUtility(IComponentSet)["multiverse"])
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 "hoary-proposed main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_primary_archive_dependency_backports(self):
        # The primary archive dependency can be modified to extend the build
        # environment for PROPOSED.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.BACKPORTS,
            getUtility(IComponentSet)["multiverse"])
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 "hoary-backports main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_partner(self):
        # Similarly to what happens with PPA builds, partner builds may
        # depend on any component in the primary archive.  This behaviour
        # allows scenarios where partner packages may use other
        # restricted/non-free applications from multiverse, and also other
        # partner applications.
        primary, partner = self.ubuntu.all_distro_archives
        self.publisher.getPubBinaries(
            archive=partner, component="partner",
            status=PackagePublishingStatus.PUBLISHED)
        build = self.makeBuild(archive=partner, component="partner")
        yield self.assertSourcesListAndKeys(
            [(partner, ["hoary partner"]),
             (primary, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_partner_proposed(self):
        # The partner archive's PROPOSED pocket builds against itself, but
        # still uses the default UPDATES dependency for the primary archive
        # unless overridden by ArchiveDependency.
        primary, partner = self.ubuntu.all_distro_archives
        self.publisher.getPubBinaries(
            archive=partner, component="partner",
            status=PackagePublishingStatus.PUBLISHED)
        self.publisher.getPubBinaries(
            archive=partner, component="partner",
            status=PackagePublishingStatus.PUBLISHED,
            pocket=PackagePublishingPocket.PROPOSED)
        build = self.makeBuild(
            archive=partner, component="partner",
            pocket=PackagePublishingPocket.PROPOSED)
        yield self.assertSourcesListAndKeys(
            [(partner, [
                 "hoary partner",
                 "hoary-proposed partner",
                 ]),
             (primary, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_archive_external_dependencies(self):
        # An archive can be manually given additional external dependencies.
        # If present, "%(series)s" is replaced with the series name for the
        # build being dispatched.
        ppa = yield self.makeArchive(publish_binary=True)
        ppa.external_dependencies = (
            "deb http://user:pass@repository zoing everything\n"
            "deb http://user:pass@repository %(series)s public private\n"
            "deb http://user:pass@repository %(series)s-extra public")
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             ("deb http://user:pass@repository", [
                 "zoing everything",
                 "hoary public private",
                 "hoary-extra public",
                 ]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], ["*****@*****.**"], build)

    @defer.inlineCallbacks
    def test_build_external_dependencies(self):
        # A single build can be manually given additional external
        # dependencies.
        ppa = yield self.makeArchive(publish_binary=True)
        build = self.makeBuild(archive=ppa)
        build.api_external_dependencies = (
            "deb http://user:pass@repository foo bar")
        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             ("deb http://user:pass@repository", ["foo bar"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], ["*****@*****.**"], build)

    @defer.inlineCallbacks
    def test_build_tools(self):
        # We can force an extra build tools line to be added to
        # sources.list, which is useful for specialised build types.
        ppa = yield self.makeArchive(publish_binary=True)
        build = self.makeBuild(archive=ppa)

        # Upload the tools archive key to the keyserver.
        tools_key_name = "*****@*****.**"
        tools_key_path = os.path.join(gpgkeysdir, "%s.sec" % tools_key_name)
        with open(tools_key_path) as tools_key_file:
            secret_key_export = tools_key_file.read()
        # Remove security proxy to avoid problems with running in a thread.
        gpghandler = removeSecurityProxy(getUtility(IGPGHandler))
        gpghandler.importSecretKey(secret_key_export)
        yield deferToThread(
            gpghandler.uploadPublicKey, self.fingerprints[tools_key_name])

        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             ("deb http://example.org", ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ],
            ["*****@*****.**", tools_key_name], build,
            tools_source="deb http://example.org %(series)s main",
            tools_fingerprint=self.fingerprints[tools_key_name])

    @defer.inlineCallbacks
    def test_build_tools_bad_formatting(self):
        # If tools_source is badly formatted, we log the error but don't
        # blow up.  (Note the missing "s" at the end of "%(series)".)
        ppa = yield self.makeArchive(publish_binary=True)
        build = self.makeBuild(archive=ppa)
        logger = BufferLogger()
        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ],
            ["*****@*****.**"], build,
            tools_source="deb http://example.org %(series) main",
            logger=logger)
        self.assertThat(logger.getLogBuffer(), StartsWith(
            "ERROR Exception processing build tools sources.list entry:\n"))

    @defer.inlineCallbacks
    def test_overlay(self):
        # An overlay distroseries is a derived distribution which works like
        # a PPA.  This means that the parent's details gets added to the
        # sources.list passed to the builders.
        depdistro = self.factory.makeDistribution(
            "depdistro", publish_base_url="http://archive.launchpad.dev/")
        depseries = self.factory.makeDistroSeries(
            distribution=depdistro, name="depseries")
        self.factory.makeDistroArchSeries(
            distroseries=depseries, architecturetag="i386")
        self.publisher.addFakeChroots(depseries)
        for component_name in self.ubuntu_components:
            component = getUtility(IComponentSet)[component_name]
            self.factory.makeComponentSelection(depseries, component)
        self.factory.makeDistroSeriesParent(
            derived_series=self.hoary, parent_series=depseries,
            initialized=True, is_overlay=True,
            pocket=PackagePublishingPocket.SECURITY,
            component=getUtility(IComponentSet)["universe"])
        build = self.makeBuild()
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, ["hoary main"]),
             (depdistro.main_archive, [
                 "depseries main universe",
                 "depseries-security main universe",
                 ]),
             ], [], build)
Beispiel #17
0
class TestBuildSet(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildSet, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor_one = self.factory.makeProcessor(
            supports_virtualized=True)
        self.processor_two = self.factory.makeProcessor(
            supports_virtualized=True)
        self.distroseries = self.factory.makeDistroSeries()
        self.distribution = self.distroseries.distribution
        self.das_one = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_one)
        self.das_two = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_two)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das_one
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.factory.makeBuilder(processors=[self.processor_one])
            self.factory.makeBuilder(processors=[self.processor_two])
        self.builds = []
        self.spphs = []

    def setUpBuilds(self):
        for i in range(5):
            # Create some test builds
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (self.factory.getUniqueInteger(), i),
                distroseries=self.distroseries,
                architecturehintlist='any')
            self.spphs.append(spph)
            builds = removeSecurityProxy(
                getUtility(IBinaryPackageBuildSet).createForSource(
                    spph.sourcepackagerelease, spph.archive, spph.distroseries,
                    spph.pocket))
            with person_logged_in(self.admin):
                for b in builds:
                    b.updateStatus(BuildStatus.BUILDING)
                    if i == 4:
                        b.updateStatus(BuildStatus.FAILEDTOBUILD)
                    else:
                        b.updateStatus(BuildStatus.FULLYBUILT)
                    b.buildqueue_record.destroySelf()
            self.builds += builds

    def test_new_virtualization(self):
        # Builds are virtualized unless Processor.support_nonvirtualized
        # and not Archive.require_virtualized.

        def make(proc_virt, proc_nonvirt, archive_virt):
            proc = self.factory.makeProcessor(
                supports_nonvirtualized=proc_nonvirt,
                supports_virtualized=proc_virt)
            das = self.factory.makeDistroArchSeries(processor=proc)
            archive = self.factory.makeArchive(
                distribution=das.distroseries.distribution,
                virtualized=archive_virt)
            bpb = getUtility(IBinaryPackageBuildSet).new(
                self.factory.makeSourcePackageRelease(), archive, das,
                PackagePublishingPocket.RELEASE)
            self.assertEqual(proc, bpb.processor)
            return bpb

        vvvbpb = make(proc_virt=True, proc_nonvirt=True, archive_virt=True)
        self.assertTrue(vvvbpb.virtualized)

        vvnbpb = make(proc_virt=True, proc_nonvirt=True, archive_virt=False)
        self.assertFalse(vvnbpb.virtualized)

        vnvbpb = make(proc_virt=True, proc_nonvirt=False, archive_virt=True)
        self.assertTrue(vnvbpb.virtualized)

        vnvbpb = make(proc_virt=True, proc_nonvirt=False, archive_virt=False)
        self.assertTrue(vnvbpb.virtualized)

    def test_get_for_distro_distribution(self):
        # Test fetching builds for a distro's main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution)
        self.assertEqual(set.count(), 10)

    def test_get_for_distro_distroseries(self):
        # Test fetching builds for a distroseries' main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distroseries)
        self.assertEqual(set.count(), 10)

    def test_get_for_distro_distroarchseries(self):
        # Test fetching builds for a distroarchseries' main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.das_one)
        self.assertEqual(set.count(), 5)

    def test_get_for_distro_filter_build_status(self):
        # The result can be filtered based on the build status
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, status=BuildStatus.FULLYBUILT)
        self.assertEqual(set.count(), 8)

    def test_get_for_distro_filter_name(self):
        # The result can be filtered based on the name
        self.setUpBuilds()
        spn = self.builds[2].source_package_release.sourcepackagename.name
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, name=spn)
        self.assertEqual(set.count(), 2)

    def test_get_for_distro_filter_pocket(self):
        # The result can be filtered based on the pocket of the build
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, pocket=PackagePublishingPocket.RELEASE)
        self.assertEqual(set.count(), 10)
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, pocket=PackagePublishingPocket.UPDATES)
        self.assertEqual(set.count(), 0)

    def test_get_for_distro_filter_arch_tag(self):
        # The result can be filtered based on the archtag of the build
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, arch_tag=self.das_one.architecturetag)
        self.assertEqual(set.count(), 5)

    def test_get_status_summary_for_builds(self):
        # We can query for the status summary of a number of builds
        self.setUpBuilds()
        relevant_builds = [self.builds[0], self.builds[2], self.builds[-2]]
        summary = getUtility(IBinaryPackageBuildSet).getStatusSummaryForBuilds(
            relevant_builds)
        self.assertEqual(summary['status'], BuildSetStatus.FAILEDTOBUILD)
        self.assertEqual(summary['builds'], [self.builds[-2]])

    def test_preload_data(self):
        # The BuildSet class allows data to be preloaded
        # Note, it is an internal method, so we have to push past the security
        # proxy
        self.setUpBuilds()
        build_ids = [self.builds[i] for i in (0, 1, 2, 3)]
        rset = removeSecurityProxy(
            getUtility(IBinaryPackageBuildSet))._prefetchBuildData(build_ids)
        self.assertEqual(len(rset), 4)

    def test_get_builds_by_source_package_release(self):
        # We are able to return all of the builds for the source package
        # release ids passed in.
        self.setUpBuilds()
        spphs = self.spphs[:2]
        ids = [spph.sourcepackagerelease.id for spph in spphs]
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(ids)
        expected_titles = []
        for spph in spphs:
            for das in (self.das_one, self.das_two):
                expected_titles.append(
                    '%s build of %s %s in %s %s RELEASE' %
                    (das.architecturetag, spph.source_package_name,
                     spph.source_package_version,
                     self.distroseries.distribution.name,
                     self.distroseries.name))
        build_titles = [build.title for build in builds]
        self.assertEqual(sorted(expected_titles), sorted(build_titles))

    def test_get_builds_by_source_package_release_filtering(self):
        self.setUpBuilds()
        ids = [self.spphs[-1].sourcepackagerelease.id]
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(
                ids, buildstate=BuildStatus.FAILEDTOBUILD)
        expected_titles = []
        for das in (self.das_one, self.das_two):
            expected_titles.append(
                '%s build of %s %s in %s %s RELEASE' %
                (das.architecturetag, self.spphs[-1].source_package_name,
                 self.spphs[-1].source_package_version,
                 self.distroseries.distribution.name, self.distroseries.name))
        build_titles = [build.title for build in builds]
        self.assertEqual(sorted(expected_titles), sorted(build_titles))
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(
                ids, buildstate=BuildStatus.CHROOTWAIT)
        self.assertEqual([], list(builds))

    def test_no_get_builds_by_source_package_release(self):
        # If no ids or None are passed into .getBuildsBySourcePackageRelease,
        # an empty list is returned.
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(None)
        self.assertEqual([], builds)
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease([])
        self.assertEqual([], builds)

    def test_getBySourceAndLocation(self):
        self.setUpBuilds()
        self.assertEqual(
            self.builds[0],
            getUtility(IBinaryPackageBuildSet).getBySourceAndLocation(
                self.builds[0].source_package_release, self.builds[0].archive,
                self.builds[0].distro_arch_series))
        self.assertEqual(
            self.builds[1],
            getUtility(IBinaryPackageBuildSet).getBySourceAndLocation(
                self.builds[1].source_package_release, self.builds[1].archive,
                self.builds[1].distro_arch_series))
        self.assertIs(
            None,
            getUtility(IBinaryPackageBuildSet).getBySourceAndLocation(
                self.builds[1].source_package_release,
                self.factory.makeArchive(), self.builds[1].distro_arch_series))
class TestBuildSet(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildSet, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor_one = self.factory.makeProcessor()
        self.processor_two = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.distribution = self.distroseries.distribution
        self.das_one = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_one,
            supports_virtualized=True)
        self.das_two = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_two,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das_one
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder_one = self.factory.makeBuilder(
                processor=self.processor_one)
            self.builder_two = self.factory.makeBuilder(
                processor=self.processor_two)
        self.builds = []
        self.spphs = []

    def setUpBuilds(self):
        for i in range(5):
            # Create some test builds
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (self.factory.getUniqueInteger(), i),
                distroseries=self.distroseries, architecturehintlist='any')
            self.spphs.append(spph)
            builds = spph.createMissingBuilds()
            with person_logged_in(self.admin):
                for b in builds:
                    b.updateStatus(BuildStatus.BUILDING)
                    if i == 4:
                        b.updateStatus(BuildStatus.FAILEDTOBUILD)
                    else:
                        b.updateStatus(BuildStatus.FULLYBUILT)
                    b.buildqueue_record.destroySelf()
            self.builds += builds

    def test_get_for_distro_distribution(self):
        # Test fetching builds for a distro's main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution)
        self.assertEquals(set.count(), 10)

    def test_get_for_distro_distroseries(self):
        # Test fetching builds for a distroseries' main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distroseries)
        self.assertEquals(set.count(), 10)

    def test_get_for_distro_distroarchseries(self):
        # Test fetching builds for a distroarchseries' main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.das_one)
        self.assertEquals(set.count(), 5)

    def test_get_for_distro_filter_build_status(self):
        # The result can be filtered based on the build status
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, status=BuildStatus.FULLYBUILT)
        self.assertEquals(set.count(), 8)

    def test_get_for_distro_filter_name(self):
        # The result can be filtered based on the name
        self.setUpBuilds()
        spn = self.builds[2].source_package_release.sourcepackagename.name
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, name=spn)
        self.assertEquals(set.count(), 2)

    def test_get_for_distro_filter_pocket(self):
        # The result can be filtered based on the pocket of the build
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, pocket=PackagePublishingPocket.RELEASE)
        self.assertEquals(set.count(), 10)
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, pocket=PackagePublishingPocket.UPDATES)
        self.assertEquals(set.count(), 0)

    def test_get_for_distro_filter_arch_tag(self):
        # The result can be filtered based on the archtag of the build
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, arch_tag=self.das_one.architecturetag)
        self.assertEquals(set.count(), 5)

    def test_get_status_summary_for_builds(self):
        # We can query for the status summary of a number of builds
        self.setUpBuilds()
        relevant_builds = [self.builds[0], self.builds[2], self.builds[-2]]
        summary = getUtility(
            IBinaryPackageBuildSet).getStatusSummaryForBuilds(
                relevant_builds)
        self.assertEquals(summary['status'], BuildSetStatus.FAILEDTOBUILD)
        self.assertEquals(summary['builds'], [self.builds[-2]])

    def test_preload_data(self):
        # The BuildSet class allows data to be preloaded
        # Note, it is an internal method, so we have to push past the security
        # proxy
        self.setUpBuilds()
        build_ids = [self.builds[i] for i in (0, 1, 2, 3)]
        rset = removeSecurityProxy(
            getUtility(IBinaryPackageBuildSet))._prefetchBuildData(build_ids)
        self.assertEquals(len(rset), 4)

    def test_get_builds_by_source_package_release(self):
        # We are able to return all of the builds for the source package
        # release ids passed in.
        self.setUpBuilds()
        spphs = self.spphs[:2]
        ids = [spph.sourcepackagerelease.id for spph in spphs]
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(ids)
        expected_titles = []
        for spph in spphs:
            for das in (self.das_one, self.das_two):
                expected_titles.append(
                    '%s build of %s %s in %s %s RELEASE' % (
                        das.architecturetag, spph.source_package_name,
                        spph.source_package_version,
                        self.distroseries.distribution.name,
                        self.distroseries.name))
        build_titles = [build.title for build in builds]
        self.assertEquals(sorted(expected_titles), sorted(build_titles))

    def test_get_builds_by_source_package_release_filtering(self):
        self.setUpBuilds()
        ids = [self.spphs[-1].sourcepackagerelease.id]
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(
                ids, buildstate=BuildStatus.FAILEDTOBUILD)
        expected_titles = []
        for das in (self.das_one, self.das_two):
            expected_titles.append(
                '%s build of %s %s in %s %s RELEASE' % (
                    das.architecturetag, self.spphs[-1].source_package_name,
                    self.spphs[-1].source_package_version,
                    self.distroseries.distribution.name,
                    self.distroseries.name))
        build_titles = [build.title for build in builds]
        self.assertEquals(sorted(expected_titles), sorted(build_titles))
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(
                ids, buildstate=BuildStatus.CHROOTWAIT)
        self.assertEquals([], list(builds))

    def test_no_get_builds_by_source_package_release(self):
        # If no ids or None are passed into .getBuildsBySourcePackageRelease,
        # an empty list is returned.
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(None)
        self.assertEquals([], builds)
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease([])
        self.assertEquals([], builds)
Beispiel #19
0
class TestSourcePublicationListingExtra(BrowserTestCase):
    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestSourcePublicationListingExtra, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create everything we need to create builds, such as a
        # DistroArchSeries and a builder.
        self.processor = self.factory.makeProcessor(supports_virtualized=True)
        self.distroseries = self.factory.makeDistroSeries(
            distribution=getUtility(IDistributionSet)['ubuntu'])
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(
                processors=[self.processor])

    def test_view_with_source_package_recipe(self):
        # When a SourcePackageRelease is linked to a
        # SourcePackageRecipeBuild, the view shows which recipe was
        # responsible for creating the SPR.
        sprb = self.factory.makeSourcePackageRecipeBuild(archive=self.archive)
        recipe = sprb.recipe
        requester = sprb.requester
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        spph.sourcepackagerelease.source_package_recipe_build = sprb
        recipe_link_matches = soupmatchers.HTMLContains(
            soupmatchers.Tag('link to build',
                             'a',
                             attrs={'href': canonical_url(sprb)},
                             text='Built'),
            soupmatchers.Tag('recipe name',
                             'a',
                             attrs={'href': canonical_url(recipe)},
                             text=recipe.name),
            soupmatchers.Tag('requester',
                             'a',
                             attrs={'href': canonical_url(requester)},
                             text=requester.displayname))
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertThat(browser.contents, recipe_link_matches)

    def test_view_without_source_package_recipe(self):
        # And if a SourcePackageRelease is not linked, there is no sign of it
        # in the view.
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertNotIn('Built by recipe', browser.contents)

    def test_view_with_deleted_source_package_recipe(self):
        # If a SourcePackageRelease is linked to a deleted recipe, the text
        # 'deleted recipe' is displayed, rather than a link.
        sprb = self.factory.makeSourcePackageRecipeBuild(archive=self.archive)
        recipe = sprb.recipe
        requester = sprb.requester
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        spph.sourcepackagerelease.source_package_recipe_build = sprb
        with person_logged_in(recipe.owner):
            recipe.destroySelf()
        recipe_link_matches = soupmatchers.HTMLContains(
            soupmatchers.Tag('link to build',
                             'a',
                             attrs={'href': canonical_url(sprb)},
                             text='Built'),
            soupmatchers.Tag('requester',
                             'a',
                             attrs={'href': canonical_url(requester)},
                             text=requester.displayname))
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertThat(browser.contents, recipe_link_matches)
        self.assertIn('deleted recipe', browser.contents)
Beispiel #20
0
class TestBuild(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuild, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)
        self.now = datetime.now(pytz.UTC)

    def test_title(self):
        # A build has a title which describes the context source version and
        # in which series and architecture it is targeted for.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_title = '%s build of %s %s in %s %s RELEASE' % (
            self.das.architecturetag, spph.source_package_name,
            spph.source_package_version, self.distroseries.distribution.name,
            self.distroseries.name)
        self.assertEquals(expected_title, build.title)

    def test_linking(self):
        # A build directly links to the archive, distribution, distroseries,
        # distroarchseries, pocket in its context and also the source version
        # that generated it.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(self.distroseries.main_archive, build.archive)
        self.assertEquals(self.distroseries.distribution, build.distribution)
        self.assertEquals(self.distroseries, build.distro_series)
        self.assertEquals(self.das, build.distro_arch_series)
        self.assertEquals(PackagePublishingPocket.RELEASE, build.pocket)
        self.assertEquals(self.das.architecturetag, build.arch_tag)
        self.assertTrue(build.is_virtualized)
        self.assertEquals(
            '%s - %s' % (spph.source_package_name,
                spph.source_package_version),
            build.source_package_release.title)

    def test_processed_builds(self):
        # Builds which were already processed also offer additional
        # information about its process such as the time it was started and
        # finished and its 'log' and 'upload_changesfile' as librarian files.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(
            sourcename=spn, version=version,
            distroseries=self.distroseries,
            status=PackagePublishingStatus.PUBLISHED)
        with person_logged_in(self.admin):
            binary = self.publisher.getPubBinaries(binaryname=spn,
                distroseries=self.distroseries, pub_source=spph,
                version=version, builder=self.builder)
        build = binary[0].binarypackagerelease.build
        self.assertTrue(build.was_built)
        self.assertEquals(
            PackageUploadStatus.DONE, build.package_upload.status)
        self.assertEquals(
            datetime(2008, 01, 01, 0, 0, 0, tzinfo=pytz.UTC),
            build.date_started)
        self.assertEquals(
            datetime(2008, 01, 01, 0, 5, 0, tzinfo=pytz.UTC),
            build.date_finished)
        self.assertEquals(timedelta(minutes=5), build.duration)
        expected_buildlog = 'buildlog_%s-%s-%s.%s_%s_FULLYBUILT.txt.gz' % (
            self.distroseries.distribution.name, self.distroseries.name,
            self.das.architecturetag, spn, version)
        self.assertEquals(expected_buildlog, build.log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' % (
                self.distroseries.distribution.name, spn, version, build.id))
        expected_buildlog_url = '%s/%s' % (url_start, expected_buildlog)
        self.assertEquals(expected_buildlog_url, build.log_url)
        expected_changesfile = '%s_%s_%s.changes' % (
            spn, version, self.das.architecturetag)
        self.assertEquals(
            expected_changesfile, build.upload_changesfile.filename)
        expected_changesfile_url = '%s/%s' % (url_start, expected_changesfile)
        self.assertEquals(expected_changesfile_url, build.changesfile_url)
        # Since this build was sucessful, it can not be retried
        self.assertFalse(build.can_be_retried)

    def test_current_component(self):
        # The currently published component is provided via the
        # 'current_component' property.  It looks over the publishing records
        # and finds the current publication of the source in question.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals('main', build.current_component.name)
        # It may not be the same as
        self.assertEquals('main', build.source_package_release.component.name)
        # If the package has no uploads, its package_upload is None
        self.assertEquals(None, build.package_upload)

    def test_current_component_when_unpublished(self):
        # Production has some buggy builds without source publications.
        # current_component returns None in that case.
        spph = self.publisher.getPubSource()
        other_das = self.factory.makeDistroArchSeries()
        build = spph.sourcepackagerelease.createBuild(
            other_das, PackagePublishingPocket.RELEASE, spph.archive)
        self.assertIs(None, build.current_component)

    def test_retry_for_released_series(self):
        # Builds can not be retried for released distroseries
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(
            distroseries=distroseries, processor=self.processor,
            supports_virtualized=True)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries)
        [build] = spph.createMissingBuilds()
        self.assertFalse(build.can_be_retried)

    def test_partner_retry_for_released_series(self):
        # Builds for PARTNER can be retried -- even if the distroseries is
        # released.
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(
            distroseries=distroseries, processor=self.processor,
            supports_virtualized=True)
        archive = self.factory.makeArchive(
            purpose=ArchivePurpose.PARTNER,
            distribution=distroseries.distribution)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries, archive=archive)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry(self):
        # A build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry_cancelled(self):
        # A cancelled build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.CANCELLED)
        self.assertTrue(build.can_be_retried)

    def test_uploadlog(self):
        # The upload log can be attached to a build
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(None, build.upload_log)
        self.assertEquals(None, build.upload_log_url)
        build.storeUploadLog('sample upload log')
        expected_filename = 'upload_%s_log.txt' % build.id
        self.assertEquals(expected_filename, build.upload_log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' % (
                self.distroseries.distribution.name, spph.source_package_name,
                spph.source_package_version, build.id))
        expected_url = '%s/%s' % (url_start, expected_filename)
        self.assertEquals(expected_url, build.upload_log_url)

    def test_retry_resets_state(self):
        # Retrying a build resets most of the state attributes, but does
        # not modify the first dispatch time.
        build = self.factory.makeBinaryPackageBuild()
        build.updateStatus(BuildStatus.BUILDING, date_started=self.now)
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        build.gotFailure()
        with person_logged_in(self.admin):
            build.retry()
        self.assertEquals(BuildStatus.NEEDSBUILD, build.status)
        self.assertEquals(self.now, build.date_first_dispatched)
        self.assertEquals(None, build.log)
        self.assertEquals(None, build.upload_log)
        self.assertEquals(0, build.failure_count)

    def test_create_bpr(self):
        # Test that we can create a BPR from a given build.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        bpn = self.factory.makeBinaryPackageName(name=spn)
        spph = self.publisher.getPubSource(
            sourcename=spn, version=version, distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        binary = build.createBinaryPackageRelease(
            binarypackagename=bpn, version=version, summary='',
            description='', binpackageformat=BinaryPackageFormat.DEB,
            component=spph.sourcepackagerelease.component.id,
            section=spph.sourcepackagerelease.section.id,
            priority=PackagePublishingPriority.STANDARD, installedsize=0,
            architecturespecific=False)
        self.assertEquals(1, build.binarypackages.count())
        self.assertEquals([binary], list(build.binarypackages))

    def test_multiple_create_bpr(self):
        # We can create multiple BPRs from a build
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(
            sourcename=spn, version=version, distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_names = []
        for i in range(15):
            bpn_name = '%s-%s' % (spn, i)
            bpn = self.factory.makeBinaryPackageName(bpn_name)
            expected_names.append(bpn_name)
            build.createBinaryPackageRelease(
                binarypackagename=bpn, version=str(i), summary='',
                description='', binpackageformat=BinaryPackageFormat.DEB,
                component=spph.sourcepackagerelease.component.id,
                section=spph.sourcepackagerelease.section.id,
                priority=PackagePublishingPriority.STANDARD, installedsize=0,
                architecturespecific=False)
        self.assertEquals(15, build.binarypackages.count())
        bin_names = [b.name for b in build.binarypackages]
        # Verify .binarypackages returns sorted by name
        expected_names.sort()
        self.assertEquals(expected_names, bin_names)

    def test_cannot_rescore_non_needsbuilds_builds(self):
        # If a build record isn't in NEEDSBUILD, it can not be rescored.
        # We will also need to log into an admin to do the rescore.
        with person_logged_in(self.admin):
            [bpph] = self.publisher.getPubBinaries(
                binaryname=self.factory.getUniqueString(),
                version="%s.1" % self.factory.getUniqueInteger(),
                distroseries=self.distroseries)
            build = bpph.binarypackagerelease.build
            self.assertRaises(CannotBeRescored, build.rescore, 20)

    def test_rescore_builds(self):
        # If the user has build-admin privileges, they can rescore builds
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(BuildStatus.NEEDSBUILD, build.status)
        self.assertEquals(2505, build.buildqueue_record.lastscore)
        with person_logged_in(self.admin):
            build.rescore(5000)
            transaction.commit()
        self.assertEquals(5000, build.buildqueue_record.lastscore)

    def test_source_publication_override(self):
        # Components can be overridden in builds.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(spph, build.current_source_publication)
        universe = getUtility(IComponentSet)['universe']
        overridden_spph = spph.changeOverride(new_component=universe)
        # We can now see current source publication points to the overridden
        # publication.
        self.assertNotEquals(spph, build.current_source_publication)
        self.assertEquals(overridden_spph, build.current_source_publication)

    def test_estimated_duration(self):
        # Builds will have an estimated duration that is set to a
        # previous build of the same sources duration.
        spn = self.factory.getUniqueString()
        spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [build] = spph.createMissingBuilds()
        # Duration is based on package size if there is no previous build.
        self.assertEquals(
            timedelta(0, 60), build.buildqueue_record.estimated_duration)
        # Set the build as done, and its duration.
        build.updateStatus(
            BuildStatus.BUILDING,
            date_started=self.now - timedelta(minutes=72))
        build.updateStatus(BuildStatus.FULLYBUILT, date_finished=self.now)
        build.buildqueue_record.destroySelf()
        new_spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [new_build] = new_spph.createMissingBuilds()
        # The duration for this build is now 72 minutes.
        self.assertEquals(
            timedelta(0, 72 * 60),
            new_build.buildqueue_record.estimated_duration)

    def test_store_uploadlog_refuses_to_overwrite(self):
        # Storing an upload log for a build will fail if the build already
        # has an upload log.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOUPLOAD)
        build.storeUploadLog('foo')
        self.assertRaises(AssertionError, build.storeUploadLog, 'bar')
class TestSourcePublicationListingExtra(BrowserTestCase):
    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestSourcePublicationListingExtra, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create everything we need to create builds, such as a
        # DistroArchSeries and a builder.
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)

    def test_view_with_source_package_recipe(self):
        # When a SourcePackageRelease is linked to a
        # SourcePackageRecipeBuild, the view shows which recipe was
        # responsible for creating the SPR.
        sprb = self.factory.makeSourcePackageRecipeBuild(
            archive=self.archive)
        recipe = sprb.recipe
        requester = sprb.requester
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        spph.sourcepackagerelease.source_package_recipe_build = sprb
        recipe_link_matches = soupmatchers.HTMLContains(
            soupmatchers.Tag(
                'link to build', 'a', attrs={'href': canonical_url(sprb)},
                text='Built'),
            soupmatchers.Tag(
                'recipe name', 'a', attrs={'href': canonical_url(recipe)},
                text=recipe.name),
            soupmatchers.Tag(
                'requester', 'a',
                attrs={
                    'href': canonical_url(requester)},
                text=requester.displayname))
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertThat(browser.contents, recipe_link_matches)

    def test_view_without_source_package_recipe(self):
        # And if a SourcePackageRelease is not linked, there is no sign of it
        # in the view.
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertNotIn('Built by recipe', browser.contents)

    def test_view_with_deleted_source_package_recipe(self):
        # If a SourcePackageRelease is linked to a deleted recipe, the text
        # 'deleted recipe' is displayed, rather than a link.
        sprb = self.factory.makeSourcePackageRecipeBuild(
            archive=self.archive)
        recipe = sprb.recipe
        requester = sprb.requester
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        spph.sourcepackagerelease.source_package_recipe_build = sprb
        with person_logged_in(recipe.owner):
            recipe.destroySelf()
        recipe_link_matches = soupmatchers.HTMLContains(
            soupmatchers.Tag(
                'link to build', 'a',
                attrs={'href': canonical_url(sprb)},
                text='Built'),
            soupmatchers.Tag(
                'requester', 'a',
                attrs={
                    'href': canonical_url(requester)},
                text=requester.displayname))
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertThat(browser.contents, recipe_link_matches)
        self.assertIn('deleted recipe', browser.contents)
Beispiel #22
0
class TestBuildDepWait(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildDepWait, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create everything we need to create builds, such as a
        # DistroArchSeries and a builder.
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)

    def test_update_dependancies(self):
        # Calling .updateDependencies() on a build will remove those which
        # are reachable.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries, archive=self.archive)
        [build] = spph.createMissingBuilds()
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        with person_logged_in(self.admin):
            build.updateStatus(
                BuildStatus.MANUALDEPWAIT,
                slave_status={'dependencies': unicode(spn)})
            [bpph] = self.publisher.getPubBinaries(
                binaryname=spn, distroseries=self.distroseries,
                version=version, builder=self.builder, archive=self.archive,
                status=PackagePublishingStatus.PUBLISHED)
            # Commit to make sure stuff hits the database.
            transaction.commit()
        build.updateDependencies()
        self.assertEquals(u'', build.dependencies)

    def test_update_dependancies_respects_component(self):
        # Since main can only utilise packages that are published in main,
        # dependencies are not satisfied if they are not in main.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries, archive=self.archive)
        [build] = spph.createMissingBuilds()
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        with person_logged_in(self.admin):
            build.updateStatus(
                BuildStatus.MANUALDEPWAIT,
                slave_status={'dependencies': unicode(spn)})
            [bpph] = self.publisher.getPubBinaries(
                binaryname=spn, distroseries=self.distroseries,
                version=version, builder=self.builder, archive=self.archive,
                status=PackagePublishingStatus.PUBLISHED,
                component='universe')
            # Commit to make sure stuff hits the database.
            transaction.commit()
        build.updateDependencies()
        # Since the dependency is in universe, we still can't see it.
        self.assertEquals(unicode(spn), build.dependencies)
        with person_logged_in(self.admin):
            bpph.component = getUtility(IComponentSet)['main']
            transaction.commit()
        # Now that we have moved it main, we can see it.
        build.updateDependencies()
        self.assertEquals(u'', build.dependencies)
Beispiel #23
0
class TestBuildNotify(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildNotify, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create all of the items we need to create builds
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries,
            processor=self.processor,
            supports_virtualized=True)
        self.creator = self.factory.makePerson(email='*****@*****.**')
        self.gpgkey = self.factory.makeGPGKey(owner=self.creator)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.ppa = self.factory.makeArchive()
        buildd_admins = getUtility(IPersonSet).getByName(
            'launchpad-buildd-admins')
        self.buildd_admins_email = []
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)
            for member in buildd_admins.activemembers:
                self.buildd_admins_email.append(member.preferredemail.email)
        self.builds = []

    def create_builds(self, archive):
        for status in BuildStatus.items:
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" %
                (self.factory.getUniqueInteger(), status.value),
                distroseries=self.distroseries,
                architecturehintlist='any',
                creator=self.creator,
                archive=archive)
            spph.sourcepackagerelease.dscsigningkey = self.gpgkey
            [build] = spph.createMissingBuilds()
            with person_logged_in(self.admin):
                build.updateStatus(BuildStatus.BUILDING, builder=self.builder)
                build.updateStatus(
                    status,
                    date_finished=(build.date_started +
                                   timedelta(minutes=5 * (status.value + 1))))
                if status != BuildStatus.BUILDING:
                    build.buildqueue_record.destroySelf()
                else:
                    build.buildqueue_record.builder = self.builder
            self.builds.append(build)

    def _assert_mail_is_correct(self, build, notification, ppa=False):
        # Assert that the mail sent (which is in notification), matches
        # the data from the build
        self.assertEquals('*****@*****.**',
                          notification['X-Creator-Recipient'])
        self.assertEquals(self.das.architecturetag,
                          notification['X-Launchpad-Build-Arch'])
        self.assertEquals('main', notification['X-Launchpad-Build-Component'])
        self.assertEquals(build.status.name,
                          notification['X-Launchpad-Build-State'])
        if ppa is True:
            self.assertEquals(get_ppa_reference(self.ppa),
                              notification['X-Launchpad-PPA'])
        body = notification.get_payload(decode=True)
        build_log = 'None'
        if ppa is True:
            archive = '%s PPA' % get_ppa_reference(build.archive)
            source = 'not available'
        else:
            archive = '%s primary archive' % (
                self.distroseries.distribution.name)
            source = canonical_url(build.distributionsourcepackagerelease)
        builder = canonical_url(build.builder)
        if build.status == BuildStatus.BUILDING:
            duration = 'not finished'
            build_log = 'see builder page'
        elif (build.status == BuildStatus.SUPERSEDED
              or build.status == BuildStatus.NEEDSBUILD):
            duration = 'not available'
            build_log = 'not available'
            builder = 'not available'
        elif build.status == BuildStatus.UPLOADING:
            duration = 'uploading'
            build_log = 'see builder page'
            builder = 'not available'
        else:
            duration = DurationFormatterAPI(
                build.duration).approximateduration()
        expected_body = dedent("""
         * Source Package: %s
         * Version: %s
         * Architecture: %s
         * Archive: %s
         * Component: main
         * State: %s
         * Duration: %s
         * Build Log: %s
         * Builder: %s
         * Source: %s



        If you want further information about this situation, feel free to
        contact a member of the Launchpad Buildd Administrators team.

        --
        %s
        %s
        """ % (build.source_package_release.sourcepackagename.name,
               build.source_package_release.version, self.das.architecturetag,
               archive, build.status.title, duration, build_log, builder,
               source, build.title, canonical_url(build)))
        self.assertEquals(expected_body, body)

    def test_notify_buildd_admins(self):
        # A build will cause an e-mail to be sent out to the buildd-admins,
        # for primary archive builds.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        expected_emails = self.buildd_admins_email + ['*****@*****.**']
        notifications = pop_notifications()
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(expected_emails, actual_emails)

    def test_ppa_does_not_notify_buildd_admins(self):
        # A build for a PPA does not notify the buildd admins.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notifications = pop_notifications()
        # An e-mail is sent to the archive owner, as well as the creator
        self.assertEquals(2, len(notifications))

    def test_notify_failed_to_build(self):
        # An e-mail is sent to the source package creator on build failures.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_failed_to_build_ppa(self):
        # An e-mail is sent to the source package creator on build failures.
        self.create_builds(archive=self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_needs_building(self):
        # We can notify the creator when the build is needing to be built.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_needs_building_ppa(self):
        # We can notify the creator when the build is needing to be built.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_successfully_built(self):
        # Successful builds don't notify anyone.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        build.notify()
        self.assertEqual([], pop_notifications())

    def test_notify_dependency_wait(self):
        # We can notify the creator when the build can't find a dependency.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_dependency_wait_ppa(self):
        # We can notify the creator when the build can't find a dependency.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_chroot_problem(self):
        # We can notify the creator when the builder the build attempted to
        # be built on has an internal problem.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_chroot_problem_ppa(self):
        # We can notify the creator when the builder the build attempted to
        # be built on has an internal problem.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_build_for_superseded_source(self):
        # We can notify the creator when the source package had a newer
        # version uploaded before this build had a chance to be dispatched.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_build_for_superseded_source_ppa(self):
        # We can notify the creator when the source package had a newer
        # version uploaded before this build had a chance to be dispatched.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_currently_building(self):
        # We can notify the creator when the build is currently building.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.BUILDING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_currently_building_ppa(self):
        # We can notify the creator when the build is currently building.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.BUILDING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_uploading_build(self):
        # We can notify the creator when the build has completed, and binary
        # packages are being uploaded by the builder.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.UPLOADING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_uploading_build_ppa(self):
        # We can notify the creator when the build has completed, and binary
        # packages are being uploaded by the builder.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.UPLOADING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_copied_into_ppa_does_not_spam(self):
        # When a package is copied into a PPA, we don't send mail to the
        # original creator of the source package.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        spph = build.current_source_publication
        ppa_spph = spph.copyTo(self.distroseries,
                               PackagePublishingPocket.RELEASE, self.ppa)
        [ppa_build] = ppa_spph.createMissingBuilds()
        ppa_build.notify()
        notifications = pop_notifications()
        self.assertEquals(1, len(notifications))

    def test_notify_owner_supresses_mail(self):
        # When the 'notify_owner' config option is False, we don't send mail
        # to the owner of the SPR.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        notify_owner = dedent("""
            [builddmaster]
            send_build_notification: True
            notify_owner: False
            """)
        config.push('notify_owner', notify_owner)
        build.notify()
        notifications = pop_notifications()
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(self.buildd_admins_email, actual_emails)
        # And undo what we just did.
        config.pop('notify_owner')

    def test_build_notification_supresses_mail(self):
        # When the 'build_notification' config option is False, we don't
        # send any mail at all.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        send_build_notification = dedent("""
            [builddmaster]
            send_build_notification: False
            """)
        config.push('send_build_notification', send_build_notification)
        build.notify()
        notifications = pop_notifications()
        self.assertEquals(0, len(notifications))
        # And undo what we just did.
        config.pop('send_build_notification')

    def test_sponsored_upload_notification(self):
        # If the signing key is different to the creator, they are both
        # notified.
        sponsor = self.factory.makePerson('*****@*****.**')
        key = self.factory.makeGPGKey(owner=sponsor)
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        spr = build.current_source_publication.sourcepackagerelease
        # Push past the security proxy
        removeSecurityProxy(spr).dscsigningkey = key
        build.notify()
        notifications = pop_notifications()
        expected_emails = self.buildd_admins_email + [
            '*****@*****.**', '*****@*****.**'
        ]
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(expected_emails, actual_emails)
Beispiel #24
0
class TestDetermineArchitecturesToBuild(TestCaseWithFactory):
    """Test that determine_architectures_to_build correctly interprets hints.
    """

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestDetermineArchitecturesToBuild, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()
        armel = self.factory.makeProcessor('armel', 'armel', 'armel')
        self.publisher.breezy_autotest.newArch(
            'armel', armel, False, self.publisher.person)
        self.publisher.addFakeChroots()

    def assertArchitecturesToBuild(self, expected_arch_tags, pub,
                                   allowed_arch_tags=None):
        if allowed_arch_tags is None:
            allowed_archs = self.publisher.breezy_autotest.architectures
        else:
            allowed_archs = [
                arch for arch in self.publisher.breezy_autotest.architectures
                if arch.architecturetag in allowed_arch_tags]
        architectures = determine_architectures_to_build(
            pub.sourcepackagerelease.architecturehintlist, pub.archive,
            self.publisher.breezy_autotest, allowed_archs)
        self.assertContentEqual(
            expected_arch_tags, [a.architecturetag for a in architectures])

    def assertArchsForHint(self, hint_string, expected_arch_tags,
                           allowed_arch_tags=None, sourcename=None):
        """Assert that the given hint resolves to the expected archtags."""
        pub = self.publisher.getPubSource(
            sourcename=sourcename, architecturehintlist=hint_string)
        self.assertArchitecturesToBuild(
            expected_arch_tags, pub, allowed_arch_tags=allowed_arch_tags)

    def test_single_architecture(self):
        # A hint string with a single arch resolves to just that arch.
        self.assertArchsForHint('hppa', ['hppa'])

    def test_three_architectures(self):
        # A hint string with multiple archs resolves to just those
        # archs.
        self.assertArchsForHint('amd64 i386 hppa', ['hppa', 'i386'])

    def test_independent(self):
        # 'all' is special, meaning just a single build. The
        # nominatedarchindep architecture is used -- in this case i386.
        self.assertArchsForHint('all', ['i386'])

    def test_one_and_independent(self):
        # 'all' is redundant if we have another build anyway.
        self.assertArchsForHint('hppa all', ['hppa'])

    def test_fictional_and_independent(self):
        # But 'all' is useful if present with an arch that wouldn't
        # generate a build.
        self.assertArchsForHint('foo all', ['i386'])

    def test_wildcard(self):
        # 'any' is a wildcard that matches all available archs.
        self.assertArchsForHint('any', ['armel', 'hppa', 'i386'])

    def test_kernel_specific_architecture(self):
        # Since we only support Linux-based architectures, 'linux-foo'
        # is treated the same as 'foo'.
        self.assertArchsForHint('linux-hppa', ['hppa'])

    def test_unknown_kernel_specific_architecture(self):
        # Non-Linux architectures aren't supported.
        self.assertArchsForHint('kfreebsd-hppa', [])

    def test_kernel_wildcard_architecture(self):
        # Wildcards work for kernels: 'any-foo' is treated like 'foo'.
        self.assertArchsForHint('any-hppa', ['hppa'])

    def test_kernel_wildcard_architecture_arm(self):
        # The second part of a wildcard matches the canonical CPU name, not
        # on the Debian architecture, so 'any-arm' matches 'armel'.
        self.assertArchsForHint('any-arm', ['armel'])

    def test_kernel_specific_architecture_wildcard(self):
        # Wildcards work for archs too: 'linux-any' is treated like 'any'.
        self.assertArchsForHint('linux-any', ['armel', 'hppa', 'i386'])

    def test_unknown_kernel_specific_architecture_wildcard(self):
        # But unknown kernels continue to result in nothing.
        self.assertArchsForHint('kfreebsd-any', [])

    def test_wildcard_and_independent(self):
        # 'all' continues to be ignored alongside a valid wildcard.
        self.assertArchsForHint('all linux-any', ['armel', 'hppa', 'i386'])

    def test_kernel_independent_is_invalid(self):
        # 'linux-all' isn't supported.
        self.assertArchsForHint('linux-all', [])

    def test_double_wildcard_is_same_as_single(self):
        # 'any-any' is redundant with 'any', but dpkg-architecture supports
        # it anyway.
        self.assertArchsForHint('any-any', ['armel', 'hppa', 'i386'])

    def test_disabled_architectures_omitted(self):
        # Disabled architectures are not buildable, so are excluded.
        self.publisher.breezy_autotest['hppa'].enabled = False
        self.assertArchsForHint('any', ['armel', 'i386'])

    def test_virtualized_archives_have_only_virtualized_archs(self):
        # For archives which must build on virtual builders, only
        # virtual archs are returned.
        self.publisher.breezy_autotest.main_archive.require_virtualized = True
        self.assertArchsForHint('any', ['i386'])

    def test_no_all_builds_when_nominatedarchindep_not_permitted(self):
        # Some archives (eg. armel rebuilds) don't want arch-indep
        # builds. If the nominatedarchindep architecture (normally
        # i386) is omitted, no builds will be created for arch-indep
        # sources.
        self.assertArchsForHint('all', [], allowed_arch_tags=['hppa'])
class TestBuildNotify(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildNotify, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create all of the items we need to create builds
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        self.creator = self.factory.makePerson(email='*****@*****.**')
        self.gpgkey = self.factory.makeGPGKey(owner=self.creator)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.ppa = self.factory.makeArchive()
        buildd_admins = getUtility(IPersonSet).getByName(
            'launchpad-buildd-admins')
        self.buildd_admins_email = []
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)
            for member in buildd_admins.activemembers:
                self.buildd_admins_email.append(member.preferredemail.email)
        self.builds = []

    def create_builds(self, archive):
        for status in BuildStatus.items:
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (
                    self.factory.getUniqueInteger(), status.value),
                distroseries=self.distroseries, architecturehintlist='any',
                creator=self.creator, archive=archive)
            spph.sourcepackagerelease.dscsigningkey = self.gpgkey
            [build] = spph.createMissingBuilds()
            with person_logged_in(self.admin):
                build.updateStatus(BuildStatus.BUILDING, builder=self.builder)
                build.updateStatus(status,
                    date_finished=(
                        build.date_started + timedelta(
                            minutes=5 * (status.value + 1))))
                if status != BuildStatus.BUILDING:
                    build.buildqueue_record.destroySelf()
                else:
                    build.buildqueue_record.builder = self.builder
            self.builds.append(build)

    def _assert_mail_is_correct(self, build, notification, ppa=False):
        # Assert that the mail sent (which is in notification), matches
        # the data from the build
        self.assertEquals('*****@*****.**',
            notification['X-Creator-Recipient'])
        self.assertEquals(
            self.das.architecturetag, notification['X-Launchpad-Build-Arch'])
        self.assertEquals(
            'main', notification['X-Launchpad-Build-Component'])
        self.assertEquals(
            build.status.name, notification['X-Launchpad-Build-State'])
        if ppa is True:
            self.assertEquals(
                get_ppa_reference(self.ppa), notification['X-Launchpad-PPA'])
        body = notification.get_payload(decode=True)
        build_log = 'None'
        if ppa is True:
            archive = '%s PPA' % get_ppa_reference(build.archive)
            source = 'not available'
        else:
            archive = '%s primary archive' % (
                self.distroseries.distribution.name)
            source = canonical_url(build.distributionsourcepackagerelease)
        builder = canonical_url(build.builder)
        if build.status == BuildStatus.BUILDING:
            duration = 'not finished'
            build_log = 'see builder page'
        elif (
            build.status == BuildStatus.SUPERSEDED or
            build.status == BuildStatus.NEEDSBUILD):
            duration = 'not available'
            build_log = 'not available'
            builder = 'not available'
        elif build.status == BuildStatus.UPLOADING:
            duration = 'uploading'
            build_log = 'see builder page'
            builder = 'not available'
        else:
            duration = DurationFormatterAPI(
                build.duration).approximateduration()
        expected_body = dedent("""
         * Source Package: %s
         * Version: %s
         * Architecture: %s
         * Archive: %s
         * Component: main
         * State: %s
         * Duration: %s
         * Build Log: %s
         * Builder: %s
         * Source: %s



        If you want further information about this situation, feel free to
        contact a member of the Launchpad Buildd Administrators team.

        --
        %s
        %s
        """ % (
            build.source_package_release.sourcepackagename.name,
            build.source_package_release.version, self.das.architecturetag,
            archive, build.status.title, duration, build_log, builder,
            source, build.title, canonical_url(build)))
        self.assertEquals(expected_body, body)

    def test_notify_buildd_admins(self):
        # A build will cause an e-mail to be sent out to the buildd-admins,
        # for primary archive builds.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        expected_emails = self.buildd_admins_email + ['*****@*****.**']
        notifications = pop_notifications()
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(expected_emails, actual_emails)

    def test_ppa_does_not_notify_buildd_admins(self):
        # A build for a PPA does not notify the buildd admins.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notifications = pop_notifications()
        # An e-mail is sent to the archive owner, as well as the creator
        self.assertEquals(2, len(notifications))

    def test_notify_failed_to_build(self):
        # An e-mail is sent to the source package creator on build failures.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_failed_to_build_ppa(self):
        # An e-mail is sent to the source package creator on build failures.
        self.create_builds(archive=self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_needs_building(self):
        # We can notify the creator when the build is needing to be built.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_needs_building_ppa(self):
        # We can notify the creator when the build is needing to be built.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_successfully_built(self):
        # Successful builds don't notify anyone.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        build.notify()
        self.assertEqual([], pop_notifications())

    def test_notify_dependency_wait(self):
        # We can notify the creator when the build can't find a dependency.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_dependency_wait_ppa(self):
        # We can notify the creator when the build can't find a dependency.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_chroot_problem(self):
        # We can notify the creator when the builder the build attempted to
        # be built on has an internal problem.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_chroot_problem_ppa(self):
        # We can notify the creator when the builder the build attempted to
        # be built on has an internal problem.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_build_for_superseded_source(self):
        # We can notify the creator when the source package had a newer
        # version uploaded before this build had a chance to be dispatched.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_build_for_superseded_source_ppa(self):
        # We can notify the creator when the source package had a newer
        # version uploaded before this build had a chance to be dispatched.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_currently_building(self):
        # We can notify the creator when the build is currently building.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.BUILDING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_currently_building_ppa(self):
        # We can notify the creator when the build is currently building.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.BUILDING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_uploading_build(self):
        # We can notify the creator when the build has completed, and binary
        # packages are being uploaded by the builder.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.UPLOADING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_uploading_build_ppa(self):
        # We can notify the creator when the build has completed, and binary
        # packages are being uploaded by the builder.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.UPLOADING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_copied_into_ppa_does_not_spam(self):
        # When a package is copied into a PPA, we don't send mail to the
        # original creator of the source package.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        spph = build.current_source_publication
        ppa_spph = spph.copyTo(
            self.distroseries, PackagePublishingPocket.RELEASE, self.ppa)
        [ppa_build] = ppa_spph.createMissingBuilds()
        ppa_build.notify()
        notifications = pop_notifications()
        self.assertEquals(1, len(notifications))

    def test_notify_owner_supresses_mail(self):
        # When the 'notify_owner' config option is False, we don't send mail
        # to the owner of the SPR.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        notify_owner = dedent("""
            [builddmaster]
            send_build_notification: True
            notify_owner: False
            """)
        config.push('notify_owner', notify_owner)
        build.notify()
        notifications = pop_notifications()
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(self.buildd_admins_email, actual_emails)
        # And undo what we just did.
        config.pop('notify_owner')

    def test_build_notification_supresses_mail(self):
        # When the 'build_notification' config option is False, we don't
        # send any mail at all.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        send_build_notification = dedent("""
            [builddmaster]
            send_build_notification: False
            """)
        config.push('send_build_notification', send_build_notification)
        build.notify()
        notifications = pop_notifications()
        self.assertEquals(0, len(notifications))
        # And undo what we just did.
        config.pop('send_build_notification')

    def test_sponsored_upload_notification(self):
        # If the signing key is different to the creator, they are both
        # notified.
        sponsor = self.factory.makePerson('*****@*****.**')
        key = self.factory.makeGPGKey(owner=sponsor)
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        spr = build.current_source_publication.sourcepackagerelease
        # Push past the security proxy
        removeSecurityProxy(spr).dscsigningkey = key
        build.notify()
        notifications = pop_notifications()
        expected_emails = self.buildd_admins_email + [
            '*****@*****.**', '*****@*****.**']
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(expected_emails, actual_emails)
Beispiel #26
0
 def getTestPublisher(self, distroseries):
     """Return an `SoyuzTestPublisher`instance."""
     stp = SoyuzTestPublisher()
     stp.addFakeChroots(distroseries)
     stp.setUpDefaultDistroSeries(distroseries)
     return stp
class TestBuildNotify(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestBuildNotify, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create all of the items we need to create builds
        self.processor = self.factory.makeProcessor(supports_virtualized=True)
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor)
        self.creator = self.factory.makePerson(email='*****@*****.**')
        self.gpgkey = self.factory.makeGPGKey(owner=self.creator)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.ppa = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PPA)
        buildd_admins = getUtility(IPersonSet).getByName(
            'launchpad-buildd-admins')
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(
                processors=[self.processor])
            self.buildd_admins_members = list(buildd_admins.activemembers)
        self.builds = []

    def create_builds(self, archive):
        for status in BuildStatus.items:
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" %
                (self.factory.getUniqueInteger(), status.value),
                distroseries=self.distroseries,
                architecturehintlist='any',
                creator=self.creator,
                archive=archive)
            spph.sourcepackagerelease.signing_key_fingerprint = (
                self.gpgkey.fingerprint)
            spph.sourcepackagerelease.signing_key_owner = (self.gpgkey.owner)
            [build] = spph.createMissingBuilds()
            with person_logged_in(self.admin):
                build.updateStatus(BuildStatus.BUILDING, builder=self.builder)
                build.updateStatus(
                    status,
                    date_finished=(build.date_started +
                                   timedelta(minutes=5 * (status.value + 1))))
                if status != BuildStatus.BUILDING:
                    build.buildqueue_record.destroySelf()
                else:
                    build.buildqueue_record.builder = self.builder
            self.builds.append(build)

    def _assert_mail_is_correct(self,
                                build,
                                notification,
                                recipient,
                                reason,
                                ppa=False):
        # Assert that the mail sent (which is in notification), matches
        # the data from the build
        self.assertEqual(format_address_for_person(recipient),
                         notification['To'])
        if reason == "buildd-admin":
            rationale = "Buildd-Admin @launchpad-buildd-admins"
            expected_for = "launchpad-buildd-admins"
        else:
            rationale = reason.title()
            expected_for = recipient.name
        self.assertEqual(rationale,
                         notification['X-Launchpad-Message-Rationale'])
        self.assertEqual(expected_for, notification['X-Launchpad-Message-For'])
        self.assertEqual('package-build-status',
                         notification['X-Launchpad-Notification-Type'])
        self.assertEqual('*****@*****.**',
                         notification['X-Creator-Recipient'])
        self.assertEqual(self.das.architecturetag,
                         notification['X-Launchpad-Build-Arch'])
        self.assertEqual('main', notification['X-Launchpad-Build-Component'])
        self.assertEqual(build.status.name,
                         notification['X-Launchpad-Build-State'])
        self.assertEqual(build.archive.reference,
                         notification['X-Launchpad-Archive'])
        if ppa and build.archive.distribution.name == 'ubuntu':
            self.assertEqual(get_ppa_reference(self.ppa),
                             notification['X-Launchpad-PPA'])
        body = notification.get_payload(decode=True)
        build_log = 'None'
        if ppa:
            source = 'not available'
        else:
            source = canonical_url(build.distributionsourcepackagerelease)
        if build.status == BuildStatus.BUILDING:
            duration = 'not finished'
            build_log = 'see builder page'
            builder = canonical_url(build.builder)
        elif (build.status == BuildStatus.SUPERSEDED
              or build.status == BuildStatus.NEEDSBUILD):
            duration = 'not available'
            build_log = 'not available'
            builder = 'not available'
        elif build.status == BuildStatus.UPLOADING:
            duration = 'uploading'
            build_log = 'see builder page'
            builder = 'not available'
        else:
            duration = DurationFormatterAPI(
                build.duration).approximateduration()
            builder = canonical_url(build.builder)
        expected_body = dedent(
            """
         * Source Package: %s
         * Version: %s
         * Architecture: %s
         * Archive: %s
         * Component: main
         * State: %s
         * Duration: %s
         * Build Log: %s
         * Builder: %s
         * Source: %s



        If you want further information about this situation, feel free to
        contact us by asking a question on Launchpad
        (https://answers.launchpad.net/launchpad/+addquestion).

        %s
        %s
        %s
        """ %
            (build.source_package_release.sourcepackagename.name,
             build.source_package_release.version, self.das.architecturetag,
             build.archive.reference, build.status.title, duration, build_log,
             builder, source, "-- ", build.title, canonical_url(build)))
        expected_body += "\n" + REASONS[reason] + "\n"
        self.assertEqual(expected_body, body)

    def _assert_mails_are_correct(self, build, reasons, ppa=False):
        notifications = pop_notifications()
        reasons = sorted(reasons,
                         key=lambda r: format_address_for_person(r[0]))
        for notification, (recipient, reason) in zip(notifications, reasons):
            self._assert_mail_is_correct(build,
                                         notification,
                                         recipient,
                                         reason,
                                         ppa=ppa)

    def test_notify_failed_to_build(self):
        # For primary archive builds, a build failure notifies the buildd
        # admins and the source package creator.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_failed_to_build_ppa(self):
        # For PPA builds, a build failure notifies the source package signer
        # and the archive owner, but not the buildd admins.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_needs_building(self):
        # We can notify the creator and buildd admins when a build needs to
        # be built.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_needs_building_ppa(self):
        # We can notify the signer and the archive owner when a build needs
        # to be built.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_successfully_built(self):
        # Successful builds don't notify anyone.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        self.assertEqual([], pop_notifications())

    def test_notify_dependency_wait(self):
        # We can notify the creator and buildd admins when a build can't
        # find a dependency.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_dependency_wait_ppa(self):
        # We can notify the signer and the archive owner when the build
        # can't find a dependency.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_chroot_problem(self):
        # We can notify the creator and buildd admins when the builder a
        # build attempted to be built on has an internal problem.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_chroot_problem_ppa(self):
        # We can notify the signer and the archive owner when the builder a
        # build attempted to be built on has an internal problem.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_build_for_superseded_source(self):
        # We can notify the creator and buildd admins when the source
        # package had a newer version uploaded before this build had a
        # chance to be dispatched.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_build_for_superseded_source_ppa(self):
        # We can notify the signer and the archive owner when the source
        # package had a newer version uploaded before this build had a
        # chance to be dispatched.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_currently_building(self):
        # We can notify the creator and buildd admins when the build is
        # currently building.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.BUILDING.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_currently_building_ppa(self):
        # We can notify the signer and the archive owner when the build is
        # currently building.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.BUILDING.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_uploading_build(self):
        # We can notify the creator and buildd admins when the build has
        # completed, and binary packages are being uploaded by the builder.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.UPLOADING.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_uploading_build_ppa(self):
        # We can notify the signer and the archive owner when the build has
        # completed, and binary packages are being uploaded by the builder.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.UPLOADING.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_copied_into_ppa_does_not_spam(self):
        # When a package is copied into a PPA, we don't send mail to the
        # original creator of the source package.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        spph = build.current_source_publication
        ppa_spph = spph.copyTo(self.distroseries,
                               PackagePublishingPocket.RELEASE, self.ppa)
        [ppa_build] = ppa_spph.createMissingBuilds()
        with dbuser(config.builddmaster.dbuser):
            ppa_build.notify()
        self._assert_mails_are_correct(ppa_build, [(self.ppa.owner, "owner")],
                                       ppa=True)

    def test_notify_owner_suppresses_mail(self):
        # When the 'notify_owner' config option is False, we don't send mail
        # to the owner of the SPR.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        notify_owner = dedent("""
            [builddmaster]
            send_build_notification: True
            notify_owner: False
            """)
        config.push('notify_owner', notify_owner)
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        self._assert_mails_are_correct(
            build, [(person, "buildd-admin")
                    for person in self.buildd_admins_members])
        # And undo what we just did.
        config.pop('notify_owner')

    def test_build_notification_suppresses_mail(self):
        # When the 'build_notification' config option is False, we don't
        # send any mail at all.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        send_build_notification = dedent("""
            [builddmaster]
            send_build_notification: False
            """)
        config.push('send_build_notification', send_build_notification)
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        notifications = pop_notifications()
        self.assertEqual(0, len(notifications))
        # And undo what we just did.
        config.pop('send_build_notification')

    def test_sponsored_upload_notification(self):
        # If the signing key is different from the creator, they are both
        # notified.
        sponsor = self.factory.makePerson('*****@*****.**')
        key = self.factory.makeGPGKey(owner=sponsor)
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        spr = build.current_source_publication.sourcepackagerelease
        # Push past the security proxy
        removeSecurityProxy(spr).signing_key_owner = key.owner
        removeSecurityProxy(spr).signing_key_fingerprint = key.fingerprint
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        expected_reasons.append((sponsor, "signer"))
        self._assert_mails_are_correct(build, expected_reasons)