def test_copy_archive_without_leak(self):
     # If source publications are copied to a .COPY archive, they don't
     # "leak" into SourcePackage.getBuildRecords().
     admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
     # Set up a distroseries and related bits, so we can create builds.
     source_name = self.factory.getUniqueString()
     spn = self.factory.makeSourcePackageName(name=source_name)
     processor = self.factory.makeProcessor()
     distroseries = self.factory.makeDistroSeries()
     das = self.factory.makeDistroArchSeries(
         distroseries=distroseries, processor=processor,
         supports_virtualized=True)
     with person_logged_in(admin):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         publisher.addFakeChroots(distroseries=distroseries)
         distroseries.nominatedarchindep = das
         self.factory.makeBuilder(processor=processor)
     spph = self.factory.makeSourcePackagePublishingHistory(
         sourcepackagename=spn, distroseries=distroseries)
     spph.createMissingBuilds()
     # Create a copy archive.
     copy = self.factory.makeArchive(
         purpose=ArchivePurpose.COPY,
         distribution=distroseries.distribution)
     # And copy the publication into it.
     copy_spph = spph.copyTo(
         distroseries, PackagePublishingPocket.RELEASE, copy)
     [copy_build] = copy_spph.createMissingBuilds()
     builds = copy.getBuildRecords()
     self.assertEquals([copy_build], list(builds))
     source = SourcePackage(spn, spph.distroseries)
     # SourcePackage.getBuildRecords() doesn't have two build records.
     builds = source.getBuildRecords().count()
     self.assertEquals(1, builds)
 def test_copy_archive_without_leak(self):
     # If source publications are copied to a .COPY archive, they don't
     # "leak" into SourcePackage.getBuildRecords().
     admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
     # Set up a distroseries and related bits, so we can create builds.
     source_name = self.factory.getUniqueString()
     spn = self.factory.makeSourcePackageName(name=source_name)
     processor = self.factory.makeProcessor()
     distroseries = self.factory.makeDistroSeries()
     das = self.factory.makeDistroArchSeries(distroseries=distroseries,
                                             processor=processor,
                                             supports_virtualized=True)
     with person_logged_in(admin):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         publisher.addFakeChroots(distroseries=distroseries)
         distroseries.nominatedarchindep = das
         self.factory.makeBuilder(processor=processor)
     spph = self.factory.makeSourcePackagePublishingHistory(
         sourcepackagename=spn, distroseries=distroseries)
     spph.createMissingBuilds()
     # Create a copy archive.
     copy = self.factory.makeArchive(purpose=ArchivePurpose.COPY,
                                     distribution=distroseries.distribution)
     # And copy the publication into it.
     copy_spph = spph.copyTo(distroseries, PackagePublishingPocket.RELEASE,
                             copy)
     [copy_build] = copy_spph.createMissingBuilds()
     builds = copy.getBuildRecords()
     self.assertEquals([copy_build], list(builds))
     source = SourcePackage(spn, spph.distroseries)
     # SourcePackage.getBuildRecords() doesn't have two build records.
     builds = source.getBuildRecords().count()
     self.assertEquals(1, builds)
 def setUp(self):
     super(TestBuildPrivacy, self).setUp()
     # Add everything we need to create builds.
     self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
     processor = self.factory.makeProcessor(supports_virtualized=True)
     distroseries = self.factory.makeDistroSeries()
     das = self.factory.makeDistroArchSeries(
         distroseries=distroseries, processor=processor)
     with person_logged_in(self.admin):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         distroseries.nominatedarchindep = das
         publisher.addFakeChroots(distroseries=distroseries)
         self.factory.makeBuilder(processors=[processor])
     self.public_archive = self.factory.makeArchive()
     self.private_archive = self.factory.makeArchive(private=True)
     # Create one public and one private build.
     public_spph = publisher.getPubSource(
         sourcename=self.factory.getUniqueString(),
         version="%s.1" % self.factory.getUniqueInteger(),
         distroseries=distroseries, archive=self.public_archive)
     [public_build] = public_spph.createMissingBuilds()
     private_spph = publisher.getPubSource(
         sourcename=self.factory.getUniqueString(),
         version="%s.1" % self.factory.getUniqueInteger(),
         distroseries=distroseries, archive=self.private_archive)
     with person_logged_in(self.admin):
         [private_build] = private_spph.createMissingBuilds()
     self.expected_title = '%s build of %s %s in %s %s RELEASE' % (
         das.architecturetag, private_spph.source_package_name,
         private_spph.source_package_version,
         distroseries.distribution.name, distroseries.name)
class TestBuildJobBase(TestCaseWithFactory):
    """Setup the test publisher and some builders."""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestBuildJobBase, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        self.i8 = self.factory.makeBuilder(name='i386-n-8', virtualized=False)
        self.i9 = self.factory.makeBuilder(name='i386-n-9', virtualized=False)

        processor = getUtility(IProcessorSet).getByName('hppa')
        self.h6 = self.factory.makeBuilder(
            name='hppa-n-6', processor=processor, virtualized=False)
        self.h7 = self.factory.makeBuilder(
            name='hppa-n-7', processor=processor, virtualized=False)

        self.builders = dict()
        # x86 native
        self.builders[(1, False)] = [self.i8, self.i9]

        # hppa native
        self.builders[(3, True)] = [self.h6, self.h7]

        # Ensure all builders are operational.
        for builders in self.builders.values():
            for builder in builders:
                builder.builderok = True
                builder.manual = False

        # Disable the sample data builders.
        getUtility(IBuilderSet)['bob'].builderok = False
        getUtility(IBuilderSet)['frog'].builderok = False
 def setUp(self):
     super(TestBuildPrivacy, self).setUp()
     # Add everything we need to create builds.
     self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
     processor = self.factory.makeProcessor()
     distroseries = self.factory.makeDistroSeries()
     das = self.factory.makeDistroArchSeries(
         distroseries=distroseries, processor=processor,
         supports_virtualized=True)
     with person_logged_in(self.admin):
         publisher = SoyuzTestPublisher()
         publisher.prepareBreezyAutotest()
         distroseries.nominatedarchindep = das
         publisher.addFakeChroots(distroseries=distroseries)
         self.factory.makeBuilder(processor=processor)
     self.public_archive = self.factory.makeArchive()
     self.private_archive = self.factory.makeArchive(private=True)
     # Create one public and one private build.
     public_spph = publisher.getPubSource(
         sourcename=self.factory.getUniqueString(),
         version="%s.1" % self.factory.getUniqueInteger(),
         distroseries=distroseries, archive=self.public_archive)
     [public_build] = public_spph.createMissingBuilds()
     private_spph = publisher.getPubSource(
         sourcename=self.factory.getUniqueString(),
         version="%s.1" % self.factory.getUniqueInteger(),
         distroseries=distroseries, archive=self.private_archive)
     with person_logged_in(self.admin):
         [private_build] = private_spph.createMissingBuilds()
     self.expected_title = '%s build of %s %s in %s %s RELEASE' % (
         das.architecturetag, private_spph.source_package_name,
         private_spph.source_package_version,
         distroseries.distribution.name, distroseries.name)
def create_child(factory):
    processor = factory.makeProcessor()
    parent = factory.makeDistroSeries()
    parent_das = factory.makeDistroArchSeries(distroseries=parent, processor=processor)
    lf = factory.makeLibraryFileAlias()
    # Since the LFA needs to be in the librarian, commit.
    transaction.commit()
    parent_das.addOrUpdateChroot(lf)
    with celebrity_logged_in("admin"):
        parent_das.supports_virtualized = True
        parent.nominatedarchindep = parent_das
        publisher = SoyuzTestPublisher()
        publisher.prepareBreezyAutotest()
        packages = {"udev": "0.1-1", "libc6": "2.8-1"}
        for package in packages.keys():
            publisher.getPubBinaries(
                distroseries=parent,
                binaryname=package,
                version=packages[package],
                status=PackagePublishingStatus.PUBLISHED,
            )
        test1 = getUtility(IPackagesetSet).new(u"test1", u"test 1 packageset", parent.owner, distroseries=parent)
        test1_packageset_id = str(test1.id)
        test1.addSources("udev")
    parent.updatePackageCount()
    child = factory.makeDistroSeries()
    getUtility(ISourcePackageFormatSelectionSet).add(child, SourcePackageFormat.FORMAT_1_0)
    # Make sure everything hits the database, switching db users aborts.
    transaction.commit()
    return parent, child, test1_packageset_id
def create_child(factory):
    processor = factory.makeProcessor()
    parent = factory.makeDistroSeries()
    parent_das = factory.makeDistroArchSeries(
        distroseries=parent, processor=processor)
    lf = factory.makeLibraryFileAlias()
    # Since the LFA needs to be in the librarian, commit.
    transaction.commit()
    parent_das.addOrUpdateChroot(lf)
    with celebrity_logged_in('admin'):
        parent_das.supports_virtualized = True
        parent.nominatedarchindep = parent_das
        publisher = SoyuzTestPublisher()
        publisher.prepareBreezyAutotest()
        packages = {'udev': '0.1-1', 'libc6': '2.8-1'}
        for package in packages.keys():
            publisher.getPubBinaries(
                distroseries=parent, binaryname=package,
                version=packages[package],
                status=PackagePublishingStatus.PUBLISHED)
        test1 = getUtility(IPackagesetSet).new(
            u'test1', u'test 1 packageset', parent.owner,
            distroseries=parent)
        test1_packageset_id = str(test1.id)
        test1.addSources('udev')
    parent.updatePackageCount()
    child = factory.makeDistroSeries()
    getUtility(ISourcePackageFormatSelectionSet).add(
        child, SourcePackageFormat.FORMAT_1_0)
    # Make sure everything hits the database, switching db users aborts.
    transaction.commit()
    return parent, child, test1_packageset_id
Beispiel #8
0
class TestBuildStartEstimation(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildStartEstimation, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            for buildd in getUtility(IBuilderSet):
                buildd.builderok = True
        self.distroseries = self.factory.makeDistroSeries()
        self.bob = getUtility(IBuilderSet).getByName(BOB_THE_BUILDER_NAME)
        das = self.factory.makeDistroArchSeries(distroseries=self.distroseries,
                                                processor=self.bob.processor,
                                                architecturetag='i386')
        with person_logged_in(self.admin):
            self.distroseries.nominatedarchindep = das
        self.publisher.addFakeChroots(distroseries=self.distroseries)

    def job_start_estimate(self, build):
        return build.buildqueue_record.getEstimatedJobStartTime()

    def test_estimation(self):
        pkg = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        build = pkg.createMissingBuilds()[0]
        now = datetime.now(pytz.UTC)
        estimate = self.job_start_estimate(build)
        self.assertTrue(estimate > now)

    def test_disabled_archives(self):
        pkg1 = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        [build1] = pkg1.createMissingBuilds()
        build1.buildqueue_record.lastscore = 1000
        # No user-serviceable parts inside
        removeSecurityProxy(
            build1.buildqueue_record).estimated_duration = (timedelta(
                minutes=10))
        pkg2 = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        [build2] = pkg2.createMissingBuilds()
        build2.buildqueue_record.lastscore = 100
        now = datetime.now(pytz.UTC)
        # Since build1 is higher priority, it's estimated dispatch time is now
        estimate = self.job_start_estimate(build1)
        self.assertEqual(5, (estimate - now).seconds)
        # And build2 is next, so must take build1's duration into account
        estimate = self.job_start_estimate(build2)
        self.assertEqual(600, (estimate - now).seconds)
        # If we disable build1's archive, build2 is next
        with person_logged_in(self.admin):
            build1.archive.disable()
        estimate = self.job_start_estimate(build2)
        self.assertEqual(5, (estimate - now).seconds)
class TestDistributionHasBuildRecords(TestCaseWithFactory):
    """Populate a distroseries with builds"""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestDistributionHasBuildRecords, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create the machinery we need to create builds, such as
        # DistroArchSeries and builders.
        self.processor_one = self.factory.makeProcessor()
        self.processor_two = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.distribution = self.distroseries.distribution
        self.das_one = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries,
            processor=self.processor_one,
            supports_virtualized=True)
        self.das_two = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries,
            processor=self.processor_two,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.arch_ids = [arch.id for arch in self.distroseries.architectures]
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das_one
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder_one = self.factory.makeBuilder(
                processor=self.processor_one)
            self.builder_two = self.factory.makeBuilder(
                processor=self.processor_two)
        self.builds = []
        self.createBuilds()

    def createBuilds(self):
        for i in range(5):
            # Create some test builds.
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (self.factory.getUniqueInteger(), i),
                distroseries=self.distroseries,
                architecturehintlist='any')
            builds = spph.createMissingBuilds()
            for b in builds:
                b.updateStatus(BuildStatus.BUILDING)
                if i == 4:
                    b.updateStatus(BuildStatus.FAILEDTOBUILD)
                else:
                    b.updateStatus(BuildStatus.FULLYBUILT)
                b.buildqueue_record.destroySelf()
            self.builds += builds

    def test_get_build_records(self):
        # A Distribution also implements IHasBuildRecords.
        builds = self.distribution.getBuildRecords().count()
        self.assertEquals(10, builds)
class TestBuildStartEstimation(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildStartEstimation, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            for buildd in getUtility(IBuilderSet):
                buildd.builderok = True
        self.distroseries = self.factory.makeDistroSeries()
        self.bob = getUtility(IBuilderSet).getByName(BOB_THE_BUILDER_NAME)
        das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.bob.processor,
            architecturetag='i386', supports_virtualized=True)
        with person_logged_in(self.admin):
            self.distroseries.nominatedarchindep = das
        self.publisher.addFakeChroots(distroseries=self.distroseries)

    def job_start_estimate(self, build):
        return build.buildqueue_record.getEstimatedJobStartTime()

    def test_estimation(self):
        pkg = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        build = pkg.createMissingBuilds()[0]
        now = datetime.now(pytz.UTC)
        estimate = self.job_start_estimate(build)
        self.assertTrue(estimate > now)

    def test_disabled_archives(self):
        pkg1 = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        [build1] = pkg1.createMissingBuilds()
        build1.buildqueue_record.lastscore = 1000
        # No user-serviceable parts inside
        removeSecurityProxy(build1.buildqueue_record).estimated_duration = (
            timedelta(minutes=10))
        pkg2 = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            distroseries=self.distroseries)
        [build2] = pkg2.createMissingBuilds()
        build2.buildqueue_record.lastscore = 100
        now = datetime.now(pytz.UTC)
        # Since build1 is higher priority, it's estimated dispatch time is now
        estimate = self.job_start_estimate(build1)
        self.assertEquals(5, (estimate - now).seconds)
        # And build2 is next, so must take build1's duration into account
        estimate = self.job_start_estimate(build2)
        self.assertEquals(600, (estimate - now).seconds)
        # If we disable build1's archive, build2 is next
        with person_logged_in(self.admin):
            build1.archive.disable()
        estimate = self.job_start_estimate(build2)
        self.assertEquals(5, (estimate - now).seconds)
class TestDistributionHasBuildRecords(TestCaseWithFactory):
    """Populate a distroseries with builds"""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestDistributionHasBuildRecords, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create the machinery we need to create builds, such as
        # DistroArchSeries and builders.
        self.processor_one = self.factory.makeProcessor()
        self.processor_two = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.distribution = self.distroseries.distribution
        self.das_one = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_one,
            supports_virtualized=True)
        self.das_two = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_two,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.arch_ids = [arch.id for arch in self.distroseries.architectures]
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das_one
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder_one = self.factory.makeBuilder(
                processor=self.processor_one)
            self.builder_two = self.factory.makeBuilder(
                processor=self.processor_two)
        self.builds = []
        self.createBuilds()

    def createBuilds(self):
        for i in range(5):
            # Create some test builds.
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (self.factory.getUniqueInteger(), i),
                distroseries=self.distroseries, architecturehintlist='any')
            builds = spph.createMissingBuilds()
            for b in builds:
                b.updateStatus(BuildStatus.BUILDING)
                if i == 4:
                    b.updateStatus(BuildStatus.FAILEDTOBUILD)
                else:
                    b.updateStatus(BuildStatus.FULLYBUILT)
                b.buildqueue_record.destroySelf()
            self.builds += builds

    def test_get_build_records(self):
        # A Distribution also implements IHasBuildRecords.
        builds = self.distribution.getBuildRecords().count()
        self.assertEquals(10, builds)
    def test_switch_privacy_with_pubs_fails(self):
        # Changing the privacy is not possible when the archive already
        # has published sources.
        public_ppa = self.factory.makeArchive(private=False)
        publisher = SoyuzTestPublisher()
        publisher.prepareBreezyAutotest()

        private_ppa = self.factory.makeArchive(private=True)
        publisher.getPubSource(archive=public_ppa)
        publisher.getPubSource(archive=private_ppa)

        self.assertRaises(CannotSwitchPrivacy, setattr, public_ppa, 'private',
                          True)

        self.assertRaises(CannotSwitchPrivacy, setattr, private_ppa, 'private',
                          False)
    def test_switch_privacy_with_pubs_fails(self):
        # Changing the privacy is not possible when the archive already
        # has published sources.
        public_ppa = self.factory.makeArchive(private=False)
        publisher = SoyuzTestPublisher()
        publisher.prepareBreezyAutotest()

        private_ppa = self.factory.makeArchive(private=True)
        publisher.getPubSource(archive=public_ppa)
        publisher.getPubSource(archive=private_ppa)

        self.assertRaises(
            CannotSwitchPrivacy, setattr, public_ppa, 'private', True)

        self.assertRaises(
            CannotSwitchPrivacy, setattr, private_ppa, 'private', False)
class TestPlatformData(TestCaseWithFactory):
    """Tests covering the processor/virtualized properties."""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        """Set up a native x86 build for the test archive."""
        super(TestPlatformData, self).setUp()

        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        # First mark all builds in the sample data as already built.
        sample_data = IStore(BinaryPackageBuild).find(BinaryPackageBuild)
        for build in sample_data:
            build.buildstate = BuildStatus.FULLYBUILT
        IStore(BinaryPackageBuild).flush()

        # We test builds that target a primary archive.
        self.non_ppa = self.factory.makeArchive(
            name="primary", purpose=ArchivePurpose.PRIMARY)
        self.non_ppa.require_virtualized = False

        self.builds = []
        self.builds.extend(
            self.publisher.getPubSource(
                sourcename="gedit", status=PackagePublishingStatus.PUBLISHED,
                archive=self.non_ppa).createMissingBuilds())

    def test_JobPlatformSettings(self):
        """The `BuildQueue` instance shares the processor/virtualized
        properties with the associated `Build`."""
        build, bq = find_job(self, 'gedit')

        # Make sure the 'processor' properties are the same.
        self.assertEqual(
            bq.processor, build.processor,
            "The 'processor' property deviates.")

        # Make sure the 'virtualized' properties are the same.
        self.assertEqual(
            bq.virtualized, build.virtualized,
            "The 'virtualized' property deviates.")
class TestBuildJobBase(TestCaseWithFactory):
    """Setup the test publisher and some builders."""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestBuildJobBase, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        self.i8 = self.factory.makeBuilder(name='i386-n-8', virtualized=False)
        self.i9 = self.factory.makeBuilder(name='i386-n-9', virtualized=False)

        processor = getUtility(IProcessorSet).getByName('hppa')
        self.h6 = self.factory.makeBuilder(name='hppa-n-6',
                                           processor=processor,
                                           virtualized=False)
        self.h7 = self.factory.makeBuilder(name='hppa-n-7',
                                           processor=processor,
                                           virtualized=False)

        self.builders = dict()
        # x86 native
        self.builders[(1, False)] = [self.i8, self.i9]

        # hppa native
        self.builders[(3, True)] = [self.h6, self.h7]

        # Ensure all builders are operational.
        for builders in self.builders.values():
            for builder in builders:
                builder.builderok = True
                builder.manual = False

        # Disable the sample data builders.
        getUtility(IBuilderSet)['bob'].builderok = False
        getUtility(IBuilderSet)['frog'].builderok = False
Beispiel #16
0
class TestSourcePublicationListingExtra(BrowserTestCase):
    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestSourcePublicationListingExtra, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create everything we need to create builds, such as a
        # DistroArchSeries and a builder.
        self.processor = self.factory.makeProcessor(supports_virtualized=True)
        self.distroseries = self.factory.makeDistroSeries(
            distribution=getUtility(IDistributionSet)['ubuntu'])
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(
                processors=[self.processor])

    def test_view_with_source_package_recipe(self):
        # When a SourcePackageRelease is linked to a
        # SourcePackageRecipeBuild, the view shows which recipe was
        # responsible for creating the SPR.
        sprb = self.factory.makeSourcePackageRecipeBuild(archive=self.archive)
        recipe = sprb.recipe
        requester = sprb.requester
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        spph.sourcepackagerelease.source_package_recipe_build = sprb
        recipe_link_matches = soupmatchers.HTMLContains(
            soupmatchers.Tag('link to build',
                             'a',
                             attrs={'href': canonical_url(sprb)},
                             text='Built'),
            soupmatchers.Tag('recipe name',
                             'a',
                             attrs={'href': canonical_url(recipe)},
                             text=recipe.name),
            soupmatchers.Tag('requester',
                             'a',
                             attrs={'href': canonical_url(requester)},
                             text=requester.displayname))
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertThat(browser.contents, recipe_link_matches)

    def test_view_without_source_package_recipe(self):
        # And if a SourcePackageRelease is not linked, there is no sign of it
        # in the view.
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertNotIn('Built by recipe', browser.contents)

    def test_view_with_deleted_source_package_recipe(self):
        # If a SourcePackageRelease is linked to a deleted recipe, the text
        # 'deleted recipe' is displayed, rather than a link.
        sprb = self.factory.makeSourcePackageRecipeBuild(archive=self.archive)
        recipe = sprb.recipe
        requester = sprb.requester
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        spph.sourcepackagerelease.source_package_recipe_build = sprb
        with person_logged_in(recipe.owner):
            recipe.destroySelf()
        recipe_link_matches = soupmatchers.HTMLContains(
            soupmatchers.Tag('link to build',
                             'a',
                             attrs={'href': canonical_url(sprb)},
                             text='Built'),
            soupmatchers.Tag('requester',
                             'a',
                             attrs={'href': canonical_url(requester)},
                             text=requester.displayname))
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertThat(browser.contents, recipe_link_matches)
        self.assertIn('deleted recipe', browser.contents)
Beispiel #17
0
class TestScriptRunning(TestCaseWithFactory):
    """Run parse-ppa-apache-access-logs.py and test its outcome."""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestScriptRunning, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        self.store = IStore(BinaryPackageReleaseDownloadCount)

        self.archive = getUtility(IPersonSet).getByName('cprov').archive
        self.archive.require_virtualized = False
        self.archive.setProcessors(getUtility(IProcessorSet).getAll())

        self.foo_i386, self.foo_hppa = self.publisher.getPubBinaries(
            archive=self.archive, architecturespecific=True)
        self.bar_i386, self.bar_hppa = self.publisher.getPubBinaries(
            binaryname='bar-bin',
            archive=self.archive,
            architecturespecific=False)

        # Commit so the script can see our changes.
        import transaction
        transaction.commit()

    def test_script_run(self):
        # Before we run the script, there are no binary package
        # downloads in the database.
        # After the script's run, we will check that the results in the
        # database match the sample log files we use for this test:
        # lib/lp/soyuz/scripts/tests/ppa-apache-log-files
        # In addition to the wanted access log file, there is also an
        # error log that will be skipped by the configured glob.
        self.assertEqual(
            0,
            self.store.find(BinaryPackageReleaseDownloadCount).count())

        process = subprocess.Popen(
            'cronscripts/parse-ppa-apache-access-logs.py',
            shell=True,
            stdin=subprocess.PIPE,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
        (out, err) = process.communicate()
        self.assertEqual(process.returncode, 0,
                         "stdout:%s, stderr:%s" % (out, err))

        # The error log does not match the glob, so it is not processed,
        # and no OOPS is generated.
        self.oops_capture.sync()
        self.assertEqual([], self.oopses)

        # Must commit because the changes were done in another transaction.
        import transaction
        transaction.commit()
        results = self.store.find(BinaryPackageReleaseDownloadCount)

        australia = getUtility(ICountrySet)['AU']
        austria = getUtility(ICountrySet)['AT']

        self.assertEqual(
            [(self.foo_hppa.binarypackagerelease, self.archive,
              date(2008, 6, 13), australia, 1),
             (self.foo_i386.binarypackagerelease, self.archive,
              date(2008, 6, 13), australia, 1),
             (self.foo_i386.binarypackagerelease, self.archive,
              date(2008, 6, 13), austria, 1),
             (self.bar_i386.binarypackagerelease, self.archive,
              date(2008, 6, 14), None, 1),
             (self.bar_i386.binarypackagerelease, self.archive,
              date(2008, 6, 14), austria, 1)],
            sorted([(result.binary_package_release, result.archive, result.day,
                     result.country, result.count) for result in results],
                   key=lambda r: (r[0].id, r[2], r[3].name if r[3] else None)))
class TestScriptRunning(TestCaseWithFactory):
    """Run parse-ppa-apache-access-logs.py and test its outcome."""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestScriptRunning, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        self.store = IStore(BinaryPackageReleaseDownloadCount)

        self.archive = getUtility(IPersonSet).getByName('cprov').archive
        self.archive.require_virtualized = False

        self.foo_i386, self.foo_hppa = self.publisher.getPubBinaries(
                archive=self.archive, architecturespecific=True)
        self.bar_i386, self.bar_hppa = self.publisher.getPubBinaries(
                binaryname='bar-bin', archive=self.archive,
                architecturespecific=False)

        # Commit so the script can see our changes.
        import transaction
        transaction.commit()

    def test_script_run(self):
        # Before we run the script, there are no binary package
        # downloads in the database.
        # After the script's run, we will check that the results in the
        # database match the sample log files we use for this test:
        # lib/lp/soyuz/scripts/tests/ppa-apache-log-files
        # In addition to the wanted access log file, there is also an
        # error log that will be skipped by the configured glob.
        self.assertEqual(
            0, self.store.find(BinaryPackageReleaseDownloadCount).count())

        process = subprocess.Popen(
            'cronscripts/parse-ppa-apache-access-logs.py', shell=True,
            stdin=subprocess.PIPE, stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
        (out, err) = process.communicate()
        self.assertEqual(
            process.returncode, 0, "stdout:%s, stderr:%s" % (out, err))

        # The error log does not match the glob, so it is not processed,
        # and no OOPS is generated.
        self.oops_capture.sync()
        self.assertEqual([], self.oopses)

        # Must commit because the changes were done in another transaction.
        import transaction
        transaction.commit()
        results = self.store.find(BinaryPackageReleaseDownloadCount)

        australia = getUtility(ICountrySet)['AU']
        austria = getUtility(ICountrySet)['AT']

        self.assertEqual(
            [(self.foo_hppa.binarypackagerelease,
              self.archive,
              date(2008, 6, 13),
              australia,
              1),
             (self.foo_i386.binarypackagerelease,
              self.archive,
              date(2008, 6, 13),
              australia,
              1),
             (self.foo_i386.binarypackagerelease,
              self.archive,
              date(2008, 6, 13),
              austria,
              1),
             (self.bar_i386.binarypackagerelease,
              self.archive,
              date(2008, 6, 14),
              None,
              1),
             (self.bar_i386.binarypackagerelease,
              self.archive,
              date(2008, 6, 14),
              austria,
              1)],
            sorted(
                [(result.binary_package_release, result.archive, result.day,
                  result.country, result.count) for result in results],
                 key=lambda r: (r[0].id, r[2], r[3].name if r[3] else None)))
class TestBuildUpdateDependencies(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def _setupSimpleDepwaitContext(self):
        """Use `SoyuzTestPublisher` to setup a simple depwait context.

        Return an `IBinaryPackageBuild` in MANUALDEWAIT state and depending
        on a binary that exists and is reachable.
        """
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        depwait_source = self.publisher.getPubSource(
            sourcename='depwait-source')

        self.publisher.getPubBinaries(
            binaryname='dep-bin',
            status=PackagePublishingStatus.PUBLISHED)

        [depwait_build] = depwait_source.createMissingBuilds()
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin'})
        return depwait_build

    def testBuildqueueRemoval(self):
        """Test removing buildqueue items.

        Removing a Buildqueue row should also remove its associated
        BuildPackageJob and Job rows.
        """
        # Create a build in depwait.
        depwait_build = self._setupSimpleDepwaitContext()
        depwait_build_id = depwait_build.id

        # Grab the relevant db records for later comparison.
        store = Store.of(depwait_build)
        build_package_job = store.find(
            BuildPackageJob,
            depwait_build.id == BuildPackageJob.build).one()
        build_package_job_id = build_package_job.id
        job_id = store.find(Job, Job.id == build_package_job.job.id).one().id
        build_queue_id = store.find(
            BuildQueue, BuildQueue.job == job_id).one().id

        depwait_build.buildqueue_record.destroySelf()

        # Test that the records above no longer exist in the db.
        self.assertEqual(
            store.find(
                BuildPackageJob,
                BuildPackageJob.id == build_package_job_id).count(),
            0)
        self.assertEqual(
            store.find(Job, Job.id == job_id).count(),
            0)
        self.assertEqual(
            store.find(BuildQueue, BuildQueue.id == build_queue_id).count(),
            0)
        # But the build itself still exists.
        self.assertEqual(
            store.find(
                BinaryPackageBuild,
                BinaryPackageBuild.id == depwait_build_id).count(),
            1)

    def testUpdateDependenciesWorks(self):
        # Calling `IBinaryPackageBuild.updateDependencies` makes the build
        # record ready for dispatch.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def assertRaisesUnparsableDependencies(self, depwait_build, dependencies):
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': dependencies})
        self.assertRaises(
            UnparsableDependencies, depwait_build.updateDependencies)

    def testInvalidDependencies(self):
        # Calling `IBinaryPackageBuild.updateDependencies` on a build with
        # invalid 'dependencies' raises an AssertionError.
        # Anything not following '<name> [([relation] <version>)][, ...]'
        depwait_build = self._setupSimpleDepwaitContext()

        # None is not a valid dependency values.
        self.assertRaisesUnparsableDependencies(depwait_build, None)

        # Missing 'name'.
        self.assertRaisesUnparsableDependencies(depwait_build, u'(>> version)')

        # Missing 'version'.
        self.assertRaisesUnparsableDependencies(depwait_build, u'name (>>)')

        # Missing comma between dependencies.
        self.assertRaisesUnparsableDependencies(depwait_build, u'name1 name2')

    def testBug378828(self):
        # `IBinaryPackageBuild.updateDependencies` copes with the
        # scenario where the corresponding source publication is not
        # active (deleted) and the source original component is not a
        # valid ubuntu component.
        depwait_build = self._setupSimpleDepwaitContext()

        spr = depwait_build.source_package_release
        depwait_build.current_source_publication.requestDeletion(
            spr.creator)
        contrib = getUtility(IComponentSet).new('contrib')
        removeSecurityProxy(spr).component = contrib

        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testVersionedDependencies(self):
        # `IBinaryPackageBuild.updateDependencies` supports versioned
        # dependencies. A build will not be retried unless the candidate
        # complies with the version restriction.
        # In this case, dep-bin 666 is available. >> 666 isn't
        # satisified, but >= 666 is.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (>> 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'dep-bin (>> 666)')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (>= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')

    def testVersionedDependencyOnOldPublication(self):
        # `IBinaryPackageBuild.updateDependencies` doesn't just consider
        # the latest publication. There may be older publications which
        # satisfy the version constraints (in other archives or pockets).
        # In this case, dep-bin 666 and 999 are available, so both = 666
        # and = 999 are satisfied.
        depwait_build = self._setupSimpleDepwaitContext()
        self.publisher.getPubBinaries(
            binaryname='dep-bin', version='999',
            status=PackagePublishingStatus.PUBLISHED)
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (= 999)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')
class TestBuildSet(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildSet, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor_one = self.factory.makeProcessor()
        self.processor_two = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.distribution = self.distroseries.distribution
        self.das_one = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_one,
            supports_virtualized=True)
        self.das_two = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_two,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das_one
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder_one = self.factory.makeBuilder(
                processor=self.processor_one)
            self.builder_two = self.factory.makeBuilder(
                processor=self.processor_two)
        self.builds = []
        self.spphs = []

    def setUpBuilds(self):
        for i in range(5):
            # Create some test builds
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (self.factory.getUniqueInteger(), i),
                distroseries=self.distroseries, architecturehintlist='any')
            self.spphs.append(spph)
            builds = spph.createMissingBuilds()
            with person_logged_in(self.admin):
                for b in builds:
                    b.updateStatus(BuildStatus.BUILDING)
                    if i == 4:
                        b.updateStatus(BuildStatus.FAILEDTOBUILD)
                    else:
                        b.updateStatus(BuildStatus.FULLYBUILT)
                    b.buildqueue_record.destroySelf()
            self.builds += builds

    def test_get_for_distro_distribution(self):
        # Test fetching builds for a distro's main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution)
        self.assertEquals(set.count(), 10)

    def test_get_for_distro_distroseries(self):
        # Test fetching builds for a distroseries' main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distroseries)
        self.assertEquals(set.count(), 10)

    def test_get_for_distro_distroarchseries(self):
        # Test fetching builds for a distroarchseries' main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.das_one)
        self.assertEquals(set.count(), 5)

    def test_get_for_distro_filter_build_status(self):
        # The result can be filtered based on the build status
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, status=BuildStatus.FULLYBUILT)
        self.assertEquals(set.count(), 8)

    def test_get_for_distro_filter_name(self):
        # The result can be filtered based on the name
        self.setUpBuilds()
        spn = self.builds[2].source_package_release.sourcepackagename.name
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, name=spn)
        self.assertEquals(set.count(), 2)

    def test_get_for_distro_filter_pocket(self):
        # The result can be filtered based on the pocket of the build
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, pocket=PackagePublishingPocket.RELEASE)
        self.assertEquals(set.count(), 10)
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, pocket=PackagePublishingPocket.UPDATES)
        self.assertEquals(set.count(), 0)

    def test_get_for_distro_filter_arch_tag(self):
        # The result can be filtered based on the archtag of the build
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, arch_tag=self.das_one.architecturetag)
        self.assertEquals(set.count(), 5)

    def test_get_status_summary_for_builds(self):
        # We can query for the status summary of a number of builds
        self.setUpBuilds()
        relevant_builds = [self.builds[0], self.builds[2], self.builds[-2]]
        summary = getUtility(
            IBinaryPackageBuildSet).getStatusSummaryForBuilds(
                relevant_builds)
        self.assertEquals(summary['status'], BuildSetStatus.FAILEDTOBUILD)
        self.assertEquals(summary['builds'], [self.builds[-2]])

    def test_preload_data(self):
        # The BuildSet class allows data to be preloaded
        # Note, it is an internal method, so we have to push past the security
        # proxy
        self.setUpBuilds()
        build_ids = [self.builds[i] for i in (0, 1, 2, 3)]
        rset = removeSecurityProxy(
            getUtility(IBinaryPackageBuildSet))._prefetchBuildData(build_ids)
        self.assertEquals(len(rset), 4)

    def test_get_builds_by_source_package_release(self):
        # We are able to return all of the builds for the source package
        # release ids passed in.
        self.setUpBuilds()
        spphs = self.spphs[:2]
        ids = [spph.sourcepackagerelease.id for spph in spphs]
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(ids)
        expected_titles = []
        for spph in spphs:
            for das in (self.das_one, self.das_two):
                expected_titles.append(
                    '%s build of %s %s in %s %s RELEASE' % (
                        das.architecturetag, spph.source_package_name,
                        spph.source_package_version,
                        self.distroseries.distribution.name,
                        self.distroseries.name))
        build_titles = [build.title for build in builds]
        self.assertEquals(sorted(expected_titles), sorted(build_titles))

    def test_get_builds_by_source_package_release_filtering(self):
        self.setUpBuilds()
        ids = [self.spphs[-1].sourcepackagerelease.id]
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(
                ids, buildstate=BuildStatus.FAILEDTOBUILD)
        expected_titles = []
        for das in (self.das_one, self.das_two):
            expected_titles.append(
                '%s build of %s %s in %s %s RELEASE' % (
                    das.architecturetag, self.spphs[-1].source_package_name,
                    self.spphs[-1].source_package_version,
                    self.distroseries.distribution.name,
                    self.distroseries.name))
        build_titles = [build.title for build in builds]
        self.assertEquals(sorted(expected_titles), sorted(build_titles))
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(
                ids, buildstate=BuildStatus.CHROOTWAIT)
        self.assertEquals([], list(builds))

    def test_no_get_builds_by_source_package_release(self):
        # If no ids or None are passed into .getBuildsBySourcePackageRelease,
        # an empty list is returned.
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(None)
        self.assertEquals([], builds)
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease([])
        self.assertEquals([], builds)
class TestDistroSeriesBinaryPackage(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        """Create a distroseriesbinarypackage to play with."""
        super(TestDistroSeriesBinaryPackage, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()
        self.distroseries = self.publisher.distroseries
        self.distribution = self.distroseries.distribution
        binaries = self.publisher.getPubBinaries(binaryname='foo-bin',
                                                 summary='Foo is the best')
        binary_pub = binaries[0]
        self.binary_package_name = (
            binary_pub.binarypackagerelease.binarypackagename)
        self.distroseries_binary_package = DistroSeriesBinaryPackage(
            self.distroseries, self.binary_package_name)

    def test_cache_attribute_when_two_cache_objects(self):
        # We have situations where there are cache objects for each
        # distro archive - we need to handle this situation without
        # OOPSing - see bug 580181.
        distro_archive_1 = self.distribution.main_archive
        distro_archive_2 = self.distribution.all_distro_archives[1]

        # Publish the same binary in another distro archive.
        self.publisher.getPubBinaries(binaryname='foo-bin',
                                      summary='Foo is the best',
                                      archive=distro_archive_2)

        logger = BufferLogger()
        with dbuser(config.statistician.dbuser):
            DistroSeriesPackageCache._update(self.distroseries,
                                             self.binary_package_name,
                                             distro_archive_1, logger)

            DistroSeriesPackageCache._update(self.distroseries,
                                             self.binary_package_name,
                                             distro_archive_2, logger)

        self.failUnlessEqual('Foo is the best',
                             self.distroseries_binary_package.summary)

    def test_none_cache_passed_at_init_counts_as_cached(self):
        # If the value None is passed as the constructor parameter
        # "cache", it is considered as a valid value.
        # Accesing the property DistroSeriesBinaryPackage.cache
        # later does not lead to the execution of an SQL query to
        # retrieve a DistroSeriesPackageCache record.
        binary_package = DistroSeriesBinaryPackage(self.distroseries,
                                                   self.binary_package_name,
                                                   cache=None)
        with StormStatementRecorder() as recorder:
            binary_package.cache
        self.assertThat(recorder, HasQueryCount(Equals(0)))

        # If the parameter "cache" was not passed, accessing
        # DistroSeriesBinaryPackage.cache for the first time requires
        # at least one SQL query.
        with StormStatementRecorder() as recorder:
            self.distroseries_binary_package.cache
        self.assertThat(recorder, HasQueryCount(NotEquals(0)))
class TestSourcePublicationListingExtra(BrowserTestCase):
    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestSourcePublicationListingExtra, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create everything we need to create builds, such as a
        # DistroArchSeries and a builder.
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)

    def test_view_with_source_package_recipe(self):
        # When a SourcePackageRelease is linked to a
        # SourcePackageRecipeBuild, the view shows which recipe was
        # responsible for creating the SPR.
        sprb = self.factory.makeSourcePackageRecipeBuild(
            archive=self.archive)
        recipe = sprb.recipe
        requester = sprb.requester
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        spph.sourcepackagerelease.source_package_recipe_build = sprb
        recipe_link_matches = soupmatchers.HTMLContains(
            soupmatchers.Tag(
                'link to build', 'a', attrs={'href': canonical_url(sprb)},
                text='Built'),
            soupmatchers.Tag(
                'recipe name', 'a', attrs={'href': canonical_url(recipe)},
                text=recipe.name),
            soupmatchers.Tag(
                'requester', 'a',
                attrs={
                    'href': canonical_url(requester)},
                text=requester.displayname))
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertThat(browser.contents, recipe_link_matches)

    def test_view_without_source_package_recipe(self):
        # And if a SourcePackageRelease is not linked, there is no sign of it
        # in the view.
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertNotIn('Built by recipe', browser.contents)

    def test_view_with_deleted_source_package_recipe(self):
        # If a SourcePackageRelease is linked to a deleted recipe, the text
        # 'deleted recipe' is displayed, rather than a link.
        sprb = self.factory.makeSourcePackageRecipeBuild(
            archive=self.archive)
        recipe = sprb.recipe
        requester = sprb.requester
        spph = self.publisher.getPubSource(
            archive=self.archive, status=PackagePublishingStatus.PUBLISHED)
        spph.sourcepackagerelease.source_package_recipe_build = sprb
        with person_logged_in(recipe.owner):
            recipe.destroySelf()
        recipe_link_matches = soupmatchers.HTMLContains(
            soupmatchers.Tag(
                'link to build', 'a',
                attrs={'href': canonical_url(sprb)},
                text='Built'),
            soupmatchers.Tag(
                'requester', 'a',
                attrs={
                    'href': canonical_url(requester)},
                text=requester.displayname))
        browser = self.getViewBrowser(spph, '+listing-archive-extra')
        self.assertThat(browser.contents, recipe_link_matches)
        self.assertIn('deleted recipe', browser.contents)
class TestBuildNotify(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildNotify, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create all of the items we need to create builds
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        self.creator = self.factory.makePerson(email='*****@*****.**')
        self.gpgkey = self.factory.makeGPGKey(owner=self.creator)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.ppa = self.factory.makeArchive()
        buildd_admins = getUtility(IPersonSet).getByName(
            'launchpad-buildd-admins')
        self.buildd_admins_email = []
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)
            for member in buildd_admins.activemembers:
                self.buildd_admins_email.append(member.preferredemail.email)
        self.builds = []

    def create_builds(self, archive):
        for status in BuildStatus.items:
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (
                    self.factory.getUniqueInteger(), status.value),
                distroseries=self.distroseries, architecturehintlist='any',
                creator=self.creator, archive=archive)
            spph.sourcepackagerelease.dscsigningkey = self.gpgkey
            [build] = spph.createMissingBuilds()
            with person_logged_in(self.admin):
                build.updateStatus(BuildStatus.BUILDING, builder=self.builder)
                build.updateStatus(status,
                    date_finished=(
                        build.date_started + timedelta(
                            minutes=5 * (status.value + 1))))
                if status != BuildStatus.BUILDING:
                    build.buildqueue_record.destroySelf()
                else:
                    build.buildqueue_record.builder = self.builder
            self.builds.append(build)

    def _assert_mail_is_correct(self, build, notification, ppa=False):
        # Assert that the mail sent (which is in notification), matches
        # the data from the build
        self.assertEquals('*****@*****.**',
            notification['X-Creator-Recipient'])
        self.assertEquals(
            self.das.architecturetag, notification['X-Launchpad-Build-Arch'])
        self.assertEquals(
            'main', notification['X-Launchpad-Build-Component'])
        self.assertEquals(
            build.status.name, notification['X-Launchpad-Build-State'])
        if ppa is True:
            self.assertEquals(
                get_ppa_reference(self.ppa), notification['X-Launchpad-PPA'])
        body = notification.get_payload(decode=True)
        build_log = 'None'
        if ppa is True:
            archive = '%s PPA' % get_ppa_reference(build.archive)
            source = 'not available'
        else:
            archive = '%s primary archive' % (
                self.distroseries.distribution.name)
            source = canonical_url(build.distributionsourcepackagerelease)
        builder = canonical_url(build.builder)
        if build.status == BuildStatus.BUILDING:
            duration = 'not finished'
            build_log = 'see builder page'
        elif (
            build.status == BuildStatus.SUPERSEDED or
            build.status == BuildStatus.NEEDSBUILD):
            duration = 'not available'
            build_log = 'not available'
            builder = 'not available'
        elif build.status == BuildStatus.UPLOADING:
            duration = 'uploading'
            build_log = 'see builder page'
            builder = 'not available'
        else:
            duration = DurationFormatterAPI(
                build.duration).approximateduration()
        expected_body = dedent("""
         * Source Package: %s
         * Version: %s
         * Architecture: %s
         * Archive: %s
         * Component: main
         * State: %s
         * Duration: %s
         * Build Log: %s
         * Builder: %s
         * Source: %s



        If you want further information about this situation, feel free to
        contact a member of the Launchpad Buildd Administrators team.

        --
        %s
        %s
        """ % (
            build.source_package_release.sourcepackagename.name,
            build.source_package_release.version, self.das.architecturetag,
            archive, build.status.title, duration, build_log, builder,
            source, build.title, canonical_url(build)))
        self.assertEquals(expected_body, body)

    def test_notify_buildd_admins(self):
        # A build will cause an e-mail to be sent out to the buildd-admins,
        # for primary archive builds.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        expected_emails = self.buildd_admins_email + ['*****@*****.**']
        notifications = pop_notifications()
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(expected_emails, actual_emails)

    def test_ppa_does_not_notify_buildd_admins(self):
        # A build for a PPA does not notify the buildd admins.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notifications = pop_notifications()
        # An e-mail is sent to the archive owner, as well as the creator
        self.assertEquals(2, len(notifications))

    def test_notify_failed_to_build(self):
        # An e-mail is sent to the source package creator on build failures.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_failed_to_build_ppa(self):
        # An e-mail is sent to the source package creator on build failures.
        self.create_builds(archive=self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_needs_building(self):
        # We can notify the creator when the build is needing to be built.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_needs_building_ppa(self):
        # We can notify the creator when the build is needing to be built.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_successfully_built(self):
        # Successful builds don't notify anyone.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        build.notify()
        self.assertEqual([], pop_notifications())

    def test_notify_dependency_wait(self):
        # We can notify the creator when the build can't find a dependency.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_dependency_wait_ppa(self):
        # We can notify the creator when the build can't find a dependency.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_chroot_problem(self):
        # We can notify the creator when the builder the build attempted to
        # be built on has an internal problem.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_chroot_problem_ppa(self):
        # We can notify the creator when the builder the build attempted to
        # be built on has an internal problem.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_build_for_superseded_source(self):
        # We can notify the creator when the source package had a newer
        # version uploaded before this build had a chance to be dispatched.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_build_for_superseded_source_ppa(self):
        # We can notify the creator when the source package had a newer
        # version uploaded before this build had a chance to be dispatched.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_currently_building(self):
        # We can notify the creator when the build is currently building.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.BUILDING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_currently_building_ppa(self):
        # We can notify the creator when the build is currently building.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.BUILDING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_uploading_build(self):
        # We can notify the creator when the build has completed, and binary
        # packages are being uploaded by the builder.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.UPLOADING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_uploading_build_ppa(self):
        # We can notify the creator when the build has completed, and binary
        # packages are being uploaded by the builder.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.UPLOADING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_copied_into_ppa_does_not_spam(self):
        # When a package is copied into a PPA, we don't send mail to the
        # original creator of the source package.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        spph = build.current_source_publication
        ppa_spph = spph.copyTo(
            self.distroseries, PackagePublishingPocket.RELEASE, self.ppa)
        [ppa_build] = ppa_spph.createMissingBuilds()
        ppa_build.notify()
        notifications = pop_notifications()
        self.assertEquals(1, len(notifications))

    def test_notify_owner_supresses_mail(self):
        # When the 'notify_owner' config option is False, we don't send mail
        # to the owner of the SPR.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        notify_owner = dedent("""
            [builddmaster]
            send_build_notification: True
            notify_owner: False
            """)
        config.push('notify_owner', notify_owner)
        build.notify()
        notifications = pop_notifications()
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(self.buildd_admins_email, actual_emails)
        # And undo what we just did.
        config.pop('notify_owner')

    def test_build_notification_supresses_mail(self):
        # When the 'build_notification' config option is False, we don't
        # send any mail at all.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        send_build_notification = dedent("""
            [builddmaster]
            send_build_notification: False
            """)
        config.push('send_build_notification', send_build_notification)
        build.notify()
        notifications = pop_notifications()
        self.assertEquals(0, len(notifications))
        # And undo what we just did.
        config.pop('send_build_notification')

    def test_sponsored_upload_notification(self):
        # If the signing key is different to the creator, they are both
        # notified.
        sponsor = self.factory.makePerson('*****@*****.**')
        key = self.factory.makeGPGKey(owner=sponsor)
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        spr = build.current_source_publication.sourcepackagerelease
        # Push past the security proxy
        removeSecurityProxy(spr).dscsigningkey = key
        build.notify()
        notifications = pop_notifications()
        expected_emails = self.buildd_admins_email + [
            '*****@*****.**', '*****@*****.**']
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(expected_emails, actual_emails)
 def publish_to_ppa(self, ppa):
     """Helper method to publish a package in a PPA."""
     publisher = SoyuzTestPublisher()
     publisher.prepareBreezyAutotest()
     publisher.getPubSource(archive=ppa)
class TestDistributionSourcePackageFindRelatedArchives(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        """Publish some gedit sources in main and PPAs."""
        super(TestDistributionSourcePackageFindRelatedArchives, self).setUp()

        self.distribution = getUtility(IDistributionSet)['ubuntutest']

        # Create two PPAs for gedit.
        self.archives = {}
        self.archives['ubuntu-main'] = self.distribution.main_archive
        self.archives['gedit-nightly'] = self.factory.makeArchive(
            name="gedit-nightly", distribution=self.distribution)
        self.archives['gedit-beta'] = self.factory.makeArchive(
            name="gedit-beta", distribution=self.distribution)

        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        # Publish gedit in all three archives.
        self.person_nightly = self.factory.makePerson()
        self.gedit_nightly_src_hist = self.publisher.getPubSource(
            sourcename="gedit", archive=self.archives['gedit-nightly'],
            creator=self.person_nightly,
            status=PackagePublishingStatus.PUBLISHED)

        self.person_beta = self.factory.makePerson()
        self.gedit_beta_src_hist = self.publisher.getPubSource(
            sourcename="gedit", archive=self.archives['gedit-beta'],
            creator=self.person_beta,
            status=PackagePublishingStatus.PUBLISHED)
        self.gedit_main_src_hist = self.publisher.getPubSource(
            sourcename="gedit", archive=self.archives['ubuntu-main'],
            status=PackagePublishingStatus.PUBLISHED)

        # Save the gedit source package for easy access.
        self.source_package = self.distribution.getSourcePackage('gedit')

        # Add slightly more soyuz karma for person_nightly for this package.
        switch_dbuser('karma')
        self.person_beta_karma = KarmaTotalCache(
            person=self.person_beta, karma_total=200)
        self.person_nightly_karma = KarmaTotalCache(
            person=self.person_nightly, karma_total=201)
        switch_dbuser('launchpad')

    def test_order_by_soyuz_package_karma(self):
        # Returned archives are ordered by the soyuz karma of the
        # package uploaders for the particular package

        related_archives = self.source_package.findRelatedArchives()
        related_archive_names = [
            archive.name for archive in related_archives]

        self.assertEqual(related_archive_names, [
            'gedit-nightly',
            'gedit-beta',
            ])

        # Update the soyuz karma for person_beta for this package so that
        # it is greater than person_nightly's.
        switch_dbuser('karma')
        self.person_beta_karma.karma_total = 202
        switch_dbuser('launchpad')

        related_archives = self.source_package.findRelatedArchives()
        related_archive_names = [
            archive.name for archive in related_archives]

        self.assertEqual(related_archive_names, [
            'gedit-beta',
            'gedit-nightly',
            ])

    def test_require_package_karma(self):
        # Only archives where the related package was created by a person
        # with the required soyuz karma for this package.

        related_archives = self.source_package.findRelatedArchives(
            required_karma=201)
        related_archive_names = [
            archive.name for archive in related_archives]

        self.assertEqual(related_archive_names, ['gedit-nightly'])

    def test_development_version(self):
        # IDistributionSourcePackage.development_version is the ISourcePackage
        # for the current series of the distribution.
        dsp = self.factory.makeDistributionSourcePackage()
        series = self.factory.makeDistroSeries(distribution=dsp.distribution)
        self.assertEqual(series, dsp.distribution.currentseries)
        development_version = dsp.distribution.currentseries.getSourcePackage(
            dsp.sourcepackagename)
        self.assertEqual(development_version, dsp.development_version)

    def test_development_version_no_current_series(self):
        # IDistributionSourcePackage.development_version is the ISourcePackage
        # for the current series of the distribution.
        dsp = self.factory.makeDistributionSourcePackage()
        currentseries = dsp.distribution.currentseries
        # The current series is None by default.
        self.assertIs(None, currentseries)
        self.assertEqual(None, dsp.development_version)

    def test_does_not_include_copied_packages(self):
        # Packages that have been copied rather than uploaded are not
        # included when determining related archives.

        # Ensure that the gedit package in gedit-nightly was originally
        # uploaded to gedit-beta (ie. copied from there).
        gedit_release = self.gedit_nightly_src_hist.sourcepackagerelease
        gedit_release.upload_archive = self.archives['gedit-beta']

        related_archives = self.source_package.findRelatedArchives()
        related_archive_names = [
            archive.name for archive in related_archives]

        self.assertEqual(related_archive_names, ['gedit-beta'])
class TestProcessAccepted(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer
    dbuser = config.uploadqueue.dbuser

    def setUp(self):
        """Create the Soyuz test publisher."""
        TestCaseWithFactory.setUp(self)
        self.stp = SoyuzTestPublisher()
        self.stp.prepareBreezyAutotest()
        self.test_package_name = u"accept-test"
        self.distro = self.factory.makeDistribution()

    def getScript(self, test_args=None):
        """Return a ProcessAccepted instance."""
        if test_args is None:
            test_args = []
        test_args.append(self.distro.name)
        script = ProcessAccepted("process accepted", test_args=test_args)
        script.logger = BufferLogger()
        script.txn = self.layer.txn
        return script

    def createWaitingAcceptancePackage(self, distroseries, archive=None,
                                       sourcename=None):
        """Create some pending publications."""
        if archive is None:
            archive = self.distro.main_archive
        if sourcename is None:
            sourcename = self.test_package_name
        return self.stp.getPubSource(
            archive=archive, sourcename=sourcename, distroseries=distroseries,
            spr_only=True)

    def test_robustness(self):
        """Test that a broken package doesn't block the publication of other
        packages."""
        # Attempt to upload one source to a supported series.
        # The record is created first and then the status of the series
        # is changed from DEVELOPMENT to SUPPORTED, otherwise it's impossible
        # to create the record.
        distroseries = self.factory.makeDistroSeries(distribution=self.distro)
        # This creates a broken publication.
        self.createWaitingAcceptancePackage(
            distroseries=distroseries, sourcename="notaccepted")
        distroseries.status = SeriesStatus.SUPPORTED
        # Also upload some other things.
        other_distroseries = self.factory.makeDistroSeries(
            distribution=self.distro)
        self.createWaitingAcceptancePackage(distroseries=other_distroseries)
        script = self.getScript([])
        switch_dbuser(self.dbuser)
        script.main()

        # The other source should be published now.
        published_main = self.distro.main_archive.getPublishedSources(
            name=self.test_package_name)
        self.assertEqual(published_main.count(), 1)

        # And an oops should be filed for the first.
        self.assertEqual(1, len(self.oopses))
        error_report = self.oopses[0]
        expected_error = "Failure processing queue_item"
        self.assertStartsWith(
                error_report['req_vars']['error-explanation'],
                expected_error)

    def test_accept_copy_archives(self):
        """Test that publications in a copy archive are accepted properly."""
        # Upload some pending packages in a copy archive.
        distroseries = self.factory.makeDistroSeries(distribution=self.distro)
        copy_archive = self.factory.makeArchive(
            distribution=self.distro, purpose=ArchivePurpose.COPY)
        copy_source = self.createWaitingAcceptancePackage(
            archive=copy_archive, distroseries=distroseries)
        # Also upload some stuff in the main archive.
        self.createWaitingAcceptancePackage(distroseries=distroseries)

        # Before accepting, the package should not be published at all.
        published_copy = copy_archive.getPublishedSources(
            name=self.test_package_name)
        # Using .count() until Storm fixes __nonzero__ on SQLObj result
        # sets, then we can use bool() which is far more efficient than
        # counting.
        self.assertEqual(published_copy.count(), 0)

        # Accept the packages.
        script = self.getScript(['--copy-archives'])
        switch_dbuser(self.dbuser)
        script.main()

        # Packages in main archive should not be accepted and published.
        published_main = self.distro.main_archive.getPublishedSources(
            name=self.test_package_name)
        self.assertEqual(published_main.count(), 0)

        # Check the copy archive source was accepted.
        published_copy = copy_archive.getPublishedSources(
            name=self.test_package_name).one()
        self.assertEqual(
            published_copy.status, PackagePublishingStatus.PENDING)
        self.assertEqual(copy_source, published_copy.sourcepackagerelease)

    def test_commits_after_each_item(self):
        # Test that the script commits after each item, not just at the end.
        uploads = [
            self.createWaitingAcceptancePackage(
                distroseries=self.factory.makeDistroSeries(
                    distribution=self.distro),
                sourcename='source%d' % i)
            for i in range(3)]

        class UploadCheckingSynchronizer:

            commit_count = 0

            def beforeCompletion(inner_self, txn):
                pass

            def afterCompletion(inner_self, txn):
                if txn.status != 'Committed':
                    return
                inner_self.commit_count += 1
                done_count = len([
                    upload for upload in uploads
                    if upload.package_upload.status ==
                        PackageUploadStatus.DONE])
                self.assertEqual(
                    min(len(uploads), inner_self.commit_count),
                    done_count)

        script = self.getScript([])
        switch_dbuser(self.dbuser)
        synch = UploadCheckingSynchronizer()
        transaction.manager.registerSynch(synch)
        script.main()
        self.assertThat(len(uploads), LessThan(synch.commit_count))

    def test_commits_work(self):
        upload = self.factory.makeSourcePackageUpload(
            distroseries=self.factory.makeDistroSeries(
                distribution=self.distro))
        upload_id = upload.id
        self.getScript([]).main()
        self.layer.txn.abort()
        self.assertEqual(
            upload, IStore(PackageUpload).get(PackageUpload, upload_id))

    def test_validateArguments_requires_distro_by_default(self):
        self.assertRaises(
            OptionValueError, ProcessAccepted(test_args=[]).validateArguments)

    def test_validateArguments_requires_no_distro_for_derived_run(self):
        ProcessAccepted(test_args=['--derived']).validateArguments()
        # The test is that this does not raise an exception.
        pass

    def test_validateArguments_does_not_accept_distro_for_derived_run(self):
        distro = self.factory.makeDistribution()
        script = ProcessAccepted(test_args=['--derived', distro.name])
        self.assertRaises(OptionValueError, script.validateArguments)

    def test_findTargetDistros_finds_named_distro(self):
        distro = self.factory.makeDistribution()
        script = ProcessAccepted(test_args=[distro.name])
        self.assertContentEqual([distro], script.findTargetDistros())

    def test_findNamedDistro_raises_error_if_not_found(self):
        nonexistent_distro = self.factory.getUniqueString()
        script = ProcessAccepted(test_args=[nonexistent_distro])
        self.assertRaises(
            LaunchpadScriptFailure,
            script.findNamedDistro, nonexistent_distro)

    def test_findTargetDistros_for_derived_finds_derived_distro(self):
        dsp = self.factory.makeDistroSeriesParent()
        script = ProcessAccepted(test_args=['--derived'])
        self.assertIn(
            dsp.derived_series.distribution, script.findTargetDistros())
class TestAddMissingBuilds(TestCaseWithFactory):
    """Test the add-missing-builds.py script. """

    layer = LaunchpadZopelessLayer
    dbuser = config.builddmaster.dbuser

    def setUp(self):
        """Make a PPA and publish some sources that need builds."""
        TestCaseWithFactory.setUp(self)
        self.stp = SoyuzTestPublisher()
        self.stp.prepareBreezyAutotest()

        # i386 and hppa are enabled by STP but we need to mark hppa as
        # PPA-enabled.
        self.stp.breezy_autotest_hppa.supports_virtualized = True

        # Create an arch-any and an arch-all source in a PPA.
        self.ppa = self.factory.makeArchive(
            purpose=ArchivePurpose.PPA, distribution=self.stp.ubuntutest)
        self.all = self.stp.getPubSource(
            sourcename="all", architecturehintlist="all", archive=self.ppa,
            status=PackagePublishingStatus.PUBLISHED)
        self.any = self.stp.getPubSource(
            sourcename="any", architecturehintlist="any", archive=self.ppa,
            status=PackagePublishingStatus.PUBLISHED)
        self.required_arches = [
            self.stp.breezy_autotest_hppa,
            self.stp.breezy_autotest_i386]

    def runScript(self, test_args=None):
        """Run the script itself, returning the result and output.

        Return a tuple of the process's return code, stdout output and
        stderr output.
        """
        if test_args is None:
            test_args = []
        script = os.path.join(
            config.root, "scripts", "add-missing-builds.py")
        args = [sys.executable, script]
        args.extend(test_args)
        process = subprocess.Popen(
            args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        stdout, stderr = process.communicate()
        return (process.returncode, stdout, stderr)

    def getScript(self):
        """Return an instance of the script object."""
        script = AddMissingBuilds("test", test_args=[])
        script.logger = BufferLogger()
        return script

    def getBuilds(self):
        """Helper to return build records."""
        any_build_i386 = self.any.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_i386, self.ppa)
        any_build_hppa = self.any.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_hppa, self.ppa)
        all_build_i386 = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_i386, self.ppa)
        all_build_hppa = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_hppa, self.ppa)
        return (
            any_build_i386, any_build_hppa, all_build_i386, all_build_hppa)

    def assertBuildsForAny(self):
        """Helper to assert that builds were created for the 'Any' package."""
        (
            any_build_i386, any_build_hppa, all_build_i386,
            all_build_hppa
            ) = self.getBuilds()
        self.assertIsNot(any_build_i386, None)
        self.assertIsNot(any_build_hppa, None)

    def assertNoBuilds(self):
        """Helper to assert that no builds were created."""
        (
            any_build_i386, any_build_hppa, all_build_i386,
            all_build_hppa
            ) = self.getBuilds()
        self.assertIs(any_build_i386, None)
        self.assertIs(any_build_hppa, None)
        self.assertIs(all_build_i386, None)
        self.assertIs(all_build_hppa, None)

    def testSimpleRun(self):
        """Try a simple script run.

        This test ensures that the script starts up and runs.
        It should create some missing builds.
        """
        # Commit the changes made in setUp()
        self.layer.txn.commit()

        args = [
            "-d", "ubuntutest",
            "-s", "breezy-autotest",
            "-a", "i386",
            "-a", "hppa",
            "--ppa", "%s" % self.ppa.owner.name,
            "--ppa-name", self.ppa.name,
            ]
        code, stdout, stderr = self.runScript(args)
        self.assertEqual(
            code, 0,
            "The script returned with a non zero exit code: %s\n%s\n%s"  % (
                code, stdout, stderr))

        # Sync database changes made in the external process.
        flush_database_updates()
        clear_current_connection_cache()

        # The arch-any package will get builds for all architectures.
        self.assertBuildsForAny()

        # The arch-all package is architecture-independent, so it will
        # only get a build for i386 which is the nominated architecture-
        # independent build arch.
        all_build_i386 = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_i386, self.ppa)
        all_build_hppa = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_hppa, self.ppa)
        self.assertIsNot(all_build_i386, None)
        self.assertIs(all_build_hppa, None)

    def testNoActionForNoSources(self):
        """Test that if nothing is published, no builds are created."""
        self.all.requestDeletion(self.ppa.owner)
        self.any.requestDeletion(self.ppa.owner)

        script = self.getScript()
        script.add_missing_builds(
            self.ppa, self.required_arches, self.stp.breezy_autotest,
            PackagePublishingPocket.RELEASE)
        self.assertNoBuilds()
class TestAcceptRejectQueueUploads(TestCaseWithFactory):
    """Uploads can be accepted or rejected with the relevant permissions."""

    layer = LaunchpadFunctionalLayer

    def makeSPR(self, sourcename, component, archive, changes_file_content,
                pocket=None, distroseries=None):
        if pocket is None:
            pocket = PackagePublishingPocket.RELEASE
        if distroseries is None:
            distroseries = self.test_publisher.distroseries
        spr = self.factory.makeSourcePackageRelease(
            sourcepackagename=sourcename, component=component, archive=archive,
            distroseries=distroseries)
        packageupload = self.factory.makePackageUpload(
            archive=archive, pocket=pocket, distroseries=distroseries,
            changes_file_content=changes_file_content)
        packageupload.addSource(spr)
        return spr

    def setUp(self):
        """Create two new uploads in the new state and a person with
        permission to upload to the partner archive."""
        super(TestAcceptRejectQueueUploads, self).setUp()
        login('*****@*****.**')
        self.test_publisher = SoyuzTestPublisher()
        self.test_publisher.prepareBreezyAutotest()
        distribution = self.test_publisher.distroseries.distribution
        self.second_series = self.factory.makeDistroSeries(
            distribution=distribution)
        self.factory.makeComponentSelection(self.second_series, 'main')
        self.main_archive = distribution.getArchiveByComponent('main')
        self.partner_archive = distribution.getArchiveByComponent('partner')

        # Get some sample changes file content for the new uploads.
        with open(datadir('suite/bar_1.0-1/bar_1.0-1_source.changes')) as cf:
            changes_file_content = cf.read()

        self.partner_spr = self.makeSPR(
            'partner-upload', 'partner', self.partner_archive,
            changes_file_content,
            distroseries=self.test_publisher.distroseries)
        self.main_spr = self.makeSPR(
            'main-upload', 'main', self.main_archive, changes_file_content,
            distroseries=self.test_publisher.distroseries)
        self.proposed_spr = self.makeSPR(
            'proposed-upload', 'main', self.main_archive, changes_file_content,
            pocket=PackagePublishingPocket.PROPOSED,
            distroseries=self.test_publisher.distroseries)
        self.proposed_series_spr = self.makeSPR(
            'proposed-series-upload', 'main', self.main_archive,
            changes_file_content, pocket=PackagePublishingPocket.PROPOSED,
            distroseries=self.second_series)

        # Define the form that will be used to post to the view.
        self.form = {
            'queue_state': PackageUploadStatus.NEW.value,
            'Accept': 'Accept',
            }

        # Create a user with queue admin rights for main, and a separate
        # user with queue admin rights for partner (on the partner
        # archive).
        self.main_queue_admin = self.factory.makePerson()
        getUtility(IArchivePermissionSet).newQueueAdmin(
            distribution.getArchiveByComponent('main'),
            self.main_queue_admin, self.main_spr.component)
        self.partner_queue_admin = self.factory.makePerson()
        getUtility(IArchivePermissionSet).newQueueAdmin(
            distribution.getArchiveByComponent('partner'),
            self.partner_queue_admin, self.partner_spr.component)

        # Create users with various pocket queue admin rights.
        self.proposed_queue_admin = self.factory.makePerson()
        getUtility(IArchivePermissionSet).newPocketQueueAdmin(
            self.main_archive, self.proposed_queue_admin,
            PackagePublishingPocket.PROPOSED)
        self.proposed_series_queue_admin = self.factory.makePerson()
        getUtility(IArchivePermissionSet).newPocketQueueAdmin(
            self.main_archive, self.proposed_series_queue_admin,
            PackagePublishingPocket.PROPOSED, distroseries=self.second_series)

        # We need to commit to ensure the changes file exists in the
        # librarian.
        transaction.commit()
        logout()

    def setupQueueView(self, request, series=None):
        """A helper to create and setup the view for testing."""
        if series is None:
            series = self.test_publisher.distroseries
        view = queryMultiAdapter((series, request), name="+queue")
        view.setupQueueList()
        view.performQueueAction()
        return view

    def assertStatus(self, package_upload_id, status):
        self.assertEqual(
            status,
            getUtility(IPackageUploadSet).get(package_upload_id).status)

    def test_main_admin_can_accept_main_upload(self):
        # A person with queue admin access for main
        # can accept uploads to the main archive.
        login_person(self.main_queue_admin)
        self.assertTrue(
            self.main_archive.canAdministerQueue(
                self.main_queue_admin, self.main_spr.component))

        package_upload_id = self.main_spr.package_upload.id
        self.form['QUEUE_ID'] = [package_upload_id]
        request = LaunchpadTestRequest(form=self.form)
        request.method = 'POST'
        self.setupQueueView(request)
        self.assertStatus(package_upload_id, PackageUploadStatus.DONE)

    def test_main_admin_cannot_accept_partner_upload(self):
        # A person with queue admin access for main cannot necessarily
        # accept uploads to partner.
        login_person(self.main_queue_admin)
        self.assertFalse(
            self.partner_archive.canAdministerQueue(
                self.main_queue_admin, self.partner_spr.component))

        package_upload_id = self.partner_spr.package_upload.id
        self.form['QUEUE_ID'] = [package_upload_id]
        request = LaunchpadTestRequest(form=self.form)
        request.method = 'POST'
        view = self.setupQueueView(request)

        self.assertEquals(
            html_escape(
                "FAILED: partner-upload (You have no rights to accept "
                "component(s) 'partner')"),
            view.request.response.notifications[0].message)
        self.assertStatus(package_upload_id, PackageUploadStatus.NEW)

    def test_admin_can_accept_partner_upload(self):
        # An admin can always accept packages, even for the
        # partner archive (note, this is *not* an archive admin).
        login('*****@*****.**')

        package_upload_id = self.partner_spr.package_upload.id
        self.form['QUEUE_ID'] = [package_upload_id]
        request = LaunchpadTestRequest(form=self.form)
        request.method = 'POST'
        self.setupQueueView(request)
        self.assertStatus(package_upload_id, PackageUploadStatus.DONE)

    def test_partner_admin_can_accept_partner_upload(self):
        # A person with queue admin access for partner
        # can accept uploads to the partner archive.
        login_person(self.partner_queue_admin)
        self.assertTrue(
            self.partner_archive.canAdministerQueue(
                self.partner_queue_admin, self.partner_spr.component))

        package_upload_id = self.partner_spr.package_upload.id
        self.form['QUEUE_ID'] = [package_upload_id]
        request = LaunchpadTestRequest(form=self.form)
        request.method = 'POST'
        self.setupQueueView(request)
        self.assertStatus(package_upload_id, PackageUploadStatus.DONE)

    def test_partner_admin_cannot_accept_main_upload(self):
        # A person with queue admin access for partner cannot necessarily
        # accept uploads to main.
        login_person(self.partner_queue_admin)
        self.assertFalse(
            self.main_archive.canAdministerQueue(
                self.partner_queue_admin, self.main_spr.component))

        package_upload_id = self.main_spr.package_upload.id
        self.form['QUEUE_ID'] = [package_upload_id]
        request = LaunchpadTestRequest(form=self.form)
        request.method = 'POST'
        view = self.setupQueueView(request)

        self.assertEquals(
            html_escape(
                "FAILED: main-upload (You have no rights to accept "
                "component(s) 'main')"),
            view.request.response.notifications[0].message)
        self.assertStatus(package_upload_id, PackageUploadStatus.NEW)

    def test_proposed_admin_can_accept_proposed_upload(self):
        # A person with queue admin access for proposed can accept uploads
        # to the proposed pocket for any series.
        login_person(self.proposed_queue_admin)
        self.assertTrue(
            self.main_archive.canAdministerQueue(
                self.proposed_queue_admin,
                pocket=PackagePublishingPocket.PROPOSED))
        for distroseries in self.test_publisher.distroseries.distribution:
            self.assertTrue(
                self.main_archive.canAdministerQueue(
                    self.proposed_queue_admin,
                    pocket=PackagePublishingPocket.PROPOSED,
                    distroseries=distroseries))

        for spr in (self.proposed_spr, self.proposed_series_spr):
            package_upload_id = spr.package_upload.id
            self.form['QUEUE_ID'] = [package_upload_id]
            request = LaunchpadTestRequest(form=self.form)
            request.method = 'POST'
            self.setupQueueView(request, series=spr.upload_distroseries)
            self.assertStatus(package_upload_id, PackageUploadStatus.DONE)

    def test_proposed_admin_cannot_accept_release_upload(self):
        # A person with queue admin access for proposed cannot necessarly
        # accept uploads to the release pocket.
        login_person(self.proposed_queue_admin)
        self.assertFalse(
            self.main_archive.canAdministerQueue(
                self.proposed_queue_admin,
                pocket=PackagePublishingPocket.RELEASE))

        package_upload_id = self.main_spr.package_upload.id
        self.form['QUEUE_ID'] = [package_upload_id]
        request = LaunchpadTestRequest(form=self.form)
        request.method = 'POST'
        view = self.setupQueueView(request)

        self.assertEqual(
            html_escape(
                "FAILED: main-upload (You have no rights to accept "
                "component(s) 'main')"),
            view.request.response.notifications[0].message)
        self.assertStatus(package_upload_id, PackageUploadStatus.NEW)

    def test_proposed_series_admin_can_accept_that_series_upload(self):
        # A person with queue admin access for proposed for one series can
        # accept uploads to that series.
        login_person(self.proposed_series_queue_admin)
        self.assertTrue(
            self.main_archive.canAdministerQueue(
                self.proposed_series_queue_admin,
                pocket=PackagePublishingPocket.PROPOSED,
                distroseries=self.second_series))

        package_upload_id = self.proposed_series_spr.package_upload.id
        self.form['QUEUE_ID'] = [package_upload_id]
        request = LaunchpadTestRequest(form=self.form)
        request.method = 'POST'
        self.setupQueueView(request, series=self.second_series)
        self.assertStatus(package_upload_id, PackageUploadStatus.DONE)

    def test_proposed_series_admin_cannot_accept_other_series_upload(self):
        # A person with queue admin access for proposed for one series
        # cannot necessarily accept uploads to other series.
        login_person(self.proposed_series_queue_admin)
        self.assertFalse(
            self.main_archive.canAdministerQueue(
                self.proposed_series_queue_admin,
                pocket=PackagePublishingPocket.PROPOSED,
                distroseries=self.test_publisher.distroseries))

        package_upload_id = self.proposed_spr.package_upload.id
        self.form['QUEUE_ID'] = [package_upload_id]
        request = LaunchpadTestRequest(form=self.form)
        request.method = 'POST'
        view = self.setupQueueView(request)

        self.assertEqual(
            "You do not have permission to act on queue items.", view.error)
        self.assertStatus(package_upload_id, PackageUploadStatus.NEW)

    def test_cannot_reject_without_comment(self):
        login_person(self.proposed_queue_admin)
        package_upload_id = self.proposed_spr.package_upload.id
        form = {
            'Reject': 'Reject',
            'QUEUE_ID': [package_upload_id]}
        request = LaunchpadTestRequest(form=form)
        request.method = 'POST'
        view = self.setupQueueView(request)
        self.assertEqual('Rejection comment required.', view.error)
        self.assertStatus(package_upload_id, PackageUploadStatus.NEW)

    def test_reject_with_comment(self):
       login_person(self.proposed_queue_admin)
       package_upload_id = self.proposed_spr.package_upload.id
       form = {
           'Reject': 'Reject',
           'rejection_comment': 'Because I can.',
           'QUEUE_ID': [package_upload_id]}
       request = LaunchpadTestRequest(form=form)
       request.method = 'POST'
       self.setupQueueView(request)
       self.assertStatus(package_upload_id, PackageUploadStatus.REJECTED)
Beispiel #29
0
class TestBuildSet(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildSet, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor_one = self.factory.makeProcessor(
            supports_virtualized=True)
        self.processor_two = self.factory.makeProcessor(
            supports_virtualized=True)
        self.distroseries = self.factory.makeDistroSeries()
        self.distribution = self.distroseries.distribution
        self.das_one = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_one)
        self.das_two = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor_two)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das_one
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.factory.makeBuilder(processors=[self.processor_one])
            self.factory.makeBuilder(processors=[self.processor_two])
        self.builds = []
        self.spphs = []

    def setUpBuilds(self):
        for i in range(5):
            # Create some test builds
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" % (self.factory.getUniqueInteger(), i),
                distroseries=self.distroseries,
                architecturehintlist='any')
            self.spphs.append(spph)
            builds = removeSecurityProxy(
                getUtility(IBinaryPackageBuildSet).createForSource(
                    spph.sourcepackagerelease, spph.archive, spph.distroseries,
                    spph.pocket))
            with person_logged_in(self.admin):
                for b in builds:
                    b.updateStatus(BuildStatus.BUILDING)
                    if i == 4:
                        b.updateStatus(BuildStatus.FAILEDTOBUILD)
                    else:
                        b.updateStatus(BuildStatus.FULLYBUILT)
                    b.buildqueue_record.destroySelf()
            self.builds += builds

    def test_new_virtualization(self):
        # Builds are virtualized unless Processor.support_nonvirtualized
        # and not Archive.require_virtualized.

        def make(proc_virt, proc_nonvirt, archive_virt):
            proc = self.factory.makeProcessor(
                supports_nonvirtualized=proc_nonvirt,
                supports_virtualized=proc_virt)
            das = self.factory.makeDistroArchSeries(processor=proc)
            archive = self.factory.makeArchive(
                distribution=das.distroseries.distribution,
                virtualized=archive_virt)
            bpb = getUtility(IBinaryPackageBuildSet).new(
                self.factory.makeSourcePackageRelease(), archive, das,
                PackagePublishingPocket.RELEASE)
            self.assertEqual(proc, bpb.processor)
            return bpb

        vvvbpb = make(proc_virt=True, proc_nonvirt=True, archive_virt=True)
        self.assertTrue(vvvbpb.virtualized)

        vvnbpb = make(proc_virt=True, proc_nonvirt=True, archive_virt=False)
        self.assertFalse(vvnbpb.virtualized)

        vnvbpb = make(proc_virt=True, proc_nonvirt=False, archive_virt=True)
        self.assertTrue(vnvbpb.virtualized)

        vnvbpb = make(proc_virt=True, proc_nonvirt=False, archive_virt=False)
        self.assertTrue(vnvbpb.virtualized)

    def test_get_for_distro_distribution(self):
        # Test fetching builds for a distro's main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution)
        self.assertEqual(set.count(), 10)

    def test_get_for_distro_distroseries(self):
        # Test fetching builds for a distroseries' main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distroseries)
        self.assertEqual(set.count(), 10)

    def test_get_for_distro_distroarchseries(self):
        # Test fetching builds for a distroarchseries' main archives
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.das_one)
        self.assertEqual(set.count(), 5)

    def test_get_for_distro_filter_build_status(self):
        # The result can be filtered based on the build status
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, status=BuildStatus.FULLYBUILT)
        self.assertEqual(set.count(), 8)

    def test_get_for_distro_filter_name(self):
        # The result can be filtered based on the name
        self.setUpBuilds()
        spn = self.builds[2].source_package_release.sourcepackagename.name
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, name=spn)
        self.assertEqual(set.count(), 2)

    def test_get_for_distro_filter_pocket(self):
        # The result can be filtered based on the pocket of the build
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, pocket=PackagePublishingPocket.RELEASE)
        self.assertEqual(set.count(), 10)
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, pocket=PackagePublishingPocket.UPDATES)
        self.assertEqual(set.count(), 0)

    def test_get_for_distro_filter_arch_tag(self):
        # The result can be filtered based on the archtag of the build
        self.setUpBuilds()
        set = getUtility(IBinaryPackageBuildSet).getBuildsForDistro(
            self.distribution, arch_tag=self.das_one.architecturetag)
        self.assertEqual(set.count(), 5)

    def test_get_status_summary_for_builds(self):
        # We can query for the status summary of a number of builds
        self.setUpBuilds()
        relevant_builds = [self.builds[0], self.builds[2], self.builds[-2]]
        summary = getUtility(IBinaryPackageBuildSet).getStatusSummaryForBuilds(
            relevant_builds)
        self.assertEqual(summary['status'], BuildSetStatus.FAILEDTOBUILD)
        self.assertEqual(summary['builds'], [self.builds[-2]])

    def test_preload_data(self):
        # The BuildSet class allows data to be preloaded
        # Note, it is an internal method, so we have to push past the security
        # proxy
        self.setUpBuilds()
        build_ids = [self.builds[i] for i in (0, 1, 2, 3)]
        rset = removeSecurityProxy(
            getUtility(IBinaryPackageBuildSet))._prefetchBuildData(build_ids)
        self.assertEqual(len(rset), 4)

    def test_get_builds_by_source_package_release(self):
        # We are able to return all of the builds for the source package
        # release ids passed in.
        self.setUpBuilds()
        spphs = self.spphs[:2]
        ids = [spph.sourcepackagerelease.id for spph in spphs]
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(ids)
        expected_titles = []
        for spph in spphs:
            for das in (self.das_one, self.das_two):
                expected_titles.append(
                    '%s build of %s %s in %s %s RELEASE' %
                    (das.architecturetag, spph.source_package_name,
                     spph.source_package_version,
                     self.distroseries.distribution.name,
                     self.distroseries.name))
        build_titles = [build.title for build in builds]
        self.assertEqual(sorted(expected_titles), sorted(build_titles))

    def test_get_builds_by_source_package_release_filtering(self):
        self.setUpBuilds()
        ids = [self.spphs[-1].sourcepackagerelease.id]
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(
                ids, buildstate=BuildStatus.FAILEDTOBUILD)
        expected_titles = []
        for das in (self.das_one, self.das_two):
            expected_titles.append(
                '%s build of %s %s in %s %s RELEASE' %
                (das.architecturetag, self.spphs[-1].source_package_name,
                 self.spphs[-1].source_package_version,
                 self.distroseries.distribution.name, self.distroseries.name))
        build_titles = [build.title for build in builds]
        self.assertEqual(sorted(expected_titles), sorted(build_titles))
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(
                ids, buildstate=BuildStatus.CHROOTWAIT)
        self.assertEqual([], list(builds))

    def test_no_get_builds_by_source_package_release(self):
        # If no ids or None are passed into .getBuildsBySourcePackageRelease,
        # an empty list is returned.
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease(None)
        self.assertEqual([], builds)
        builds = getUtility(
            IBinaryPackageBuildSet).getBuildsBySourcePackageRelease([])
        self.assertEqual([], builds)

    def test_getBySourceAndLocation(self):
        self.setUpBuilds()
        self.assertEqual(
            self.builds[0],
            getUtility(IBinaryPackageBuildSet).getBySourceAndLocation(
                self.builds[0].source_package_release, self.builds[0].archive,
                self.builds[0].distro_arch_series))
        self.assertEqual(
            self.builds[1],
            getUtility(IBinaryPackageBuildSet).getBySourceAndLocation(
                self.builds[1].source_package_release, self.builds[1].archive,
                self.builds[1].distro_arch_series))
        self.assertIs(
            None,
            getUtility(IBinaryPackageBuildSet).getBySourceAndLocation(
                self.builds[1].source_package_release,
                self.factory.makeArchive(), self.builds[1].distro_arch_series))
class TestDistroSeriesBinaryPackage(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        """Create a distroseriesbinarypackage to play with."""
        super(TestDistroSeriesBinaryPackage, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()
        self.distroseries = self.publisher.distroseries
        self.distribution = self.distroseries.distribution
        binaries = self.publisher.getPubBinaries(
            binaryname='foo-bin', summary='Foo is the best')
        binary_pub = binaries[0]
        self.binary_package_name = (
            binary_pub.binarypackagerelease.binarypackagename)
        self.distroseries_binary_package = DistroSeriesBinaryPackage(
            self.distroseries, self.binary_package_name)

    def test_cache_attribute_when_two_cache_objects(self):
        # We have situations where there are cache objects for each
        # distro archive - we need to handle this situation without
        # OOPSing - see bug 580181.
        distro_archive_1 = self.distribution.main_archive
        distro_archive_2 = self.distribution.all_distro_archives[1]

        # Publish the same binary in another distro archive.
        self.publisher.getPubBinaries(
            binaryname='foo-bin', summary='Foo is the best',
            archive=distro_archive_2)

        logger = BufferLogger()
        with dbuser(config.statistician.dbuser):
            DistroSeriesPackageCache._update(
                self.distroseries, self.binary_package_name, distro_archive_1,
                logger)

            DistroSeriesPackageCache._update(
                self.distroseries, self.binary_package_name, distro_archive_2,
                logger)

        self.failUnlessEqual(
            'Foo is the best', self.distroseries_binary_package.summary)

    def test_none_cache_passed_at_init_counts_as_cached(self):
        # If the value None is passed as the constructor parameter
        # "cache", it is considered as a valid value.
        # Accesing the property DistroSeriesBinaryPackage.cache
        # later does not lead to the execution of an SQL query to
        # retrieve a DistroSeriesPackageCache record.
        binary_package = DistroSeriesBinaryPackage(
            self.distroseries, self.binary_package_name, cache=None)
        with StormStatementRecorder() as recorder:
            binary_package.cache
        self.assertThat(recorder, HasQueryCount(Equals(0)))

        # If the parameter "cache" was not passed, accessing
        # DistroSeriesBinaryPackage.cache for the first time requires
        # at least one SQL query.
        with StormStatementRecorder() as recorder:
            self.distroseries_binary_package.cache
        self.assertThat(recorder, HasQueryCount(NotEquals(0)))
Beispiel #31
0
class TestProcessAcceptedBugsJob(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer
    dbuser = config.IProcessAcceptedBugsJobSource.dbuser

    def setUp(self):
        super(TestProcessAcceptedBugsJob, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()
        self.distroseries = self.publisher.breezy_autotest

    def makeJob(self, distroseries=None, spr=None, bug_ids=[1]):
        """Create a `ProcessAcceptedBugsJob`."""
        if distroseries is None:
            distroseries = self.distroseries
        if spr is None:
            spr = self.factory.makeSourcePackageRelease(
                distroseries=distroseries, changelog_entry="changelog")
        return getUtility(IProcessAcceptedBugsJobSource).create(
            distroseries, spr, bug_ids)

    def test_job_implements_IProcessAcceptedBugsJob(self):
        job = self.makeJob()
        self.assertTrue(verifyObject(IProcessAcceptedBugsJob, job))

    def test_job_source_implements_IProcessAcceptedBugsJobSource(self):
        job_source = getUtility(IProcessAcceptedBugsJobSource)
        self.assertTrue(
            verifyObject(IProcessAcceptedBugsJobSource, job_source))

    def test_create(self):
        # A ProcessAcceptedBugsJob can be created and stores its arguments.
        spr = self.factory.makeSourcePackageRelease(
            distroseries=self.distroseries, changelog_entry="changelog")
        bug_ids = [1, 2]
        job = self.makeJob(spr=spr, bug_ids=bug_ids)
        self.assertProvides(job, IProcessAcceptedBugsJob)
        self.assertEqual(self.distroseries, job.distroseries)
        self.assertEqual(spr, job.sourcepackagerelease)
        self.assertEqual(bug_ids, job.bug_ids)

    def test_run_raises_errors(self):
        # A job reports unexpected errors as exceptions.
        class Boom(Exception):
            pass

        distroseries = self.factory.makeDistroSeries()
        removeSecurityProxy(distroseries).getSourcePackage = FakeMethod(
            failure=Boom())
        job = self.makeJob(distroseries=distroseries)
        self.assertRaises(Boom, job.run)

    def test___repr__(self):
        spr = self.factory.makeSourcePackageRelease(
            distroseries=self.distroseries, changelog_entry="changelog")
        bug_ids = [1, 2]
        job = self.makeJob(spr=spr, bug_ids=bug_ids)
        self.assertEqual(
            ("<ProcessAcceptedBugsJob to close bugs [1, 2] for "
             "{spr.name}/{spr.version} ({distroseries.distribution.name} "
             "{distroseries.name})>").format(
                distroseries=self.distroseries, spr=spr),
            repr(job))

    def test_run(self):
        # A proper test run closes bugs.
        spr = self.factory.makeSourcePackageRelease(
            distroseries=self.distroseries, changelog_entry="changelog")
        bug = self.factory.makeBug()
        bugtask = self.factory.makeBugTask(
            target=self.distroseries.getSourcePackage(spr.sourcepackagename),
            bug=bug)
        self.assertEqual(BugTaskStatus.NEW, bugtask.status)
        job = self.makeJob(spr=spr, bug_ids=[bug.id])
        JobRunner([job]).runAll()
        self.assertEqual(BugTaskStatus.FIXRELEASED, bugtask.status)

    def test_smoke(self):
        spr = self.factory.makeSourcePackageRelease(
            distroseries=self.distroseries, changelog_entry="changelog")
        bug = self.factory.makeBug()
        bugtask = self.factory.makeBugTask(
            target=self.distroseries.getSourcePackage(spr.sourcepackagename),
            bug=bug)
        self.assertEqual(BugTaskStatus.NEW, bugtask.status)
        self.makeJob(spr=spr, bug_ids=[bug.id])
        transaction.commit()

        out, err, exit_code = run_script(
            "LP_DEBUG_SQL=1 cronscripts/process-job-source.py -vv %s" % (
                IProcessAcceptedBugsJobSource.getName()))

        self.addDetail("stdout", text_content(out))
        self.addDetail("stderr", text_content(err))

        self.assertEqual(0, exit_code)
        self.assertEqual(BugTaskStatus.FIXRELEASED, bugtask.status)
class TestBuildUpdateDependencies(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def _setupSimpleDepwaitContext(self):
        """Use `SoyuzTestPublisher` to setup a simple depwait context.

        Return an `IBinaryPackageBuild` in MANUALDEWAIT state and depending
        on a binary that exists and is reachable.
        """
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        depwait_source = self.publisher.getPubSource(
            sourcename='depwait-source')

        self.publisher.getPubBinaries(
            binaryname='dep-bin',
            status=PackagePublishingStatus.PUBLISHED)

        [depwait_build] = depwait_source.createMissingBuilds()
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin'})
        return depwait_build

    def testUpdateDependenciesWorks(self):
        # Calling `IBinaryPackageBuild.updateDependencies` makes the build
        # record ready for dispatch.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def assertRaisesUnparsableDependencies(self, depwait_build, dependencies):
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': dependencies})
        self.assertRaises(
            UnparsableDependencies, depwait_build.updateDependencies)

    def testInvalidDependencies(self):
        # Calling `IBinaryPackageBuild.updateDependencies` on a build with
        # invalid 'dependencies' raises an AssertionError.
        # Anything not following '<name> [([relation] <version>)][, ...]'
        depwait_build = self._setupSimpleDepwaitContext()

        # None is not a valid dependency values.
        self.assertRaisesUnparsableDependencies(depwait_build, None)

        # Missing 'name'.
        self.assertRaisesUnparsableDependencies(depwait_build, '(>> version)')

        # Missing 'version'.
        self.assertRaisesUnparsableDependencies(depwait_build, 'name (>>)')

        # Missing comma between dependencies.
        self.assertRaisesUnparsableDependencies(depwait_build, 'name1 name2')

    def testBug378828(self):
        # `IBinaryPackageBuild.updateDependencies` copes with the
        # scenario where the corresponding source publication is not
        # active (deleted) and the source original component is not a
        # valid ubuntu component.
        depwait_build = self._setupSimpleDepwaitContext()

        spr = depwait_build.source_package_release
        depwait_build.current_source_publication.requestDeletion(
            spr.creator)
        contrib = getUtility(IComponentSet).new('contrib')
        removeSecurityProxy(spr).component = contrib

        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testVersionedDependencies(self):
        # `IBinaryPackageBuild.updateDependencies` supports versioned
        # dependencies. A build will not be retried unless the candidate
        # complies with the version restriction.
        # In this case, dep-bin 666 is available. >> 666 isn't
        # satisified, but >= 666 is.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (>> 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, 'dep-bin (>> 666)')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (>= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testVersionedDependencyOnOldPublication(self):
        # `IBinaryPackageBuild.updateDependencies` doesn't just consider
        # the latest publication. There may be older publications which
        # satisfy the version constraints (in other archives or pockets).
        # In this case, dep-bin 666 and 999 are available, so both = 666
        # and = 999 are satisfied.
        depwait_build = self._setupSimpleDepwaitContext()
        self.publisher.getPubBinaries(
            binaryname='dep-bin', version='999',
            status=PackagePublishingStatus.PUBLISHED)
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (= 999)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testStrictInequalities(self):
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        for dep, expected in (
                ('dep-bin (<< 444)', 'dep-bin (<< 444)'),
                ('dep-bin (>> 444)', ''),
                ('dep-bin (<< 888)', ''),
                ('dep-bin (>> 888)', 'dep-bin (>> 888)'),
                ):
            depwait_build.updateStatus(
                BuildStatus.MANUALDEPWAIT, slave_status={'dependencies': dep})
            depwait_build.updateDependencies()
            self.assertEqual(expected, depwait_build.dependencies)

    def testDisjunctions(self):
        # If one of a set of alternatives becomes available, that set of
        # alternatives is dropped from the outstanding dependencies.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={
                'dependencies': 'dep-bin (>= 999) | alt-bin, dep-tools'})
        depwait_build.updateDependencies()
        self.assertEqual(
            'dep-bin (>= 999) | alt-bin, dep-tools',
            depwait_build.dependencies)

        self.publisher.getPubBinaries(
            binaryname='alt-bin', status=PackagePublishingStatus.PUBLISHED)
        self.layer.txn.commit()

        depwait_build.updateDependencies()
        self.assertEqual('dep-tools', depwait_build.dependencies)

    def testAptVersionConstraints(self):
        # launchpad-buildd can return apt-style version constraints
        # using < and > rather than << and >>.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (> 666), dep-bin (< 777)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, 'dep-bin (> 666)')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (> 665)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')
class TestAddMissingBuilds(TestCaseWithFactory):
    """Test the add-missing-builds.py script. """

    layer = LaunchpadZopelessLayer
    dbuser = config.builddmaster.dbuser

    def setUp(self):
        """Make a PPA and publish some sources that need builds."""
        TestCaseWithFactory.setUp(self)
        self.stp = SoyuzTestPublisher()
        self.stp.prepareBreezyAutotest()

        # i386 and hppa are enabled by STP but we need to mark hppa as
        # PPA-enabled.
        self.stp.breezy_autotest_hppa.supports_virtualized = True

        # Create an arch-any and an arch-all source in a PPA.
        self.ppa = self.factory.makeArchive(purpose=ArchivePurpose.PPA,
                                            distribution=self.stp.ubuntutest)
        self.all = self.stp.getPubSource(
            sourcename="all",
            architecturehintlist="all",
            archive=self.ppa,
            status=PackagePublishingStatus.PUBLISHED)
        self.any = self.stp.getPubSource(
            sourcename="any",
            architecturehintlist="any",
            archive=self.ppa,
            status=PackagePublishingStatus.PUBLISHED)
        self.required_arches = [
            self.stp.breezy_autotest_hppa, self.stp.breezy_autotest_i386
        ]

    def runScript(self, test_args=None):
        """Run the script itself, returning the result and output.

        Return a tuple of the process's return code, stdout output and
        stderr output.
        """
        if test_args is None:
            test_args = []
        script = os.path.join(config.root, "scripts", "add-missing-builds.py")
        args = [sys.executable, script]
        args.extend(test_args)
        process = subprocess.Popen(args,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
        stdout, stderr = process.communicate()
        return (process.returncode, stdout, stderr)

    def getScript(self):
        """Return an instance of the script object."""
        script = AddMissingBuilds("test", test_args=[])
        script.logger = BufferLogger()
        return script

    def getBuilds(self):
        """Helper to return build records."""
        any_build_i386 = self.any.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_i386, self.ppa)
        any_build_hppa = self.any.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_hppa, self.ppa)
        all_build_i386 = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_i386, self.ppa)
        all_build_hppa = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_hppa, self.ppa)
        return (any_build_i386, any_build_hppa, all_build_i386, all_build_hppa)

    def assertBuildsForAny(self):
        """Helper to assert that builds were created for the 'Any' package."""
        (any_build_i386, any_build_hppa, all_build_i386,
         all_build_hppa) = self.getBuilds()
        self.assertIsNot(any_build_i386, None)
        self.assertIsNot(any_build_hppa, None)

    def assertNoBuilds(self):
        """Helper to assert that no builds were created."""
        (any_build_i386, any_build_hppa, all_build_i386,
         all_build_hppa) = self.getBuilds()
        self.assertIs(any_build_i386, None)
        self.assertIs(any_build_hppa, None)
        self.assertIs(all_build_i386, None)
        self.assertIs(all_build_hppa, None)

    def testSimpleRun(self):
        """Try a simple script run.

        This test ensures that the script starts up and runs.
        It should create some missing builds.
        """
        # Commit the changes made in setUp()
        self.layer.txn.commit()

        args = [
            "-d",
            "ubuntutest",
            "-s",
            "breezy-autotest",
            "-a",
            "i386",
            "-a",
            "hppa",
            "--ppa",
            "%s" % self.ppa.owner.name,
            "--ppa-name",
            self.ppa.name,
        ]
        code, stdout, stderr = self.runScript(args)
        self.assertEqual(
            code, 0,
            "The script returned with a non zero exit code: %s\n%s\n%s" %
            (code, stdout, stderr))

        # Sync database changes made in the external process.
        flush_database_updates()
        clear_current_connection_cache()

        # The arch-any package will get builds for all architectures.
        self.assertBuildsForAny()

        # The arch-all package is architecture-independent, so it will
        # only get a build for i386 which is the nominated architecture-
        # independent build arch.
        all_build_i386 = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_i386, self.ppa)
        all_build_hppa = self.all.sourcepackagerelease.getBuildByArch(
            self.stp.breezy_autotest_hppa, self.ppa)
        self.assertIsNot(all_build_i386, None)
        self.assertIs(all_build_hppa, None)

    def testNoActionForNoSources(self):
        """Test that if nothing is published, no builds are created."""
        self.all.requestDeletion(self.ppa.owner)
        self.any.requestDeletion(self.ppa.owner)

        script = self.getScript()
        script.add_missing_builds(self.ppa, self.required_arches,
                                  self.stp.breezy_autotest,
                                  PackagePublishingPocket.RELEASE)
        self.assertNoBuilds()
class TestBuildNotify(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestBuildNotify, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create all of the items we need to create builds
        self.processor = self.factory.makeProcessor(supports_virtualized=True)
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor)
        self.creator = self.factory.makePerson(email='*****@*****.**')
        self.gpgkey = self.factory.makeGPGKey(owner=self.creator)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.ppa = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PPA)
        buildd_admins = getUtility(IPersonSet).getByName(
            'launchpad-buildd-admins')
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(
                processors=[self.processor])
            self.buildd_admins_members = list(buildd_admins.activemembers)
        self.builds = []

    def create_builds(self, archive):
        for status in BuildStatus.items:
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" %
                (self.factory.getUniqueInteger(), status.value),
                distroseries=self.distroseries,
                architecturehintlist='any',
                creator=self.creator,
                archive=archive)
            spph.sourcepackagerelease.signing_key_fingerprint = (
                self.gpgkey.fingerprint)
            spph.sourcepackagerelease.signing_key_owner = (self.gpgkey.owner)
            [build] = spph.createMissingBuilds()
            with person_logged_in(self.admin):
                build.updateStatus(BuildStatus.BUILDING, builder=self.builder)
                build.updateStatus(
                    status,
                    date_finished=(build.date_started +
                                   timedelta(minutes=5 * (status.value + 1))))
                if status != BuildStatus.BUILDING:
                    build.buildqueue_record.destroySelf()
                else:
                    build.buildqueue_record.builder = self.builder
            self.builds.append(build)

    def _assert_mail_is_correct(self,
                                build,
                                notification,
                                recipient,
                                reason,
                                ppa=False):
        # Assert that the mail sent (which is in notification), matches
        # the data from the build
        self.assertEqual(format_address_for_person(recipient),
                         notification['To'])
        if reason == "buildd-admin":
            rationale = "Buildd-Admin @launchpad-buildd-admins"
            expected_for = "launchpad-buildd-admins"
        else:
            rationale = reason.title()
            expected_for = recipient.name
        self.assertEqual(rationale,
                         notification['X-Launchpad-Message-Rationale'])
        self.assertEqual(expected_for, notification['X-Launchpad-Message-For'])
        self.assertEqual('package-build-status',
                         notification['X-Launchpad-Notification-Type'])
        self.assertEqual('*****@*****.**',
                         notification['X-Creator-Recipient'])
        self.assertEqual(self.das.architecturetag,
                         notification['X-Launchpad-Build-Arch'])
        self.assertEqual('main', notification['X-Launchpad-Build-Component'])
        self.assertEqual(build.status.name,
                         notification['X-Launchpad-Build-State'])
        self.assertEqual(build.archive.reference,
                         notification['X-Launchpad-Archive'])
        if ppa and build.archive.distribution.name == 'ubuntu':
            self.assertEqual(get_ppa_reference(self.ppa),
                             notification['X-Launchpad-PPA'])
        body = notification.get_payload(decode=True)
        build_log = 'None'
        if ppa:
            source = 'not available'
        else:
            source = canonical_url(build.distributionsourcepackagerelease)
        if build.status == BuildStatus.BUILDING:
            duration = 'not finished'
            build_log = 'see builder page'
            builder = canonical_url(build.builder)
        elif (build.status == BuildStatus.SUPERSEDED
              or build.status == BuildStatus.NEEDSBUILD):
            duration = 'not available'
            build_log = 'not available'
            builder = 'not available'
        elif build.status == BuildStatus.UPLOADING:
            duration = 'uploading'
            build_log = 'see builder page'
            builder = 'not available'
        else:
            duration = DurationFormatterAPI(
                build.duration).approximateduration()
            builder = canonical_url(build.builder)
        expected_body = dedent(
            """
         * Source Package: %s
         * Version: %s
         * Architecture: %s
         * Archive: %s
         * Component: main
         * State: %s
         * Duration: %s
         * Build Log: %s
         * Builder: %s
         * Source: %s



        If you want further information about this situation, feel free to
        contact us by asking a question on Launchpad
        (https://answers.launchpad.net/launchpad/+addquestion).

        %s
        %s
        %s
        """ %
            (build.source_package_release.sourcepackagename.name,
             build.source_package_release.version, self.das.architecturetag,
             build.archive.reference, build.status.title, duration, build_log,
             builder, source, "-- ", build.title, canonical_url(build)))
        expected_body += "\n" + REASONS[reason] + "\n"
        self.assertEqual(expected_body, body)

    def _assert_mails_are_correct(self, build, reasons, ppa=False):
        notifications = pop_notifications()
        reasons = sorted(reasons,
                         key=lambda r: format_address_for_person(r[0]))
        for notification, (recipient, reason) in zip(notifications, reasons):
            self._assert_mail_is_correct(build,
                                         notification,
                                         recipient,
                                         reason,
                                         ppa=ppa)

    def test_notify_failed_to_build(self):
        # For primary archive builds, a build failure notifies the buildd
        # admins and the source package creator.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_failed_to_build_ppa(self):
        # For PPA builds, a build failure notifies the source package signer
        # and the archive owner, but not the buildd admins.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_needs_building(self):
        # We can notify the creator and buildd admins when a build needs to
        # be built.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_needs_building_ppa(self):
        # We can notify the signer and the archive owner when a build needs
        # to be built.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_successfully_built(self):
        # Successful builds don't notify anyone.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        self.assertEqual([], pop_notifications())

    def test_notify_dependency_wait(self):
        # We can notify the creator and buildd admins when a build can't
        # find a dependency.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_dependency_wait_ppa(self):
        # We can notify the signer and the archive owner when the build
        # can't find a dependency.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_chroot_problem(self):
        # We can notify the creator and buildd admins when the builder a
        # build attempted to be built on has an internal problem.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_chroot_problem_ppa(self):
        # We can notify the signer and the archive owner when the builder a
        # build attempted to be built on has an internal problem.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_build_for_superseded_source(self):
        # We can notify the creator and buildd admins when the source
        # package had a newer version uploaded before this build had a
        # chance to be dispatched.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_build_for_superseded_source_ppa(self):
        # We can notify the signer and the archive owner when the source
        # package had a newer version uploaded before this build had a
        # chance to be dispatched.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_currently_building(self):
        # We can notify the creator and buildd admins when the build is
        # currently building.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.BUILDING.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_currently_building_ppa(self):
        # We can notify the signer and the archive owner when the build is
        # currently building.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.BUILDING.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_notify_uploading_build(self):
        # We can notify the creator and buildd admins when the build has
        # completed, and binary packages are being uploaded by the builder.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.UPLOADING.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        self._assert_mails_are_correct(build, expected_reasons)

    def test_notify_uploading_build_ppa(self):
        # We can notify the signer and the archive owner when the build has
        # completed, and binary packages are being uploaded by the builder.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.UPLOADING.value]
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [
            (self.creator, "signer"),
            (self.ppa.owner, "owner"),
        ]
        self._assert_mails_are_correct(build, expected_reasons, ppa=True)

    def test_copied_into_ppa_does_not_spam(self):
        # When a package is copied into a PPA, we don't send mail to the
        # original creator of the source package.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        spph = build.current_source_publication
        ppa_spph = spph.copyTo(self.distroseries,
                               PackagePublishingPocket.RELEASE, self.ppa)
        [ppa_build] = ppa_spph.createMissingBuilds()
        with dbuser(config.builddmaster.dbuser):
            ppa_build.notify()
        self._assert_mails_are_correct(ppa_build, [(self.ppa.owner, "owner")],
                                       ppa=True)

    def test_notify_owner_suppresses_mail(self):
        # When the 'notify_owner' config option is False, we don't send mail
        # to the owner of the SPR.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        notify_owner = dedent("""
            [builddmaster]
            send_build_notification: True
            notify_owner: False
            """)
        config.push('notify_owner', notify_owner)
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        self._assert_mails_are_correct(
            build, [(person, "buildd-admin")
                    for person in self.buildd_admins_members])
        # And undo what we just did.
        config.pop('notify_owner')

    def test_build_notification_suppresses_mail(self):
        # When the 'build_notification' config option is False, we don't
        # send any mail at all.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        send_build_notification = dedent("""
            [builddmaster]
            send_build_notification: False
            """)
        config.push('send_build_notification', send_build_notification)
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        notifications = pop_notifications()
        self.assertEqual(0, len(notifications))
        # And undo what we just did.
        config.pop('send_build_notification')

    def test_sponsored_upload_notification(self):
        # If the signing key is different from the creator, they are both
        # notified.
        sponsor = self.factory.makePerson('*****@*****.**')
        key = self.factory.makeGPGKey(owner=sponsor)
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        spr = build.current_source_publication.sourcepackagerelease
        # Push past the security proxy
        removeSecurityProxy(spr).signing_key_owner = key.owner
        removeSecurityProxy(spr).signing_key_fingerprint = key.fingerprint
        with dbuser(config.builddmaster.dbuser):
            build.notify()
        expected_reasons = [(person, "buildd-admin")
                            for person in self.buildd_admins_members]
        expected_reasons.append((self.creator, "creator"))
        expected_reasons.append((sponsor, "signer"))
        self._assert_mails_are_correct(build, expected_reasons)
Beispiel #35
0
class TestBuild(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuild, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor = self.factory.makeProcessor(supports_virtualized=True)
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(
                processors=[self.processor])
        self.now = datetime.now(pytz.UTC)

    def test_title(self):
        # A build has a title which describes the context source version and
        # in which series and architecture it is targeted for.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_title = '%s build of %s %s in %s %s RELEASE' % (
            self.das.architecturetag, spph.source_package_name,
            spph.source_package_version, self.distroseries.distribution.name,
            self.distroseries.name)
        self.assertEqual(expected_title, build.title)

    def test_linking(self):
        # A build directly links to the archive, distribution, distroseries,
        # distroarchseries, pocket in its context and also the source version
        # that generated it.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual(self.distroseries.main_archive, build.archive)
        self.assertEqual(self.distroseries.distribution, build.distribution)
        self.assertEqual(self.distroseries, build.distro_series)
        self.assertEqual(self.das, build.distro_arch_series)
        self.assertEqual(PackagePublishingPocket.RELEASE, build.pocket)
        self.assertEqual(self.das.architecturetag, build.arch_tag)
        self.assertTrue(build.virtualized)
        self.assertEqual(
            '%s - %s' %
            (spph.source_package_name, spph.source_package_version),
            build.source_package_release.title)

    def test_processed_builds(self):
        # Builds which were already processed also offer additional
        # information about its process such as the time it was started and
        # finished and its 'log' and 'upload_changesfile' as librarian files.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(
            sourcename=spn,
            version=version,
            distroseries=self.distroseries,
            status=PackagePublishingStatus.PUBLISHED)
        with person_logged_in(self.admin):
            binary = self.publisher.getPubBinaries(
                binaryname=spn,
                distroseries=self.distroseries,
                pub_source=spph,
                version=version,
                builder=self.builder)
        build = binary[0].binarypackagerelease.build
        self.assertTrue(build.was_built)
        self.assertEqual(PackageUploadStatus.DONE, build.package_upload.status)
        self.assertEqual(datetime(2008, 1, 1, 0, 0, 0, tzinfo=pytz.UTC),
                         build.date_started)
        self.assertEqual(datetime(2008, 1, 1, 0, 5, 0, tzinfo=pytz.UTC),
                         build.date_finished)
        self.assertEqual(timedelta(minutes=5), build.duration)
        expected_buildlog = 'buildlog_%s-%s-%s.%s_%s_FULLYBUILT.txt.gz' % (
            self.distroseries.distribution.name, self.distroseries.name,
            self.das.architecturetag, spn, version)
        self.assertEqual(expected_buildlog, build.log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' %
            (self.distroseries.distribution.name, spn, version, build.id))
        expected_buildlog_url = '%s/%s' % (url_start, expected_buildlog)
        self.assertEqual(expected_buildlog_url, build.log_url)
        expected_changesfile = '%s_%s_%s.changes' % (spn, version,
                                                     self.das.architecturetag)
        self.assertEqual(expected_changesfile,
                         build.upload_changesfile.filename)
        expected_changesfile_url = '%s/%s' % (url_start, expected_changesfile)
        self.assertEqual(expected_changesfile_url, build.changesfile_url)
        # Since this build was sucessful, it can not be retried
        self.assertFalse(build.can_be_retried)

    def test_current_component(self):
        # The currently published component is provided via the
        # 'current_component' property.  It looks over the publishing records
        # and finds the current publication of the source in question.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual('main', build.current_component.name)
        # It may not be the same as
        self.assertEqual('main', build.source_package_release.component.name)
        # If the package has no uploads, its package_upload is None
        self.assertIsNone(build.package_upload)

    def test_current_component_when_unpublished(self):
        # Production has some buggy builds without source publications.
        # current_component returns None in that case.
        spph = self.publisher.getPubSource()
        other_das = self.factory.makeDistroArchSeries()
        build = getUtility(IBinaryPackageBuildSet).new(
            spph.sourcepackagerelease, spph.archive, other_das,
            PackagePublishingPocket.RELEASE)
        self.assertIs(None, build.current_component)

    def test_retry_for_released_series(self):
        # Builds can not be retried for released distroseries
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(distroseries=distroseries,
                                                processor=self.processor)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries)
        [build] = spph.createMissingBuilds()
        self.assertFalse(build.can_be_retried)

    def test_partner_retry_for_released_series(self):
        # Builds for PARTNER can be retried -- even if the distroseries is
        # released.
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(distroseries=distroseries,
                                                processor=self.processor)
        archive = self.factory.makeArchive(
            purpose=ArchivePurpose.PARTNER,
            distribution=distroseries.distribution)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries,
            archive=archive)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry(self):
        # A build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry_cancelled(self):
        # A cancelled build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.CANCELLED)
        self.assertTrue(build.can_be_retried)

    def test_retry_superseded(self):
        # A superseded build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.SUPERSEDED)
        self.assertTrue(build.can_be_retried)

    def test_uploadlog(self):
        # The upload log can be attached to a build
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertIsNone(build.upload_log)
        self.assertIsNone(build.upload_log_url)
        build.storeUploadLog('sample upload log')
        expected_filename = 'upload_%s_log.txt' % build.id
        self.assertEqual(expected_filename, build.upload_log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' %
            (self.distroseries.distribution.name, spph.source_package_name,
             spph.source_package_version, build.id))
        expected_url = '%s/%s' % (url_start, expected_filename)
        self.assertEqual(expected_url, build.upload_log_url)

    def test_retry_resets_state(self):
        # Retrying a build resets most of the state attributes, but does
        # not modify the first dispatch time.
        build = self.factory.makeBinaryPackageBuild()
        build.updateStatus(BuildStatus.BUILDING, date_started=self.now)
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        build.gotFailure()
        with person_logged_in(self.admin):
            build.retry()
        self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
        self.assertEqual(self.now, build.date_first_dispatched)
        self.assertIsNone(build.log)
        self.assertIsNone(build.upload_log)
        self.assertEqual(0, build.failure_count)

    def test_retry_resets_virtualized(self):
        # Retrying a build recalculates its virtualization.
        archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution, virtualized=False)
        build = self.factory.makeBinaryPackageBuild(distroarchseries=self.das,
                                                    archive=archive,
                                                    processor=self.processor)
        self.assertFalse(build.virtualized)
        build.updateStatus(BuildStatus.BUILDING)
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        build.gotFailure()
        self.processor.supports_nonvirtualized = False
        with person_logged_in(self.admin):
            build.retry()
        self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
        self.assertTrue(build.virtualized)

    def test_create_bpr(self):
        # Test that we can create a BPR from a given build.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        bpn = self.factory.makeBinaryPackageName(name=spn)
        spph = self.publisher.getPubSource(sourcename=spn,
                                           version=version,
                                           distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        binary = build.createBinaryPackageRelease(
            binarypackagename=bpn,
            version=version,
            summary='',
            description='',
            binpackageformat=BinaryPackageFormat.DEB,
            component=spph.sourcepackagerelease.component.id,
            section=spph.sourcepackagerelease.section.id,
            priority=PackagePublishingPriority.STANDARD,
            installedsize=0,
            architecturespecific=False)
        self.assertEqual(1, build.binarypackages.count())
        self.assertEqual([binary], list(build.binarypackages))

    def test_multiple_create_bpr(self):
        # We can create multiple BPRs from a build
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(sourcename=spn,
                                           version=version,
                                           distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_names = []
        for i in range(15):
            bpn_name = '%s-%s' % (spn, i)
            bpn = self.factory.makeBinaryPackageName(bpn_name)
            expected_names.append(bpn_name)
            build.createBinaryPackageRelease(
                binarypackagename=bpn,
                version=str(i),
                summary='',
                description='',
                binpackageformat=BinaryPackageFormat.DEB,
                component=spph.sourcepackagerelease.component.id,
                section=spph.sourcepackagerelease.section.id,
                priority=PackagePublishingPriority.STANDARD,
                installedsize=0,
                architecturespecific=False)
        self.assertEqual(15, build.binarypackages.count())
        bin_names = [b.name for b in build.binarypackages]
        # Verify .binarypackages returns sorted by name
        expected_names.sort()
        self.assertEqual(expected_names, bin_names)

    def test_cannot_rescore_non_needsbuilds_builds(self):
        # If a build record isn't in NEEDSBUILD, it can not be rescored.
        # We will also need to log into an admin to do the rescore.
        with person_logged_in(self.admin):
            [bpph] = self.publisher.getPubBinaries(
                binaryname=self.factory.getUniqueString(),
                version="%s.1" % self.factory.getUniqueInteger(),
                distroseries=self.distroseries)
            build = bpph.binarypackagerelease.build
            self.assertRaises(CannotBeRescored, build.rescore, 20)

    def test_rescore_builds(self):
        # If the user has build-admin privileges, they can rescore builds
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
        self.assertEqual(2505, build.buildqueue_record.lastscore)
        with person_logged_in(self.admin):
            build.rescore(5000)
            transaction.commit()
        self.assertEqual(5000, build.buildqueue_record.lastscore)

    def test_source_publication_override(self):
        # Components can be overridden in builds.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual(spph, build.current_source_publication)
        universe = getUtility(IComponentSet)['universe']
        overridden_spph = spph.changeOverride(new_component=universe)
        # We can now see current source publication points to the overridden
        # publication.
        self.assertNotEqual(spph, build.current_source_publication)
        self.assertEqual(overridden_spph, build.current_source_publication)

    def test_estimated_duration(self):
        # Builds will have an estimated duration that is set to a
        # previous build of the same sources duration.
        spn = self.factory.getUniqueString()
        spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [build] = spph.createMissingBuilds()
        # Duration is based on package size if there is no previous build.
        self.assertEqual(timedelta(0, 60),
                         build.buildqueue_record.estimated_duration)
        # Set the build as done, and its duration.
        build.updateStatus(BuildStatus.BUILDING,
                           date_started=self.now - timedelta(minutes=72))
        build.updateStatus(BuildStatus.FULLYBUILT, date_finished=self.now)
        build.buildqueue_record.destroySelf()
        new_spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [new_build] = new_spph.createMissingBuilds()
        # The duration for this build is now 72 minutes.
        self.assertEqual(timedelta(0, 72 * 60),
                         new_build.buildqueue_record.estimated_duration)

    def test_store_uploadlog_refuses_to_overwrite(self):
        # Storing an upload log for a build will fail if the build already
        # has an upload log.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOUPLOAD)
        build.storeUploadLog('foo')
        self.assertRaises(AssertionError, build.storeUploadLog, 'bar')
Beispiel #36
0
class TestBuild(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuild, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)
        self.now = datetime.now(pytz.UTC)

    def test_title(self):
        # A build has a title which describes the context source version and
        # in which series and architecture it is targeted for.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_title = '%s build of %s %s in %s %s RELEASE' % (
            self.das.architecturetag, spph.source_package_name,
            spph.source_package_version, self.distroseries.distribution.name,
            self.distroseries.name)
        self.assertEquals(expected_title, build.title)

    def test_linking(self):
        # A build directly links to the archive, distribution, distroseries,
        # distroarchseries, pocket in its context and also the source version
        # that generated it.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(self.distroseries.main_archive, build.archive)
        self.assertEquals(self.distroseries.distribution, build.distribution)
        self.assertEquals(self.distroseries, build.distro_series)
        self.assertEquals(self.das, build.distro_arch_series)
        self.assertEquals(PackagePublishingPocket.RELEASE, build.pocket)
        self.assertEquals(self.das.architecturetag, build.arch_tag)
        self.assertTrue(build.is_virtualized)
        self.assertEquals(
            '%s - %s' % (spph.source_package_name,
                spph.source_package_version),
            build.source_package_release.title)

    def test_processed_builds(self):
        # Builds which were already processed also offer additional
        # information about its process such as the time it was started and
        # finished and its 'log' and 'upload_changesfile' as librarian files.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(
            sourcename=spn, version=version,
            distroseries=self.distroseries,
            status=PackagePublishingStatus.PUBLISHED)
        with person_logged_in(self.admin):
            binary = self.publisher.getPubBinaries(binaryname=spn,
                distroseries=self.distroseries, pub_source=spph,
                version=version, builder=self.builder)
        build = binary[0].binarypackagerelease.build
        self.assertTrue(build.was_built)
        self.assertEquals(
            PackageUploadStatus.DONE, build.package_upload.status)
        self.assertEquals(
            datetime(2008, 01, 01, 0, 0, 0, tzinfo=pytz.UTC),
            build.date_started)
        self.assertEquals(
            datetime(2008, 01, 01, 0, 5, 0, tzinfo=pytz.UTC),
            build.date_finished)
        self.assertEquals(timedelta(minutes=5), build.duration)
        expected_buildlog = 'buildlog_%s-%s-%s.%s_%s_FULLYBUILT.txt.gz' % (
            self.distroseries.distribution.name, self.distroseries.name,
            self.das.architecturetag, spn, version)
        self.assertEquals(expected_buildlog, build.log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' % (
                self.distroseries.distribution.name, spn, version, build.id))
        expected_buildlog_url = '%s/%s' % (url_start, expected_buildlog)
        self.assertEquals(expected_buildlog_url, build.log_url)
        expected_changesfile = '%s_%s_%s.changes' % (
            spn, version, self.das.architecturetag)
        self.assertEquals(
            expected_changesfile, build.upload_changesfile.filename)
        expected_changesfile_url = '%s/%s' % (url_start, expected_changesfile)
        self.assertEquals(expected_changesfile_url, build.changesfile_url)
        # Since this build was sucessful, it can not be retried
        self.assertFalse(build.can_be_retried)

    def test_current_component(self):
        # The currently published component is provided via the
        # 'current_component' property.  It looks over the publishing records
        # and finds the current publication of the source in question.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals('main', build.current_component.name)
        # It may not be the same as
        self.assertEquals('main', build.source_package_release.component.name)
        # If the package has no uploads, its package_upload is None
        self.assertEquals(None, build.package_upload)

    def test_current_component_when_unpublished(self):
        # Production has some buggy builds without source publications.
        # current_component returns None in that case.
        spph = self.publisher.getPubSource()
        other_das = self.factory.makeDistroArchSeries()
        build = spph.sourcepackagerelease.createBuild(
            other_das, PackagePublishingPocket.RELEASE, spph.archive)
        self.assertIs(None, build.current_component)

    def test_retry_for_released_series(self):
        # Builds can not be retried for released distroseries
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(
            distroseries=distroseries, processor=self.processor,
            supports_virtualized=True)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries)
        [build] = spph.createMissingBuilds()
        self.assertFalse(build.can_be_retried)

    def test_partner_retry_for_released_series(self):
        # Builds for PARTNER can be retried -- even if the distroseries is
        # released.
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(
            distroseries=distroseries, processor=self.processor,
            supports_virtualized=True)
        archive = self.factory.makeArchive(
            purpose=ArchivePurpose.PARTNER,
            distribution=distroseries.distribution)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries, archive=archive)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry(self):
        # A build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry_cancelled(self):
        # A cancelled build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.CANCELLED)
        self.assertTrue(build.can_be_retried)

    def test_uploadlog(self):
        # The upload log can be attached to a build
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(None, build.upload_log)
        self.assertEquals(None, build.upload_log_url)
        build.storeUploadLog('sample upload log')
        expected_filename = 'upload_%s_log.txt' % build.id
        self.assertEquals(expected_filename, build.upload_log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' % (
                self.distroseries.distribution.name, spph.source_package_name,
                spph.source_package_version, build.id))
        expected_url = '%s/%s' % (url_start, expected_filename)
        self.assertEquals(expected_url, build.upload_log_url)

    def test_retry_resets_state(self):
        # Retrying a build resets most of the state attributes, but does
        # not modify the first dispatch time.
        build = self.factory.makeBinaryPackageBuild()
        build.updateStatus(BuildStatus.BUILDING, date_started=self.now)
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        build.gotFailure()
        with person_logged_in(self.admin):
            build.retry()
        self.assertEquals(BuildStatus.NEEDSBUILD, build.status)
        self.assertEquals(self.now, build.date_first_dispatched)
        self.assertEquals(None, build.log)
        self.assertEquals(None, build.upload_log)
        self.assertEquals(0, build.failure_count)

    def test_create_bpr(self):
        # Test that we can create a BPR from a given build.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        bpn = self.factory.makeBinaryPackageName(name=spn)
        spph = self.publisher.getPubSource(
            sourcename=spn, version=version, distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        binary = build.createBinaryPackageRelease(
            binarypackagename=bpn, version=version, summary='',
            description='', binpackageformat=BinaryPackageFormat.DEB,
            component=spph.sourcepackagerelease.component.id,
            section=spph.sourcepackagerelease.section.id,
            priority=PackagePublishingPriority.STANDARD, installedsize=0,
            architecturespecific=False)
        self.assertEquals(1, build.binarypackages.count())
        self.assertEquals([binary], list(build.binarypackages))

    def test_multiple_create_bpr(self):
        # We can create multiple BPRs from a build
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(
            sourcename=spn, version=version, distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_names = []
        for i in range(15):
            bpn_name = '%s-%s' % (spn, i)
            bpn = self.factory.makeBinaryPackageName(bpn_name)
            expected_names.append(bpn_name)
            build.createBinaryPackageRelease(
                binarypackagename=bpn, version=str(i), summary='',
                description='', binpackageformat=BinaryPackageFormat.DEB,
                component=spph.sourcepackagerelease.component.id,
                section=spph.sourcepackagerelease.section.id,
                priority=PackagePublishingPriority.STANDARD, installedsize=0,
                architecturespecific=False)
        self.assertEquals(15, build.binarypackages.count())
        bin_names = [b.name for b in build.binarypackages]
        # Verify .binarypackages returns sorted by name
        expected_names.sort()
        self.assertEquals(expected_names, bin_names)

    def test_cannot_rescore_non_needsbuilds_builds(self):
        # If a build record isn't in NEEDSBUILD, it can not be rescored.
        # We will also need to log into an admin to do the rescore.
        with person_logged_in(self.admin):
            [bpph] = self.publisher.getPubBinaries(
                binaryname=self.factory.getUniqueString(),
                version="%s.1" % self.factory.getUniqueInteger(),
                distroseries=self.distroseries)
            build = bpph.binarypackagerelease.build
            self.assertRaises(CannotBeRescored, build.rescore, 20)

    def test_rescore_builds(self):
        # If the user has build-admin privileges, they can rescore builds
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(BuildStatus.NEEDSBUILD, build.status)
        self.assertEquals(2505, build.buildqueue_record.lastscore)
        with person_logged_in(self.admin):
            build.rescore(5000)
            transaction.commit()
        self.assertEquals(5000, build.buildqueue_record.lastscore)

    def test_source_publication_override(self):
        # Components can be overridden in builds.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(spph, build.current_source_publication)
        universe = getUtility(IComponentSet)['universe']
        overridden_spph = spph.changeOverride(new_component=universe)
        # We can now see current source publication points to the overridden
        # publication.
        self.assertNotEquals(spph, build.current_source_publication)
        self.assertEquals(overridden_spph, build.current_source_publication)

    def test_estimated_duration(self):
        # Builds will have an estimated duration that is set to a
        # previous build of the same sources duration.
        spn = self.factory.getUniqueString()
        spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [build] = spph.createMissingBuilds()
        # Duration is based on package size if there is no previous build.
        self.assertEquals(
            timedelta(0, 60), build.buildqueue_record.estimated_duration)
        # Set the build as done, and its duration.
        build.updateStatus(
            BuildStatus.BUILDING,
            date_started=self.now - timedelta(minutes=72))
        build.updateStatus(BuildStatus.FULLYBUILT, date_finished=self.now)
        build.buildqueue_record.destroySelf()
        new_spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [new_build] = new_spph.createMissingBuilds()
        # The duration for this build is now 72 minutes.
        self.assertEquals(
            timedelta(0, 72 * 60),
            new_build.buildqueue_record.estimated_duration)

    def test_store_uploadlog_refuses_to_overwrite(self):
        # Storing an upload log for a build will fail if the build already
        # has an upload log.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOUPLOAD)
        build.storeUploadLog('foo')
        self.assertRaises(AssertionError, build.storeUploadLog, 'bar')
Beispiel #37
0
class TestDistributionSourcePackageFindRelatedArchives(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        """Publish some gedit sources in main and PPAs."""
        super(TestDistributionSourcePackageFindRelatedArchives, self).setUp()

        self.distribution = getUtility(IDistributionSet)['ubuntutest']

        # Create two PPAs for gedit.
        self.archives = {}
        self.archives['ubuntu-main'] = self.distribution.main_archive
        self.archives['gedit-nightly'] = self.factory.makeArchive(
            name="gedit-nightly", distribution=self.distribution)
        self.archives['gedit-beta'] = self.factory.makeArchive(
            name="gedit-beta", distribution=self.distribution)

        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        # Publish gedit in all three archives.
        self.person_nightly = self.factory.makePerson()
        self.gedit_nightly_src_hist = self.publisher.getPubSource(
            sourcename="gedit",
            archive=self.archives['gedit-nightly'],
            creator=self.person_nightly,
            status=PackagePublishingStatus.PUBLISHED)

        self.person_beta = self.factory.makePerson()
        self.gedit_beta_src_hist = self.publisher.getPubSource(
            sourcename="gedit",
            archive=self.archives['gedit-beta'],
            creator=self.person_beta,
            status=PackagePublishingStatus.PUBLISHED)
        self.gedit_main_src_hist = self.publisher.getPubSource(
            sourcename="gedit",
            archive=self.archives['ubuntu-main'],
            status=PackagePublishingStatus.PUBLISHED)

        # Save the gedit source package for easy access.
        self.source_package = self.distribution.getSourcePackage('gedit')

        # Add slightly more soyuz karma for person_nightly for this package.
        switch_dbuser('karma')
        self.person_beta_karma = KarmaTotalCache(person=self.person_beta,
                                                 karma_total=200)
        self.person_nightly_karma = KarmaTotalCache(person=self.person_nightly,
                                                    karma_total=201)
        switch_dbuser('launchpad')

    def test_order_by_soyuz_package_karma(self):
        # Returned archives are ordered by the soyuz karma of the
        # package uploaders for the particular package

        related_archives = self.source_package.findRelatedArchives()
        related_archive_names = [archive.name for archive in related_archives]

        self.assertEqual(related_archive_names, [
            'gedit-nightly',
            'gedit-beta',
        ])

        # Update the soyuz karma for person_beta for this package so that
        # it is greater than person_nightly's.
        switch_dbuser('karma')
        self.person_beta_karma.karma_total = 202
        switch_dbuser('launchpad')

        related_archives = self.source_package.findRelatedArchives()
        related_archive_names = [archive.name for archive in related_archives]

        self.assertEqual(related_archive_names, [
            'gedit-beta',
            'gedit-nightly',
        ])

    def test_require_package_karma(self):
        # Only archives where the related package was created by a person
        # with the required soyuz karma for this package.

        related_archives = self.source_package.findRelatedArchives(
            required_karma=201)
        related_archive_names = [archive.name for archive in related_archives]

        self.assertEqual(related_archive_names, ['gedit-nightly'])

    def test_development_version(self):
        # IDistributionSourcePackage.development_version is the ISourcePackage
        # for the current series of the distribution.
        dsp = self.factory.makeDistributionSourcePackage()
        series = self.factory.makeDistroSeries(distribution=dsp.distribution)
        self.assertEqual(series, dsp.distribution.currentseries)
        development_version = dsp.distribution.currentseries.getSourcePackage(
            dsp.sourcepackagename)
        self.assertEqual(development_version, dsp.development_version)

    def test_development_version_no_current_series(self):
        # IDistributionSourcePackage.development_version is the ISourcePackage
        # for the current series of the distribution.
        dsp = self.factory.makeDistributionSourcePackage()
        currentseries = dsp.distribution.currentseries
        # The current series is None by default.
        self.assertIs(None, currentseries)
        self.assertEqual(None, dsp.development_version)

    def test_does_not_include_copied_packages(self):
        # Packages that have been copied rather than uploaded are not
        # included when determining related archives.

        # Ensure that the gedit package in gedit-nightly was originally
        # uploaded to gedit-beta (ie. copied from there).
        gedit_release = self.gedit_nightly_src_hist.sourcepackagerelease
        gedit_release.upload_archive = self.archives['gedit-beta']

        related_archives = self.source_package.findRelatedArchives()
        related_archive_names = [archive.name for archive in related_archives]

        self.assertEqual(related_archive_names, ['gedit-beta'])
Beispiel #38
0
class TestBuildQueueBase(TestCaseWithFactory):
    """Setup the test publisher and some builders."""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestBuildQueueBase, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        # First make nine 'i386' builders.
        self.i1 = self.factory.makeBuilder(name='i386-v-1')
        self.i2 = self.factory.makeBuilder(name='i386-v-2')
        self.i3 = self.factory.makeBuilder(name='i386-v-3')
        self.i4 = self.factory.makeBuilder(name='i386-v-4')
        self.i5 = self.factory.makeBuilder(name='i386-v-5')
        self.i6 = self.factory.makeBuilder(name='i386-n-6', virtualized=False)
        self.i7 = self.factory.makeBuilder(name='i386-n-7', virtualized=False)
        self.i8 = self.factory.makeBuilder(name='i386-n-8', virtualized=False)
        self.i9 = self.factory.makeBuilder(name='i386-n-9', virtualized=False)

        # Next make seven 'hppa' builders.
        self.hppa_proc = getUtility(IProcessorSet).getByName('hppa')
        self.h1 = self.factory.makeBuilder(
            name='hppa-v-1', processors=[self.hppa_proc])
        self.h2 = self.factory.makeBuilder(
            name='hppa-v-2', processors=[self.hppa_proc])
        self.h3 = self.factory.makeBuilder(
            name='hppa-v-3', processors=[self.hppa_proc])
        self.h4 = self.factory.makeBuilder(
            name='hppa-v-4', processors=[self.hppa_proc])
        self.h5 = self.factory.makeBuilder(
            name='hppa-n-5', processors=[self.hppa_proc], virtualized=False)
        self.h6 = self.factory.makeBuilder(
            name='hppa-n-6', processors=[self.hppa_proc], virtualized=False)
        self.h7 = self.factory.makeBuilder(
            name='hppa-n-7', processors=[self.hppa_proc], virtualized=False)

        # Finally make five 'amd64' builders.
        self.amd_proc = getUtility(IProcessorSet).getByName('amd64')
        self.a1 = self.factory.makeBuilder(
            name='amd64-v-1', processors=[self.amd_proc])
        self.a2 = self.factory.makeBuilder(
            name='amd64-v-2', processors=[self.amd_proc])
        self.a3 = self.factory.makeBuilder(
            name='amd64-v-3', processors=[self.amd_proc])
        self.a4 = self.factory.makeBuilder(
            name='amd64-n-4', processors=[self.amd_proc], virtualized=False)
        self.a5 = self.factory.makeBuilder(
            name='amd64-n-5', processors=[self.amd_proc], virtualized=False)

        self.builders = dict()
        self.x86_proc = getUtility(IProcessorSet).getByName('386')
        # x86 native
        self.builders[(self.x86_proc.id, False)] = [
            self.i6, self.i7, self.i8, self.i9]
        # x86 virtual
        self.builders[(self.x86_proc.id, True)] = [
            self.i1, self.i2, self.i3, self.i4, self.i5]

        # amd64 native
        self.builders[(self.amd_proc.id, False)] = [self.a4, self.a5]
        # amd64 virtual
        self.builders[(self.amd_proc.id, True)] = [self.a1, self.a2, self.a3]

        # hppa native
        self.builders[(self.hppa_proc.id, False)] = [
            self.h5,
            self.h6,
            self.h7,
            ]
        # hppa virtual
        self.builders[(self.hppa_proc.id, True)] = [
            self.h1, self.h2, self.h3, self.h4]

        # Ensure all builders are operational.
        for builders in self.builders.values():
            for builder in builders:
                builder.builderok = True
                builder.manual = False

        # Native builders irrespective of processor.
        self.builders[(None, False)] = []
        self.builders[(None, False)].extend(
            self.builders[(self.x86_proc.id, False)])
        self.builders[(None, False)].extend(
            self.builders[(self.amd_proc.id, False)])
        self.builders[(None, False)].extend(
            self.builders[(self.hppa_proc.id, False)])

        # Virtual builders irrespective of processor.
        self.builders[(None, True)] = []
        self.builders[(None, True)].extend(
            self.builders[(self.x86_proc.id, True)])
        self.builders[(None, True)].extend(
            self.builders[(self.amd_proc.id, True)])
        self.builders[(None, True)].extend(
            self.builders[(self.hppa_proc.id, True)])

        # Disable the sample data builders.
        getUtility(IBuilderSet)['bob'].builderok = False
        getUtility(IBuilderSet)['frog'].builderok = False

    def makeCustomBuildQueue(self, score=9876, virtualized=True,
                             estimated_duration=64, sourcename=None,
                             recipe_build=None):
        """Create a `SourcePackageRecipeBuild` and a `BuildQueue` for
        testing."""
        if recipe_build is None:
            recipe_build = self.factory.makeSourcePackageRecipeBuild(
                sourcename=sourcename)
        bq = BuildQueue(
            build_farm_job=recipe_build.build_farm_job, lastscore=score,
            estimated_duration=timedelta(seconds=estimated_duration),
            virtualized=virtualized)
        IStore(BuildQueue).add(bq)
        return bq
 def publish_to_ppa(self, ppa):
     """Helper method to publish a package in a PPA."""
     publisher = SoyuzTestPublisher()
     publisher.prepareBreezyAutotest()
     publisher.getPubSource(archive=ppa)
Beispiel #40
0
class TestBuildNotify(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildNotify, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create all of the items we need to create builds
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries,
            processor=self.processor,
            supports_virtualized=True)
        self.creator = self.factory.makePerson(email='*****@*****.**')
        self.gpgkey = self.factory.makeGPGKey(owner=self.creator)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        self.ppa = self.factory.makeArchive()
        buildd_admins = getUtility(IPersonSet).getByName(
            'launchpad-buildd-admins')
        self.buildd_admins_email = []
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)
            for member in buildd_admins.activemembers:
                self.buildd_admins_email.append(member.preferredemail.email)
        self.builds = []

    def create_builds(self, archive):
        for status in BuildStatus.items:
            spph = self.publisher.getPubSource(
                sourcename=self.factory.getUniqueString(),
                version="%s.%s" %
                (self.factory.getUniqueInteger(), status.value),
                distroseries=self.distroseries,
                architecturehintlist='any',
                creator=self.creator,
                archive=archive)
            spph.sourcepackagerelease.dscsigningkey = self.gpgkey
            [build] = spph.createMissingBuilds()
            with person_logged_in(self.admin):
                build.updateStatus(BuildStatus.BUILDING, builder=self.builder)
                build.updateStatus(
                    status,
                    date_finished=(build.date_started +
                                   timedelta(minutes=5 * (status.value + 1))))
                if status != BuildStatus.BUILDING:
                    build.buildqueue_record.destroySelf()
                else:
                    build.buildqueue_record.builder = self.builder
            self.builds.append(build)

    def _assert_mail_is_correct(self, build, notification, ppa=False):
        # Assert that the mail sent (which is in notification), matches
        # the data from the build
        self.assertEquals('*****@*****.**',
                          notification['X-Creator-Recipient'])
        self.assertEquals(self.das.architecturetag,
                          notification['X-Launchpad-Build-Arch'])
        self.assertEquals('main', notification['X-Launchpad-Build-Component'])
        self.assertEquals(build.status.name,
                          notification['X-Launchpad-Build-State'])
        if ppa is True:
            self.assertEquals(get_ppa_reference(self.ppa),
                              notification['X-Launchpad-PPA'])
        body = notification.get_payload(decode=True)
        build_log = 'None'
        if ppa is True:
            archive = '%s PPA' % get_ppa_reference(build.archive)
            source = 'not available'
        else:
            archive = '%s primary archive' % (
                self.distroseries.distribution.name)
            source = canonical_url(build.distributionsourcepackagerelease)
        builder = canonical_url(build.builder)
        if build.status == BuildStatus.BUILDING:
            duration = 'not finished'
            build_log = 'see builder page'
        elif (build.status == BuildStatus.SUPERSEDED
              or build.status == BuildStatus.NEEDSBUILD):
            duration = 'not available'
            build_log = 'not available'
            builder = 'not available'
        elif build.status == BuildStatus.UPLOADING:
            duration = 'uploading'
            build_log = 'see builder page'
            builder = 'not available'
        else:
            duration = DurationFormatterAPI(
                build.duration).approximateduration()
        expected_body = dedent("""
         * Source Package: %s
         * Version: %s
         * Architecture: %s
         * Archive: %s
         * Component: main
         * State: %s
         * Duration: %s
         * Build Log: %s
         * Builder: %s
         * Source: %s



        If you want further information about this situation, feel free to
        contact a member of the Launchpad Buildd Administrators team.

        --
        %s
        %s
        """ % (build.source_package_release.sourcepackagename.name,
               build.source_package_release.version, self.das.architecturetag,
               archive, build.status.title, duration, build_log, builder,
               source, build.title, canonical_url(build)))
        self.assertEquals(expected_body, body)

    def test_notify_buildd_admins(self):
        # A build will cause an e-mail to be sent out to the buildd-admins,
        # for primary archive builds.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        expected_emails = self.buildd_admins_email + ['*****@*****.**']
        notifications = pop_notifications()
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(expected_emails, actual_emails)

    def test_ppa_does_not_notify_buildd_admins(self):
        # A build for a PPA does not notify the buildd admins.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notifications = pop_notifications()
        # An e-mail is sent to the archive owner, as well as the creator
        self.assertEquals(2, len(notifications))

    def test_notify_failed_to_build(self):
        # An e-mail is sent to the source package creator on build failures.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_failed_to_build_ppa(self):
        # An e-mail is sent to the source package creator on build failures.
        self.create_builds(archive=self.ppa)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_needs_building(self):
        # We can notify the creator when the build is needing to be built.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_needs_building_ppa(self):
        # We can notify the creator when the build is needing to be built.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.NEEDSBUILD.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_successfully_built(self):
        # Successful builds don't notify anyone.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        build.notify()
        self.assertEqual([], pop_notifications())

    def test_notify_dependency_wait(self):
        # We can notify the creator when the build can't find a dependency.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_dependency_wait_ppa(self):
        # We can notify the creator when the build can't find a dependency.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.MANUALDEPWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_chroot_problem(self):
        # We can notify the creator when the builder the build attempted to
        # be built on has an internal problem.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_chroot_problem_ppa(self):
        # We can notify the creator when the builder the build attempted to
        # be built on has an internal problem.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.CHROOTWAIT.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_build_for_superseded_source(self):
        # We can notify the creator when the source package had a newer
        # version uploaded before this build had a chance to be dispatched.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_build_for_superseded_source_ppa(self):
        # We can notify the creator when the source package had a newer
        # version uploaded before this build had a chance to be dispatched.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.SUPERSEDED.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_currently_building(self):
        # We can notify the creator when the build is currently building.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.BUILDING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_currently_building_ppa(self):
        # We can notify the creator when the build is currently building.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.BUILDING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_notify_uploading_build(self):
        # We can notify the creator when the build has completed, and binary
        # packages are being uploaded by the builder.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.UPLOADING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification)

    def test_notify_uploading_build_ppa(self):
        # We can notify the creator when the build has completed, and binary
        # packages are being uploaded by the builder.
        self.create_builds(self.ppa)
        build = self.builds[BuildStatus.UPLOADING.value]
        build.notify()
        notification = pop_notifications()[1]
        self._assert_mail_is_correct(build, notification, ppa=True)

    def test_copied_into_ppa_does_not_spam(self):
        # When a package is copied into a PPA, we don't send mail to the
        # original creator of the source package.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        spph = build.current_source_publication
        ppa_spph = spph.copyTo(self.distroseries,
                               PackagePublishingPocket.RELEASE, self.ppa)
        [ppa_build] = ppa_spph.createMissingBuilds()
        ppa_build.notify()
        notifications = pop_notifications()
        self.assertEquals(1, len(notifications))

    def test_notify_owner_supresses_mail(self):
        # When the 'notify_owner' config option is False, we don't send mail
        # to the owner of the SPR.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        notify_owner = dedent("""
            [builddmaster]
            send_build_notification: True
            notify_owner: False
            """)
        config.push('notify_owner', notify_owner)
        build.notify()
        notifications = pop_notifications()
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(self.buildd_admins_email, actual_emails)
        # And undo what we just did.
        config.pop('notify_owner')

    def test_build_notification_supresses_mail(self):
        # When the 'build_notification' config option is False, we don't
        # send any mail at all.
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FULLYBUILT.value]
        send_build_notification = dedent("""
            [builddmaster]
            send_build_notification: False
            """)
        config.push('send_build_notification', send_build_notification)
        build.notify()
        notifications = pop_notifications()
        self.assertEquals(0, len(notifications))
        # And undo what we just did.
        config.pop('send_build_notification')

    def test_sponsored_upload_notification(self):
        # If the signing key is different to the creator, they are both
        # notified.
        sponsor = self.factory.makePerson('*****@*****.**')
        key = self.factory.makeGPGKey(owner=sponsor)
        self.create_builds(self.archive)
        build = self.builds[BuildStatus.FAILEDTOBUILD.value]
        spr = build.current_source_publication.sourcepackagerelease
        # Push past the security proxy
        removeSecurityProxy(spr).dscsigningkey = key
        build.notify()
        notifications = pop_notifications()
        expected_emails = self.buildd_admins_email + [
            '*****@*****.**', '*****@*****.**'
        ]
        actual_emails = [n['To'] for n in notifications]
        self.assertEquals(expected_emails, actual_emails)
Beispiel #41
0
class TestBuildDepWait(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildDepWait, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create everything we need to create builds, such as a
        # DistroArchSeries and a builder.
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)

    def test_update_dependancies(self):
        # Calling .updateDependencies() on a build will remove those which
        # are reachable.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries, archive=self.archive)
        [build] = spph.createMissingBuilds()
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        with person_logged_in(self.admin):
            build.updateStatus(
                BuildStatus.MANUALDEPWAIT,
                slave_status={'dependencies': unicode(spn)})
            [bpph] = self.publisher.getPubBinaries(
                binaryname=spn, distroseries=self.distroseries,
                version=version, builder=self.builder, archive=self.archive,
                status=PackagePublishingStatus.PUBLISHED)
            # Commit to make sure stuff hits the database.
            transaction.commit()
        build.updateDependencies()
        self.assertEquals(u'', build.dependencies)

    def test_update_dependancies_respects_component(self):
        # Since main can only utilise packages that are published in main,
        # dependencies are not satisfied if they are not in main.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries, archive=self.archive)
        [build] = spph.createMissingBuilds()
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        with person_logged_in(self.admin):
            build.updateStatus(
                BuildStatus.MANUALDEPWAIT,
                slave_status={'dependencies': unicode(spn)})
            [bpph] = self.publisher.getPubBinaries(
                binaryname=spn, distroseries=self.distroseries,
                version=version, builder=self.builder, archive=self.archive,
                status=PackagePublishingStatus.PUBLISHED,
                component='universe')
            # Commit to make sure stuff hits the database.
            transaction.commit()
        build.updateDependencies()
        # Since the dependency is in universe, we still can't see it.
        self.assertEquals(unicode(spn), build.dependencies)
        with person_logged_in(self.admin):
            bpph.component = getUtility(IComponentSet)['main']
            transaction.commit()
        # Now that we have moved it main, we can see it.
        build.updateDependencies()
        self.assertEquals(u'', build.dependencies)
Beispiel #42
0
class TestDetermineArchitecturesToBuild(TestCaseWithFactory):
    """Test that determine_architectures_to_build correctly interprets hints.
    """

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestDetermineArchitecturesToBuild, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()
        armel = self.factory.makeProcessor('armel', 'armel', 'armel')
        self.publisher.breezy_autotest.newArch(
            'armel', armel, False, self.publisher.person)
        self.publisher.addFakeChroots()

    def assertArchitecturesToBuild(self, expected_arch_tags, pub,
                                   allowed_arch_tags=None):
        if allowed_arch_tags is None:
            allowed_archs = self.publisher.breezy_autotest.architectures
        else:
            allowed_archs = [
                arch for arch in self.publisher.breezy_autotest.architectures
                if arch.architecturetag in allowed_arch_tags]
        architectures = determine_architectures_to_build(
            pub.sourcepackagerelease.architecturehintlist, pub.archive,
            self.publisher.breezy_autotest, allowed_archs)
        self.assertContentEqual(
            expected_arch_tags, [a.architecturetag for a in architectures])

    def assertArchsForHint(self, hint_string, expected_arch_tags,
                           allowed_arch_tags=None, sourcename=None):
        """Assert that the given hint resolves to the expected archtags."""
        pub = self.publisher.getPubSource(
            sourcename=sourcename, architecturehintlist=hint_string)
        self.assertArchitecturesToBuild(
            expected_arch_tags, pub, allowed_arch_tags=allowed_arch_tags)

    def test_single_architecture(self):
        # A hint string with a single arch resolves to just that arch.
        self.assertArchsForHint('hppa', ['hppa'])

    def test_three_architectures(self):
        # A hint string with multiple archs resolves to just those
        # archs.
        self.assertArchsForHint('amd64 i386 hppa', ['hppa', 'i386'])

    def test_independent(self):
        # 'all' is special, meaning just a single build. The
        # nominatedarchindep architecture is used -- in this case i386.
        self.assertArchsForHint('all', ['i386'])

    def test_one_and_independent(self):
        # 'all' is redundant if we have another build anyway.
        self.assertArchsForHint('hppa all', ['hppa'])

    def test_fictional_and_independent(self):
        # But 'all' is useful if present with an arch that wouldn't
        # generate a build.
        self.assertArchsForHint('foo all', ['i386'])

    def test_wildcard(self):
        # 'any' is a wildcard that matches all available archs.
        self.assertArchsForHint('any', ['armel', 'hppa', 'i386'])

    def test_kernel_specific_architecture(self):
        # Since we only support Linux-based architectures, 'linux-foo'
        # is treated the same as 'foo'.
        self.assertArchsForHint('linux-hppa', ['hppa'])

    def test_unknown_kernel_specific_architecture(self):
        # Non-Linux architectures aren't supported.
        self.assertArchsForHint('kfreebsd-hppa', [])

    def test_kernel_wildcard_architecture(self):
        # Wildcards work for kernels: 'any-foo' is treated like 'foo'.
        self.assertArchsForHint('any-hppa', ['hppa'])

    def test_kernel_wildcard_architecture_arm(self):
        # The second part of a wildcard matches the canonical CPU name, not
        # on the Debian architecture, so 'any-arm' matches 'armel'.
        self.assertArchsForHint('any-arm', ['armel'])

    def test_kernel_specific_architecture_wildcard(self):
        # Wildcards work for archs too: 'linux-any' is treated like 'any'.
        self.assertArchsForHint('linux-any', ['armel', 'hppa', 'i386'])

    def test_unknown_kernel_specific_architecture_wildcard(self):
        # But unknown kernels continue to result in nothing.
        self.assertArchsForHint('kfreebsd-any', [])

    def test_wildcard_and_independent(self):
        # 'all' continues to be ignored alongside a valid wildcard.
        self.assertArchsForHint('all linux-any', ['armel', 'hppa', 'i386'])

    def test_kernel_independent_is_invalid(self):
        # 'linux-all' isn't supported.
        self.assertArchsForHint('linux-all', [])

    def test_double_wildcard_is_same_as_single(self):
        # 'any-any' is redundant with 'any', but dpkg-architecture supports
        # it anyway.
        self.assertArchsForHint('any-any', ['armel', 'hppa', 'i386'])

    def test_disabled_architectures_omitted(self):
        # Disabled architectures are not buildable, so are excluded.
        self.publisher.breezy_autotest['hppa'].enabled = False
        self.assertArchsForHint('any', ['armel', 'i386'])

    def test_virtualized_archives_have_only_virtualized_archs(self):
        # For archives which must build on virtual builders, only
        # virtual archs are returned.
        self.publisher.breezy_autotest.main_archive.require_virtualized = True
        self.assertArchsForHint('any', ['i386'])

    def test_no_all_builds_when_nominatedarchindep_not_permitted(self):
        # Some archives (eg. armel rebuilds) don't want arch-indep
        # builds. If the nominatedarchindep architecture (normally
        # i386) is omitted, no builds will be created for arch-indep
        # sources.
        self.assertArchsForHint('all', [], allowed_arch_tags=['hppa'])
class TestProcessAccepted(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer
    dbuser = config.uploadqueue.dbuser

    def setUp(self):
        """Create the Soyuz test publisher."""
        TestCaseWithFactory.setUp(self)
        self.stp = SoyuzTestPublisher()
        self.stp.prepareBreezyAutotest()
        self.test_package_name = u"accept-test"
        self.distro = self.factory.makeDistribution()

    def getScript(self, test_args=None):
        """Return a ProcessAccepted instance."""
        if test_args is None:
            test_args = []
        test_args.append(self.distro.name)
        script = ProcessAccepted("process accepted", test_args=test_args)
        script.logger = BufferLogger()
        script.txn = self.layer.txn
        return script

    def createWaitingAcceptancePackage(self,
                                       distroseries,
                                       archive=None,
                                       sourcename=None):
        """Create some pending publications."""
        if archive is None:
            archive = self.distro.main_archive
        if sourcename is None:
            sourcename = self.test_package_name
        return self.stp.getPubSource(archive=archive,
                                     sourcename=sourcename,
                                     distroseries=distroseries,
                                     spr_only=True)

    def test_robustness(self):
        """Test that a broken package doesn't block the publication of other
        packages."""
        # Attempt to upload one source to a supported series.
        # The record is created first and then the status of the series
        # is changed from DEVELOPMENT to SUPPORTED, otherwise it's impossible
        # to create the record.
        distroseries = self.factory.makeDistroSeries(distribution=self.distro)
        # This creates a broken publication.
        self.createWaitingAcceptancePackage(distroseries=distroseries,
                                            sourcename="notaccepted")
        distroseries.status = SeriesStatus.SUPPORTED
        # Also upload some other things.
        other_distroseries = self.factory.makeDistroSeries(
            distribution=self.distro)
        self.createWaitingAcceptancePackage(distroseries=other_distroseries)
        script = self.getScript([])
        switch_dbuser(self.dbuser)
        script.main()

        # The other source should be published now.
        published_main = self.distro.main_archive.getPublishedSources(
            name=self.test_package_name)
        self.assertEqual(published_main.count(), 1)

        # And an oops should be filed for the first.
        self.assertEqual(1, len(self.oopses))
        error_report = self.oopses[0]
        expected_error = "Failure processing queue_item"
        self.assertStartsWith(error_report['req_vars']['error-explanation'],
                              expected_error)

    def test_accept_copy_archives(self):
        """Test that publications in a copy archive are accepted properly."""
        # Upload some pending packages in a copy archive.
        distroseries = self.factory.makeDistroSeries(distribution=self.distro)
        copy_archive = self.factory.makeArchive(distribution=self.distro,
                                                purpose=ArchivePurpose.COPY)
        copy_source = self.createWaitingAcceptancePackage(
            archive=copy_archive, distroseries=distroseries)
        # Also upload some stuff in the main archive.
        self.createWaitingAcceptancePackage(distroseries=distroseries)

        # Before accepting, the package should not be published at all.
        published_copy = copy_archive.getPublishedSources(
            name=self.test_package_name)
        # Using .count() until Storm fixes __nonzero__ on SQLObj result
        # sets, then we can use bool() which is far more efficient than
        # counting.
        self.assertEqual(published_copy.count(), 0)

        # Accept the packages.
        script = self.getScript(['--copy-archives'])
        switch_dbuser(self.dbuser)
        script.main()

        # Packages in main archive should not be accepted and published.
        published_main = self.distro.main_archive.getPublishedSources(
            name=self.test_package_name)
        self.assertEqual(published_main.count(), 0)

        # Check the copy archive source was accepted.
        published_copy = copy_archive.getPublishedSources(
            name=self.test_package_name).one()
        self.assertEqual(published_copy.status,
                         PackagePublishingStatus.PENDING)
        self.assertEqual(copy_source, published_copy.sourcepackagerelease)

    def test_commits_after_each_item(self):
        # Test that the script commits after each item, not just at the end.
        uploads = [
            self.createWaitingAcceptancePackage(
                distroseries=self.factory.makeDistroSeries(
                    distribution=self.distro),
                sourcename='source%d' % i) for i in range(3)
        ]

        class UploadCheckingSynchronizer:

            commit_count = 0

            def beforeCompletion(inner_self, txn):
                pass

            def afterCompletion(inner_self, txn):
                if txn.status != 'Committed':
                    return
                inner_self.commit_count += 1
                done_count = len([
                    upload for upload in uploads
                    if upload.package_upload.status == PackageUploadStatus.DONE
                ])
                self.assertEqual(min(len(uploads), inner_self.commit_count),
                                 done_count)

        script = self.getScript([])
        switch_dbuser(self.dbuser)
        synch = UploadCheckingSynchronizer()
        transaction.manager.registerSynch(synch)
        script.main()
        self.assertThat(len(uploads), LessThan(synch.commit_count))

    def test_commits_work(self):
        upload = self.factory.makeSourcePackageUpload(
            distroseries=self.factory.makeDistroSeries(
                distribution=self.distro))
        upload_id = upload.id
        self.getScript([]).main()
        self.layer.txn.abort()
        self.assertEqual(upload,
                         IStore(PackageUpload).get(PackageUpload, upload_id))

    def test_validateArguments_requires_distro_by_default(self):
        self.assertRaises(OptionValueError,
                          ProcessAccepted(test_args=[]).validateArguments)

    def test_validateArguments_requires_no_distro_for_derived_run(self):
        ProcessAccepted(test_args=['--derived']).validateArguments()
        # The test is that this does not raise an exception.
        pass

    def test_validateArguments_does_not_accept_distro_for_derived_run(self):
        distro = self.factory.makeDistribution()
        script = ProcessAccepted(test_args=['--derived', distro.name])
        self.assertRaises(OptionValueError, script.validateArguments)

    def test_findTargetDistros_finds_named_distro(self):
        distro = self.factory.makeDistribution()
        script = ProcessAccepted(test_args=[distro.name])
        self.assertContentEqual([distro], script.findTargetDistros())

    def test_findNamedDistro_raises_error_if_not_found(self):
        nonexistent_distro = self.factory.getUniqueString()
        script = ProcessAccepted(test_args=[nonexistent_distro])
        self.assertRaises(LaunchpadScriptFailure, script.findNamedDistro,
                          nonexistent_distro)

    def test_findTargetDistros_for_derived_finds_derived_distro(self):
        dsp = self.factory.makeDistroSeriesParent()
        script = ProcessAccepted(test_args=['--derived'])
        self.assertIn(dsp.derived_series.distribution,
                      script.findTargetDistros())
Beispiel #44
0
class ArchiveExpiryTestBase(TestCaseWithFactory):
    """base class for the expire-archive-files.py script tests."""
    layer = LaunchpadZopelessLayer
    dbuser = config.binaryfile_expire.dbuser

    def setUp(self):
        """Set up some test publications."""
        super(ArchiveExpiryTestBase, self).setUp()
        # Configure the test publisher.
        switch_dbuser("launchpad")
        self.stp = SoyuzTestPublisher()
        self.stp.prepareBreezyAutotest()

        # Prepare some date properties for the tests to use.
        self.now = datetime.now(pytz.UTC)
        self.under_threshold_date = self.now - timedelta(days=29)
        self.over_threshold_date = self.now - timedelta(days=31)

    def getScript(self, test_args=None):
        """Return a ArchiveExpirer instance."""
        if test_args is None:
            test_args = []
        test_args.extend(['--expire-after', '30'])
        script = ArchiveExpirer("test expirer", test_args=test_args)
        script.logger = BufferLogger()
        script.txn = self.layer.txn
        return script

    def runScript(self):
        """Run the expiry script and return."""
        script = self.getScript()
        switch_dbuser(self.dbuser)
        script.main()

    def _setUpExpirablePublications(self, archive=None):
        """Helper to set up two publications that are both expirable."""
        if archive is None:
            archive = self.archive
        pkg5 = self.stp.getPubSource(
            sourcename="pkg5", architecturehintlist="i386", archive=archive,
            dateremoved=self.over_threshold_date)
        other_source = pkg5.copyTo(
            pkg5.distroseries, pkg5.pocket, self.archive2)
        other_source.dateremoved = self.over_threshold_date
        [pub] = self.stp.getPubBinaries(
            pub_source=pkg5, dateremoved=self.over_threshold_date,
            archive=archive)
        [other_binary] = pub.copyTo(
            pub.distroarchseries.distroseries, pub.pocket, self.archive2)
        other_binary.dateremoved = self.over_threshold_date
        return pkg5, pub

    def assertBinaryExpired(self, publication):
        self.assertNotEqual(
            publication.binarypackagerelease.files[0].libraryfile.expires,
            None,
            "lfa.expires should be set, but it's not.")

    def assertBinaryNotExpired(self, publication):
        self.assertEqual(
            publication.binarypackagerelease.files[0].libraryfile.expires,
            None,
            "lfa.expires should be None, but it's not.")

    def assertSourceExpired(self, publication):
        self.assertNotEqual(
            publication.sourcepackagerelease.files[0].libraryfile.expires,
            None,
            "lfa.expires should be set, but it's not.")

    def assertSourceNotExpired(self, publication):
        self.assertEqual(
            publication.sourcepackagerelease.files[0].libraryfile.expires,
            None,
            "lfa.expires should be None, but it's not.")
class TestBuildUpdateDependencies(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def _setupSimpleDepwaitContext(self):
        """Use `SoyuzTestPublisher` to setup a simple depwait context.

        Return an `IBinaryPackageBuild` in MANUALDEWAIT state and depending
        on a binary that exists and is reachable.
        """
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        depwait_source = self.publisher.getPubSource(
            sourcename='depwait-source')

        self.publisher.getPubBinaries(
            binaryname='dep-bin', status=PackagePublishingStatus.PUBLISHED)

        [depwait_build] = depwait_source.createMissingBuilds()
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin'})
        return depwait_build

    def testBuildqueueRemoval(self):
        """Test removing buildqueue items.

        Removing a Buildqueue row should also remove its associated
        BuildPackageJob and Job rows.
        """
        # Create a build in depwait.
        depwait_build = self._setupSimpleDepwaitContext()
        depwait_build_id = depwait_build.id

        # Grab the relevant db records for later comparison.
        store = Store.of(depwait_build)
        build_package_job = store.find(
            BuildPackageJob, depwait_build.id == BuildPackageJob.build).one()
        build_package_job_id = build_package_job.id
        job_id = store.find(Job, Job.id == build_package_job.job.id).one().id
        build_queue_id = store.find(BuildQueue,
                                    BuildQueue.job == job_id).one().id

        depwait_build.buildqueue_record.destroySelf()

        # Test that the records above no longer exist in the db.
        self.assertEqual(
            store.find(BuildPackageJob,
                       BuildPackageJob.id == build_package_job_id).count(), 0)
        self.assertEqual(store.find(Job, Job.id == job_id).count(), 0)
        self.assertEqual(
            store.find(BuildQueue, BuildQueue.id == build_queue_id).count(), 0)
        # But the build itself still exists.
        self.assertEqual(
            store.find(BinaryPackageBuild,
                       BinaryPackageBuild.id == depwait_build_id).count(), 1)

    def testUpdateDependenciesWorks(self):
        # Calling `IBinaryPackageBuild.updateDependencies` makes the build
        # record ready for dispatch.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def assertRaisesUnparsableDependencies(self, depwait_build, dependencies):
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': dependencies})
        self.assertRaises(UnparsableDependencies,
                          depwait_build.updateDependencies)

    def testInvalidDependencies(self):
        # Calling `IBinaryPackageBuild.updateDependencies` on a build with
        # invalid 'dependencies' raises an AssertionError.
        # Anything not following '<name> [([relation] <version>)][, ...]'
        depwait_build = self._setupSimpleDepwaitContext()

        # None is not a valid dependency values.
        self.assertRaisesUnparsableDependencies(depwait_build, None)

        # Missing 'name'.
        self.assertRaisesUnparsableDependencies(depwait_build, u'(>> version)')

        # Missing 'version'.
        self.assertRaisesUnparsableDependencies(depwait_build, u'name (>>)')

        # Missing comma between dependencies.
        self.assertRaisesUnparsableDependencies(depwait_build, u'name1 name2')

    def testBug378828(self):
        # `IBinaryPackageBuild.updateDependencies` copes with the
        # scenario where the corresponding source publication is not
        # active (deleted) and the source original component is not a
        # valid ubuntu component.
        depwait_build = self._setupSimpleDepwaitContext()

        spr = depwait_build.source_package_release
        depwait_build.current_source_publication.requestDeletion(spr.creator)
        contrib = getUtility(IComponentSet).new('contrib')
        removeSecurityProxy(spr).component = contrib

        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testVersionedDependencies(self):
        # `IBinaryPackageBuild.updateDependencies` supports versioned
        # dependencies. A build will not be retried unless the candidate
        # complies with the version restriction.
        # In this case, dep-bin 666 is available. >> 666 isn't
        # satisified, but >= 666 is.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (>> 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'dep-bin (>> 666)')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (>= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')

    def testVersionedDependencyOnOldPublication(self):
        # `IBinaryPackageBuild.updateDependencies` doesn't just consider
        # the latest publication. There may be older publications which
        # satisfy the version constraints (in other archives or pockets).
        # In this case, dep-bin 666 and 999 are available, so both = 666
        # and = 999 are satisfied.
        depwait_build = self._setupSimpleDepwaitContext()
        self.publisher.getPubBinaries(
            binaryname='dep-bin',
            version='999',
            status=PackagePublishingStatus.PUBLISHED)
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (= 999)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')
class TestProcessAcceptedBugsJob(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer
    dbuser = config.IProcessAcceptedBugsJobSource.dbuser

    def setUp(self):
        super(TestProcessAcceptedBugsJob, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()
        self.distroseries = self.publisher.breezy_autotest

    def makeJob(self, distroseries=None, spr=None, bug_ids=[1]):
        """Create a `ProcessAcceptedBugsJob`."""
        if distroseries is None:
            distroseries = self.distroseries
        if spr is None:
            spr = self.factory.makeSourcePackageRelease(
                distroseries=distroseries, changelog_entry="changelog")
        return getUtility(IProcessAcceptedBugsJobSource).create(
            distroseries, spr, bug_ids)

    def test_job_implements_IProcessAcceptedBugsJob(self):
        job = self.makeJob()
        self.assertTrue(verifyObject(IProcessAcceptedBugsJob, job))

    def test_job_source_implements_IProcessAcceptedBugsJobSource(self):
        job_source = getUtility(IProcessAcceptedBugsJobSource)
        self.assertTrue(
            verifyObject(IProcessAcceptedBugsJobSource, job_source))

    def test_create(self):
        # A ProcessAcceptedBugsJob can be created and stores its arguments.
        spr = self.factory.makeSourcePackageRelease(
            distroseries=self.distroseries, changelog_entry="changelog")
        bug_ids = [1, 2]
        job = self.makeJob(spr=spr, bug_ids=bug_ids)
        self.assertProvides(job, IProcessAcceptedBugsJob)
        self.assertEqual(self.distroseries, job.distroseries)
        self.assertEqual(spr, job.sourcepackagerelease)
        self.assertEqual(bug_ids, job.bug_ids)

    def test_run_raises_errors(self):
        # A job reports unexpected errors as exceptions.
        class Boom(Exception):
            pass

        distroseries = self.factory.makeDistroSeries()
        removeSecurityProxy(distroseries).getSourcePackage = FakeMethod(
            failure=Boom())
        job = self.makeJob(distroseries=distroseries)
        self.assertRaises(Boom, job.run)

    def test___repr__(self):
        spr = self.factory.makeSourcePackageRelease(
            distroseries=self.distroseries, changelog_entry="changelog")
        bug_ids = [1, 2]
        job = self.makeJob(spr=spr, bug_ids=bug_ids)
        self.assertEqual(
            ("<ProcessAcceptedBugsJob to close bugs [1, 2] for "
             "{spr.name}/{spr.version} ({distroseries.distribution.name} "
             "{distroseries.name})>").format(
                distroseries=self.distroseries, spr=spr),
            repr(job))

    def test_run(self):
        # A proper test run closes bugs.
        spr = self.factory.makeSourcePackageRelease(
            distroseries=self.distroseries, changelog_entry="changelog")
        bug = self.factory.makeBug()
        bugtask = self.factory.makeBugTask(target=spr.sourcepackage, bug=bug)
        self.assertEqual(BugTaskStatus.NEW, bugtask.status)
        job = self.makeJob(spr=spr, bug_ids=[bug.id])
        JobRunner([job]).runAll()
        self.assertEqual(BugTaskStatus.FIXRELEASED, bugtask.status)

    def test_smoke(self):
        spr = self.factory.makeSourcePackageRelease(
            distroseries=self.distroseries, changelog_entry="changelog")
        bug = self.factory.makeBug()
        bugtask = self.factory.makeBugTask(target=spr.sourcepackage, bug=bug)
        self.assertEqual(BugTaskStatus.NEW, bugtask.status)
        self.makeJob(spr=spr, bug_ids=[bug.id])
        transaction.commit()

        out, err, exit_code = run_script(
            "LP_DEBUG_SQL=1 cronscripts/process-job-source.py -vv %s" % (
                IProcessAcceptedBugsJobSource.getName()))

        self.addDetail("stdout", text_content(out))
        self.addDetail("stderr", text_content(err))

        self.assertEqual(0, exit_code)
        self.assertEqual(BugTaskStatus.FIXRELEASED, bugtask.status)