def create_child(factory):
    processor = factory.makeProcessor()
    parent = factory.makeDistroSeries()
    parent_das = factory.makeDistroArchSeries(distroseries=parent, processor=processor)
    lf = factory.makeLibraryFileAlias()
    # Since the LFA needs to be in the librarian, commit.
    transaction.commit()
    parent_das.addOrUpdateChroot(lf)
    with celebrity_logged_in("admin"):
        parent_das.supports_virtualized = True
        parent.nominatedarchindep = parent_das
        publisher = SoyuzTestPublisher()
        publisher.prepareBreezyAutotest()
        packages = {"udev": "0.1-1", "libc6": "2.8-1"}
        for package in packages.keys():
            publisher.getPubBinaries(
                distroseries=parent,
                binaryname=package,
                version=packages[package],
                status=PackagePublishingStatus.PUBLISHED,
            )
        test1 = getUtility(IPackagesetSet).new(u"test1", u"test 1 packageset", parent.owner, distroseries=parent)
        test1_packageset_id = str(test1.id)
        test1.addSources("udev")
    parent.updatePackageCount()
    child = factory.makeDistroSeries()
    getUtility(ISourcePackageFormatSelectionSet).add(child, SourcePackageFormat.FORMAT_1_0)
    # Make sure everything hits the database, switching db users aborts.
    transaction.commit()
    return parent, child, test1_packageset_id
def create_child(factory):
    processor = factory.makeProcessor()
    parent = factory.makeDistroSeries()
    parent_das = factory.makeDistroArchSeries(
        distroseries=parent, processor=processor)
    lf = factory.makeLibraryFileAlias()
    # Since the LFA needs to be in the librarian, commit.
    transaction.commit()
    parent_das.addOrUpdateChroot(lf)
    with celebrity_logged_in('admin'):
        parent_das.supports_virtualized = True
        parent.nominatedarchindep = parent_das
        publisher = SoyuzTestPublisher()
        publisher.prepareBreezyAutotest()
        packages = {'udev': '0.1-1', 'libc6': '2.8-1'}
        for package in packages.keys():
            publisher.getPubBinaries(
                distroseries=parent, binaryname=package,
                version=packages[package],
                status=PackagePublishingStatus.PUBLISHED)
        test1 = getUtility(IPackagesetSet).new(
            u'test1', u'test 1 packageset', parent.owner,
            distroseries=parent)
        test1_packageset_id = str(test1.id)
        test1.addSources('udev')
    parent.updatePackageCount()
    child = factory.makeDistroSeries()
    getUtility(ISourcePackageFormatSelectionSet).add(
        child, SourcePackageFormat.FORMAT_1_0)
    # Make sure everything hits the database, switching db users aborts.
    transaction.commit()
    return parent, child, test1_packageset_id
class TestBuildUpdateDependencies(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def _setupSimpleDepwaitContext(self):
        """Use `SoyuzTestPublisher` to setup a simple depwait context.

        Return an `IBinaryPackageBuild` in MANUALDEWAIT state and depending
        on a binary that exists and is reachable.
        """
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        depwait_source = self.publisher.getPubSource(
            sourcename='depwait-source')

        self.publisher.getPubBinaries(
            binaryname='dep-bin', status=PackagePublishingStatus.PUBLISHED)

        [depwait_build] = depwait_source.createMissingBuilds()
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin'})
        return depwait_build

    def testBuildqueueRemoval(self):
        """Test removing buildqueue items.

        Removing a Buildqueue row should also remove its associated
        BuildPackageJob and Job rows.
        """
        # Create a build in depwait.
        depwait_build = self._setupSimpleDepwaitContext()
        depwait_build_id = depwait_build.id

        # Grab the relevant db records for later comparison.
        store = Store.of(depwait_build)
        build_package_job = store.find(
            BuildPackageJob, depwait_build.id == BuildPackageJob.build).one()
        build_package_job_id = build_package_job.id
        job_id = store.find(Job, Job.id == build_package_job.job.id).one().id
        build_queue_id = store.find(BuildQueue,
                                    BuildQueue.job == job_id).one().id

        depwait_build.buildqueue_record.destroySelf()

        # Test that the records above no longer exist in the db.
        self.assertEqual(
            store.find(BuildPackageJob,
                       BuildPackageJob.id == build_package_job_id).count(), 0)
        self.assertEqual(store.find(Job, Job.id == job_id).count(), 0)
        self.assertEqual(
            store.find(BuildQueue, BuildQueue.id == build_queue_id).count(), 0)
        # But the build itself still exists.
        self.assertEqual(
            store.find(BinaryPackageBuild,
                       BinaryPackageBuild.id == depwait_build_id).count(), 1)

    def testUpdateDependenciesWorks(self):
        # Calling `IBinaryPackageBuild.updateDependencies` makes the build
        # record ready for dispatch.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def assertRaisesUnparsableDependencies(self, depwait_build, dependencies):
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': dependencies})
        self.assertRaises(UnparsableDependencies,
                          depwait_build.updateDependencies)

    def testInvalidDependencies(self):
        # Calling `IBinaryPackageBuild.updateDependencies` on a build with
        # invalid 'dependencies' raises an AssertionError.
        # Anything not following '<name> [([relation] <version>)][, ...]'
        depwait_build = self._setupSimpleDepwaitContext()

        # None is not a valid dependency values.
        self.assertRaisesUnparsableDependencies(depwait_build, None)

        # Missing 'name'.
        self.assertRaisesUnparsableDependencies(depwait_build, u'(>> version)')

        # Missing 'version'.
        self.assertRaisesUnparsableDependencies(depwait_build, u'name (>>)')

        # Missing comma between dependencies.
        self.assertRaisesUnparsableDependencies(depwait_build, u'name1 name2')

    def testBug378828(self):
        # `IBinaryPackageBuild.updateDependencies` copes with the
        # scenario where the corresponding source publication is not
        # active (deleted) and the source original component is not a
        # valid ubuntu component.
        depwait_build = self._setupSimpleDepwaitContext()

        spr = depwait_build.source_package_release
        depwait_build.current_source_publication.requestDeletion(spr.creator)
        contrib = getUtility(IComponentSet).new('contrib')
        removeSecurityProxy(spr).component = contrib

        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testVersionedDependencies(self):
        # `IBinaryPackageBuild.updateDependencies` supports versioned
        # dependencies. A build will not be retried unless the candidate
        # complies with the version restriction.
        # In this case, dep-bin 666 is available. >> 666 isn't
        # satisified, but >= 666 is.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (>> 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'dep-bin (>> 666)')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (>= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')

    def testVersionedDependencyOnOldPublication(self):
        # `IBinaryPackageBuild.updateDependencies` doesn't just consider
        # the latest publication. There may be older publications which
        # satisfy the version constraints (in other archives or pockets).
        # In this case, dep-bin 666 and 999 are available, so both = 666
        # and = 999 are satisfied.
        depwait_build = self._setupSimpleDepwaitContext()
        self.publisher.getPubBinaries(
            binaryname='dep-bin',
            version='999',
            status=PackagePublishingStatus.PUBLISHED)
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (= 999)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')
class TestBuildUpdateDependencies(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def _setupSimpleDepwaitContext(self):
        """Use `SoyuzTestPublisher` to setup a simple depwait context.

        Return an `IBinaryPackageBuild` in MANUALDEWAIT state and depending
        on a binary that exists and is reachable.
        """
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        depwait_source = self.publisher.getPubSource(
            sourcename='depwait-source')

        self.publisher.getPubBinaries(
            binaryname='dep-bin',
            status=PackagePublishingStatus.PUBLISHED)

        [depwait_build] = depwait_source.createMissingBuilds()
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin'})
        return depwait_build

    def testUpdateDependenciesWorks(self):
        # Calling `IBinaryPackageBuild.updateDependencies` makes the build
        # record ready for dispatch.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def assertRaisesUnparsableDependencies(self, depwait_build, dependencies):
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': dependencies})
        self.assertRaises(
            UnparsableDependencies, depwait_build.updateDependencies)

    def testInvalidDependencies(self):
        # Calling `IBinaryPackageBuild.updateDependencies` on a build with
        # invalid 'dependencies' raises an AssertionError.
        # Anything not following '<name> [([relation] <version>)][, ...]'
        depwait_build = self._setupSimpleDepwaitContext()

        # None is not a valid dependency values.
        self.assertRaisesUnparsableDependencies(depwait_build, None)

        # Missing 'name'.
        self.assertRaisesUnparsableDependencies(depwait_build, '(>> version)')

        # Missing 'version'.
        self.assertRaisesUnparsableDependencies(depwait_build, 'name (>>)')

        # Missing comma between dependencies.
        self.assertRaisesUnparsableDependencies(depwait_build, 'name1 name2')

    def testBug378828(self):
        # `IBinaryPackageBuild.updateDependencies` copes with the
        # scenario where the corresponding source publication is not
        # active (deleted) and the source original component is not a
        # valid ubuntu component.
        depwait_build = self._setupSimpleDepwaitContext()

        spr = depwait_build.source_package_release
        depwait_build.current_source_publication.requestDeletion(
            spr.creator)
        contrib = getUtility(IComponentSet).new('contrib')
        removeSecurityProxy(spr).component = contrib

        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testVersionedDependencies(self):
        # `IBinaryPackageBuild.updateDependencies` supports versioned
        # dependencies. A build will not be retried unless the candidate
        # complies with the version restriction.
        # In this case, dep-bin 666 is available. >> 666 isn't
        # satisified, but >= 666 is.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (>> 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, 'dep-bin (>> 666)')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (>= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testVersionedDependencyOnOldPublication(self):
        # `IBinaryPackageBuild.updateDependencies` doesn't just consider
        # the latest publication. There may be older publications which
        # satisfy the version constraints (in other archives or pockets).
        # In this case, dep-bin 666 and 999 are available, so both = 666
        # and = 999 are satisfied.
        depwait_build = self._setupSimpleDepwaitContext()
        self.publisher.getPubBinaries(
            binaryname='dep-bin', version='999',
            status=PackagePublishingStatus.PUBLISHED)
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (= 999)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testStrictInequalities(self):
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        for dep, expected in (
                ('dep-bin (<< 444)', 'dep-bin (<< 444)'),
                ('dep-bin (>> 444)', ''),
                ('dep-bin (<< 888)', ''),
                ('dep-bin (>> 888)', 'dep-bin (>> 888)'),
                ):
            depwait_build.updateStatus(
                BuildStatus.MANUALDEPWAIT, slave_status={'dependencies': dep})
            depwait_build.updateDependencies()
            self.assertEqual(expected, depwait_build.dependencies)

    def testDisjunctions(self):
        # If one of a set of alternatives becomes available, that set of
        # alternatives is dropped from the outstanding dependencies.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={
                'dependencies': 'dep-bin (>= 999) | alt-bin, dep-tools'})
        depwait_build.updateDependencies()
        self.assertEqual(
            'dep-bin (>= 999) | alt-bin, dep-tools',
            depwait_build.dependencies)

        self.publisher.getPubBinaries(
            binaryname='alt-bin', status=PackagePublishingStatus.PUBLISHED)
        self.layer.txn.commit()

        depwait_build.updateDependencies()
        self.assertEqual('dep-tools', depwait_build.dependencies)

    def testAptVersionConstraints(self):
        # launchpad-buildd can return apt-style version constraints
        # using < and > rather than << and >>.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (> 666), dep-bin (< 777)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, 'dep-bin (> 666)')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': 'dep-bin (> 665)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')
class TestBuildUpdateDependencies(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def _setupSimpleDepwaitContext(self):
        """Use `SoyuzTestPublisher` to setup a simple depwait context.

        Return an `IBinaryPackageBuild` in MANUALDEWAIT state and depending
        on a binary that exists and is reachable.
        """
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        depwait_source = self.publisher.getPubSource(
            sourcename='depwait-source')

        self.publisher.getPubBinaries(
            binaryname='dep-bin',
            status=PackagePublishingStatus.PUBLISHED)

        [depwait_build] = depwait_source.createMissingBuilds()
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin'})
        return depwait_build

    def testBuildqueueRemoval(self):
        """Test removing buildqueue items.

        Removing a Buildqueue row should also remove its associated
        BuildPackageJob and Job rows.
        """
        # Create a build in depwait.
        depwait_build = self._setupSimpleDepwaitContext()
        depwait_build_id = depwait_build.id

        # Grab the relevant db records for later comparison.
        store = Store.of(depwait_build)
        build_package_job = store.find(
            BuildPackageJob,
            depwait_build.id == BuildPackageJob.build).one()
        build_package_job_id = build_package_job.id
        job_id = store.find(Job, Job.id == build_package_job.job.id).one().id
        build_queue_id = store.find(
            BuildQueue, BuildQueue.job == job_id).one().id

        depwait_build.buildqueue_record.destroySelf()

        # Test that the records above no longer exist in the db.
        self.assertEqual(
            store.find(
                BuildPackageJob,
                BuildPackageJob.id == build_package_job_id).count(),
            0)
        self.assertEqual(
            store.find(Job, Job.id == job_id).count(),
            0)
        self.assertEqual(
            store.find(BuildQueue, BuildQueue.id == build_queue_id).count(),
            0)
        # But the build itself still exists.
        self.assertEqual(
            store.find(
                BinaryPackageBuild,
                BinaryPackageBuild.id == depwait_build_id).count(),
            1)

    def testUpdateDependenciesWorks(self):
        # Calling `IBinaryPackageBuild.updateDependencies` makes the build
        # record ready for dispatch.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def assertRaisesUnparsableDependencies(self, depwait_build, dependencies):
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': dependencies})
        self.assertRaises(
            UnparsableDependencies, depwait_build.updateDependencies)

    def testInvalidDependencies(self):
        # Calling `IBinaryPackageBuild.updateDependencies` on a build with
        # invalid 'dependencies' raises an AssertionError.
        # Anything not following '<name> [([relation] <version>)][, ...]'
        depwait_build = self._setupSimpleDepwaitContext()

        # None is not a valid dependency values.
        self.assertRaisesUnparsableDependencies(depwait_build, None)

        # Missing 'name'.
        self.assertRaisesUnparsableDependencies(depwait_build, u'(>> version)')

        # Missing 'version'.
        self.assertRaisesUnparsableDependencies(depwait_build, u'name (>>)')

        # Missing comma between dependencies.
        self.assertRaisesUnparsableDependencies(depwait_build, u'name1 name2')

    def testBug378828(self):
        # `IBinaryPackageBuild.updateDependencies` copes with the
        # scenario where the corresponding source publication is not
        # active (deleted) and the source original component is not a
        # valid ubuntu component.
        depwait_build = self._setupSimpleDepwaitContext()

        spr = depwait_build.source_package_release
        depwait_build.current_source_publication.requestDeletion(
            spr.creator)
        contrib = getUtility(IComponentSet).new('contrib')
        removeSecurityProxy(spr).component = contrib

        self.layer.txn.commit()
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, '')

    def testVersionedDependencies(self):
        # `IBinaryPackageBuild.updateDependencies` supports versioned
        # dependencies. A build will not be retried unless the candidate
        # complies with the version restriction.
        # In this case, dep-bin 666 is available. >> 666 isn't
        # satisified, but >= 666 is.
        depwait_build = self._setupSimpleDepwaitContext()
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (>> 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'dep-bin (>> 666)')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (>= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')

    def testVersionedDependencyOnOldPublication(self):
        # `IBinaryPackageBuild.updateDependencies` doesn't just consider
        # the latest publication. There may be older publications which
        # satisfy the version constraints (in other archives or pockets).
        # In this case, dep-bin 666 and 999 are available, so both = 666
        # and = 999 are satisfied.
        depwait_build = self._setupSimpleDepwaitContext()
        self.publisher.getPubBinaries(
            binaryname='dep-bin', version='999',
            status=PackagePublishingStatus.PUBLISHED)
        self.layer.txn.commit()

        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (= 666)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')
        depwait_build.updateStatus(
            BuildStatus.MANUALDEPWAIT,
            slave_status={'dependencies': u'dep-bin (= 999)'})
        depwait_build.updateDependencies()
        self.assertEqual(depwait_build.dependencies, u'')
Example #6
0
class TestBuild(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuild, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor = self.factory.makeProcessor(supports_virtualized=True)
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(
                processors=[self.processor])
        self.now = datetime.now(pytz.UTC)

    def test_title(self):
        # A build has a title which describes the context source version and
        # in which series and architecture it is targeted for.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_title = '%s build of %s %s in %s %s RELEASE' % (
            self.das.architecturetag, spph.source_package_name,
            spph.source_package_version, self.distroseries.distribution.name,
            self.distroseries.name)
        self.assertEqual(expected_title, build.title)

    def test_linking(self):
        # A build directly links to the archive, distribution, distroseries,
        # distroarchseries, pocket in its context and also the source version
        # that generated it.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual(self.distroseries.main_archive, build.archive)
        self.assertEqual(self.distroseries.distribution, build.distribution)
        self.assertEqual(self.distroseries, build.distro_series)
        self.assertEqual(self.das, build.distro_arch_series)
        self.assertEqual(PackagePublishingPocket.RELEASE, build.pocket)
        self.assertEqual(self.das.architecturetag, build.arch_tag)
        self.assertTrue(build.virtualized)
        self.assertEqual(
            '%s - %s' %
            (spph.source_package_name, spph.source_package_version),
            build.source_package_release.title)

    def test_processed_builds(self):
        # Builds which were already processed also offer additional
        # information about its process such as the time it was started and
        # finished and its 'log' and 'upload_changesfile' as librarian files.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(
            sourcename=spn,
            version=version,
            distroseries=self.distroseries,
            status=PackagePublishingStatus.PUBLISHED)
        with person_logged_in(self.admin):
            binary = self.publisher.getPubBinaries(
                binaryname=spn,
                distroseries=self.distroseries,
                pub_source=spph,
                version=version,
                builder=self.builder)
        build = binary[0].binarypackagerelease.build
        self.assertTrue(build.was_built)
        self.assertEqual(PackageUploadStatus.DONE, build.package_upload.status)
        self.assertEqual(datetime(2008, 1, 1, 0, 0, 0, tzinfo=pytz.UTC),
                         build.date_started)
        self.assertEqual(datetime(2008, 1, 1, 0, 5, 0, tzinfo=pytz.UTC),
                         build.date_finished)
        self.assertEqual(timedelta(minutes=5), build.duration)
        expected_buildlog = 'buildlog_%s-%s-%s.%s_%s_FULLYBUILT.txt.gz' % (
            self.distroseries.distribution.name, self.distroseries.name,
            self.das.architecturetag, spn, version)
        self.assertEqual(expected_buildlog, build.log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' %
            (self.distroseries.distribution.name, spn, version, build.id))
        expected_buildlog_url = '%s/%s' % (url_start, expected_buildlog)
        self.assertEqual(expected_buildlog_url, build.log_url)
        expected_changesfile = '%s_%s_%s.changes' % (spn, version,
                                                     self.das.architecturetag)
        self.assertEqual(expected_changesfile,
                         build.upload_changesfile.filename)
        expected_changesfile_url = '%s/%s' % (url_start, expected_changesfile)
        self.assertEqual(expected_changesfile_url, build.changesfile_url)
        # Since this build was sucessful, it can not be retried
        self.assertFalse(build.can_be_retried)

    def test_current_component(self):
        # The currently published component is provided via the
        # 'current_component' property.  It looks over the publishing records
        # and finds the current publication of the source in question.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual('main', build.current_component.name)
        # It may not be the same as
        self.assertEqual('main', build.source_package_release.component.name)
        # If the package has no uploads, its package_upload is None
        self.assertIsNone(build.package_upload)

    def test_current_component_when_unpublished(self):
        # Production has some buggy builds without source publications.
        # current_component returns None in that case.
        spph = self.publisher.getPubSource()
        other_das = self.factory.makeDistroArchSeries()
        build = getUtility(IBinaryPackageBuildSet).new(
            spph.sourcepackagerelease, spph.archive, other_das,
            PackagePublishingPocket.RELEASE)
        self.assertIs(None, build.current_component)

    def test_retry_for_released_series(self):
        # Builds can not be retried for released distroseries
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(distroseries=distroseries,
                                                processor=self.processor)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries)
        [build] = spph.createMissingBuilds()
        self.assertFalse(build.can_be_retried)

    def test_partner_retry_for_released_series(self):
        # Builds for PARTNER can be retried -- even if the distroseries is
        # released.
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(distroseries=distroseries,
                                                processor=self.processor)
        archive = self.factory.makeArchive(
            purpose=ArchivePurpose.PARTNER,
            distribution=distroseries.distribution)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries,
            archive=archive)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry(self):
        # A build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry_cancelled(self):
        # A cancelled build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.CANCELLED)
        self.assertTrue(build.can_be_retried)

    def test_retry_superseded(self):
        # A superseded build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.SUPERSEDED)
        self.assertTrue(build.can_be_retried)

    def test_uploadlog(self):
        # The upload log can be attached to a build
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertIsNone(build.upload_log)
        self.assertIsNone(build.upload_log_url)
        build.storeUploadLog('sample upload log')
        expected_filename = 'upload_%s_log.txt' % build.id
        self.assertEqual(expected_filename, build.upload_log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' %
            (self.distroseries.distribution.name, spph.source_package_name,
             spph.source_package_version, build.id))
        expected_url = '%s/%s' % (url_start, expected_filename)
        self.assertEqual(expected_url, build.upload_log_url)

    def test_retry_resets_state(self):
        # Retrying a build resets most of the state attributes, but does
        # not modify the first dispatch time.
        build = self.factory.makeBinaryPackageBuild()
        build.updateStatus(BuildStatus.BUILDING, date_started=self.now)
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        build.gotFailure()
        with person_logged_in(self.admin):
            build.retry()
        self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
        self.assertEqual(self.now, build.date_first_dispatched)
        self.assertIsNone(build.log)
        self.assertIsNone(build.upload_log)
        self.assertEqual(0, build.failure_count)

    def test_retry_resets_virtualized(self):
        # Retrying a build recalculates its virtualization.
        archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution, virtualized=False)
        build = self.factory.makeBinaryPackageBuild(distroarchseries=self.das,
                                                    archive=archive,
                                                    processor=self.processor)
        self.assertFalse(build.virtualized)
        build.updateStatus(BuildStatus.BUILDING)
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        build.gotFailure()
        self.processor.supports_nonvirtualized = False
        with person_logged_in(self.admin):
            build.retry()
        self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
        self.assertTrue(build.virtualized)

    def test_create_bpr(self):
        # Test that we can create a BPR from a given build.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        bpn = self.factory.makeBinaryPackageName(name=spn)
        spph = self.publisher.getPubSource(sourcename=spn,
                                           version=version,
                                           distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        binary = build.createBinaryPackageRelease(
            binarypackagename=bpn,
            version=version,
            summary='',
            description='',
            binpackageformat=BinaryPackageFormat.DEB,
            component=spph.sourcepackagerelease.component.id,
            section=spph.sourcepackagerelease.section.id,
            priority=PackagePublishingPriority.STANDARD,
            installedsize=0,
            architecturespecific=False)
        self.assertEqual(1, build.binarypackages.count())
        self.assertEqual([binary], list(build.binarypackages))

    def test_multiple_create_bpr(self):
        # We can create multiple BPRs from a build
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(sourcename=spn,
                                           version=version,
                                           distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_names = []
        for i in range(15):
            bpn_name = '%s-%s' % (spn, i)
            bpn = self.factory.makeBinaryPackageName(bpn_name)
            expected_names.append(bpn_name)
            build.createBinaryPackageRelease(
                binarypackagename=bpn,
                version=str(i),
                summary='',
                description='',
                binpackageformat=BinaryPackageFormat.DEB,
                component=spph.sourcepackagerelease.component.id,
                section=spph.sourcepackagerelease.section.id,
                priority=PackagePublishingPriority.STANDARD,
                installedsize=0,
                architecturespecific=False)
        self.assertEqual(15, build.binarypackages.count())
        bin_names = [b.name for b in build.binarypackages]
        # Verify .binarypackages returns sorted by name
        expected_names.sort()
        self.assertEqual(expected_names, bin_names)

    def test_cannot_rescore_non_needsbuilds_builds(self):
        # If a build record isn't in NEEDSBUILD, it can not be rescored.
        # We will also need to log into an admin to do the rescore.
        with person_logged_in(self.admin):
            [bpph] = self.publisher.getPubBinaries(
                binaryname=self.factory.getUniqueString(),
                version="%s.1" % self.factory.getUniqueInteger(),
                distroseries=self.distroseries)
            build = bpph.binarypackagerelease.build
            self.assertRaises(CannotBeRescored, build.rescore, 20)

    def test_rescore_builds(self):
        # If the user has build-admin privileges, they can rescore builds
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual(BuildStatus.NEEDSBUILD, build.status)
        self.assertEqual(2505, build.buildqueue_record.lastscore)
        with person_logged_in(self.admin):
            build.rescore(5000)
            transaction.commit()
        self.assertEqual(5000, build.buildqueue_record.lastscore)

    def test_source_publication_override(self):
        # Components can be overridden in builds.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEqual(spph, build.current_source_publication)
        universe = getUtility(IComponentSet)['universe']
        overridden_spph = spph.changeOverride(new_component=universe)
        # We can now see current source publication points to the overridden
        # publication.
        self.assertNotEqual(spph, build.current_source_publication)
        self.assertEqual(overridden_spph, build.current_source_publication)

    def test_estimated_duration(self):
        # Builds will have an estimated duration that is set to a
        # previous build of the same sources duration.
        spn = self.factory.getUniqueString()
        spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [build] = spph.createMissingBuilds()
        # Duration is based on package size if there is no previous build.
        self.assertEqual(timedelta(0, 60),
                         build.buildqueue_record.estimated_duration)
        # Set the build as done, and its duration.
        build.updateStatus(BuildStatus.BUILDING,
                           date_started=self.now - timedelta(minutes=72))
        build.updateStatus(BuildStatus.FULLYBUILT, date_finished=self.now)
        build.buildqueue_record.destroySelf()
        new_spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [new_build] = new_spph.createMissingBuilds()
        # The duration for this build is now 72 minutes.
        self.assertEqual(timedelta(0, 72 * 60),
                         new_build.buildqueue_record.estimated_duration)

    def test_store_uploadlog_refuses_to_overwrite(self):
        # Storing an upload log for a build will fail if the build already
        # has an upload log.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOUPLOAD)
        build.storeUploadLog('foo')
        self.assertRaises(AssertionError, build.storeUploadLog, 'bar')
class TestDistroSeriesBinaryPackage(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        """Create a distroseriesbinarypackage to play with."""
        super(TestDistroSeriesBinaryPackage, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()
        self.distroseries = self.publisher.distroseries
        self.distribution = self.distroseries.distribution
        binaries = self.publisher.getPubBinaries(binaryname='foo-bin',
                                                 summary='Foo is the best')
        binary_pub = binaries[0]
        self.binary_package_name = (
            binary_pub.binarypackagerelease.binarypackagename)
        self.distroseries_binary_package = DistroSeriesBinaryPackage(
            self.distroseries, self.binary_package_name)

    def test_cache_attribute_when_two_cache_objects(self):
        # We have situations where there are cache objects for each
        # distro archive - we need to handle this situation without
        # OOPSing - see bug 580181.
        distro_archive_1 = self.distribution.main_archive
        distro_archive_2 = self.distribution.all_distro_archives[1]

        # Publish the same binary in another distro archive.
        self.publisher.getPubBinaries(binaryname='foo-bin',
                                      summary='Foo is the best',
                                      archive=distro_archive_2)

        logger = BufferLogger()
        with dbuser(config.statistician.dbuser):
            DistroSeriesPackageCache._update(self.distroseries,
                                             self.binary_package_name,
                                             distro_archive_1, logger)

            DistroSeriesPackageCache._update(self.distroseries,
                                             self.binary_package_name,
                                             distro_archive_2, logger)

        self.failUnlessEqual('Foo is the best',
                             self.distroseries_binary_package.summary)

    def test_none_cache_passed_at_init_counts_as_cached(self):
        # If the value None is passed as the constructor parameter
        # "cache", it is considered as a valid value.
        # Accesing the property DistroSeriesBinaryPackage.cache
        # later does not lead to the execution of an SQL query to
        # retrieve a DistroSeriesPackageCache record.
        binary_package = DistroSeriesBinaryPackage(self.distroseries,
                                                   self.binary_package_name,
                                                   cache=None)
        with StormStatementRecorder() as recorder:
            binary_package.cache
        self.assertThat(recorder, HasQueryCount(Equals(0)))

        # If the parameter "cache" was not passed, accessing
        # DistroSeriesBinaryPackage.cache for the first time requires
        # at least one SQL query.
        with StormStatementRecorder() as recorder:
            self.distroseries_binary_package.cache
        self.assertThat(recorder, HasQueryCount(NotEquals(0)))
Example #8
0
class TestScriptRunning(TestCaseWithFactory):
    """Run parse-ppa-apache-access-logs.py and test its outcome."""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestScriptRunning, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        self.store = IStore(BinaryPackageReleaseDownloadCount)

        self.archive = getUtility(IPersonSet).getByName('cprov').archive
        self.archive.require_virtualized = False
        self.archive.setProcessors(getUtility(IProcessorSet).getAll())

        self.foo_i386, self.foo_hppa = self.publisher.getPubBinaries(
            archive=self.archive, architecturespecific=True)
        self.bar_i386, self.bar_hppa = self.publisher.getPubBinaries(
            binaryname='bar-bin',
            archive=self.archive,
            architecturespecific=False)

        # Commit so the script can see our changes.
        import transaction
        transaction.commit()

    def test_script_run(self):
        # Before we run the script, there are no binary package
        # downloads in the database.
        # After the script's run, we will check that the results in the
        # database match the sample log files we use for this test:
        # lib/lp/soyuz/scripts/tests/ppa-apache-log-files
        # In addition to the wanted access log file, there is also an
        # error log that will be skipped by the configured glob.
        self.assertEqual(
            0,
            self.store.find(BinaryPackageReleaseDownloadCount).count())

        process = subprocess.Popen(
            'cronscripts/parse-ppa-apache-access-logs.py',
            shell=True,
            stdin=subprocess.PIPE,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
        (out, err) = process.communicate()
        self.assertEqual(process.returncode, 0,
                         "stdout:%s, stderr:%s" % (out, err))

        # The error log does not match the glob, so it is not processed,
        # and no OOPS is generated.
        self.oops_capture.sync()
        self.assertEqual([], self.oopses)

        # Must commit because the changes were done in another transaction.
        import transaction
        transaction.commit()
        results = self.store.find(BinaryPackageReleaseDownloadCount)

        australia = getUtility(ICountrySet)['AU']
        austria = getUtility(ICountrySet)['AT']

        self.assertEqual(
            [(self.foo_hppa.binarypackagerelease, self.archive,
              date(2008, 6, 13), australia, 1),
             (self.foo_i386.binarypackagerelease, self.archive,
              date(2008, 6, 13), australia, 1),
             (self.foo_i386.binarypackagerelease, self.archive,
              date(2008, 6, 13), austria, 1),
             (self.bar_i386.binarypackagerelease, self.archive,
              date(2008, 6, 14), None, 1),
             (self.bar_i386.binarypackagerelease, self.archive,
              date(2008, 6, 14), austria, 1)],
            sorted([(result.binary_package_release, result.archive, result.day,
                     result.country, result.count) for result in results],
                   key=lambda r: (r[0].id, r[2], r[3].name if r[3] else None)))
Example #9
0
class TestBuild(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuild, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)
        self.now = datetime.now(pytz.UTC)

    def test_title(self):
        # A build has a title which describes the context source version and
        # in which series and architecture it is targeted for.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_title = '%s build of %s %s in %s %s RELEASE' % (
            self.das.architecturetag, spph.source_package_name,
            spph.source_package_version, self.distroseries.distribution.name,
            self.distroseries.name)
        self.assertEquals(expected_title, build.title)

    def test_linking(self):
        # A build directly links to the archive, distribution, distroseries,
        # distroarchseries, pocket in its context and also the source version
        # that generated it.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(self.distroseries.main_archive, build.archive)
        self.assertEquals(self.distroseries.distribution, build.distribution)
        self.assertEquals(self.distroseries, build.distro_series)
        self.assertEquals(self.das, build.distro_arch_series)
        self.assertEquals(PackagePublishingPocket.RELEASE, build.pocket)
        self.assertEquals(self.das.architecturetag, build.arch_tag)
        self.assertTrue(build.is_virtualized)
        self.assertEquals(
            '%s - %s' % (spph.source_package_name,
                spph.source_package_version),
            build.source_package_release.title)

    def test_processed_builds(self):
        # Builds which were already processed also offer additional
        # information about its process such as the time it was started and
        # finished and its 'log' and 'upload_changesfile' as librarian files.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(
            sourcename=spn, version=version,
            distroseries=self.distroseries,
            status=PackagePublishingStatus.PUBLISHED)
        with person_logged_in(self.admin):
            binary = self.publisher.getPubBinaries(binaryname=spn,
                distroseries=self.distroseries, pub_source=spph,
                version=version, builder=self.builder)
        build = binary[0].binarypackagerelease.build
        self.assertTrue(build.was_built)
        self.assertEquals(
            PackageUploadStatus.DONE, build.package_upload.status)
        self.assertEquals(
            datetime(2008, 01, 01, 0, 0, 0, tzinfo=pytz.UTC),
            build.date_started)
        self.assertEquals(
            datetime(2008, 01, 01, 0, 5, 0, tzinfo=pytz.UTC),
            build.date_finished)
        self.assertEquals(timedelta(minutes=5), build.duration)
        expected_buildlog = 'buildlog_%s-%s-%s.%s_%s_FULLYBUILT.txt.gz' % (
            self.distroseries.distribution.name, self.distroseries.name,
            self.das.architecturetag, spn, version)
        self.assertEquals(expected_buildlog, build.log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' % (
                self.distroseries.distribution.name, spn, version, build.id))
        expected_buildlog_url = '%s/%s' % (url_start, expected_buildlog)
        self.assertEquals(expected_buildlog_url, build.log_url)
        expected_changesfile = '%s_%s_%s.changes' % (
            spn, version, self.das.architecturetag)
        self.assertEquals(
            expected_changesfile, build.upload_changesfile.filename)
        expected_changesfile_url = '%s/%s' % (url_start, expected_changesfile)
        self.assertEquals(expected_changesfile_url, build.changesfile_url)
        # Since this build was sucessful, it can not be retried
        self.assertFalse(build.can_be_retried)

    def test_current_component(self):
        # The currently published component is provided via the
        # 'current_component' property.  It looks over the publishing records
        # and finds the current publication of the source in question.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals('main', build.current_component.name)
        # It may not be the same as
        self.assertEquals('main', build.source_package_release.component.name)
        # If the package has no uploads, its package_upload is None
        self.assertEquals(None, build.package_upload)

    def test_current_component_when_unpublished(self):
        # Production has some buggy builds without source publications.
        # current_component returns None in that case.
        spph = self.publisher.getPubSource()
        other_das = self.factory.makeDistroArchSeries()
        build = spph.sourcepackagerelease.createBuild(
            other_das, PackagePublishingPocket.RELEASE, spph.archive)
        self.assertIs(None, build.current_component)

    def test_retry_for_released_series(self):
        # Builds can not be retried for released distroseries
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(
            distroseries=distroseries, processor=self.processor,
            supports_virtualized=True)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries)
        [build] = spph.createMissingBuilds()
        self.assertFalse(build.can_be_retried)

    def test_partner_retry_for_released_series(self):
        # Builds for PARTNER can be retried -- even if the distroseries is
        # released.
        distroseries = self.factory.makeDistroSeries()
        das = self.factory.makeDistroArchSeries(
            distroseries=distroseries, processor=self.processor,
            supports_virtualized=True)
        archive = self.factory.makeArchive(
            purpose=ArchivePurpose.PARTNER,
            distribution=distroseries.distribution)
        with person_logged_in(self.admin):
            distroseries.nominatedarchindep = das
            distroseries.status = SeriesStatus.OBSOLETE
            self.publisher.addFakeChroots(distroseries=distroseries)
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=distroseries, archive=archive)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry(self):
        # A build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        self.assertTrue(build.can_be_retried)

    def test_retry_cancelled(self):
        # A cancelled build can be retried
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.CANCELLED)
        self.assertTrue(build.can_be_retried)

    def test_uploadlog(self):
        # The upload log can be attached to a build
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(None, build.upload_log)
        self.assertEquals(None, build.upload_log_url)
        build.storeUploadLog('sample upload log')
        expected_filename = 'upload_%s_log.txt' % build.id
        self.assertEquals(expected_filename, build.upload_log.filename)
        url_start = (
            'http://launchpad.dev/%s/+source/%s/%s/+build/%s/+files' % (
                self.distroseries.distribution.name, spph.source_package_name,
                spph.source_package_version, build.id))
        expected_url = '%s/%s' % (url_start, expected_filename)
        self.assertEquals(expected_url, build.upload_log_url)

    def test_retry_resets_state(self):
        # Retrying a build resets most of the state attributes, but does
        # not modify the first dispatch time.
        build = self.factory.makeBinaryPackageBuild()
        build.updateStatus(BuildStatus.BUILDING, date_started=self.now)
        build.updateStatus(BuildStatus.FAILEDTOBUILD)
        build.gotFailure()
        with person_logged_in(self.admin):
            build.retry()
        self.assertEquals(BuildStatus.NEEDSBUILD, build.status)
        self.assertEquals(self.now, build.date_first_dispatched)
        self.assertEquals(None, build.log)
        self.assertEquals(None, build.upload_log)
        self.assertEquals(0, build.failure_count)

    def test_create_bpr(self):
        # Test that we can create a BPR from a given build.
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        bpn = self.factory.makeBinaryPackageName(name=spn)
        spph = self.publisher.getPubSource(
            sourcename=spn, version=version, distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        binary = build.createBinaryPackageRelease(
            binarypackagename=bpn, version=version, summary='',
            description='', binpackageformat=BinaryPackageFormat.DEB,
            component=spph.sourcepackagerelease.component.id,
            section=spph.sourcepackagerelease.section.id,
            priority=PackagePublishingPriority.STANDARD, installedsize=0,
            architecturespecific=False)
        self.assertEquals(1, build.binarypackages.count())
        self.assertEquals([binary], list(build.binarypackages))

    def test_multiple_create_bpr(self):
        # We can create multiple BPRs from a build
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        spph = self.publisher.getPubSource(
            sourcename=spn, version=version, distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        expected_names = []
        for i in range(15):
            bpn_name = '%s-%s' % (spn, i)
            bpn = self.factory.makeBinaryPackageName(bpn_name)
            expected_names.append(bpn_name)
            build.createBinaryPackageRelease(
                binarypackagename=bpn, version=str(i), summary='',
                description='', binpackageformat=BinaryPackageFormat.DEB,
                component=spph.sourcepackagerelease.component.id,
                section=spph.sourcepackagerelease.section.id,
                priority=PackagePublishingPriority.STANDARD, installedsize=0,
                architecturespecific=False)
        self.assertEquals(15, build.binarypackages.count())
        bin_names = [b.name for b in build.binarypackages]
        # Verify .binarypackages returns sorted by name
        expected_names.sort()
        self.assertEquals(expected_names, bin_names)

    def test_cannot_rescore_non_needsbuilds_builds(self):
        # If a build record isn't in NEEDSBUILD, it can not be rescored.
        # We will also need to log into an admin to do the rescore.
        with person_logged_in(self.admin):
            [bpph] = self.publisher.getPubBinaries(
                binaryname=self.factory.getUniqueString(),
                version="%s.1" % self.factory.getUniqueInteger(),
                distroseries=self.distroseries)
            build = bpph.binarypackagerelease.build
            self.assertRaises(CannotBeRescored, build.rescore, 20)

    def test_rescore_builds(self):
        # If the user has build-admin privileges, they can rescore builds
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(BuildStatus.NEEDSBUILD, build.status)
        self.assertEquals(2505, build.buildqueue_record.lastscore)
        with person_logged_in(self.admin):
            build.rescore(5000)
            transaction.commit()
        self.assertEquals(5000, build.buildqueue_record.lastscore)

    def test_source_publication_override(self):
        # Components can be overridden in builds.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        self.assertEquals(spph, build.current_source_publication)
        universe = getUtility(IComponentSet)['universe']
        overridden_spph = spph.changeOverride(new_component=universe)
        # We can now see current source publication points to the overridden
        # publication.
        self.assertNotEquals(spph, build.current_source_publication)
        self.assertEquals(overridden_spph, build.current_source_publication)

    def test_estimated_duration(self):
        # Builds will have an estimated duration that is set to a
        # previous build of the same sources duration.
        spn = self.factory.getUniqueString()
        spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [build] = spph.createMissingBuilds()
        # Duration is based on package size if there is no previous build.
        self.assertEquals(
            timedelta(0, 60), build.buildqueue_record.estimated_duration)
        # Set the build as done, and its duration.
        build.updateStatus(
            BuildStatus.BUILDING,
            date_started=self.now - timedelta(minutes=72))
        build.updateStatus(BuildStatus.FULLYBUILT, date_finished=self.now)
        build.buildqueue_record.destroySelf()
        new_spph = self.publisher.getPubSource(
            sourcename=spn, status=PackagePublishingStatus.PUBLISHED)
        [new_build] = new_spph.createMissingBuilds()
        # The duration for this build is now 72 minutes.
        self.assertEquals(
            timedelta(0, 72 * 60),
            new_build.buildqueue_record.estimated_duration)

    def test_store_uploadlog_refuses_to_overwrite(self):
        # Storing an upload log for a build will fail if the build already
        # has an upload log.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries)
        [build] = spph.createMissingBuilds()
        build.updateStatus(BuildStatus.FAILEDTOUPLOAD)
        build.storeUploadLog('foo')
        self.assertRaises(AssertionError, build.storeUploadLog, 'bar')
Example #10
0
class ArchiveExpiryTestBase(TestCaseWithFactory):
    """base class for the expire-archive-files.py script tests."""
    layer = LaunchpadZopelessLayer
    dbuser = config.binaryfile_expire.dbuser

    def setUp(self):
        """Set up some test publications."""
        super(ArchiveExpiryTestBase, self).setUp()
        # Configure the test publisher.
        switch_dbuser("launchpad")
        self.stp = SoyuzTestPublisher()
        self.stp.prepareBreezyAutotest()

        # Prepare some date properties for the tests to use.
        self.now = datetime.now(pytz.UTC)
        self.under_threshold_date = self.now - timedelta(days=29)
        self.over_threshold_date = self.now - timedelta(days=31)

    def getScript(self, test_args=None):
        """Return a ArchiveExpirer instance."""
        if test_args is None:
            test_args = []
        test_args.extend(['--expire-after', '30'])
        script = ArchiveExpirer("test expirer", test_args=test_args)
        script.logger = BufferLogger()
        script.txn = self.layer.txn
        return script

    def runScript(self):
        """Run the expiry script and return."""
        script = self.getScript()
        switch_dbuser(self.dbuser)
        script.main()

    def _setUpExpirablePublications(self, archive=None):
        """Helper to set up two publications that are both expirable."""
        if archive is None:
            archive = self.archive
        pkg5 = self.stp.getPubSource(
            sourcename="pkg5", architecturehintlist="i386", archive=archive,
            dateremoved=self.over_threshold_date)
        other_source = pkg5.copyTo(
            pkg5.distroseries, pkg5.pocket, self.archive2)
        other_source.dateremoved = self.over_threshold_date
        [pub] = self.stp.getPubBinaries(
            pub_source=pkg5, dateremoved=self.over_threshold_date,
            archive=archive)
        [other_binary] = pub.copyTo(
            pub.distroarchseries.distroseries, pub.pocket, self.archive2)
        other_binary.dateremoved = self.over_threshold_date
        return pkg5, pub

    def assertBinaryExpired(self, publication):
        self.assertNotEqual(
            publication.binarypackagerelease.files[0].libraryfile.expires,
            None,
            "lfa.expires should be set, but it's not.")

    def assertBinaryNotExpired(self, publication):
        self.assertEqual(
            publication.binarypackagerelease.files[0].libraryfile.expires,
            None,
            "lfa.expires should be None, but it's not.")

    def assertSourceExpired(self, publication):
        self.assertNotEqual(
            publication.sourcepackagerelease.files[0].libraryfile.expires,
            None,
            "lfa.expires should be set, but it's not.")

    def assertSourceNotExpired(self, publication):
        self.assertEqual(
            publication.sourcepackagerelease.files[0].libraryfile.expires,
            None,
            "lfa.expires should be None, but it's not.")
Example #11
0
class TestBuildDepWait(TestCaseWithFactory):

    layer = LaunchpadFunctionalLayer

    def setUp(self):
        super(TestBuildDepWait, self).setUp()
        self.admin = getUtility(IPersonSet).getByEmail(ADMIN_EMAIL)
        # Create everything we need to create builds, such as a
        # DistroArchSeries and a builder.
        self.processor = self.factory.makeProcessor()
        self.distroseries = self.factory.makeDistroSeries()
        self.das = self.factory.makeDistroArchSeries(
            distroseries=self.distroseries, processor=self.processor,
            supports_virtualized=True)
        self.archive = self.factory.makeArchive(
            distribution=self.distroseries.distribution,
            purpose=ArchivePurpose.PRIMARY)
        with person_logged_in(self.admin):
            self.publisher = SoyuzTestPublisher()
            self.publisher.prepareBreezyAutotest()
            self.distroseries.nominatedarchindep = self.das
            self.publisher.addFakeChroots(distroseries=self.distroseries)
            self.builder = self.factory.makeBuilder(processor=self.processor)

    def test_update_dependancies(self):
        # Calling .updateDependencies() on a build will remove those which
        # are reachable.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries, archive=self.archive)
        [build] = spph.createMissingBuilds()
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        with person_logged_in(self.admin):
            build.updateStatus(
                BuildStatus.MANUALDEPWAIT,
                slave_status={'dependencies': unicode(spn)})
            [bpph] = self.publisher.getPubBinaries(
                binaryname=spn, distroseries=self.distroseries,
                version=version, builder=self.builder, archive=self.archive,
                status=PackagePublishingStatus.PUBLISHED)
            # Commit to make sure stuff hits the database.
            transaction.commit()
        build.updateDependencies()
        self.assertEquals(u'', build.dependencies)

    def test_update_dependancies_respects_component(self):
        # Since main can only utilise packages that are published in main,
        # dependencies are not satisfied if they are not in main.
        spph = self.publisher.getPubSource(
            sourcename=self.factory.getUniqueString(),
            version="%s.1" % self.factory.getUniqueInteger(),
            distroseries=self.distroseries, archive=self.archive)
        [build] = spph.createMissingBuilds()
        spn = self.factory.getUniqueString()
        version = "%s.1" % self.factory.getUniqueInteger()
        with person_logged_in(self.admin):
            build.updateStatus(
                BuildStatus.MANUALDEPWAIT,
                slave_status={'dependencies': unicode(spn)})
            [bpph] = self.publisher.getPubBinaries(
                binaryname=spn, distroseries=self.distroseries,
                version=version, builder=self.builder, archive=self.archive,
                status=PackagePublishingStatus.PUBLISHED,
                component='universe')
            # Commit to make sure stuff hits the database.
            transaction.commit()
        build.updateDependencies()
        # Since the dependency is in universe, we still can't see it.
        self.assertEquals(unicode(spn), build.dependencies)
        with person_logged_in(self.admin):
            bpph.component = getUtility(IComponentSet)['main']
            transaction.commit()
        # Now that we have moved it main, we can see it.
        build.updateDependencies()
        self.assertEquals(u'', build.dependencies)
class TestScriptRunning(TestCaseWithFactory):
    """Run parse-ppa-apache-access-logs.py and test its outcome."""

    layer = LaunchpadZopelessLayer

    def setUp(self):
        super(TestScriptRunning, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()

        self.store = IStore(BinaryPackageReleaseDownloadCount)

        self.archive = getUtility(IPersonSet).getByName('cprov').archive
        self.archive.require_virtualized = False

        self.foo_i386, self.foo_hppa = self.publisher.getPubBinaries(
                archive=self.archive, architecturespecific=True)
        self.bar_i386, self.bar_hppa = self.publisher.getPubBinaries(
                binaryname='bar-bin', archive=self.archive,
                architecturespecific=False)

        # Commit so the script can see our changes.
        import transaction
        transaction.commit()

    def test_script_run(self):
        # Before we run the script, there are no binary package
        # downloads in the database.
        # After the script's run, we will check that the results in the
        # database match the sample log files we use for this test:
        # lib/lp/soyuz/scripts/tests/ppa-apache-log-files
        # In addition to the wanted access log file, there is also an
        # error log that will be skipped by the configured glob.
        self.assertEqual(
            0, self.store.find(BinaryPackageReleaseDownloadCount).count())

        process = subprocess.Popen(
            'cronscripts/parse-ppa-apache-access-logs.py', shell=True,
            stdin=subprocess.PIPE, stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
        (out, err) = process.communicate()
        self.assertEqual(
            process.returncode, 0, "stdout:%s, stderr:%s" % (out, err))

        # The error log does not match the glob, so it is not processed,
        # and no OOPS is generated.
        self.oops_capture.sync()
        self.assertEqual([], self.oopses)

        # Must commit because the changes were done in another transaction.
        import transaction
        transaction.commit()
        results = self.store.find(BinaryPackageReleaseDownloadCount)

        australia = getUtility(ICountrySet)['AU']
        austria = getUtility(ICountrySet)['AT']

        self.assertEqual(
            [(self.foo_hppa.binarypackagerelease,
              self.archive,
              date(2008, 6, 13),
              australia,
              1),
             (self.foo_i386.binarypackagerelease,
              self.archive,
              date(2008, 6, 13),
              australia,
              1),
             (self.foo_i386.binarypackagerelease,
              self.archive,
              date(2008, 6, 13),
              austria,
              1),
             (self.bar_i386.binarypackagerelease,
              self.archive,
              date(2008, 6, 14),
              None,
              1),
             (self.bar_i386.binarypackagerelease,
              self.archive,
              date(2008, 6, 14),
              austria,
              1)],
            sorted(
                [(result.binary_package_release, result.archive, result.day,
                  result.country, result.count) for result in results],
                 key=lambda r: (r[0].id, r[2], r[3].name if r[3] else None)))
class TestDistroSeriesBinaryPackage(TestCaseWithFactory):

    layer = LaunchpadZopelessLayer

    def setUp(self):
        """Create a distroseriesbinarypackage to play with."""
        super(TestDistroSeriesBinaryPackage, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.publisher.prepareBreezyAutotest()
        self.distroseries = self.publisher.distroseries
        self.distribution = self.distroseries.distribution
        binaries = self.publisher.getPubBinaries(
            binaryname='foo-bin', summary='Foo is the best')
        binary_pub = binaries[0]
        self.binary_package_name = (
            binary_pub.binarypackagerelease.binarypackagename)
        self.distroseries_binary_package = DistroSeriesBinaryPackage(
            self.distroseries, self.binary_package_name)

    def test_cache_attribute_when_two_cache_objects(self):
        # We have situations where there are cache objects for each
        # distro archive - we need to handle this situation without
        # OOPSing - see bug 580181.
        distro_archive_1 = self.distribution.main_archive
        distro_archive_2 = self.distribution.all_distro_archives[1]

        # Publish the same binary in another distro archive.
        self.publisher.getPubBinaries(
            binaryname='foo-bin', summary='Foo is the best',
            archive=distro_archive_2)

        logger = BufferLogger()
        with dbuser(config.statistician.dbuser):
            DistroSeriesPackageCache._update(
                self.distroseries, self.binary_package_name, distro_archive_1,
                logger)

            DistroSeriesPackageCache._update(
                self.distroseries, self.binary_package_name, distro_archive_2,
                logger)

        self.failUnlessEqual(
            'Foo is the best', self.distroseries_binary_package.summary)

    def test_none_cache_passed_at_init_counts_as_cached(self):
        # If the value None is passed as the constructor parameter
        # "cache", it is considered as a valid value.
        # Accesing the property DistroSeriesBinaryPackage.cache
        # later does not lead to the execution of an SQL query to
        # retrieve a DistroSeriesPackageCache record.
        binary_package = DistroSeriesBinaryPackage(
            self.distroseries, self.binary_package_name, cache=None)
        with StormStatementRecorder() as recorder:
            binary_package.cache
        self.assertThat(recorder, HasQueryCount(Equals(0)))

        # If the parameter "cache" was not passed, accessing
        # DistroSeriesBinaryPackage.cache for the first time requires
        # at least one SQL query.
        with StormStatementRecorder() as recorder:
            self.distroseries_binary_package.cache
        self.assertThat(recorder, HasQueryCount(NotEquals(0)))
class TestSourcesList(TestCaseWithFactory):
    """Test sources.list contents for building, and related mechanisms."""

    layer = LaunchpadZopelessLayer
    run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=10)

    ubuntu_components = [
        "main", "restricted", "universe", "multiverse", "partner"]

    fingerprints = {
        "*****@*****.**": "0D57E99656BEFB0897606EE9A022DD1F5001B46D",
        "*****@*****.**": (
            "B7B1966662BA8D3F5A6ED89BD640F4A593B2CF67"),
        }

    def setUp(self):
        super(TestSourcesList, self).setUp()
        self.publisher = SoyuzTestPublisher()
        self.ubuntu = getUtility(IDistributionSet).getByName("ubuntu")
        self.hoary = self.ubuntu.getSeries("hoary")
        self.publisher.addFakeChroots(self.hoary)
        self.publisher.setUpDefaultDistroSeries(self.hoary)
        for component_name in self.ubuntu_components:
            component = getUtility(IComponentSet)[component_name]
            if component not in self.hoary.components:
                self.factory.makeComponentSelection(self.hoary, component)

    def test_defaults(self):
        # Non-primary archives by default use the Release, Security and
        # Updates pockets from the primary archive, and all its available
        # components.
        self.assertEqual(
            PackagePublishingPocket.UPDATES, default_pocket_dependency)
        self.assertEqual("multiverse", default_component_dependency_name)
        self.assertEqual(
            (PackagePublishingPocket.RELEASE,
             PackagePublishingPocket.SECURITY,
             PackagePublishingPocket.UPDATES),
            pocket_dependencies[default_pocket_dependency])

    @defer.inlineCallbacks
    def makeArchive(self, signing_key_name="*****@*****.**",
                    publish_binary=False, **kwargs):
        try:
            getattr(config, "in-process-key-server-fixture")
        except AttributeError:
            yield self.useFixture(InProcessKeyServerFixture()).start()
        archive = self.factory.makeArchive(distribution=self.ubuntu, **kwargs)
        if signing_key_name is not None:
            key_path = os.path.join(gpgkeysdir, "%s.sec" % signing_key_name)
            yield IArchiveSigningKey(archive).setSigningKey(
                key_path, async_keyserver=True)
        if publish_binary:
            self.publisher.getPubBinaries(
                archive=archive, status=PackagePublishingStatus.PUBLISHED)
        defer.returnValue(archive)

    def makeBuild(self, **kwargs):
        pub_source = self.publisher.getPubSource(**kwargs)
        [build] = pub_source.createMissingBuilds()
        return build

    def assertPrimaryCurrentComponent(self, expected, build):
        self.assertEqual(
            expected,
            get_primary_current_component(
                build.archive, build.distro_series,
                build.source_package_release.name).name)

    @defer.inlineCallbacks
    def assertSourcesListAndKeys(self, expected_sources_list,
                                 expected_key_names, build, **kwargs):
        expected_lines = []
        for archive_or_prefix, suffixes in expected_sources_list:
            if IArchive.providedBy(archive_or_prefix):
                prefix = "deb %s " % archive_or_prefix.archive_url
            else:
                prefix = archive_or_prefix + " "
            expected_lines.extend([prefix + suffix for suffix in suffixes])
        sources_list, trusted_keys = yield get_sources_list_for_building(
            build, build.distro_arch_series, build.source_package_release.name,
            **kwargs)
        self.assertEqual(expected_lines, sources_list)
        key_matchers = [
            Base64KeyMatches(self.fingerprints[key_name])
            for key_name in expected_key_names]
        self.assertThat(trusted_keys, MatchesSetwise(*key_matchers))

    @defer.inlineCallbacks
    def test_ppa_with_no_binaries(self):
        # If there are no published binaries in a PPA, only its primary
        # archive dependencies need to be considered.
        ppa = yield self.makeArchive()
        build = self.makeBuild(archive=ppa)
        self.assertEqual(
            0, ppa.getAllPublishedBinaries(
                distroarchseries=build.distro_arch_series,
                status=PackagePublishingStatus.PUBLISHED).count())
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_ppa_with_binaries(self):
        # If there are binaries published in a PPA, then the PPA is
        # considered as well as its primary dependencies.
        ppa = yield self.makeArchive(publish_binary=True)
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], ["*****@*****.**"], build)

    @defer.inlineCallbacks
    def test_dependent_ppa_with_no_binaries(self):
        # A depended-upon PPA is not considered if it has no published
        # binaries.
        lower_ppa = yield self.makeArchive(
            signing_key_name="*****@*****.**")
        upper_ppa = yield self.makeArchive(publish_binary=True)
        upper_ppa.addArchiveDependency(
            lower_ppa, PackagePublishingPocket.RELEASE,
            getUtility(IComponentSet)["main"])
        build = self.makeBuild(archive=upper_ppa)
        yield self.assertSourcesListAndKeys(
            [(upper_ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], ["*****@*****.**"], build)

    @defer.inlineCallbacks
    def test_dependent_ppa_with_binaries(self):
        # A depended-upon PPA is considered if it has published binaries.
        lower_ppa = yield self.makeArchive(
            signing_key_name="*****@*****.**",
            publish_binary=True)
        upper_ppa = yield self.makeArchive(publish_binary=True)
        upper_ppa.addArchiveDependency(
            lower_ppa, PackagePublishingPocket.RELEASE,
            getUtility(IComponentSet)["main"])
        build = self.makeBuild(archive=upper_ppa)
        yield self.assertSourcesListAndKeys(
            [(upper_ppa, ["hoary main"]),
             (lower_ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ],
            ["*****@*****.**", "*****@*****.**"],
            build)

    @defer.inlineCallbacks
    def test_lax_supported_component_dependencies(self):
        # Dependencies for series with
        # strict_supported_component_dependencies=False are reasonable.
        # PPAs only have the "main" component.
        lower_ppa = yield self.makeArchive(
            signing_key_name="*****@*****.**",
            publish_binary=True)
        upper_ppa = yield self.makeArchive(publish_binary=True)
        upper_ppa.addArchiveDependency(
            lower_ppa, PackagePublishingPocket.RELEASE,
            getUtility(IComponentSet)["main"])
        upper_ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.UPDATES,
            getUtility(IComponentSet)["restricted"])
        build = self.makeBuild(archive=upper_ppa)
        yield self.assertSourcesListAndKeys(
            [(upper_ppa, ["hoary main"]),
             (lower_ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted",
                 "hoary-security main restricted",
                 "hoary-updates main restricted",
                 ]),
             ],
            ["*****@*****.**", "*****@*****.**"],
            build)
        self.hoary.strict_supported_component_dependencies = False
        transaction.commit()
        yield self.assertSourcesListAndKeys(
            [(upper_ppa, ["hoary main"]),
             (lower_ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ],
            ["*****@*****.**", "*****@*****.**"],
            build)

    @defer.inlineCallbacks
    def test_no_op_primary_archive_dependency(self):
        # Overriding the default primary archive dependencies with exactly
        # the same values has no effect.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.UPDATES,
            getUtility(IComponentSet)["multiverse"])
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_primary_archive_dependency_security(self):
        # The primary archive dependency can be modified to behave as an
        # embargoed archive that builds security updates.  This is done by
        # setting the SECURITY pocket dependencies (RELEASE and SECURITY)
        # and following the component dependencies of the component where
        # the source was last published in the primary archive.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.SECURITY)
        build = self.makeBuild(archive=ppa)
        self.assertPrimaryCurrentComponent("universe", build)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main universe",
                 "hoary-security main universe",
                 ]),
             ], [], build)
        self.publisher.getPubSource(
            sourcename="with-ancestry", version="1.0",
            archive=self.ubuntu.main_archive)
        [build_with_ancestry] = self.publisher.getPubSource(
            sourcename="with-ancestry", version="1.1",
            archive=ppa).createMissingBuilds()
        self.assertPrimaryCurrentComponent("main", build_with_ancestry)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main",
                 "hoary-security main",
                 ]),
             ], [], build_with_ancestry)

    @defer.inlineCallbacks
    def test_primary_archive_dependency_release(self):
        # The primary archive dependency can be modified to behave as a
        # pristine build environment based only on what was included in the
        # original release of the corresponding series.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.RELEASE,
            getUtility(IComponentSet)["restricted"])
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, ["hoary main restricted"])], [], build)

    @defer.inlineCallbacks
    def test_primary_archive_dependency_proposed(self):
        # The primary archive dependency can be modified to extend the build
        # environment for PROPOSED.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.PROPOSED,
            getUtility(IComponentSet)["multiverse"])
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 "hoary-proposed main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_primary_archive_dependency_backports(self):
        # The primary archive dependency can be modified to extend the build
        # environment for PROPOSED.
        ppa = yield self.makeArchive()
        ppa.addArchiveDependency(
            self.ubuntu.main_archive, PackagePublishingPocket.BACKPORTS,
            getUtility(IComponentSet)["multiverse"])
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 "hoary-backports main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_partner(self):
        # Similarly to what happens with PPA builds, partner builds may
        # depend on any component in the primary archive.  This behaviour
        # allows scenarios where partner packages may use other
        # restricted/non-free applications from multiverse, and also other
        # partner applications.
        primary, partner = self.ubuntu.all_distro_archives
        self.publisher.getPubBinaries(
            archive=partner, component="partner",
            status=PackagePublishingStatus.PUBLISHED)
        build = self.makeBuild(archive=partner, component="partner")
        yield self.assertSourcesListAndKeys(
            [(partner, ["hoary partner"]),
             (primary, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_partner_proposed(self):
        # The partner archive's PROPOSED pocket builds against itself, but
        # still uses the default UPDATES dependency for the primary archive
        # unless overridden by ArchiveDependency.
        primary, partner = self.ubuntu.all_distro_archives
        self.publisher.getPubBinaries(
            archive=partner, component="partner",
            status=PackagePublishingStatus.PUBLISHED)
        self.publisher.getPubBinaries(
            archive=partner, component="partner",
            status=PackagePublishingStatus.PUBLISHED,
            pocket=PackagePublishingPocket.PROPOSED)
        build = self.makeBuild(
            archive=partner, component="partner",
            pocket=PackagePublishingPocket.PROPOSED)
        yield self.assertSourcesListAndKeys(
            [(partner, [
                 "hoary partner",
                 "hoary-proposed partner",
                 ]),
             (primary, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], [], build)

    @defer.inlineCallbacks
    def test_archive_external_dependencies(self):
        # An archive can be manually given additional external dependencies.
        # If present, "%(series)s" is replaced with the series name for the
        # build being dispatched.
        ppa = yield self.makeArchive(publish_binary=True)
        ppa.external_dependencies = (
            "deb http://user:pass@repository zoing everything\n"
            "deb http://user:pass@repository %(series)s public private\n"
            "deb http://user:pass@repository %(series)s-extra public")
        build = self.makeBuild(archive=ppa)
        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             ("deb http://user:pass@repository", [
                 "zoing everything",
                 "hoary public private",
                 "hoary-extra public",
                 ]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], ["*****@*****.**"], build)

    @defer.inlineCallbacks
    def test_build_external_dependencies(self):
        # A single build can be manually given additional external
        # dependencies.
        ppa = yield self.makeArchive(publish_binary=True)
        build = self.makeBuild(archive=ppa)
        build.api_external_dependencies = (
            "deb http://user:pass@repository foo bar")
        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             ("deb http://user:pass@repository", ["foo bar"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ], ["*****@*****.**"], build)

    @defer.inlineCallbacks
    def test_build_tools(self):
        # We can force an extra build tools line to be added to
        # sources.list, which is useful for specialised build types.
        ppa = yield self.makeArchive(publish_binary=True)
        build = self.makeBuild(archive=ppa)

        # Upload the tools archive key to the keyserver.
        tools_key_name = "*****@*****.**"
        tools_key_path = os.path.join(gpgkeysdir, "%s.sec" % tools_key_name)
        with open(tools_key_path) as tools_key_file:
            secret_key_export = tools_key_file.read()
        # Remove security proxy to avoid problems with running in a thread.
        gpghandler = removeSecurityProxy(getUtility(IGPGHandler))
        gpghandler.importSecretKey(secret_key_export)
        yield deferToThread(
            gpghandler.uploadPublicKey, self.fingerprints[tools_key_name])

        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             ("deb http://example.org", ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ],
            ["*****@*****.**", tools_key_name], build,
            tools_source="deb http://example.org %(series)s main",
            tools_fingerprint=self.fingerprints[tools_key_name])

    @defer.inlineCallbacks
    def test_build_tools_bad_formatting(self):
        # If tools_source is badly formatted, we log the error but don't
        # blow up.  (Note the missing "s" at the end of "%(series)".)
        ppa = yield self.makeArchive(publish_binary=True)
        build = self.makeBuild(archive=ppa)
        logger = BufferLogger()
        yield self.assertSourcesListAndKeys(
            [(ppa, ["hoary main"]),
             (self.ubuntu.main_archive, [
                 "hoary main restricted universe multiverse",
                 "hoary-security main restricted universe multiverse",
                 "hoary-updates main restricted universe multiverse",
                 ]),
             ],
            ["*****@*****.**"], build,
            tools_source="deb http://example.org %(series) main",
            logger=logger)
        self.assertThat(logger.getLogBuffer(), StartsWith(
            "ERROR Exception processing build tools sources.list entry:\n"))

    @defer.inlineCallbacks
    def test_overlay(self):
        # An overlay distroseries is a derived distribution which works like
        # a PPA.  This means that the parent's details gets added to the
        # sources.list passed to the builders.
        depdistro = self.factory.makeDistribution(
            "depdistro", publish_base_url="http://archive.launchpad.dev/")
        depseries = self.factory.makeDistroSeries(
            distribution=depdistro, name="depseries")
        self.factory.makeDistroArchSeries(
            distroseries=depseries, architecturetag="i386")
        self.publisher.addFakeChroots(depseries)
        for component_name in self.ubuntu_components:
            component = getUtility(IComponentSet)[component_name]
            self.factory.makeComponentSelection(depseries, component)
        self.factory.makeDistroSeriesParent(
            derived_series=self.hoary, parent_series=depseries,
            initialized=True, is_overlay=True,
            pocket=PackagePublishingPocket.SECURITY,
            component=getUtility(IComponentSet)["universe"])
        build = self.makeBuild()
        yield self.assertSourcesListAndKeys(
            [(self.ubuntu.main_archive, ["hoary main"]),
             (depdistro.main_archive, [
                 "depseries main universe",
                 "depseries-security main universe",
                 ]),
             ], [], build)