Example #1
0
    def test_copy_to_swift(self):
        log = BufferLogger()

        # Confirm that files exist on disk where we expect to find them.
        for lfc in self.lfcs:
            path = swift.filesystem_path(lfc.id)
            self.assertTrue(os.path.exists(path))

        # Copy all the files into Swift.
        swift.to_swift(log, remove_func=None)

        # Confirm that files exist on disk where we expect to find them.
        for lfc in self.lfcs:
            path = swift.filesystem_path(lfc.id)
            self.assertTrue(os.path.exists(path))

        # Confirm all the files are also in Swift.
        swift_client = self.swift_fixture.connect()
        for lfc, contents in zip(self.lfcs, self.contents):
            container, name = swift.swift_location(lfc.id)
            headers, obj = swift_client.get_object(container, name)
            self.assertEqual(contents, obj, 'Did not round trip')

        # Running again does nothing, in particular does not reupload
        # the files to Swift.
        con_patch = patch.object(swift.swiftclient.Connection,
                                 'put_object',
                                 side_effect=AssertionError('do not call'))
        with con_patch:
            swift.to_swift(log)  # remove_func == None
Example #2
0
 def test_process_one(self):
     remote_bug = {
         "id": 12345, "number": 1234, "state": "open", "labels": [],
         }
     _add_rate_limit_response("api.github.com")
     responses.add(
         "GET", "https://api.github.com/repos/user/repository/issues/1234",
         json=remote_bug)
     bug = self.factory.makeBug()
     bug_tracker = self.factory.makeBugTracker(
         base_url="https://github.com/user/repository/issues",
         bugtrackertype=BugTrackerType.GITHUB)
     bug.addWatch(
         bug_tracker, "1234", getUtility(ILaunchpadCelebrities).janitor)
     self.assertEqual(
         [("1234", None)],
         [(watch.remotebug, watch.remotestatus)
          for watch in bug_tracker.watches])
     transaction.commit()
     logger = BufferLogger()
     bug_watch_updater = CheckwatchesMaster(transaction, logger=logger)
     github = get_external_bugtracker(bug_tracker)
     bug_watch_updater.updateBugWatches(github, bug_tracker.watches)
     self.assertEqual(
         "INFO Updating 1 watches for 1 bugs on "
         "https://api.github.com/repos/user/repository\n",
         logger.getLogBuffer())
     self.assertEqual(
         [("1234", BugTaskStatus.NEW)],
         [(watch.remotebug, github.convertRemoteStatus(watch.remotestatus))
          for watch in bug_tracker.watches])
Example #3
0
    def performImport(self, job_id):
        """Perform the import job with ID job_id.

        Return a Deferred that fires when it the job is done.

        This implementation does it in-process.
        """
        logger = BufferLogger()
        monitor = CIWorkerMonitorForTesting(
            job_id, logger,
            xmlrpc.Proxy(config.codeimportdispatcher.codeimportscheduler_url),
            "anything")
        deferred = monitor.run()

        def save_protocol_object(result):
            """Save the process protocol object.

            We do this in an addBoth so that it's called after the process
            protocol is actually constructed but before we drop the last
            reference to the monitor object.
            """
            self._protocol = monitor._protocol
            return result

        return deferred.addBoth(save_protocol_object)
 def test_non_virtual_ppa_dispatch(self):
     # When the BinaryPackageBuildBehaviour dispatches PPA builds to
     # non-virtual builders, it stores the chroot on the server and
     # requests a binary package build, lying to say that the archive
     # purpose is "PRIMARY" because this ensures that the package mangling
     # tools will run over the built packages.
     archive = self.factory.makeArchive(virtualized=False)
     slave = OkSlave()
     builder = self.factory.makeBuilder(virtualized=False)
     builder.setCleanStatus(BuilderCleanStatus.CLEAN)
     vitals = extract_vitals_from_db(builder)
     build = self.factory.makeBinaryPackageBuild(builder=builder,
                                                 archive=archive)
     lf = self.factory.makeLibraryFileAlias()
     transaction.commit()
     build.distro_arch_series.addOrUpdateChroot(lf)
     bq = build.queueBuild()
     bq.markAsBuilding(builder)
     interactor = BuilderInteractor()
     yield interactor._startBuild(
         bq, vitals, builder, slave,
         interactor.getBuildBehaviour(bq, builder, slave), BufferLogger())
     yield self.assertExpectedInteraction(slave.call_log, builder, build,
                                          lf, archive,
                                          ArchivePurpose.PRIMARY,
                                          'universe')
 def test_private_source_dispatch(self):
     archive = self.factory.makeArchive(private=True)
     slave = OkSlave()
     builder = self.factory.makeBuilder()
     builder.setCleanStatus(BuilderCleanStatus.CLEAN)
     vitals = extract_vitals_from_db(builder)
     build = self.factory.makeBinaryPackageBuild(builder=builder,
                                                 archive=archive)
     sprf = build.source_package_release.addFile(
         self.factory.makeLibraryFileAlias(db_only=True),
         filetype=SourcePackageFileType.ORIG_TARBALL)
     sprf_url = (
         'http://private-ppa.launchpad.dev/%s/%s/ubuntu/pool/%s/%s' %
         (archive.owner.name, archive.name,
          poolify(build.source_package_release.sourcepackagename.name,
                  'main'), sprf.libraryfile.filename))
     lf = self.factory.makeLibraryFileAlias()
     transaction.commit()
     build.distro_arch_series.addOrUpdateChroot(lf)
     bq = build.queueBuild()
     bq.markAsBuilding(builder)
     interactor = BuilderInteractor()
     yield interactor._startBuild(
         bq, vitals, builder, slave,
         interactor.getBuildBehaviour(bq, builder, slave), BufferLogger())
     yield self.assertExpectedInteraction(
         slave.call_log,
         builder,
         build,
         lf,
         archive,
         ArchivePurpose.PPA,
         extra_uploads=[(sprf_url, 'buildd', 'sekrit')],
         filemap_names=[sprf.libraryfile.filename])
 def test_no_signature_rejected(self):
     # An unsigned changes file is rejected.
     path = datadir('signatures/unsigned.changes')
     changesfile = ChangesFile(path, InsecureUploadPolicy(), BufferLogger())
     errors = list(changesfile.parseChanges())
     self.assertIsInstance(errors[0], UploadError)
     self.assertEqual(1, len(errors))
    def test_get_remote_products_and_components_encounters_301(self):
        def redirect_callback(request):
            new_url = request.url.replace("query.cgi", "newquery.cgi")
            return (301, {"Location": new_url}, "")

        lp_bugtracker = self.factory.makeBugTracker(
            title="fdo-example",
            name="fdo-example")
        transaction.commit()

        finder = BugzillaRemoteComponentFinder(logger=BufferLogger())
        responses.add_callback(
            "GET", re.compile(r".*/query\.cgi"), callback=redirect_callback)
        responses.add(
            "GET", re.compile(r".*/newquery\.cgi\?format=advanced"),
            match_querystring=True, content_type="text/html",
            body=read_test_file("bugzilla-fdo-advanced-query.html"))
        finder.getRemoteProductsAndComponents(bugtracker_name="fdo-example")

        self.assertEqual(
            109, len(list(lp_bugtracker.getAllRemoteComponentGroups())))
        comp_group = lp_bugtracker.getRemoteComponentGroup(u'xorg')
        self.assertIsNot(None, comp_group)
        self.assertEqual(146, len(list(comp_group.components)))
        comp = comp_group.getComponent(u'Driver/Radeon')
        self.assertIsNot(None, comp)
        self.assertEqual(u'Driver/Radeon', comp.name)
Example #8
0
    def test_pruneBugWatchActivity_leaves_most_recent(self):
        # BugWatchActivityPruner.pruneBugWatchActivity() will delete all
        # but the n most recent BugWatchActivity items for a bug watch,
        # where n is determined by checkwatches.scheduler.MAX_SAMPLE_SIZE.
        for i in range(5):
            self.bug_watch.addActivity(message="Activity %s" % i)

        switch_dbuser('garbo')
        self.pruner = BugWatchActivityPruner(BufferLogger())
        self.addCleanup(self.pruner.cleanUp)

        # MAX_SAMPLE_SIZE + 1 created in setUp(), and 5 more created
        # just above.
        self.assertEqual(MAX_SAMPLE_SIZE + 6, self.bug_watch.activity.count())

        # Run the pruner
        while not self.pruner.isDone():
            self.pruner(chunk_size=3)

        # Only MAX_SAMPLE_SIZE items should be left.
        self.assertEqual(MAX_SAMPLE_SIZE, self.bug_watch.activity.count())

        # They should be the most recent items - the ones created at the
        # start of this test.
        messages = [activity.message for activity in self.bug_watch.activity]
        for i in range(MAX_SAMPLE_SIZE):
            self.assertIn("Activity %s" % i, messages)
    def test_scan_aborts_lost_slave_with_job(self):
        # SlaveScanner.scan uses BuilderInteractor.rescueIfLost to abort
        # slaves that don't have the expected job.
        slave = BuildingSlave('nontrivial')
        bq = FakeBuildQueue()

        # Instrument updateBuild.
        interactor = BuilderInteractor()
        interactor.updateBuild = FakeMethod()

        scanner = SlaveScanner('mock',
                               MockBuilderFactory(MockBuilder(), bq),
                               BufferLogger(),
                               interactor_factory=FakeMethod(interactor),
                               slave_factory=FakeMethod(slave),
                               behavior_factory=FakeMethod(TrivialBehavior()))
        # XXX: checkCancellation needs more than a FakeBuildQueue.
        scanner.checkCancellation = FakeMethod(defer.succeed(False))

        # A single scan will call status(), notice that the slave is
        # lost, abort() the slave, then reset() the job without calling
        # updateBuild().
        yield scanner.scan()
        self.assertEqual(['status', 'abort'], slave.call_log)
        self.assertEqual(0, interactor.updateBuild.call_count)
        self.assertEqual(1, bq.reset.call_count)
Example #10
0
    def createRepository(self, path):
        """Create a CVS repository at `path`.

        :param path: The local path to create a repository in.
        :return: A CVS.Repository`.
        """
        return CVS.init(path, BufferLogger())
Example #11
0
 def test_recoverWorkingDists_is_quiet_normally(self):
     script = self.makeScript()
     script.setUp()
     script.logger = BufferLogger()
     script.logger.setLevel(logging.INFO)
     script.recoverWorkingDists()
     self.assertEqual('', script.logger.getLogBuffer())
Example #12
0
    def test_dispatchBuildToSlave(self):
        # Ensure dispatchBuildToSlave will make the right calls to the slave
        job = self.makeJob()
        test_publisher = SoyuzTestPublisher()
        test_publisher.addFakeChroots(job.build.distroseries)
        slave = OkSlave()
        builder = MockBuilder("bob-de-bouwer")
        builder.processor = getUtility(IProcessorSet).getByName('386')
        job.setBuilder(builder, slave)
        logger = BufferLogger()
        d = defer.maybeDeferred(job.dispatchBuildToSlave, "someid", logger)

        def check_dispatch(ignored):
            self.assertThat(
                logger.getLogBuffer(),
                StartsWith(
                    dedent("""\
                  INFO Sending chroot file for recipe build to bob-de-bouwer
                  INFO Initiating build 1-someid on http://fake:0000
                  """)))
            self.assertEquals(["ensurepresent", "build"],
                              [call[0] for call in slave.call_log])
            build_args = slave.call_log[1][1:]
            self.assertEquals(build_args[0], job.getBuildCookie())
            self.assertEquals(build_args[1], "sourcepackagerecipe")
            self.assertEquals(build_args[3], [])
            distroarchseries = job.build.distroseries.architectures[0]
            self.assertEqual(build_args[4],
                             job._extraBuildArgs(distroarchseries))

        return d.addCallback(check_dispatch)
Example #13
0
 def test_logStartBuild(self):
     # logStartBuild will properly report the package that's being built
     job = self.makeJob()
     logger = BufferLogger()
     job.logStartBuild(logger)
     self.assertEquals(logger.getLogBuffer(),
                       "INFO startBuild(Mydistro, recept, joe)\n")
    def test_makeDailyBuilds_skips_builds_already_queued(self):
        # If the recipe already has an identical build pending,
        # makeDailyBuilds() won't create a build.
        owner = self.factory.makePerson(name='eric')
        recipe = self.factory.makeSourcePackageRecipe(owner=owner,
                                                      name='funky-recipe',
                                                      build_daily=True,
                                                      is_stale=True)
        series = list(recipe.distroseries)[0]
        self.factory.makeSourcePackageRecipeBuild(
            recipe=recipe,
            archive=recipe.daily_build_archive,
            requester=recipe.owner,
            distroseries=series,
            pocket=PackagePublishingPocket.RELEASE,
            date_created=datetime.now(utc) - timedelta(hours=24, seconds=1))
        removeSecurityProxy(recipe).is_stale = True

        logger = BufferLogger()
        daily_builds = SourcePackageRecipeBuild.makeDailyBuilds(logger)
        self.assertEqual([], daily_builds)
        self.assertEqual(
            'DEBUG Recipe eric/funky-recipe is stale\n'
            'DEBUG  - build already pending for Warty (4.10)\n',
            logger.getLogBuffer())
    def test_handleUnpushedBranch_mails_branch_owner(self):
        exporter = ExportTranslationsToBranch(test_args=[])
        exporter.logger = BufferLogger()
        productseries = self.factory.makeProductSeries()
        email = self.factory.getUniqueEmailAddress()
        branch_owner = self.factory.makePerson(email=email)
        productseries.translations_branch = self.factory.makeBranch(
            owner=branch_owner)
        exporter._exportToBranch = FakeMethod(failure=NotBranchError("Ow"))
        exporter._sendMail = FakeMethod()

        self.becomeDbUser('translationstobranch')

        exporter._exportToBranches([productseries])

        self.assertEqual(1, exporter._sendMail.call_count)
        (sender, recipients, subject,
         text), kwargs = (exporter._sendMail.calls[-1])
        self.assertIn(config.canonical.noreply_from_address, sender)
        self.assertIn(email, recipients)
        self.assertEqual("Launchpad: translations branch has not been set up.",
                         subject)

        self.assertIn("problem with translations branch synchronization", text)
        self.assertIn(productseries.title, text)
        self.assertIn(productseries.translations_branch.bzr_identity, text)
        self.assertIn('bzr push lp://', text)
 def _getScanner(self, clock=None):
     scanner = SlaveScanner(None,
                            BuilderFactory(),
                            BufferLogger(),
                            clock=clock)
     scanner.logger.name = 'slave-scanner'
     return scanner
Example #17
0
    def getSoyuz(self, version=None, component=None, arch=None,
                 suite=None, distribution_name='ubuntu',
                 ppa=None, partner=False, ppa_name='ppa'):
        """Return a SoyuzScript instance.

        Allow tests to use a set of default options and pass an
        inactive logger to SoyuzScript.
        """
        test_args = ['-d', distribution_name, '-y']

        if suite is not None:
            test_args.extend(['-s', suite])

        if version is not None:
            test_args.extend(['-e', version])

        if arch is not None:
            test_args.extend(['-a', arch])

        if component is not None:
            test_args.extend(['-c', component])

        if ppa is not None:
            test_args.extend(['-p', ppa])
            test_args.extend(['--ppa-name', ppa_name])

        if partner:
            test_args.append('-j')

        soyuz = SoyuzScript(name='soyuz-script', test_args=test_args)
        # Store output messages, for future checks.
        soyuz.logger = BufferLogger()
        soyuz.setupLocation()
        return soyuz
Example #18
0
    def disabled_test_timeout_short(self):
        """When a job exceeds its lease, an exception is raised.

        Unfortunately, timeouts include the time it takes for the zope
        machinery to start up, so we run a job that will not time out first,
        followed by a job that is sure to time out.
        """
        logger = BufferLogger()
        logger.setLevel(logging.INFO)
        # StuckJob is actually a source of two jobs. The first is fast, the
        # second slow.
        runner = TwistedJobRunner.runFromSource(ShorterStuckJob,
                                                'branchscanner', logger)
        self.oops_capture.sync()
        oops = self.oopses[0]
        self.assertEqual(
            (1, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
        self.assertThat(
            logger.getLogBuffer(),
            MatchesRegex(
                dedent("""\
                INFO Running through Twisted.
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Job resulted in OOPS: %s
                """) % oops['id']))
        self.assertEqual(('TimeoutError', 'Job ran too long.'),
                         (oops['type'], oops['value']))
Example #19
0
    def test_bug_497141(self):
        # Regression test for bug 497141. KeyErrors raised in
        # RemoteBugUpdater.updateRemoteBug() shouldn't cause
        # checkwatches to abort.
        (bug_tracker, bug_watches) = self.factory.makeBugTrackerWithWatches()

        # Use a test XML-RPC transport to ensure no connections happen.
        test_transport = TestBugzillaAPIXMLRPCTransport(bug_tracker.baseurl)
        remote_system = NonConnectingBugzillaAPI(
            bug_tracker.baseurl, xmlrpc_transport=test_transport)

        working_base = WorkingBase()
        working_base.init(LOGIN, transaction.manager, BufferLogger())

        for bug_watch in bug_watches:
            # we want to know that an oops was raised
            oops_count = len(self.oopses)
            updater = NoBugWatchesByRemoteBugUpdater(
                working_base, remote_system, bug_watch.remotebug,
                [bug_watch.id], [], datetime.now())

            # Calling updateRemoteBug() shouldn't raise a KeyError,
            # even though with our broken updater
            # _getExternalBugTrackersAndWatches() will return an empty
            # dict.
            updater.updateRemoteBug()

            # A single oops will have been logged instead of the KeyError
            # being raised.
            self.assertEqual(oops_count + 1, len(self.oopses))
            last_oops = self.oopses[-1]
            self.assertStartsWith(
                last_oops['value'], 'Spurious remote bug ID')
Example #20
0
 def test_no_jobs(self):
     logger = BufferLogger()
     logger.setLevel(logging.INFO)
     runner = TwistedJobRunner.runFromSource(NoJobs, 'branchscanner',
                                             logger)
     self.assertEqual(
         (0, 0), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
    def test_get_remote_products_and_components_encounters_500(self):
        self.factory.makeBugTracker()
        transaction.commit()
        finder = BugzillaRemoteComponentFinder(logger=BufferLogger())

        responses.add("GET", re.compile(r".*/query\.cgi"), status=500)
        self.assertGetRemoteProductsAndComponentsDoesNotAssert(finder)
Example #22
0
 def test_process_one(self):
     remote_bug = [
         {"id": "12345", "iid": 1234, "state": "opened", "labels": []},
         ]
     responses.add(
         "GET",
         "https://gitlab.com/api/v4/projects/user%2Frepository/issues?"
         "iids[]=1234",
         json=remote_bug, match_querystring=True)
     bug = self.factory.makeBug()
     bug_tracker = self.factory.makeBugTracker(
         base_url="https://gitlab.com/user/repository/issues",
         bugtrackertype=BugTrackerType.GITLAB)
     bug.addWatch(
         bug_tracker, "1234", getUtility(ILaunchpadCelebrities).janitor)
     self.assertEqual(
         [("1234", None)],
         [(watch.remotebug, watch.remotestatus)
          for watch in bug_tracker.watches])
     transaction.commit()
     logger = BufferLogger()
     bug_watch_updater = CheckwatchesMaster(transaction, logger=logger)
     gitlab = get_external_bugtracker(bug_tracker)
     bug_watch_updater.updateBugWatches(gitlab, bug_tracker.watches)
     self.assertEqual(
         "INFO Updating 1 watches for 1 bugs on "
         "https://gitlab.com/api/v4/projects/user%2Frepository\n",
         logger.getLogBuffer())
     self.assertEqual(
         [("1234", BugTaskStatus.NEW)],
         [(watch.remotebug, gitlab.convertRemoteStatus(watch.remotestatus))
          for watch in bug_tracker.watches])
 def test_non_virtual_ppa_dispatch_with_primary_ancestry(self):
     # If there is a primary component override, it is honoured for
     # non-virtual PPA builds too.
     archive = self.factory.makeArchive(virtualized=False)
     slave = OkSlave()
     builder = self.factory.makeBuilder(virtualized=False)
     builder.setCleanStatus(BuilderCleanStatus.CLEAN)
     vitals = extract_vitals_from_db(builder)
     build = self.factory.makeBinaryPackageBuild(builder=builder,
                                                 archive=archive)
     self.factory.makeSourcePackagePublishingHistory(
         distroseries=build.distro_series,
         archive=archive.distribution.main_archive,
         sourcepackagename=build.source_package_release.sourcepackagename,
         component='main')
     lf = self.factory.makeLibraryFileAlias()
     transaction.commit()
     build.distro_arch_series.addOrUpdateChroot(lf)
     bq = build.queueBuild()
     bq.markAsBuilding(builder)
     interactor = BuilderInteractor()
     yield interactor._startBuild(
         bq, vitals, builder, slave,
         interactor.getBuildBehaviour(bq, builder, slave), BufferLogger())
     yield self.assertExpectedInteraction(slave.call_log, builder, build,
                                          lf, archive,
                                          ArchivePurpose.PRIMARY, 'main')
Example #24
0
    def test_virtual_ppa_dispatch(self):
        # Make sure the builder slave gets reset before a build is
        # dispatched to it.
        archive = self.factory.makeArchive(virtualized=True)
        slave = OkSlave()
        builder = self.factory.makeBuilder(virtualized=True, vm_host="foohost")
        vitals = extract_vitals_from_db(builder)
        build = self.factory.makeBinaryPackageBuild(builder=builder,
                                                    archive=archive)
        lf = self.factory.makeLibraryFileAlias()
        transaction.commit()
        build.distro_arch_series.addOrUpdateChroot(lf)
        bq = build.queueBuild()
        bq.markAsBuilding(builder)
        interactor = BuilderInteractor()
        d = interactor._startBuild(
            bq, vitals, builder, slave,
            interactor.getBuildBehavior(bq, builder, slave), BufferLogger())

        def check_build(ignored):
            # We expect the first call to the slave to be a resume call,
            # followed by the rest of the usual calls we expect.
            expected_resume_call = slave.call_log.pop(0)
            self.assertEqual('resume', expected_resume_call)
            self.assertExpectedInteraction(ignored, slave.call_log, builder,
                                           build, lf, archive,
                                           ArchivePurpose.PPA)

        return d.addCallback(check_build)
 def test_dont_dispatch_release_builds(self):
     archive = self.factory.makeArchive(purpose=ArchivePurpose.PRIMARY)
     builder = self.factory.makeBuilder()
     distroseries = self.factory.makeDistroSeries(
         status=SeriesStatus.CURRENT, distribution=archive.distribution)
     distro_arch_series = self.factory.makeDistroArchSeries(
         distroseries=distroseries)
     build = self.factory.makeBinaryPackageBuild(
         builder=builder,
         archive=archive,
         distroarchseries=distro_arch_series,
         pocket=PackagePublishingPocket.RELEASE)
     lf = self.factory.makeLibraryFileAlias()
     transaction.commit()
     build.distro_arch_series.addOrUpdateChroot(lf)
     behaviour = IBuildFarmJobBehaviour(build)
     behaviour.setBuilder(builder, None)
     e = self.assertRaises(AssertionError, behaviour.verifyBuildRequest,
                           BufferLogger())
     expected_message = (
         "%s (%s) can not be built for pocket %s: invalid pocket due "
         "to the series status of %s." %
         (build.title, build.id, build.pocket.name,
          build.distro_series.name))
     self.assertEqual(expected_message, str(e))
 def test_exportToStaleBranch(self):
     # Attempting to export to a stale branch marks it for scanning.
     self.useBzrBranches(direct_database=False)
     exporter = ExportTranslationsToBranch(test_args=[])
     exporter.logger = BufferLogger()
     productseries = self.factory.makeProductSeries()
     db_branch, tree = self.create_branch_and_tree(
         product=productseries.product)
     removeSecurityProxy(productseries).translations_branch = db_branch
     db_branch.last_mirrored_id = 'stale-id'
     db_branch.last_scanned_id = db_branch.last_mirrored_id
     self.becomeDbUser('translationstobranch')
     self.assertFalse(db_branch.pending_writes)
     self.assertNotEqual(db_branch.last_mirrored_id,
                         tree.branch.last_revision())
     # The export code works on a Branch from the slave store.  It
     # shouldn't stop the scan request.
     slave_series = ISlaveStore(productseries).get(ProductSeries,
                                                   productseries.id)
     exporter._exportToBranch(slave_series)
     self.assertEqual(db_branch.last_mirrored_id,
                      tree.branch.last_revision())
     self.assertTrue(db_branch.pending_writes)
     matches = MatchesRegex(
         "(.|\n)*WARNING Skipped .* due to stale DB info, and scheduled a "
         "new scan.")
     self.assertThat(exporter.logger.getLogBuffer(), matches)
Example #27
0
 def test_process_many(self):
     remote_bugs = [
         {"id": bug_id + 1, "number": bug_id,
          "state": "open" if (bug_id % 2) == 0 else "closed",
          "labels": []}
         for bug_id in range(1000, 1010)]
     _add_rate_limit_response("api.github.com")
     responses.add(
         "GET", "https://api.github.com/repos/user/repository/issues",
         json=remote_bugs)
     bug = self.factory.makeBug()
     bug_tracker = self.factory.makeBugTracker(
         base_url="https://github.com/user/repository/issues",
         bugtrackertype=BugTrackerType.GITHUB)
     for remote_bug in remote_bugs:
         bug.addWatch(
             bug_tracker, str(remote_bug["number"]),
             getUtility(ILaunchpadCelebrities).janitor)
     transaction.commit()
     logger = BufferLogger()
     bug_watch_updater = CheckwatchesMaster(transaction, logger=logger)
     github = get_external_bugtracker(bug_tracker)
     bug_watch_updater.updateBugWatches(github, bug_tracker.watches)
     self.assertEqual(
         "INFO Updating 10 watches for 10 bugs on "
         "https://api.github.com/repos/user/repository\n",
         logger.getLogBuffer())
     self.assertContentEqual(
         [(str(bug_id), BugTaskStatus.NEW)
          for bug_id in (1000, 1002, 1004, 1006, 1008)] +
         [(str(bug_id), BugTaskStatus.FIXRELEASED)
          for bug_id in (1001, 1003, 1005, 1007, 1009)],
         [(watch.remotebug, github.convertRemoteStatus(watch.remotestatus))
          for watch in bug_tracker.watches])
    def test_exportToBranches_handles_unpushed_branches(self):
        # bzrlib raises NotBranchError when accessing a nonexistent
        # branch.  The exporter deals with that by calling
        # _handleUnpushedBranch.
        exporter = ExportTranslationsToBranch(test_args=[])
        exporter.logger = BufferLogger()
        productseries = self.factory.makeProductSeries()
        productseries.translations_branch = self.factory.makeBranch()

        self.becomeDbUser('translationstobranch')

        # _handleUnpushedBranch is called if _exportToBranch raises
        # NotBranchError.
        exporter._handleUnpushedBranch = FakeMethod()
        exporter._exportToBranch = FakeMethod(failure=NotBranchError("No!"))
        exporter._exportToBranches([productseries])
        self.assertEqual(1, exporter._handleUnpushedBranch.call_count)

        # This does not happen if the export succeeds.
        exporter._handleUnpushedBranch = FakeMethod()
        exporter._exportToBranch = FakeMethod()
        exporter._exportToBranches([productseries])
        self.assertEqual(0, exporter._handleUnpushedBranch.call_count)

        # Nor does it happen if the export fails in some other way.
        exporter._handleUnpushedBranch = FakeMethod()
        exporter._exportToBranch = FakeMethod(failure=IndexError("Ayyeee!"))
        exporter._exportToBranches([productseries])
        self.assertEqual(0, exporter._handleUnpushedBranch.call_count)
Example #29
0
 def test_logging(self):
     r = Request()
     root_key = hashlib.sha256("root").hexdigest()
     root_macaroon = Macaroon(key=root_key)
     discharge_key = hashlib.sha256("discharge").hexdigest()
     discharge_caveat_id = '{"secret": "thing"}'
     root_macaroon.add_third_party_caveat(
         "sso.example", discharge_key, discharge_caveat_id)
     root_macaroon.add_first_party_caveat(
         "store.example|package_id|{}".format(
             json.dumps(["example-package"])))
     unbound_discharge_macaroon = Macaroon(
         location="sso.example", key=discharge_key,
         identifier=discharge_caveat_id)
     unbound_discharge_macaroon.add_first_party_caveat(
         "sso.example|account|{}".format(
             base64.b64encode(json.dumps({
                 "openid": "1234567",
                 "email": "*****@*****.**",
                 }))))
     logger = BufferLogger()
     MacaroonAuth(
         root_macaroon.serialize(),
         unbound_discharge_macaroon.serialize(), logger=logger)(r)
     self.assertEqual(
         ['DEBUG root macaroon: snap-ids: ["example-package"]',
          'DEBUG discharge macaroon: OpenID identifier: 1234567'],
         logger.getLogBuffer().splitlines())
 def test_verifyBuildRequest_no_chroot(self):
     # verifyBuildRequest raises when the DAS has no chroot.
     job = self.makeJob()
     builder = MockBuilder()
     job.setBuilder(builder, OkSlave())
     logger = BufferLogger()
     e = self.assertRaises(CannotBuild, job.verifyBuildRequest, logger)
     self.assertIn("Missing chroot", str(e))