def test_extraBuildArgs_withBadConfigForBzrBuilderPPA(self):
     # Ensure _extraBuildArgs doesn't blow up with a badly formatted
     # bzr_builder_sources_list in the config.
     self.pushConfig("builddmaster", bzr_builder_sources_list="deb http://foo %(series) main")
     # (note the missing 's' in %(series)
     job = self.makeJob()
     distroarchseries = job.build.distroseries.architectures[0]
     expected_archives = get_sources_list_for_building(job.build, distroarchseries, None)
     logger = BufferLogger()
     self.assertEqual(
         {
             "archive_private": False,
             "arch_tag": "i386",
             "author_email": u"*****@*****.**",
             "suite": u"mydistro",
             "author_name": u"Joe User",
             "archive_purpose": "PPA",
             "ogrecomponent": "universe",
             "recipe_text": "# bzr-builder format 0.3 deb-version {debupstream}-0~{revno}\n"
             "lp://dev/~joe/someapp/pkg\n",
             "archives": expected_archives,
             "distroseries_name": job.build.distroseries.name,
         },
         job._extraBuildArgs(distroarchseries, logger),
     )
     self.assertIn("Exception processing bzr_builder_sources_list:", logger.getLogBuffer())
Example #2
0
 def test_process_one(self):
     remote_bug = [
         {"id": "12345", "iid": 1234, "state": "opened", "labels": []},
         ]
     responses.add(
         "GET",
         "https://gitlab.com/api/v4/projects/user%2Frepository/issues?"
         "iids[]=1234",
         json=remote_bug, match_querystring=True)
     bug = self.factory.makeBug()
     bug_tracker = self.factory.makeBugTracker(
         base_url="https://gitlab.com/user/repository/issues",
         bugtrackertype=BugTrackerType.GITLAB)
     bug.addWatch(
         bug_tracker, "1234", getUtility(ILaunchpadCelebrities).janitor)
     self.assertEqual(
         [("1234", None)],
         [(watch.remotebug, watch.remotestatus)
          for watch in bug_tracker.watches])
     transaction.commit()
     logger = BufferLogger()
     bug_watch_updater = CheckwatchesMaster(transaction, logger=logger)
     gitlab = get_external_bugtracker(bug_tracker)
     bug_watch_updater.updateBugWatches(gitlab, bug_tracker.watches)
     self.assertEqual(
         "INFO Updating 1 watches for 1 bugs on "
         "https://gitlab.com/api/v4/projects/user%2Frepository\n",
         logger.getLogBuffer())
     self.assertEqual(
         [("1234", BugTaskStatus.NEW)],
         [(watch.remotebug, gitlab.convertRemoteStatus(watch.remotestatus))
          for watch in bug_tracker.watches])
    def test_makeDailyBuilds_skips_builds_already_queued(self):
        # If the recipe already has an identical build pending,
        # makeDailyBuilds() won't create a build.
        owner = self.factory.makePerson(name='eric')
        recipe = self.factory.makeSourcePackageRecipe(owner=owner,
                                                      name='funky-recipe',
                                                      build_daily=True,
                                                      is_stale=True)
        series = list(recipe.distroseries)[0]
        self.factory.makeSourcePackageRecipeBuild(
            recipe=recipe,
            archive=recipe.daily_build_archive,
            requester=recipe.owner,
            distroseries=series,
            pocket=PackagePublishingPocket.RELEASE,
            date_created=datetime.now(utc) - timedelta(hours=24, seconds=1))
        removeSecurityProxy(recipe).is_stale = True

        logger = BufferLogger()
        daily_builds = SourcePackageRecipeBuild.makeDailyBuilds(logger)
        self.assertEqual([], daily_builds)
        self.assertEqual(
            'DEBUG Recipe eric/funky-recipe is stale\n'
            'DEBUG  - build already pending for Warty (4.10)\n',
            logger.getLogBuffer())
Example #4
0
 def test_content(self):
     # The content property returns a `testtools.content.Content` object
     # representing the contents of the logger's buffer.
     logger = BufferLogger()
     logger.info("Hello")
     logger.warn("World")
     self.assertEqual("INFO Hello\nWARNING World\n", "".join(logger.content.iter_text()))
 def test_no_jobs(self):
     logger = BufferLogger()
     logger.setLevel(logging.INFO)
     runner = TwistedJobRunner.runFromSource(
         NoJobs, 'branchscanner', logger)
     self.assertEqual(
         (0, 0), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
    def disabled_test_timeout_short(self):
        """When a job exceeds its lease, an exception is raised.

        Unfortunately, timeouts include the time it takes for the zope
        machinery to start up, so we run a job that will not time out first,
        followed by a job that is sure to time out.
        """
        logger = BufferLogger()
        logger.setLevel(logging.INFO)
        # StuckJob is actually a source of two jobs. The first is fast, the
        # second slow.
        runner = TwistedJobRunner.runFromSource(
            ShorterStuckJob, 'branchscanner', logger)
        self.oops_capture.sync()
        oops = self.oopses[0]
        self.assertEqual(
            (1, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
        self.assertThat(
            logger.getLogBuffer(), MatchesRegex(
                dedent("""\
                INFO Running through Twisted.
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Job resulted in OOPS: %s
                """) % oops['id']))
        self.assertEqual(('TimeoutError', 'Job ran too long.'),
                         (oops['type'], oops['value']))
Example #7
0
 def test_logging(self):
     r = Request()
     root_key = hashlib.sha256("root").hexdigest()
     root_macaroon = Macaroon(key=root_key)
     discharge_key = hashlib.sha256("discharge").hexdigest()
     discharge_caveat_id = '{"secret": "thing"}'
     root_macaroon.add_third_party_caveat(
         "sso.example", discharge_key, discharge_caveat_id)
     root_macaroon.add_first_party_caveat(
         "store.example|package_id|{}".format(
             json.dumps(["example-package"])))
     unbound_discharge_macaroon = Macaroon(
         location="sso.example", key=discharge_key,
         identifier=discharge_caveat_id)
     unbound_discharge_macaroon.add_first_party_caveat(
         "sso.example|account|{}".format(
             base64.b64encode(json.dumps({
                 "openid": "1234567",
                 "email": "*****@*****.**",
                 }))))
     logger = BufferLogger()
     MacaroonAuth(
         root_macaroon.serialize(),
         unbound_discharge_macaroon.serialize(), logger=logger)(r)
     self.assertEqual(
         ['DEBUG root macaroon: snap-ids: ["example-package"]',
          'DEBUG discharge macaroon: OpenID identifier: 1234567'],
         logger.getLogBuffer().splitlines())
Example #8
0
 def test_process_one(self):
     remote_bug = {
         "id": 12345, "number": 1234, "state": "open", "labels": [],
         }
     _add_rate_limit_response("api.github.com")
     responses.add(
         "GET", "https://api.github.com/repos/user/repository/issues/1234",
         json=remote_bug)
     bug = self.factory.makeBug()
     bug_tracker = self.factory.makeBugTracker(
         base_url="https://github.com/user/repository/issues",
         bugtrackertype=BugTrackerType.GITHUB)
     bug.addWatch(
         bug_tracker, "1234", getUtility(ILaunchpadCelebrities).janitor)
     self.assertEqual(
         [("1234", None)],
         [(watch.remotebug, watch.remotestatus)
          for watch in bug_tracker.watches])
     transaction.commit()
     logger = BufferLogger()
     bug_watch_updater = CheckwatchesMaster(transaction, logger=logger)
     github = get_external_bugtracker(bug_tracker)
     bug_watch_updater.updateBugWatches(github, bug_tracker.watches)
     self.assertEqual(
         "INFO Updating 1 watches for 1 bugs on "
         "https://api.github.com/repos/user/repository\n",
         logger.getLogBuffer())
     self.assertEqual(
         [("1234", BugTaskStatus.NEW)],
         [(watch.remotebug, github.convertRemoteStatus(watch.remotestatus))
          for watch in bug_tracker.watches])
Example #9
0
 def test_process_many(self):
     remote_bugs = [
         {"id": bug_id + 1, "number": bug_id,
          "state": "open" if (bug_id % 2) == 0 else "closed",
          "labels": []}
         for bug_id in range(1000, 1010)]
     _add_rate_limit_response("api.github.com")
     responses.add(
         "GET", "https://api.github.com/repos/user/repository/issues",
         json=remote_bugs)
     bug = self.factory.makeBug()
     bug_tracker = self.factory.makeBugTracker(
         base_url="https://github.com/user/repository/issues",
         bugtrackertype=BugTrackerType.GITHUB)
     for remote_bug in remote_bugs:
         bug.addWatch(
             bug_tracker, str(remote_bug["number"]),
             getUtility(ILaunchpadCelebrities).janitor)
     transaction.commit()
     logger = BufferLogger()
     bug_watch_updater = CheckwatchesMaster(transaction, logger=logger)
     github = get_external_bugtracker(bug_tracker)
     bug_watch_updater.updateBugWatches(github, bug_tracker.watches)
     self.assertEqual(
         "INFO Updating 10 watches for 10 bugs on "
         "https://api.github.com/repos/user/repository\n",
         logger.getLogBuffer())
     self.assertContentEqual(
         [(str(bug_id), BugTaskStatus.NEW)
          for bug_id in (1000, 1002, 1004, 1006, 1008)] +
         [(str(bug_id), BugTaskStatus.FIXRELEASED)
          for bug_id in (1001, 1003, 1005, 1007, 1009)],
         [(watch.remotebug, github.convertRemoteStatus(watch.remotestatus))
          for watch in bug_tracker.watches])
Example #10
0
    def disabled_test_timeout_short(self):
        """When a job exceeds its lease, an exception is raised.

        Unfortunately, timeouts include the time it takes for the zope
        machinery to start up, so we run a job that will not time out first,
        followed by a job that is sure to time out.
        """
        logger = BufferLogger()
        logger.setLevel(logging.INFO)
        # StuckJob is actually a source of two jobs. The first is fast, the
        # second slow.
        runner = TwistedJobRunner.runFromSource(ShorterStuckJob,
                                                'branchscanner', logger)
        self.oops_capture.sync()
        oops = self.oopses[0]
        self.assertEqual(
            (1, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
        self.assertThat(
            logger.getLogBuffer(),
            MatchesRegex(
                dedent("""\
                INFO Running through Twisted.
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Job resulted in OOPS: %s
                """) % oops['id']))
        self.assertEqual(('TimeoutError', 'Job ran too long.'),
                         (oops['type'], oops['value']))
Example #11
0
 def test_logStartBuild(self):
     # logStartBuild will properly report the package that's being built
     job = self.makeJob()
     logger = BufferLogger()
     job.logStartBuild(logger)
     self.assertEquals(logger.getLogBuffer(),
                       "INFO startBuild(Mydistro, recept, joe)\n")
Example #12
0
 def test_no_jobs(self):
     logger = BufferLogger()
     logger.setLevel(logging.INFO)
     runner = TwistedJobRunner.runFromSource(NoJobs, 'branchscanner',
                                             logger)
     self.assertEqual(
         (0, 0), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
Example #13
0
 def test_lease_held_handled(self):
     """Jobs that raise LeaseHeld are handled correctly."""
     logger = BufferLogger()
     logger.setLevel(logging.DEBUG)
     runner = TwistedJobRunner.runFromSource(
         LeaseHeldJob, 'branchscanner', logger)
     self.assertIn('Could not acquire lease', logger.getLogBuffer())
     self.assertEqual(
         (0, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
 def test_verifyBuildRequest_valid(self):
     # VerifyBuildRequest won't raise any exceptions when called with a
     # valid builder set.
     job = self.makeJob()
     builder = MockBuilder("bob-de-bouwer")
     job.setBuilder(builder, OkSlave())
     logger = BufferLogger()
     job.verifyBuildRequest(logger)
     self.assertEquals("", logger.getLogBuffer())
Example #15
0
 def test_lease_held_handled(self):
     """Jobs that raise LeaseHeld are handled correctly."""
     logger = BufferLogger()
     logger.setLevel(logging.DEBUG)
     runner = TwistedJobRunner.runFromSource(LeaseHeldJob, 'branchscanner',
                                             logger)
     self.assertIn('Could not acquire lease', logger.getLogBuffer())
     self.assertEqual(
         (0, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
Example #16
0
 def test_verifyBuildRequest_valid(self):
     # VerifyBuildRequest won't raise any exceptions when called with a
     # valid builder set.
     job = self.makeJob()
     builder = MockBuilder("bob-de-bouwer")
     job.setBuilder(builder, OkSlave())
     logger = BufferLogger()
     job.verifyBuildRequest(logger)
     self.assertEquals("", logger.getLogBuffer())
Example #17
0
 def test_original_to_in_body(self):
     header_to = "*****@*****.**"
     original_to = "*****@*****.**"
     alias = "librarian-somewhere"
     body = "%s: %s\n\nsome body stuff" % (ORIGINAL_TO_HEADER, original_to)
     log = BufferLogger()
     mail = self.factory.makeSignedMessage(body=body, to_address=header_to)
     addresses = extract_addresses(mail, alias, log)
     self.assertThat(addresses, Equals([header_to]))
     self.assertThat(log.getLogBuffer(), Equals("INFO Suspected spam: librarian-somewhere\n"))
 def test_original_to_missing(self):
     header_to = '*****@*****.**'
     alias = 'librarian-somewhere'
     log = BufferLogger()
     mail = self.factory.makeSignedMessage(to_address=header_to)
     addresses = extract_addresses(mail, alias, log)
     self.assertThat(addresses, Equals([header_to]))
     self.assertThat(
         log.getLogBuffer(),
         Equals('WARNING No X-Launchpad-Original-To header was present '
                'in email: librarian-somewhere\n'))
 def test_original_to_in_body(self):
     header_to = '*****@*****.**'
     original_to = '*****@*****.**'
     alias = 'librarian-somewhere'
     body = '%s: %s\n\nsome body stuff' % (ORIGINAL_TO_HEADER, original_to)
     log = BufferLogger()
     mail = self.factory.makeSignedMessage(body=body, to_address=header_to)
     addresses = extract_addresses(mail, alias, log)
     self.assertThat(addresses, Equals([header_to]))
     self.assertThat(log.getLogBuffer(),
                     Equals('INFO Suspected spam: librarian-somewhere\n'))
Example #20
0
 def test_original_to_missing(self):
     header_to = "*****@*****.**"
     alias = "librarian-somewhere"
     log = BufferLogger()
     mail = self.factory.makeSignedMessage(to_address=header_to)
     addresses = extract_addresses(mail, alias, log)
     self.assertThat(addresses, Equals([header_to]))
     self.assertThat(
         log.getLogBuffer(),
         Equals("WARNING No X-Launchpad-Original-To header was present " "in email: librarian-somewhere\n"),
     )
 def test_verifyBuildRequest_valid(self):
     # verifyBuildRequest doesn't raise any exceptions when called with a
     # valid builder set.
     job = self.makeJob()
     lfa = self.factory.makeLibraryFileAlias()
     transaction.commit()
     job.build.distro_arch_series.addOrUpdateChroot(lfa)
     builder = MockBuilder()
     job.setBuilder(builder, OkSlave())
     logger = BufferLogger()
     job.verifyBuildRequest(logger)
     self.assertEqual("", logger.getLogBuffer())
Example #22
0
 def test_makeDailyBuilds_logs_builds(self):
     # If a logger is passed into the makeDailyBuilds method, each recipe
     # that a build is requested for gets logged.
     owner = self.factory.makePerson(name='eric')
     self.factory.makeSourcePackageRecipe(
         owner=owner, name=u'funky-recipe', build_daily=True)
     logger = BufferLogger()
     SourcePackageRecipeBuild.makeDailyBuilds(logger)
     self.assertEqual(
         'DEBUG Recipe eric/funky-recipe is stale\n'
         'DEBUG  - build requested for Warty (4.10)\n',
         logger.getLogBuffer())
Example #23
0
 def disable_test_memory_hog_job(self):
     """A job with a memory limit will trigger MemoryError on excess."""
     # XXX: frankban 2012-03-29 bug=963455: This test fails intermittently,
     # especially in parallel tests.
     logger = BufferLogger()
     logger.setLevel(logging.INFO)
     runner = TwistedJobRunner.runFromSource(
         MemoryHogJob, 'branchscanner', logger)
     self.assertEqual(
         (0, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
     self.assertIn('Job resulted in OOPS', logger.getLogBuffer())
     self.oops_capture.sync()
     self.assertEqual('MemoryError', self.oopses[0]['type'])
 def test_makeDailyBuilds_with_disallowed_series(self):
     # If a recipe is set to build into a disallowed series,
     # makeDailyBuilds won't OOPS.
     recipe = self.factory.makeSourcePackageRecipe(build_daily=True,
                                                   is_stale=True)
     self.factory.makeArchive(owner=recipe.owner)
     logger = BufferLogger()
     distroseries = list(recipe.distroseries)[0]
     removeSecurityProxy(distroseries).status = SeriesStatus.OBSOLETE
     SourcePackageRecipeBuild.makeDailyBuilds(logger)
     self.assertEqual([], self.oopses)
     self.assertIn("DEBUG  - cannot build against Warty (4.10).",
                   logger.getLogBuffer())
 def test_uploadPublicKey_suppress_in_config(self):
     self.useFixture(KeyServerTac())
     logger = BufferLogger()
     self.pushConfig("gpghandler", upload_keys=False)
     self.populateKeyring()
     fingerprint = list(self.gpg_handler.localKeys())[0].fingerprint
     self.gpg_handler.uploadPublicKey(fingerprint, logger=logger)
     self.assertEqual(
         "INFO Not submitting key to keyserver "
         "(disabled in configuration).\n", logger.getLogBuffer())
     self.assertRaises(GPGKeyDoesNotExistOnServer,
                       removeSecurityProxy(self.gpg_handler)._getPubKey,
                       fingerprint)
 def test_verifyBuildRequest_archive_private_owners_match(self):
     archive = self.factory.makeArchive(private=True)
     job = self.makeJob(archive=archive,
                        registrant=archive.owner,
                        owner=archive.owner)
     lfa = self.factory.makeLibraryFileAlias()
     transaction.commit()
     job.build.distro_arch_series.addOrUpdateChroot(lfa)
     builder = MockBuilder()
     job.setBuilder(builder, OkSlave())
     logger = BufferLogger()
     job.verifyBuildRequest(logger)
     self.assertEqual("", logger.getLogBuffer())
Example #27
0
 def __init__(self, team_name, log=None):
     self.team_name = team_name
     self.team = getUtility(IPersonSet).getByName(team_name)
     assert self.team is not None, ('No team with name: %s' % team_name)
     self.mailing_list = getUtility(IMailingListSet).get(team_name)
     assert self.mailing_list is not None, ('Team has no mailing list: %s' %
                                            team_name)
     assert self.mailing_list.status == MailingListStatus.ACTIVE, (
         'Team mailing list is not active: %s' % team_name)
     if log is None:
         self.log = BufferLogger()
     else:
         self.log = log
    def test_run(self):
        # When run it merges from_person into to_person.
        self.transfer_email()
        logger = BufferLogger()
        with log.use(logger):
            self.job.run()

        self.assertEqual(self.to_person, self.from_person.merged)
        self.assertEqual(
            ["DEBUG PersonMergeJob is about to merge ~void into ~gestalt",
             "DEBUG PersonMergeJob has merged ~void into ~gestalt"],
            logger.getLogBuffer().splitlines())
        self.assertEqual(self.to_person, self.from_person.merged)
Example #29
0
 def disable_test_memory_hog_job(self):
     """A job with a memory limit will trigger MemoryError on excess."""
     # XXX: frankban 2012-03-29 bug=963455: This test fails intermittently,
     # especially in parallel tests.
     logger = BufferLogger()
     logger.setLevel(logging.INFO)
     runner = TwistedJobRunner.runFromSource(MemoryHogJob, 'branchscanner',
                                             logger)
     self.assertEqual(
         (0, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
     self.assertIn('Job resulted in OOPS', logger.getLogBuffer())
     self.oops_capture.sync()
     self.assertEqual('MemoryError', self.oopses[0]['type'])
Example #30
0
    def test_run(self):
        # When run it merges from_person into to_person.
        self.transfer_email()
        logger = BufferLogger()
        with log.use(logger):
            self.job.run()

        self.assertEqual(self.to_person, self.from_person.merged)
        self.assertEqual([
            "DEBUG PersonMergeJob is about to merge ~void into ~gestalt",
            "DEBUG PersonMergeJob has merged ~void into ~gestalt"
        ],
                         logger.getLogBuffer().splitlines())
        self.assertEqual(self.to_person, self.from_person.merged)
 def test_makeDailyBuilds_with_disallowed_series(self):
     # If a recipe is set to build into a disallowed series,
     # makeDailyBuilds won't OOPS.
     recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True)
     self.factory.makeArchive(owner=recipe.owner)
     logger = BufferLogger()
     distroseries = list(recipe.distroseries)[0]
     removeSecurityProxy(distroseries).status = SeriesStatus.OBSOLETE
     SourcePackageRecipeBuild.makeDailyBuilds(logger)
     self.assertEquals([], self.oopses)
     self.assertIn(
         "DEBUG  - cannot build against Warty (4.10).",
         logger.getLogBuffer())
 def test_run(self):
     # The email is sent to all the recipients.
     job = make_question_job(
         self.factory, QuestionRecipientSet.ASKER_SUBSCRIBER)
     logger = BufferLogger()
     with log.use(logger):
         job.run()
     self.assertEqual(
         ["DEBUG QuestionEmailJob will send email for question %s." %
          job.question.id,
          "DEBUG QuestionEmailJob has sent email for question %s." %
          job.question.id],
         logger.getLogBuffer().splitlines())
     transaction.commit()
     self.assertEqual(2, len(stub.test_emails))
 def test_reject_changes_file_no_email(self):
     # If we are rejecting a mail, and the person to notify has no
     # preferred email, we should return early.
     archive = self.factory.makeArchive()
     distroseries = self.factory.makeDistroSeries()
     uploader = self.factory.makePerson()
     get_property_cache(uploader).preferredemail = None
     email = '%s <*****@*****.**>' % uploader.displayname
     changes = {'Changed-By': email, 'Maintainer': email}
     logger = BufferLogger()
     reject_changes_file(
         uploader, '/tmp/changes', changes, archive, distroseries, '',
         logger=logger)
     self.assertIn(
         'No recipients have a preferred email.', logger.getLogBuffer())
Example #34
0
    def test_bug_497141(self):
        # Regression test for bug 497141. KeyErrors raised in
        # RemoteBugUpdater.updateRemoteBug() shouldn't cause
        # checkwatches to abort.
        (bug_tracker, bug_watches) = self.factory.makeBugTrackerWithWatches()

        # Use a test XML-RPC transport to ensure no connections happen.
        test_transport = TestBugzillaAPIXMLRPCTransport(bug_tracker.baseurl)
        remote_system = NonConnectingBugzillaAPI(
            bug_tracker.baseurl, xmlrpc_transport=test_transport)

        working_base = WorkingBase()
        working_base.init(LOGIN, transaction.manager, BufferLogger())

        for bug_watch in bug_watches:
            # we want to know that an oops was raised
            oops_count = len(self.oopses)
            updater = NoBugWatchesByRemoteBugUpdater(
                working_base, remote_system, bug_watch.remotebug,
                [bug_watch.id], [], datetime.now())

            # Calling updateRemoteBug() shouldn't raise a KeyError,
            # even though with our broken updater
            # _getExternalBugTrackersAndWatches() will return an empty
            # dict.
            updater.updateRemoteBug()

            # A single oops will have been logged instead of the KeyError
            # being raised.
            self.assertEqual(oops_count + 1, len(self.oopses))
            last_oops = self.oopses[-1]
            self.assertStartsWith(
                last_oops['value'], 'Spurious remote bug ID')
 def test_no_signature_rejected(self):
     # An unsigned changes file is rejected.
     path = datadir('signatures/unsigned.changes')
     changesfile = ChangesFile(path, InsecureUploadPolicy(), BufferLogger())
     errors = list(changesfile.parseChanges())
     self.assertIsInstance(errors[0], UploadError)
     self.assertEqual(1, len(errors))
Example #36
0
 def test_run(self):
     # The email is sent to all the recipients.
     job = make_question_job(self.factory,
                             QuestionRecipientSet.ASKER_SUBSCRIBER)
     logger = BufferLogger()
     with log.use(logger):
         job.run()
     self.assertEqual([
         "DEBUG QuestionEmailJob will send email for question %s." %
         job.question.id,
         "DEBUG QuestionEmailJob has sent email for question %s." %
         job.question.id
     ],
                      logger.getLogBuffer().splitlines())
     transaction.commit()
     self.assertEqual(2, len(stub.test_emails))
    def test_get_remote_products_and_components_encounters_500(self):
        self.factory.makeBugTracker()
        transaction.commit()
        finder = BugzillaRemoteComponentFinder(logger=BufferLogger())

        responses.add("GET", re.compile(r".*/query\.cgi"), status=500)
        self.assertGetRemoteProductsAndComponentsDoesNotAssert(finder)
 def test_dont_dispatch_release_builds(self):
     archive = self.factory.makeArchive(purpose=ArchivePurpose.PRIMARY)
     builder = self.factory.makeBuilder()
     distroseries = self.factory.makeDistroSeries(
         status=SeriesStatus.CURRENT, distribution=archive.distribution)
     distro_arch_series = self.factory.makeDistroArchSeries(
         distroseries=distroseries)
     build = self.factory.makeBinaryPackageBuild(
         builder=builder,
         archive=archive,
         distroarchseries=distro_arch_series,
         pocket=PackagePublishingPocket.RELEASE)
     lf = self.factory.makeLibraryFileAlias()
     transaction.commit()
     build.distro_arch_series.addOrUpdateChroot(lf)
     behaviour = IBuildFarmJobBehaviour(build)
     behaviour.setBuilder(builder, None)
     e = self.assertRaises(AssertionError, behaviour.verifyBuildRequest,
                           BufferLogger())
     expected_message = (
         "%s (%s) can not be built for pocket %s: invalid pocket due "
         "to the series status of %s." %
         (build.title, build.id, build.pocket.name,
          build.distro_series.name))
     self.assertEqual(expected_message, str(e))
 def test_private_source_dispatch(self):
     archive = self.factory.makeArchive(private=True)
     slave = OkSlave()
     builder = self.factory.makeBuilder()
     builder.setCleanStatus(BuilderCleanStatus.CLEAN)
     vitals = extract_vitals_from_db(builder)
     build = self.factory.makeBinaryPackageBuild(builder=builder,
                                                 archive=archive)
     sprf = build.source_package_release.addFile(
         self.factory.makeLibraryFileAlias(db_only=True),
         filetype=SourcePackageFileType.ORIG_TARBALL)
     sprf_url = (
         'http://private-ppa.launchpad.dev/%s/%s/ubuntu/pool/%s/%s' %
         (archive.owner.name, archive.name,
          poolify(build.source_package_release.sourcepackagename.name,
                  'main'), sprf.libraryfile.filename))
     lf = self.factory.makeLibraryFileAlias()
     transaction.commit()
     build.distro_arch_series.addOrUpdateChroot(lf)
     bq = build.queueBuild()
     bq.markAsBuilding(builder)
     interactor = BuilderInteractor()
     yield interactor._startBuild(
         bq, vitals, builder, slave,
         interactor.getBuildBehaviour(bq, builder, slave), BufferLogger())
     yield self.assertExpectedInteraction(
         slave.call_log,
         builder,
         build,
         lf,
         archive,
         ArchivePurpose.PPA,
         extra_uploads=[(sprf_url, 'buildd', 'sekrit')],
         filemap_names=[sprf.libraryfile.filename])
    def test_get_remote_products_and_components_encounters_301(self):
        def redirect_callback(request):
            new_url = request.url.replace("query.cgi", "newquery.cgi")
            return (301, {"Location": new_url}, "")

        lp_bugtracker = self.factory.makeBugTracker(
            title="fdo-example",
            name="fdo-example")
        transaction.commit()

        finder = BugzillaRemoteComponentFinder(logger=BufferLogger())
        responses.add_callback(
            "GET", re.compile(r".*/query\.cgi"), callback=redirect_callback)
        responses.add(
            "GET", re.compile(r".*/newquery\.cgi\?format=advanced"),
            match_querystring=True, content_type="text/html",
            body=read_test_file("bugzilla-fdo-advanced-query.html"))
        finder.getRemoteProductsAndComponents(bugtracker_name="fdo-example")

        self.assertEqual(
            109, len(list(lp_bugtracker.getAllRemoteComponentGroups())))
        comp_group = lp_bugtracker.getRemoteComponentGroup(u'xorg')
        self.assertIsNot(None, comp_group)
        self.assertEqual(146, len(list(comp_group.components)))
        comp = comp_group.getComponent(u'Driver/Radeon')
        self.assertIsNot(None, comp)
        self.assertEqual(u'Driver/Radeon', comp.name)
Example #41
0
    def performImport(self, job_id):
        """Perform the import job with ID job_id.

        Return a Deferred that fires when it the job is done.

        This implementation does it in-process.
        """
        logger = BufferLogger()
        monitor = CIWorkerMonitorForTesting(
            job_id, logger,
            xmlrpc.Proxy(config.codeimportdispatcher.codeimportscheduler_url),
            "anything")
        deferred = monitor.run()

        def save_protocol_object(result):
            """Save the process protocol object.

            We do this in an addBoth so that it's called after the process
            protocol is actually constructed but before we drop the last
            reference to the monitor object.
            """
            self._protocol = monitor._protocol
            return result

        return deferred.addBoth(save_protocol_object)
 def test_non_virtual_ppa_dispatch(self):
     # When the BinaryPackageBuildBehaviour dispatches PPA builds to
     # non-virtual builders, it stores the chroot on the server and
     # requests a binary package build, lying to say that the archive
     # purpose is "PRIMARY" because this ensures that the package mangling
     # tools will run over the built packages.
     archive = self.factory.makeArchive(virtualized=False)
     slave = OkSlave()
     builder = self.factory.makeBuilder(virtualized=False)
     builder.setCleanStatus(BuilderCleanStatus.CLEAN)
     vitals = extract_vitals_from_db(builder)
     build = self.factory.makeBinaryPackageBuild(builder=builder,
                                                 archive=archive)
     lf = self.factory.makeLibraryFileAlias()
     transaction.commit()
     build.distro_arch_series.addOrUpdateChroot(lf)
     bq = build.queueBuild()
     bq.markAsBuilding(builder)
     interactor = BuilderInteractor()
     yield interactor._startBuild(
         bq, vitals, builder, slave,
         interactor.getBuildBehaviour(bq, builder, slave), BufferLogger())
     yield self.assertExpectedInteraction(slave.call_log, builder, build,
                                          lf, archive,
                                          ArchivePurpose.PRIMARY,
                                          'universe')
 def test_non_virtual_ppa_dispatch_with_primary_ancestry(self):
     # If there is a primary component override, it is honoured for
     # non-virtual PPA builds too.
     archive = self.factory.makeArchive(virtualized=False)
     slave = OkSlave()
     builder = self.factory.makeBuilder(virtualized=False)
     builder.setCleanStatus(BuilderCleanStatus.CLEAN)
     vitals = extract_vitals_from_db(builder)
     build = self.factory.makeBinaryPackageBuild(builder=builder,
                                                 archive=archive)
     self.factory.makeSourcePackagePublishingHistory(
         distroseries=build.distro_series,
         archive=archive.distribution.main_archive,
         sourcepackagename=build.source_package_release.sourcepackagename,
         component='main')
     lf = self.factory.makeLibraryFileAlias()
     transaction.commit()
     build.distro_arch_series.addOrUpdateChroot(lf)
     bq = build.queueBuild()
     bq.markAsBuilding(builder)
     interactor = BuilderInteractor()
     yield interactor._startBuild(
         bq, vitals, builder, slave,
         interactor.getBuildBehaviour(bq, builder, slave), BufferLogger())
     yield self.assertExpectedInteraction(slave.call_log, builder, build,
                                          lf, archive,
                                          ArchivePurpose.PRIMARY, 'main')
 def test_rebuild_bugsummary_for_target(self):
     # rebuild_bugsummary_for_target rebuilds BugSummary for a
     # specific target from BugTaskFlat. Since it ignores the
     # journal, it also removes any relevant journal entries.
     product = self.factory.makeProduct()
     self.factory.makeBug(target=product)
     self.assertEqual(0, get_bugsummary_rows(product).count())
     self.assertEqual(1, get_bugsummaryjournal_rows(product).count())
     log = BufferLogger()
     with dbuser('bugsummaryrebuild'):
         rebuild_bugsummary_for_target(product, log)
     self.assertEqual(1, get_bugsummary_rows(product).count())
     self.assertEqual(0, get_bugsummaryjournal_rows(product).count())
     self.assertThat(
         log.getLogBufferAndClear(),
         MatchesRegex(
             'DEBUG Rebuilding %s\nDEBUG Added {.*: 1L}' % product.name))
    def test_makeDailyBuilds_skips_archive_with_no_permission(self):
        # If the recipe's daily build archive cannot be uploaded to due to
        # insufficient permissions, makeDailyBuilds() won't create a build.
        owner = self.factory.makePerson(name='eric')
        recipe = self.factory.makeSourcePackageRecipe(
            owner=owner, name=u'funky-recipe', build_daily=True,
            is_stale=True)
        archive = self.factory.makeArchive(name="ppa")
        removeSecurityProxy(recipe).daily_build_archive = archive

        logger = BufferLogger()
        daily_builds = SourcePackageRecipeBuild.makeDailyBuilds(logger)
        self.assertEqual([], daily_builds)
        self.assertEqual(
            'DEBUG Recipe eric/funky-recipe is stale\n'
            'DEBUG  - daily build failed for Warty (4.10): '
            "CannotUploadToPPA('Signer has no upload rights "
            "to this PPA.',)\n",
            logger.getLogBuffer())
    def test_makeDailyBuilds_skips_disabled_archive(self):
        # If the recipe's daily build archive is disabled, makeDailyBuilds()
        # won't create a build.
        owner = self.factory.makePerson(name='eric')
        recipe = self.factory.makeSourcePackageRecipe(
            owner=owner, name=u'funky-recipe', build_daily=True,
            is_stale=True)
        archive = self.factory.makeArchive(owner=recipe.owner, name="ppa")
        removeSecurityProxy(recipe).daily_build_archive = archive
        removeSecurityProxy(archive).disable()

        logger = BufferLogger()
        daily_builds = SourcePackageRecipeBuild.makeDailyBuilds(logger)
        self.assertEqual([], daily_builds)
        self.assertEqual(
            'DEBUG Recipe eric/funky-recipe is stale\n'
            'DEBUG  - daily build failed for Warty (4.10): ' +
            "ArchiveDisabled(u'PPA for Eric is disabled.',)\n",
            logger.getLogBuffer())
class TestLibrarianLogFileParsing(TestCase):
    """Test the parsing of librarian log files."""

    layer = ZopelessLayer

    def setUp(self):
        TestCase.setUp(self)
        self.logger = BufferLogger()

    def test_request_to_lfa_is_parsed(self):
        fd = StringIO(
            '69.233.136.42 - - [13/Jun/2008:14:55:22 +0100] "GET '
            '/15018215/ul_logo_64x64.png HTTP/1.1" 200 2261 '
            '"https://launchpad.net/~ubuntulite/+archive" "Mozilla"')
        downloads, parsed_bytes, ignored = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_library_file_id)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 1 download stats.')

        self.assertEqual(downloads,
            {'15018215': {datetime(2008, 6, 13): {'US': 1}}})

        self.assertEqual(parsed_bytes, fd.tell())

    def test_request_to_non_lfa_is_ignored(self):
        # A request to a path which doesn't map to a LibraryFileAlias (e.g.
        # '/') is ignored.
        fd = StringIO(
            '69.233.136.42 - - [13/Jun/2008:14:55:22 +0100] "GET / HTTP/1.1" '
            '200 2261 "https://launchpad.net/~ubuntulite/+archive" "Mozilla"')
        downloads, parsed_bytes, ignored = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_library_file_id)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 0 download stats.')
        self.assertEqual(downloads, {})
        self.assertEqual(parsed_bytes, fd.tell())
    def test_makeDailyBuilds_skips_builds_already_queued(self):
        # If the recipe already has an identical build pending,
        # makeDailyBuilds() won't create a build.
        owner = self.factory.makePerson(name='eric')
        recipe = self.factory.makeSourcePackageRecipe(
            owner=owner, name=u'funky-recipe', build_daily=True,
            is_stale=True)
        series = list(recipe.distroseries)[0]
        self.factory.makeSourcePackageRecipeBuild(
            recipe=recipe, archive=recipe.daily_build_archive,
            requester=recipe.owner, distroseries=series,
            pocket=PackagePublishingPocket.RELEASE,
            date_created=datetime.now(utc) - timedelta(hours=24, seconds=1))
        removeSecurityProxy(recipe).is_stale = True

        logger = BufferLogger()
        daily_builds = SourcePackageRecipeBuild.makeDailyBuilds(logger)
        self.assertEqual([], daily_builds)
        self.assertEqual(
            'DEBUG Recipe eric/funky-recipe is stale\n'
            'DEBUG  - build already pending for Warty (4.10)\n',
            logger.getLogBuffer())
Example #49
0
 def __init__(self, team_name, log=None):
     self.team_name = team_name
     self.team = getUtility(IPersonSet).getByName(team_name)
     assert self.team is not None, (
         'No team with name: %s' % team_name)
     self.mailing_list = getUtility(IMailingListSet).get(team_name)
     assert self.mailing_list is not None, (
         'Team has no mailing list: %s' % team_name)
     assert self.mailing_list.status == MailingListStatus.ACTIVE, (
         'Team mailing list is not active: %s' % team_name)
     if log is None:
         self.log = BufferLogger()
     else:
         self.log = log
    def process(self):
        """Process an upload that is the result of a build.

        The name of the leaf is the build id of the build.
        Build uploads always contain a single package per leaf.
        """
        logger = BufferLogger()
        if self.build.status != BuildStatus.UPLOADING:
            self.processor.log.warn(
                "Expected build status to be 'UPLOADING', was %s. Ignoring." %
                self.build.status.name)
            return
        try:
            # The recipe may have been deleted so we need to flag that here
            # and will handle below. We check so that we don't go to the
            # expense of doing an unnecessary upload. We don't just exit here
            # because we want the standard cleanup to occur.
            recipe_deleted = (ISourcePackageRecipeBuild.providedBy(self.build)
                and self.build.recipe is None)
            if recipe_deleted:
                result = UploadStatusEnum.FAILED
            else:
                self.processor.log.debug("Build %s found" % self.build.id)
                [changes_file] = self.locateChangesFiles()
                logger.debug("Considering changefile %s" % changes_file)
                result = self.processChangesFile(changes_file, logger)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            info = sys.exc_info()
            message = (
                'Exception while processing upload %s' % self.upload_path)
            properties = [('error-explanation', message)]
            request = ScriptRequest(properties)
            error_utility = ErrorReportingUtility()
            error_utility.raising(info, request)
            logger.error('%s (%s)' % (message, request.oopsid))
            result = UploadStatusEnum.FAILED
        if (result != UploadStatusEnum.ACCEPTED or
            not self.build.verifySuccessfulUpload()):
            self.build.updateStatus(BuildStatus.FAILEDTOUPLOAD)
        if self.build.status != BuildStatus.FULLYBUILT:
            if recipe_deleted:
                # For a deleted recipe, no need to notify that uploading has
                # failed - we just log a warning.
                self.processor.log.warn(
                    "Recipe for build %s was deleted. Ignoring." %
                    self.upload)
            else:
                self.build.storeUploadLog(logger.getLogBuffer())
                self.build.notify(extra_info="Uploading build %s failed." %
                                  self.upload)
        else:
            self.build.notify()
        self.processor.ztm.commit()
        self.moveProcessedUpload(result, logger)
class TestLoggingUIFactory(TestCase):
    """Tests for `LoggingUIFactory`."""

    def setUp(self):
        TestCase.setUp(self)
        self.fake_time = FakeTime(12345)
        self.logger = BufferLogger()

    def makeLoggingUIFactory(self):
        """Make a `LoggingUIFactory` with fake time and contained output."""
        return LoggingUIFactory(
            time_source=self.fake_time.now, logger=self.logger)

    def test_first_progress_updates(self):
        # The first call to progress generates some output.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi")
        self.assertEqual('INFO hi\n', self.logger.getLogBuffer())

    def test_second_rapid_progress_doesnt_update(self):
        # The second of two progress calls that are less than the factory's
        # interval apart does not generate output.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi")
        self.fake_time.advance(factory.interval / 2)
        bar.update("there")
        self.assertEqual('INFO hi\n', self.logger.getLogBuffer())

    def test_second_slow_progress_updates(self):
        # The second of two progress calls that are more than the factory's
        # interval apart does generate output.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi")
        self.fake_time.advance(factory.interval * 2)
        bar.update("there")
        self.assertEqual(
            'INFO hi\n'
            'INFO there\n',
            self.logger.getLogBuffer())

    def test_first_progress_on_new_bar_updates(self):
        # The first progress on a new progress task always generates output.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi")
        self.fake_time.advance(factory.interval / 2)
        bar2 = factory.nested_progress_bar()
        bar2.update("there")
        self.assertEqual(
            'INFO hi\nINFO hi:there\n', self.logger.getLogBuffer())

    def test_update_with_count_formats_nicely(self):
        # When more details are passed to update, they are formatted nicely.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi", 1, 8)
        self.assertEqual('INFO hi 1/8\n', self.logger.getLogBuffer())

    def test_report_transport_activity_reports_bytes_since_last_update(self):
        # If there is no call to _progress_updated for 'interval' seconds, the
        # next call to report_transport_activity will report however many
        # bytes have been transferred since the update.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi", 1, 10)
        self.fake_time.advance(factory.interval / 2)
        # The bytes in this call will not be reported:
        factory.report_transport_activity(None, 1, 'read')
        self.fake_time.advance(factory.interval)
        bar.update("hi", 2, 10)
        self.fake_time.advance(factory.interval / 2)
        factory.report_transport_activity(None, 10, 'read')
        self.fake_time.advance(factory.interval)
        factory.report_transport_activity(None, 100, 'read')
        self.fake_time.advance(factory.interval * 2)
        # This call will cause output that does not include the transport
        # activity info.
        bar.update("hi", 3, 10)
        self.assertEqual(
            'INFO hi 1/10\n'
            'INFO hi 2/10\n'
            'INFO 110 bytes transferred | hi 2/10\n'
            'INFO hi 3/10\n',
            self.logger.getLogBuffer())

    def test_note(self):
        factory = self.makeLoggingUIFactory()
        factory.note("Banja Luka")
        self.assertEqual('INFO Banja Luka\n', self.logger.getLogBuffer())

    def test_show_error(self):
        factory = self.makeLoggingUIFactory()
        factory.show_error("Exploding Peaches")
        self.assertEqual(
            "ERROR Exploding Peaches\n", self.logger.getLogBuffer())

    def test_confirm_action(self):
        factory = self.makeLoggingUIFactory()
        self.assertTrue(factory.confirm_action(
            "How are you %(when)s?", "wellness", {"when": "today"}))

    def test_show_message(self):
        factory = self.makeLoggingUIFactory()
        factory.show_message("Peaches")
        self.assertEqual("INFO Peaches\n", self.logger.getLogBuffer())

    def test_get_username(self):
        factory = self.makeLoggingUIFactory()
        self.assertIs(
            None, factory.get_username("Who are you %(when)s?", when="today"))

    def test_get_password(self):
        factory = self.makeLoggingUIFactory()
        self.assertIs(
            None,
            factory.get_password("How is your %(drink)s", drink="coffee"))

    def test_show_warning(self):
        factory = self.makeLoggingUIFactory()
        factory.show_warning("Peaches")
        self.assertEqual("WARNING Peaches\n", self.logger.getLogBuffer())

    def test_show_warning_unicode(self):
        factory = self.makeLoggingUIFactory()
        factory.show_warning(u"Peach\xeas")
        self.assertEqual(
            "WARNING Peach\xc3\xaas\n", self.logger.getLogBuffer())

    def test_user_warning(self):
        factory = self.makeLoggingUIFactory()
        factory.show_user_warning('cross_format_fetch',
            from_format="athing", to_format="anotherthing")
        message = factory._user_warning_templates['cross_format_fetch'] % {
            "from_format": "athing",
            "to_format": "anotherthing",
            }
        self.assertEqual("WARNING %s\n" % message, self.logger.getLogBuffer())

    def test_clear_term(self):
        factory = self.makeLoggingUIFactory()
        factory.clear_term()
        self.assertEqual("", self.logger.getLogBuffer())
Example #52
0
class Importer:
    """Perform mailing list imports for command line scripts."""

    def __init__(self, team_name, log=None):
        self.team_name = team_name
        self.team = getUtility(IPersonSet).getByName(team_name)
        assert self.team is not None, (
            'No team with name: %s' % team_name)
        self.mailing_list = getUtility(IMailingListSet).get(team_name)
        assert self.mailing_list is not None, (
            'Team has no mailing list: %s' % team_name)
        assert self.mailing_list.status == MailingListStatus.ACTIVE, (
            'Team mailing list is not active: %s' % team_name)
        if log is None:
            self.log = BufferLogger()
        else:
            self.log = log

    def importAddresses(self, addresses):
        """Import all addresses.

        Every address that is preferred or validated and connected to a person
        is made a member of the team, and is subscribed to the mailing list
        (with the address given).  If the address is not valid, or if it is
        associated with a team, the address is ignored.

        :param addresses: The email addresses to join and subscribe.
        :type addresses: sequence of strings
        """
        email_set = getUtility(IEmailAddressSet)
        person_set = getUtility(IPersonSet)
        for entry in addresses:
            real_name, address = parseaddr(entry)
            # address could be empty or None.
            if not address:
                continue
            person = person_set.getByEmail(address, filter_status=False)
            if person is None or person.is_team:
                self.log.error('No person for address: %s', address)
                continue
            email = email_set.getByEmail(address)
            assert email is not None, (
                'Address has no IEmailAddress? %s' % address)
            if email.status not in (EmailAddressStatus.PREFERRED,
                                    EmailAddressStatus.VALIDATED):
                self.log.error('No valid email for address: %s', address)
                continue
            # Turn off may_subscribe_to_list because we want to explicitly
            # force subscription without relying on the person's
            # auto-subscribe policy.
            naked_team = removeSecurityProxy(self.team)
            naked_team.addMember(person, reviewer=person,
                                 status=TeamMembershipStatus.APPROVED,
                                 force_team_add=True,
                                 may_subscribe_to_list=False)
            try:
                self.mailing_list.subscribe(person, email)
            except CannotSubscribe as error:
                self.log.error('%s', error)
            # It's okay to str()-ify these because addresses and person names
            # are guaranteed to be in the ASCII range.
            self.log.info('%s (%s) joined and subscribed',
                          str(address), str(person.name))

    def importFromFile(self, filename):
        """Import all addresses given in the named file.

        The named file has email address to import, one per line.  The lines
        may be formatted using any format recognized by
        `email.Utils.parseaddr()`.

        :param filename: The name of the file containing email address.
        :type filename: string
        """
        in_file = open(filename)
        try:
            addresses = list(in_file)
        finally:
            in_file.close()
        self.importAddresses(addresses)
 def setUp(self):
     TestCase.setUp(self)
     self.fake_time = FakeTime(12345)
     self.logger = BufferLogger()
Example #54
0
class TestMantisBugBatchParser(TestCase):
    """Test the MantisBugBatchParser class."""

    def setUp(self):
        super(TestMantisBugBatchParser, self).setUp()
        self.logger = BufferLogger()

    def test_empty(self):
        data = []
        parser = MantisBugBatchParser(data, self.logger)
        exc = self.assertRaises(
            UnparsableBugData,
            parser.getBugs)
        self.assertThat(
            str(exc), Equals("Missing header line"))

    def test_missing_headers(self):
        data = ['some,headers']
        parser = MantisBugBatchParser(data, self.logger)
        exc = self.assertRaises(
            UnparsableBugData,
            parser.getBugs)
        self.assertThat(
            str(exc),
            Equals("CSV header ['some', 'headers'] missing fields:"
                   " ['id', 'status', 'resolution']"))

    def test_missing_some_headers(self):
        data = ['some,headers,status,resolution']
        parser = MantisBugBatchParser(data, self.logger)
        exc = self.assertRaises(
            UnparsableBugData,
            parser.getBugs)
        self.assertThat(
            str(exc),
            Equals("CSV header ['some', 'headers', 'status', 'resolution'] "
                   "missing fields: ['id']"))

    def test_no_bugs(self):
        data = ['other,fields,id,status,resolution']
        parser = MantisBugBatchParser(data, self.logger)
        self.assertThat(parser.getBugs(), Equals({}))

    def test_passing(self):
        data = [
            'ignored,id,resolution,status',
            'foo,42,not,complete',
            'boo,13,,confirmed',
            ]
        parser = MantisBugBatchParser(data, self.logger)
        bug_42 = dict(
            id=42, status='complete', resolution='not', ignored='foo')
        bug_13 = dict(
            id=13, status='confirmed', resolution='', ignored='boo')
        self.assertThat(parser.getBugs(), Equals({42: bug_42, 13: bug_13}))

    def test_incomplete_line(self):
        data = [
            'ignored,id,resolution,status',
            '42,not,complete',
            ]
        parser = MantisBugBatchParser(data, self.logger)
        self.assertThat(parser.getBugs(), Equals({}))
        log = self.logger.getLogBuffer()
        self.assertThat(
            log,
            Equals("WARNING Line ['42', 'not', 'complete'] incomplete.\n"))

    def test_non_integer_id(self):
        data = [
            'ignored,id,resolution,status',
            'foo,bar,not,complete',
            ]
        parser = MantisBugBatchParser(data, self.logger)
        self.assertThat(parser.getBugs(), Equals({}))
        log = self.logger.getLogBuffer()
        self.assertThat(
            log, Equals("WARNING Encountered invalid bug ID: 'bar'.\n"))
Example #55
0
 def setUp(self):
     super(TestMantisBugBatchParser, self).setUp()
     self.logger = BufferLogger()
class TestLogFileParsing(TestCase):
    """Test the parsing of log files."""

    layer = ZopelessLayer
    sample_line = (
        '69.233.136.42 - - [13/Jun/2008:14:55:22 +0100] "%(method)s '
        '/15018215/ul_logo_64x64.png HTTP/1.1" %(status)s 2261 '
        '"https://launchpad.net/~ubuntulite/+archive" "Mozilla/5.0 (X11; '
        'U; Linux i686; en-US; rv:1.9b5) Gecko/2008041514 Firefox/3.0b5"')

    def setUp(self):
        TestCase.setUp(self)
        self.logger = BufferLogger()

    def _getLastLineStart(self, fd):
        """Return the position (in bytes) where the last line of the given
        file starts.
        """
        fd.seek(0)
        lines = fd.readlines()
        return fd.tell() - len(lines[-1])

    def test_parsing(self):
        # The parse_file() function returns a tuple containing a dict (mapping
        # days and library file IDs to number of downloads), the total number
        # of bytes that have been parsed from this file, and the running total
        # of lines parsed (for testing against the maximum).  In our sample
        # log, the file with ID 8196569 has been downloaded twice (once from
        # Argentina and once from Japan) and the files with ID 12060796 and
        # 9096290 have been downloaded once.  The file with ID 15018215 has
        # also been downloaded once (last line of the sample log), but
        # parse_file() always skips the last line as it may be truncated, so
        # it doesn't show up in the dict returned.
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 5 lines resulting in 3 download stats.')
        date = datetime(2008, 6, 13)
        self.assertContentEqual(
            downloads.items(),
            [('/12060796/me-tv-icon-64x64.png', {date: {'AU': 1}}),
             ('/8196569/mediumubuntulogo.png', {date: {'AR': 1, 'JP': 1}}),
             ('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])

        # The last line is skipped, so we'll record that the file has been
        # parsed until the beginning of the last line.
        self.assertNotEqual(parsed_bytes, fd.tell())
        self.assertEqual(parsed_bytes, self._getLastLineStart(fd))

    def test_parsing_last_line(self):
        # When there's only the last line of a given file for us to parse, we
        # assume the file has been rotated and it's safe to parse its last
        # line without worrying about whether or not it's been truncated.
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=self._getLastLineStart(fd), logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 1 download stats.')
        self.assertEqual(parsed_bytes, fd.tell())

        self.assertContentEqual(
            downloads.items(),
            [('/15018215/ul_logo_64x64.png',
              {datetime(2008, 6, 13): {'US': 1}})])

    def test_unexpected_error_while_parsing(self):
        # When there's an unexpected error, we log it and return as if we had
        # parsed up to the line before the one where the failure occurred.
        # Here we force an unexpected error on the first line.
        fd = StringIO('Not a log')
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertIn('Error', self.logger.getLogBuffer())
        self.assertEqual(downloads, {})
        self.assertEqual(parsed_bytes, 0)

    def _assertResponseWithGivenStatusIsIgnored(self, status):
        """Assert that responses with the given status are ignored."""
        fd = StringIO(
            self.sample_line % dict(status=status, method='GET'))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 0 download stats.')
        self.assertEqual(downloads, {})
        self.assertEqual(parsed_bytes, fd.tell())

    def test_responses_with_404_status_are_ignored(self):
        self._assertResponseWithGivenStatusIsIgnored('404')

    def test_responses_with_206_status_are_ignored(self):
        self._assertResponseWithGivenStatusIsIgnored('206')

    def test_responses_with_304_status_are_ignored(self):
        self._assertResponseWithGivenStatusIsIgnored('304')

    def test_responses_with_503_status_are_ignored(self):
        self._assertResponseWithGivenStatusIsIgnored('503')

    def _assertRequestWithGivenMethodIsIgnored(self, method):
        """Assert that requests with the given method are ignored."""
        fd = StringIO(
            self.sample_line % dict(status='200', method=method))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 0 download stats.')
        self.assertEqual(downloads, {})
        self.assertEqual(parsed_bytes, fd.tell())

    def test_HEAD_request_is_ignored(self):
        self._assertRequestWithGivenMethodIsIgnored('HEAD')

    def test_POST_request_is_ignored(self):
        self._assertRequestWithGivenMethodIsIgnored('POST')

    def test_normal_request_is_not_ignored(self):
        fd = StringIO(
            self.sample_line % dict(status=200, method='GET'))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 1 download stats.')

        self.assertEqual(downloads,
            {'/15018215/ul_logo_64x64.png':
                {datetime(2008, 6, 13): {'US': 1}}})

        self.assertEqual(parsed_bytes, fd.tell())

    def test_max_parsed_lines(self):
        # The max_parsed_lines config option limits the number of parsed
        # lines.
        config.push(
            'log_parser config',
            '[launchpad]\nlogparser_max_parsed_lines: 2')
        self.addCleanup(config.pop, 'log_parser config')
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        self.addCleanup(fd.close)

        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)

        # We have initially parsed only the first two lines of data,
        # corresponding to one download (the first line is a 404 and
        # so ignored).
        self.assertEqual(parsed_lines, 2)
        date = datetime(2008, 6, 13)
        self.assertContentEqual(
            downloads.items(),
            [('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])
        fd.seek(0)
        lines = fd.readlines()
        line_lengths = [len(line) for line in lines]
        self.assertEqual(parsed_bytes, sum(line_lengths[:2]))

        # And the subsequent parse will be for the 3rd and 4th lines,
        # corresponding to two downloads of the same file.
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=parsed_bytes, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertContentEqual(
            downloads.items(),
            [('/12060796/me-tv-icon-64x64.png', {date: {'AU': 1}}),
             ('/8196569/mediumubuntulogo.png', {date: {'AR': 1}})])
        self.assertEqual(parsed_bytes, sum(line_lengths[:4]))

    def test_max_parsed_lines_exceeded(self):
        # Show that if a non-zero parsed_lines is passed in, the number of
        # lines parsed will be less than it would otherwise have been.

        # The max_parsed_lines config option limits the number of parsed
        # lines.
        config.push(
            'log_parser config',
            '[launchpad]\nlogparser_max_parsed_lines: 2')
        self.addCleanup(config.pop, 'log_parser config')
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        self.addCleanup(fd.close)

        # We want to start parsing on line 2 so we will have a value in
        # "downloads" to make a positive assertion about.  (The first line is
        # a 404 so wouldn't generate any output.)
        start_position = len(fd.readline())

        # If we have already parsed some lines, then the number of lines
        # parsed will be passed in (parsed_lines argument) and parse_file will
        # take that number into account when determining if the maximum number
        # of lines to parse has been reached.
        parsed_lines = 1
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=start_position, logger=self.logger,
            get_download_key=get_path_download_key, parsed_lines=parsed_lines)

        # The total number of lines parsed during the run (1 line) plus the
        # number of lines parsed previously (1 line, as passed in via
        # parsed_lines) is returned.
        self.assertEqual(parsed_lines, 2)
        # Since we told parse_file that we had already parsed 1 line and the
        # limit is 2 lines, it only parsed a single line.
        date = datetime(2008, 6, 13)
        self.assertContentEqual(
            downloads.items(),
            [('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])
 def setUp(self):
     TestCase.setUp(self)
     self.logger = BufferLogger()
 def test_logStartBuild(self):
     # logStartBuild will properly report the package that's being built
     job = self.makeJob()
     logger = BufferLogger()
     job.logStartBuild(logger)
     self.assertEquals(logger.getLogBuffer(), "INFO startBuild(Mydistro, recept, joe)\n")