Example #1
0
    def disabled_test_timeout_short(self):
        """When a job exceeds its lease, an exception is raised.

        Unfortunately, timeouts include the time it takes for the zope
        machinery to start up, so we run a job that will not time out first,
        followed by a job that is sure to time out.
        """
        logger = BufferLogger()
        logger.setLevel(logging.INFO)
        # StuckJob is actually a source of two jobs. The first is fast, the
        # second slow.
        runner = TwistedJobRunner.runFromSource(ShorterStuckJob,
                                                'branchscanner', logger)
        self.oops_capture.sync()
        oops = self.oopses[0]
        self.assertEqual(
            (1, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
        self.assertThat(
            logger.getLogBuffer(),
            MatchesRegex(
                dedent("""\
                INFO Running through Twisted.
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Job resulted in OOPS: %s
                """) % oops['id']))
        self.assertEqual(('TimeoutError', 'Job ran too long.'),
                         (oops['type'], oops['value']))
Example #2
0
 def test_logging(self):
     r = Request()
     root_key = hashlib.sha256("root").hexdigest()
     root_macaroon = Macaroon(key=root_key)
     discharge_key = hashlib.sha256("discharge").hexdigest()
     discharge_caveat_id = '{"secret": "thing"}'
     root_macaroon.add_third_party_caveat(
         "sso.example", discharge_key, discharge_caveat_id)
     root_macaroon.add_first_party_caveat(
         "store.example|package_id|{}".format(
             json.dumps(["example-package"])))
     unbound_discharge_macaroon = Macaroon(
         location="sso.example", key=discharge_key,
         identifier=discharge_caveat_id)
     unbound_discharge_macaroon.add_first_party_caveat(
         "sso.example|account|{}".format(
             base64.b64encode(json.dumps({
                 "openid": "1234567",
                 "email": "*****@*****.**",
                 }))))
     logger = BufferLogger()
     MacaroonAuth(
         root_macaroon.serialize(),
         unbound_discharge_macaroon.serialize(), logger=logger)(r)
     self.assertEqual(
         ['DEBUG root macaroon: snap-ids: ["example-package"]',
          'DEBUG discharge macaroon: OpenID identifier: 1234567'],
         logger.getLogBuffer().splitlines())
 def test_extraBuildArgs_withBadConfigForBzrBuilderPPA(self):
     # Ensure _extraBuildArgs doesn't blow up with a badly formatted
     # bzr_builder_sources_list in the config.
     self.pushConfig("builddmaster", bzr_builder_sources_list="deb http://foo %(series) main")
     # (note the missing 's' in %(series)
     job = self.makeJob()
     distroarchseries = job.build.distroseries.architectures[0]
     expected_archives = get_sources_list_for_building(job.build, distroarchseries, None)
     logger = BufferLogger()
     self.assertEqual(
         {
             "archive_private": False,
             "arch_tag": "i386",
             "author_email": u"*****@*****.**",
             "suite": u"mydistro",
             "author_name": u"Joe User",
             "archive_purpose": "PPA",
             "ogrecomponent": "universe",
             "recipe_text": "# bzr-builder format 0.3 deb-version {debupstream}-0~{revno}\n"
             "lp://dev/~joe/someapp/pkg\n",
             "archives": expected_archives,
             "distroseries_name": job.build.distroseries.name,
         },
         job._extraBuildArgs(distroarchseries, logger),
     )
     self.assertIn("Exception processing bzr_builder_sources_list:", logger.getLogBuffer())
    def disabled_test_timeout_short(self):
        """When a job exceeds its lease, an exception is raised.

        Unfortunately, timeouts include the time it takes for the zope
        machinery to start up, so we run a job that will not time out first,
        followed by a job that is sure to time out.
        """
        logger = BufferLogger()
        logger.setLevel(logging.INFO)
        # StuckJob is actually a source of two jobs. The first is fast, the
        # second slow.
        runner = TwistedJobRunner.runFromSource(
            ShorterStuckJob, 'branchscanner', logger)
        self.oops_capture.sync()
        oops = self.oopses[0]
        self.assertEqual(
            (1, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
        self.assertThat(
            logger.getLogBuffer(), MatchesRegex(
                dedent("""\
                INFO Running through Twisted.
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Running <ShorterStuckJob.*?> \(ID .*?\).
                INFO Job resulted in OOPS: %s
                """) % oops['id']))
        self.assertEqual(('TimeoutError', 'Job ran too long.'),
                         (oops['type'], oops['value']))
Example #5
0
 def test_process_one(self):
     remote_bug = {
         "id": 12345, "number": 1234, "state": "open", "labels": [],
         }
     _add_rate_limit_response("api.github.com")
     responses.add(
         "GET", "https://api.github.com/repos/user/repository/issues/1234",
         json=remote_bug)
     bug = self.factory.makeBug()
     bug_tracker = self.factory.makeBugTracker(
         base_url="https://github.com/user/repository/issues",
         bugtrackertype=BugTrackerType.GITHUB)
     bug.addWatch(
         bug_tracker, "1234", getUtility(ILaunchpadCelebrities).janitor)
     self.assertEqual(
         [("1234", None)],
         [(watch.remotebug, watch.remotestatus)
          for watch in bug_tracker.watches])
     transaction.commit()
     logger = BufferLogger()
     bug_watch_updater = CheckwatchesMaster(transaction, logger=logger)
     github = get_external_bugtracker(bug_tracker)
     bug_watch_updater.updateBugWatches(github, bug_tracker.watches)
     self.assertEqual(
         "INFO Updating 1 watches for 1 bugs on "
         "https://api.github.com/repos/user/repository\n",
         logger.getLogBuffer())
     self.assertEqual(
         [("1234", BugTaskStatus.NEW)],
         [(watch.remotebug, github.convertRemoteStatus(watch.remotestatus))
          for watch in bug_tracker.watches])
Example #6
0
 def test_process_many(self):
     remote_bugs = [
         {"id": bug_id + 1, "number": bug_id,
          "state": "open" if (bug_id % 2) == 0 else "closed",
          "labels": []}
         for bug_id in range(1000, 1010)]
     _add_rate_limit_response("api.github.com")
     responses.add(
         "GET", "https://api.github.com/repos/user/repository/issues",
         json=remote_bugs)
     bug = self.factory.makeBug()
     bug_tracker = self.factory.makeBugTracker(
         base_url="https://github.com/user/repository/issues",
         bugtrackertype=BugTrackerType.GITHUB)
     for remote_bug in remote_bugs:
         bug.addWatch(
             bug_tracker, str(remote_bug["number"]),
             getUtility(ILaunchpadCelebrities).janitor)
     transaction.commit()
     logger = BufferLogger()
     bug_watch_updater = CheckwatchesMaster(transaction, logger=logger)
     github = get_external_bugtracker(bug_tracker)
     bug_watch_updater.updateBugWatches(github, bug_tracker.watches)
     self.assertEqual(
         "INFO Updating 10 watches for 10 bugs on "
         "https://api.github.com/repos/user/repository\n",
         logger.getLogBuffer())
     self.assertContentEqual(
         [(str(bug_id), BugTaskStatus.NEW)
          for bug_id in (1000, 1002, 1004, 1006, 1008)] +
         [(str(bug_id), BugTaskStatus.FIXRELEASED)
          for bug_id in (1001, 1003, 1005, 1007, 1009)],
         [(watch.remotebug, github.convertRemoteStatus(watch.remotestatus))
          for watch in bug_tracker.watches])
Example #7
0
 def test_logStartBuild(self):
     # logStartBuild will properly report the package that's being built
     job = self.makeJob()
     logger = BufferLogger()
     job.logStartBuild(logger)
     self.assertEquals(logger.getLogBuffer(),
                       "INFO startBuild(Mydistro, recept, joe)\n")
    def test_makeDailyBuilds_skips_builds_already_queued(self):
        # If the recipe already has an identical build pending,
        # makeDailyBuilds() won't create a build.
        owner = self.factory.makePerson(name='eric')
        recipe = self.factory.makeSourcePackageRecipe(owner=owner,
                                                      name='funky-recipe',
                                                      build_daily=True,
                                                      is_stale=True)
        series = list(recipe.distroseries)[0]
        self.factory.makeSourcePackageRecipeBuild(
            recipe=recipe,
            archive=recipe.daily_build_archive,
            requester=recipe.owner,
            distroseries=series,
            pocket=PackagePublishingPocket.RELEASE,
            date_created=datetime.now(utc) - timedelta(hours=24, seconds=1))
        removeSecurityProxy(recipe).is_stale = True

        logger = BufferLogger()
        daily_builds = SourcePackageRecipeBuild.makeDailyBuilds(logger)
        self.assertEqual([], daily_builds)
        self.assertEqual(
            'DEBUG Recipe eric/funky-recipe is stale\n'
            'DEBUG  - build already pending for Warty (4.10)\n',
            logger.getLogBuffer())
Example #9
0
 def test_process_one(self):
     remote_bug = [
         {"id": "12345", "iid": 1234, "state": "opened", "labels": []},
         ]
     responses.add(
         "GET",
         "https://gitlab.com/api/v4/projects/user%2Frepository/issues?"
         "iids[]=1234",
         json=remote_bug, match_querystring=True)
     bug = self.factory.makeBug()
     bug_tracker = self.factory.makeBugTracker(
         base_url="https://gitlab.com/user/repository/issues",
         bugtrackertype=BugTrackerType.GITLAB)
     bug.addWatch(
         bug_tracker, "1234", getUtility(ILaunchpadCelebrities).janitor)
     self.assertEqual(
         [("1234", None)],
         [(watch.remotebug, watch.remotestatus)
          for watch in bug_tracker.watches])
     transaction.commit()
     logger = BufferLogger()
     bug_watch_updater = CheckwatchesMaster(transaction, logger=logger)
     gitlab = get_external_bugtracker(bug_tracker)
     bug_watch_updater.updateBugWatches(gitlab, bug_tracker.watches)
     self.assertEqual(
         "INFO Updating 1 watches for 1 bugs on "
         "https://gitlab.com/api/v4/projects/user%2Frepository\n",
         logger.getLogBuffer())
     self.assertEqual(
         [("1234", BugTaskStatus.NEW)],
         [(watch.remotebug, gitlab.convertRemoteStatus(watch.remotestatus))
          for watch in bug_tracker.watches])
Example #10
0
class TestLibrarianLogFileParsing(TestCase):
    """Test the parsing of librarian log files."""

    layer = ZopelessLayer

    def setUp(self):
        TestCase.setUp(self)
        self.logger = BufferLogger()

    def test_request_to_lfa_is_parsed(self):
        fd = StringIO('69.233.136.42 - - [13/Jun/2008:14:55:22 +0100] "GET '
                      '/15018215/ul_logo_64x64.png HTTP/1.1" 200 2261 '
                      '"https://launchpad.net/~ubuntulite/+archive" "Mozilla"')
        downloads, parsed_bytes, ignored = parse_file(
            fd,
            start_position=0,
            logger=self.logger,
            get_download_key=get_library_file_id)
        self.assertEqual(self.logger.getLogBuffer().strip(),
                         'INFO Parsed 1 lines resulting in 1 download stats.')

        self.assertEqual(downloads,
                         {'15018215': {
                             datetime(2008, 6, 13): {
                                 'US': 1
                             }
                         }})

        self.assertEqual(parsed_bytes, fd.tell())

    def test_request_to_non_lfa_is_ignored(self):
        # A request to a path which doesn't map to a LibraryFileAlias (e.g.
        # '/') is ignored.
        fd = StringIO(
            '69.233.136.42 - - [13/Jun/2008:14:55:22 +0100] "GET / HTTP/1.1" '
            '200 2261 "https://launchpad.net/~ubuntulite/+archive" "Mozilla"')
        downloads, parsed_bytes, ignored = parse_file(
            fd,
            start_position=0,
            logger=self.logger,
            get_download_key=get_library_file_id)
        self.assertEqual(self.logger.getLogBuffer().strip(),
                         'INFO Parsed 1 lines resulting in 0 download stats.')
        self.assertEqual(downloads, {})
        self.assertEqual(parsed_bytes, fd.tell())
Example #11
0
 def test_lease_held_handled(self):
     """Jobs that raise LeaseHeld are handled correctly."""
     logger = BufferLogger()
     logger.setLevel(logging.DEBUG)
     runner = TwistedJobRunner.runFromSource(
         LeaseHeldJob, 'branchscanner', logger)
     self.assertIn('Could not acquire lease', logger.getLogBuffer())
     self.assertEqual(
         (0, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
 def test_verifyBuildRequest_valid(self):
     # VerifyBuildRequest won't raise any exceptions when called with a
     # valid builder set.
     job = self.makeJob()
     builder = MockBuilder("bob-de-bouwer")
     job.setBuilder(builder, OkSlave())
     logger = BufferLogger()
     job.verifyBuildRequest(logger)
     self.assertEquals("", logger.getLogBuffer())
Example #13
0
 def test_verifyBuildRequest_valid(self):
     # VerifyBuildRequest won't raise any exceptions when called with a
     # valid builder set.
     job = self.makeJob()
     builder = MockBuilder("bob-de-bouwer")
     job.setBuilder(builder, OkSlave())
     logger = BufferLogger()
     job.verifyBuildRequest(logger)
     self.assertEquals("", logger.getLogBuffer())
Example #14
0
 def test_lease_held_handled(self):
     """Jobs that raise LeaseHeld are handled correctly."""
     logger = BufferLogger()
     logger.setLevel(logging.DEBUG)
     runner = TwistedJobRunner.runFromSource(LeaseHeldJob, 'branchscanner',
                                             logger)
     self.assertIn('Could not acquire lease', logger.getLogBuffer())
     self.assertEqual(
         (0, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
    def process(self):
        """Process an upload that is the result of a build.

        The name of the leaf is the build id of the build.
        Build uploads always contain a single package per leaf.
        """
        logger = BufferLogger()
        if self.build.status != BuildStatus.UPLOADING:
            self.processor.log.warn(
                "Expected build status to be 'UPLOADING', was %s. Ignoring." %
                self.build.status.name)
            return
        try:
            # The recipe may have been deleted so we need to flag that here
            # and will handle below. We check so that we don't go to the
            # expense of doing an unnecessary upload. We don't just exit here
            # because we want the standard cleanup to occur.
            recipe_deleted = (ISourcePackageRecipeBuild.providedBy(self.build)
                and self.build.recipe is None)
            if recipe_deleted:
                result = UploadStatusEnum.FAILED
            else:
                self.processor.log.debug("Build %s found" % self.build.id)
                [changes_file] = self.locateChangesFiles()
                logger.debug("Considering changefile %s" % changes_file)
                result = self.processChangesFile(changes_file, logger)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            info = sys.exc_info()
            message = (
                'Exception while processing upload %s' % self.upload_path)
            properties = [('error-explanation', message)]
            request = ScriptRequest(properties)
            error_utility = ErrorReportingUtility()
            error_utility.raising(info, request)
            logger.error('%s (%s)' % (message, request.oopsid))
            result = UploadStatusEnum.FAILED
        if (result != UploadStatusEnum.ACCEPTED or
            not self.build.verifySuccessfulUpload()):
            self.build.updateStatus(BuildStatus.FAILEDTOUPLOAD)
        if self.build.status != BuildStatus.FULLYBUILT:
            if recipe_deleted:
                # For a deleted recipe, no need to notify that uploading has
                # failed - we just log a warning.
                self.processor.log.warn(
                    "Recipe for build %s was deleted. Ignoring." %
                    self.upload)
            else:
                self.build.storeUploadLog(logger.getLogBuffer())
                self.build.notify(extra_info="Uploading build %s failed." %
                                  self.upload)
        else:
            self.build.notify()
        self.processor.ztm.commit()
        self.moveProcessedUpload(result, logger)
Example #16
0
 def test_original_to_in_body(self):
     header_to = "*****@*****.**"
     original_to = "*****@*****.**"
     alias = "librarian-somewhere"
     body = "%s: %s\n\nsome body stuff" % (ORIGINAL_TO_HEADER, original_to)
     log = BufferLogger()
     mail = self.factory.makeSignedMessage(body=body, to_address=header_to)
     addresses = extract_addresses(mail, alias, log)
     self.assertThat(addresses, Equals([header_to]))
     self.assertThat(log.getLogBuffer(), Equals("INFO Suspected spam: librarian-somewhere\n"))
Example #17
0
    def process(self):
        """Process an upload that is the result of a build.

        The name of the leaf is the build id of the build.
        Build uploads always contain a single package per leaf.
        """
        logger = BufferLogger()
        if self.build.status != BuildStatus.UPLOADING:
            self.processor.log.warn(
                "Expected build status to be 'UPLOADING', was %s. Ignoring." %
                self.build.status.name)
            return
        try:
            # The recipe may have been deleted so we need to flag that here
            # and will handle below. We check so that we don't go to the
            # expense of doing an unnecessary upload. We don't just exit here
            # because we want the standard cleanup to occur.
            recipe_deleted = (ISourcePackageRecipeBuild.providedBy(self.build)
                              and self.build.recipe is None)
            if recipe_deleted:
                result = UploadStatusEnum.FAILED
            else:
                self.processor.log.debug("Build %s found" % self.build.id)
                [changes_file] = self.locateChangesFiles()
                logger.debug("Considering changefile %s" % changes_file)
                result = self.processChangesFile(changes_file, logger)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            info = sys.exc_info()
            message = ('Exception while processing upload %s' %
                       self.upload_path)
            properties = [('error-explanation', message)]
            request = ScriptRequest(properties)
            error_utility = ErrorReportingUtility()
            error_utility.raising(info, request)
            logger.error('%s (%s)' % (message, request.oopsid))
            result = UploadStatusEnum.FAILED
        if (result != UploadStatusEnum.ACCEPTED
                or not self.build.verifySuccessfulUpload()):
            self.build.updateStatus(BuildStatus.FAILEDTOUPLOAD)
        if self.build.status != BuildStatus.FULLYBUILT:
            if recipe_deleted:
                # For a deleted recipe, no need to notify that uploading has
                # failed - we just log a warning.
                self.processor.log.warn(
                    "Recipe for build %s was deleted. Ignoring." % self.upload)
            else:
                self.build.storeUploadLog(logger.getLogBuffer())
                self.build.notify(extra_info="Uploading build %s failed." %
                                  self.upload)
        else:
            self.build.notify()
        self.processor.ztm.commit()
        self.moveProcessedUpload(result, logger)
 def test_original_to_in_body(self):
     header_to = '*****@*****.**'
     original_to = '*****@*****.**'
     alias = 'librarian-somewhere'
     body = '%s: %s\n\nsome body stuff' % (ORIGINAL_TO_HEADER, original_to)
     log = BufferLogger()
     mail = self.factory.makeSignedMessage(body=body, to_address=header_to)
     addresses = extract_addresses(mail, alias, log)
     self.assertThat(addresses, Equals([header_to]))
     self.assertThat(log.getLogBuffer(),
                     Equals('INFO Suspected spam: librarian-somewhere\n'))
Example #19
0
 def test_original_to_missing(self):
     header_to = "*****@*****.**"
     alias = "librarian-somewhere"
     log = BufferLogger()
     mail = self.factory.makeSignedMessage(to_address=header_to)
     addresses = extract_addresses(mail, alias, log)
     self.assertThat(addresses, Equals([header_to]))
     self.assertThat(
         log.getLogBuffer(),
         Equals("WARNING No X-Launchpad-Original-To header was present " "in email: librarian-somewhere\n"),
     )
 def test_original_to_missing(self):
     header_to = '*****@*****.**'
     alias = 'librarian-somewhere'
     log = BufferLogger()
     mail = self.factory.makeSignedMessage(to_address=header_to)
     addresses = extract_addresses(mail, alias, log)
     self.assertThat(addresses, Equals([header_to]))
     self.assertThat(
         log.getLogBuffer(),
         Equals('WARNING No X-Launchpad-Original-To header was present '
                'in email: librarian-somewhere\n'))
 def test_verifyBuildRequest_valid(self):
     # verifyBuildRequest doesn't raise any exceptions when called with a
     # valid builder set.
     job = self.makeJob()
     lfa = self.factory.makeLibraryFileAlias()
     transaction.commit()
     job.build.distro_arch_series.addOrUpdateChroot(lfa)
     builder = MockBuilder()
     job.setBuilder(builder, OkSlave())
     logger = BufferLogger()
     job.verifyBuildRequest(logger)
     self.assertEqual("", logger.getLogBuffer())
Example #22
0
 def test_makeDailyBuilds_logs_builds(self):
     # If a logger is passed into the makeDailyBuilds method, each recipe
     # that a build is requested for gets logged.
     owner = self.factory.makePerson(name='eric')
     self.factory.makeSourcePackageRecipe(
         owner=owner, name=u'funky-recipe', build_daily=True)
     logger = BufferLogger()
     SourcePackageRecipeBuild.makeDailyBuilds(logger)
     self.assertEqual(
         'DEBUG Recipe eric/funky-recipe is stale\n'
         'DEBUG  - build requested for Warty (4.10)\n',
         logger.getLogBuffer())
class TestLibrarianLogFileParsing(TestCase):
    """Test the parsing of librarian log files."""

    layer = ZopelessLayer

    def setUp(self):
        TestCase.setUp(self)
        self.logger = BufferLogger()

    def test_request_to_lfa_is_parsed(self):
        fd = StringIO(
            '69.233.136.42 - - [13/Jun/2008:14:55:22 +0100] "GET '
            '/15018215/ul_logo_64x64.png HTTP/1.1" 200 2261 '
            '"https://launchpad.net/~ubuntulite/+archive" "Mozilla"')
        downloads, parsed_bytes, ignored = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_library_file_id)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 1 download stats.')

        self.assertEqual(downloads,
            {'15018215': {datetime(2008, 6, 13): {'US': 1}}})

        self.assertEqual(parsed_bytes, fd.tell())

    def test_request_to_non_lfa_is_ignored(self):
        # A request to a path which doesn't map to a LibraryFileAlias (e.g.
        # '/') is ignored.
        fd = StringIO(
            '69.233.136.42 - - [13/Jun/2008:14:55:22 +0100] "GET / HTTP/1.1" '
            '200 2261 "https://launchpad.net/~ubuntulite/+archive" "Mozilla"')
        downloads, parsed_bytes, ignored = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_library_file_id)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 0 download stats.')
        self.assertEqual(downloads, {})
        self.assertEqual(parsed_bytes, fd.tell())
Example #24
0
 def disable_test_memory_hog_job(self):
     """A job with a memory limit will trigger MemoryError on excess."""
     # XXX: frankban 2012-03-29 bug=963455: This test fails intermittently,
     # especially in parallel tests.
     logger = BufferLogger()
     logger.setLevel(logging.INFO)
     runner = TwistedJobRunner.runFromSource(
         MemoryHogJob, 'branchscanner', logger)
     self.assertEqual(
         (0, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
     self.assertIn('Job resulted in OOPS', logger.getLogBuffer())
     self.oops_capture.sync()
     self.assertEqual('MemoryError', self.oopses[0]['type'])
 def test_makeDailyBuilds_with_disallowed_series(self):
     # If a recipe is set to build into a disallowed series,
     # makeDailyBuilds won't OOPS.
     recipe = self.factory.makeSourcePackageRecipe(build_daily=True,
                                                   is_stale=True)
     self.factory.makeArchive(owner=recipe.owner)
     logger = BufferLogger()
     distroseries = list(recipe.distroseries)[0]
     removeSecurityProxy(distroseries).status = SeriesStatus.OBSOLETE
     SourcePackageRecipeBuild.makeDailyBuilds(logger)
     self.assertEqual([], self.oopses)
     self.assertIn("DEBUG  - cannot build against Warty (4.10).",
                   logger.getLogBuffer())
 def test_uploadPublicKey_suppress_in_config(self):
     self.useFixture(KeyServerTac())
     logger = BufferLogger()
     self.pushConfig("gpghandler", upload_keys=False)
     self.populateKeyring()
     fingerprint = list(self.gpg_handler.localKeys())[0].fingerprint
     self.gpg_handler.uploadPublicKey(fingerprint, logger=logger)
     self.assertEqual(
         "INFO Not submitting key to keyserver "
         "(disabled in configuration).\n", logger.getLogBuffer())
     self.assertRaises(GPGKeyDoesNotExistOnServer,
                       removeSecurityProxy(self.gpg_handler)._getPubKey,
                       fingerprint)
 def test_verifyBuildRequest_archive_private_owners_match(self):
     archive = self.factory.makeArchive(private=True)
     job = self.makeJob(archive=archive,
                        registrant=archive.owner,
                        owner=archive.owner)
     lfa = self.factory.makeLibraryFileAlias()
     transaction.commit()
     job.build.distro_arch_series.addOrUpdateChroot(lfa)
     builder = MockBuilder()
     job.setBuilder(builder, OkSlave())
     logger = BufferLogger()
     job.verifyBuildRequest(logger)
     self.assertEqual("", logger.getLogBuffer())
Example #28
0
 def disable_test_memory_hog_job(self):
     """A job with a memory limit will trigger MemoryError on excess."""
     # XXX: frankban 2012-03-29 bug=963455: This test fails intermittently,
     # especially in parallel tests.
     logger = BufferLogger()
     logger.setLevel(logging.INFO)
     runner = TwistedJobRunner.runFromSource(MemoryHogJob, 'branchscanner',
                                             logger)
     self.assertEqual(
         (0, 1), (len(runner.completed_jobs), len(runner.incomplete_jobs)))
     self.assertIn('Job resulted in OOPS', logger.getLogBuffer())
     self.oops_capture.sync()
     self.assertEqual('MemoryError', self.oopses[0]['type'])
    def test_run(self):
        # When run it merges from_person into to_person.
        self.transfer_email()
        logger = BufferLogger()
        with log.use(logger):
            self.job.run()

        self.assertEqual(self.to_person, self.from_person.merged)
        self.assertEqual(
            ["DEBUG PersonMergeJob is about to merge ~void into ~gestalt",
             "DEBUG PersonMergeJob has merged ~void into ~gestalt"],
            logger.getLogBuffer().splitlines())
        self.assertEqual(self.to_person, self.from_person.merged)
Example #30
0
    def test_run(self):
        # When run it merges from_person into to_person.
        self.transfer_email()
        logger = BufferLogger()
        with log.use(logger):
            self.job.run()

        self.assertEqual(self.to_person, self.from_person.merged)
        self.assertEqual([
            "DEBUG PersonMergeJob is about to merge ~void into ~gestalt",
            "DEBUG PersonMergeJob has merged ~void into ~gestalt"
        ],
                         logger.getLogBuffer().splitlines())
        self.assertEqual(self.to_person, self.from_person.merged)
 def test_makeDailyBuilds_with_disallowed_series(self):
     # If a recipe is set to build into a disallowed series,
     # makeDailyBuilds won't OOPS.
     recipe = self.factory.makeSourcePackageRecipe(
         build_daily=True, is_stale=True)
     self.factory.makeArchive(owner=recipe.owner)
     logger = BufferLogger()
     distroseries = list(recipe.distroseries)[0]
     removeSecurityProxy(distroseries).status = SeriesStatus.OBSOLETE
     SourcePackageRecipeBuild.makeDailyBuilds(logger)
     self.assertEquals([], self.oopses)
     self.assertIn(
         "DEBUG  - cannot build against Warty (4.10).",
         logger.getLogBuffer())
 def test_reject_changes_file_no_email(self):
     # If we are rejecting a mail, and the person to notify has no
     # preferred email, we should return early.
     archive = self.factory.makeArchive()
     distroseries = self.factory.makeDistroSeries()
     uploader = self.factory.makePerson()
     get_property_cache(uploader).preferredemail = None
     email = '%s <*****@*****.**>' % uploader.displayname
     changes = {'Changed-By': email, 'Maintainer': email}
     logger = BufferLogger()
     reject_changes_file(
         uploader, '/tmp/changes', changes, archive, distroseries, '',
         logger=logger)
     self.assertIn(
         'No recipients have a preferred email.', logger.getLogBuffer())
 def test_run(self):
     # The email is sent to all the recipients.
     job = make_question_job(
         self.factory, QuestionRecipientSet.ASKER_SUBSCRIBER)
     logger = BufferLogger()
     with log.use(logger):
         job.run()
     self.assertEqual(
         ["DEBUG QuestionEmailJob will send email for question %s." %
          job.question.id,
          "DEBUG QuestionEmailJob has sent email for question %s." %
          job.question.id],
         logger.getLogBuffer().splitlines())
     transaction.commit()
     self.assertEqual(2, len(stub.test_emails))
Example #34
0
 def test_run(self):
     # The email is sent to all the recipients.
     job = make_question_job(self.factory,
                             QuestionRecipientSet.ASKER_SUBSCRIBER)
     logger = BufferLogger()
     with log.use(logger):
         job.run()
     self.assertEqual([
         "DEBUG QuestionEmailJob will send email for question %s." %
         job.question.id,
         "DEBUG QuestionEmailJob has sent email for question %s." %
         job.question.id
     ],
                      logger.getLogBuffer().splitlines())
     transaction.commit()
     self.assertEqual(2, len(stub.test_emails))
Example #35
0
    def test_non_ubuntu_ppa(self):
        """A meta-data upload to a non-Ubuntu PPA is not published.

        The meta-data directory is currently only defined for Ubuntu PPAs.
        """
        archive = self.factory.makeArchive(
            distribution=self.factory.makeDistribution())
        packageupload = self.factory.makePackageUpload(archive=archive)
        libraryfilealias = self.factory.makeLibraryFileAlias(db_only=True)
        logger = BufferLogger()
        MetaDataUpload(logger=logger).process(packageupload, libraryfilealias)
        self.assertEqual(
            "DEBUG Skipping meta-data for archive without metaroot.\n",
            logger.getLogBuffer())
        published_file = os.path.join(
            getPubConfig(archive).distroroot, archive.owner.name, "meta",
            archive.name, libraryfilealias.filename)
        self.assertThat(published_file, Not(PathExists()))
 def test_run(self):
     # The email is sent to all the recipients.
     job = make_question_job(self.factory,
                             QuestionRecipientSet.ASKER_SUBSCRIBER)
     team_subscriber = self.factory.makeTeam()
     job.question.subscribe(team_subscriber)
     logger = BufferLogger()
     with dbuser(config.answertracker.dbuser), log.use(logger):
         job.run()
     self.assertEqual([
         "DEBUG QuestionEmailJob will send email for question %s." %
         job.question.id,
         "DEBUG QuestionEmailJob has sent email for question %s." %
         job.question.id
     ],
                      logger.getLogBuffer().splitlines())
     transaction.commit()
     self.assertEqual(3, len(stub.test_emails))
Example #37
0
 def test_reject_changes_file_no_email(self):
     # If we are rejecting a mail, and the person to notify has no
     # preferred email, we should return early.
     archive = self.factory.makeArchive()
     distroseries = self.factory.makeDistroSeries()
     uploader = self.factory.makePerson()
     get_property_cache(uploader).preferredemail = None
     email = '%s <*****@*****.**>' % uploader.displayname
     changes = {'Changed-By': email, 'Maintainer': email}
     logger = BufferLogger()
     reject_changes_file(uploader,
                         '/tmp/changes',
                         changes,
                         archive,
                         distroseries,
                         '',
                         logger=logger)
     self.assertIn('No recipients have a preferred email.',
                   logger.getLogBuffer())
Example #38
0
    def test_ubuntu_ppa(self):
        """A meta-data upload to an Ubuntu PPA is published.

        The custom file is published to
        /<person_name>/meta/<ppa_name>/<filename>.
        """
        ubuntu = getUtility(IDistributionSet)["ubuntu"]
        archive = self.factory.makeArchive(distribution=ubuntu)
        packageupload = self.factory.makePackageUpload(archive=archive)
        content = self.factory.getUniqueString()
        libraryfilealias = self.factory.makeLibraryFileAlias(content=content)
        transaction.commit()
        logger = BufferLogger()
        MetaDataUpload(logger=logger).process(packageupload, libraryfilealias)
        self.assertEqual("", logger.getLogBuffer())
        published_file = os.path.join(
            getPubConfig(archive).distroroot, archive.owner.name, "meta",
            archive.name, libraryfilealias.filename)
        self.assertThat(published_file, FileContains(content))
Example #39
0
    def test_makeDailyBuilds_skips_archive_with_no_permission(self):
        # If the recipe's daily build archive cannot be uploaded to due to
        # insufficient permissions, makeDailyBuilds() won't create a build.
        owner = self.factory.makePerson(name='eric')
        recipe = self.factory.makeSourcePackageRecipe(
            owner=owner, name=u'funky-recipe', build_daily=True,
            is_stale=True)
        archive = self.factory.makeArchive(name="ppa")
        removeSecurityProxy(recipe).daily_build_archive = archive

        logger = BufferLogger()
        daily_builds = SourcePackageRecipeBuild.makeDailyBuilds(logger)
        self.assertEqual([], daily_builds)
        self.assertEqual(
            'DEBUG Recipe eric/funky-recipe is stale\n'
            'DEBUG  - daily build failed for Warty (4.10): '
            "CannotUploadToPPA('Signer has no upload rights "
            "to this PPA.',)\n",
            logger.getLogBuffer())
 def test_build_tools_bad_formatting(self):
     # If tools_source is badly formatted, we log the error but don't
     # blow up.  (Note the missing "s" at the end of "%(series)".)
     ppa = yield self.makeArchive(publish_binary=True)
     build = self.makeBuild(archive=ppa)
     logger = BufferLogger()
     yield self.assertSourcesListAndKeys(
         [(ppa, ["hoary main"]),
          (self.ubuntu.main_archive, [
              "hoary main restricted universe multiverse",
              "hoary-security main restricted universe multiverse",
              "hoary-updates main restricted universe multiverse",
              ]),
          ],
         ["*****@*****.**"], build,
         tools_source="deb http://example.org %(series) main",
         logger=logger)
     self.assertThat(logger.getLogBuffer(), StartsWith(
         "ERROR Exception processing build tools sources.list entry:\n"))
Example #41
0
    def test_makeDailyBuilds_skips_disabled_archive(self):
        # If the recipe's daily build archive is disabled, makeDailyBuilds()
        # won't create a build.
        owner = self.factory.makePerson(name='eric')
        recipe = self.factory.makeSourcePackageRecipe(
            owner=owner, name=u'funky-recipe', build_daily=True,
            is_stale=True)
        archive = self.factory.makeArchive(owner=recipe.owner, name="ppa")
        removeSecurityProxy(recipe).daily_build_archive = archive
        removeSecurityProxy(archive).disable()

        logger = BufferLogger()
        daily_builds = SourcePackageRecipeBuild.makeDailyBuilds(logger)
        self.assertEqual([], daily_builds)
        self.assertEqual(
            'DEBUG Recipe eric/funky-recipe is stale\n'
            'DEBUG  - daily build failed for Warty (4.10): ' +
            "ArchiveDisabled(u'PPA for Eric is disabled.',)\n",
            logger.getLogBuffer())
    def test_makeDailyBuilds_skips_builds_already_queued(self):
        # If the recipe already has an identical build pending,
        # makeDailyBuilds() won't create a build.
        owner = self.factory.makePerson(name='eric')
        recipe = self.factory.makeSourcePackageRecipe(
            owner=owner, name=u'funky-recipe', build_daily=True,
            is_stale=True)
        series = list(recipe.distroseries)[0]
        self.factory.makeSourcePackageRecipeBuild(
            recipe=recipe, archive=recipe.daily_build_archive,
            requester=recipe.owner, distroseries=series,
            pocket=PackagePublishingPocket.RELEASE,
            date_created=datetime.now(utc) - timedelta(hours=24, seconds=1))
        removeSecurityProxy(recipe).is_stale = True

        logger = BufferLogger()
        daily_builds = SourcePackageRecipeBuild.makeDailyBuilds(logger)
        self.assertEqual([], daily_builds)
        self.assertEqual(
            'DEBUG Recipe eric/funky-recipe is stale\n'
            'DEBUG  - build already pending for Warty (4.10)\n',
            logger.getLogBuffer())
Example #43
0
 def test_extraBuildArgs_withBadConfigForBzrBuilderPPA(self):
     # Ensure _extraBuildArgs doesn't blow up with a badly formatted
     # bzr_builder_sources_list in the config.
     self.pushConfig(
         "builddmaster",
         bzr_builder_sources_list="deb http://foo %(series) main")
     # (note the missing 's' in %(series)
     job = self.makeJob()
     distroarchseries = job.build.distroseries.architectures[0]
     expected_archives = get_sources_list_for_building(
         job.build, distroarchseries, None)
     logger = BufferLogger()
     self.assertEqual(
         {
             'archive_private':
             False,
             'arch_tag':
             'i386',
             'author_email':
             u'*****@*****.**',
             'suite':
             u'mydistro',
             'author_name':
             u'Joe User',
             'archive_purpose':
             'PPA',
             'ogrecomponent':
             'universe',
             'recipe_text':
             '# bzr-builder format 0.3 deb-version {debupstream}-0~{revno}\n'
             'lp://dev/~joe/someapp/pkg\n',
             'archives':
             expected_archives,
             'distroseries_name':
             job.build.distroseries.name,
         }, job._extraBuildArgs(distroarchseries, logger))
     self.assertIn("Exception processing bzr_builder_sources_list:",
                   logger.getLogBuffer())
 def test_extraBuildArgs_withBadConfigForBzrBuilderPPA(self):
     # Ensure extraBuildArgs doesn't blow up with a badly formatted
     # bzr_builder_sources_list in the config.
     self.pushConfig(
         "builddmaster",
         bzr_builder_sources_list="deb http://foo %(series) main")
     # (note the missing 's' in %(series)
     job = self.makeJob(with_builder=True)
     distroarchseries = job.build.distroseries.architectures[0]
     expected_archives, expected_trusted_keys = (
         yield get_sources_list_for_building(
             job.build, distroarchseries, None))
     logger = BufferLogger()
     extra_args = yield job.extraBuildArgs(logger)
     self.assertEqual({
         'arch_tag': 'i386',
         'archive_private': False,
         'archive_purpose': 'PPA',
         'archives': expected_archives,
         'author_email': '*****@*****.**',
         'author_name': 'Joe User',
         'build_url': canonical_url(job.build),
         'distroseries_name': job.build.distroseries.name,
         'fast_cleanup': True,
         'ogrecomponent': 'universe',
         'recipe_text':
             '# bzr-builder format 0.3 '
             'deb-version {debupstream}-0~{revno}\n'
             'lp://dev/~joe/someapp/pkg\n',
         'series': job.build.distroseries.name,
         'suite': 'mydistro',
         'trusted_keys': expected_trusted_keys,
         }, extra_args)
     self.assertIn(
         "Exception processing build tools sources.list entry:",
         logger.getLogBuffer())
Example #45
0
class TestMantisBugBatchParser(TestCase):
    """Test the MantisBugBatchParser class."""

    def setUp(self):
        super(TestMantisBugBatchParser, self).setUp()
        self.logger = BufferLogger()

    def test_empty(self):
        data = []
        parser = MantisBugBatchParser(data, self.logger)
        exc = self.assertRaises(
            UnparsableBugData,
            parser.getBugs)
        self.assertThat(
            str(exc), Equals("Missing header line"))

    def test_missing_headers(self):
        data = ['some,headers']
        parser = MantisBugBatchParser(data, self.logger)
        exc = self.assertRaises(
            UnparsableBugData,
            parser.getBugs)
        self.assertThat(
            str(exc),
            Equals("CSV header ['some', 'headers'] missing fields:"
                   " ['id', 'status', 'resolution']"))

    def test_missing_some_headers(self):
        data = ['some,headers,status,resolution']
        parser = MantisBugBatchParser(data, self.logger)
        exc = self.assertRaises(
            UnparsableBugData,
            parser.getBugs)
        self.assertThat(
            str(exc),
            Equals("CSV header ['some', 'headers', 'status', 'resolution'] "
                   "missing fields: ['id']"))

    def test_no_bugs(self):
        data = ['other,fields,id,status,resolution']
        parser = MantisBugBatchParser(data, self.logger)
        self.assertThat(parser.getBugs(), Equals({}))

    def test_passing(self):
        data = [
            'ignored,id,resolution,status',
            'foo,42,not,complete',
            'boo,13,,confirmed',
            ]
        parser = MantisBugBatchParser(data, self.logger)
        bug_42 = dict(
            id=42, status='complete', resolution='not', ignored='foo')
        bug_13 = dict(
            id=13, status='confirmed', resolution='', ignored='boo')
        self.assertThat(parser.getBugs(), Equals({42: bug_42, 13: bug_13}))

    def test_incomplete_line(self):
        data = [
            'ignored,id,resolution,status',
            '42,not,complete',
            ]
        parser = MantisBugBatchParser(data, self.logger)
        self.assertThat(parser.getBugs(), Equals({}))
        log = self.logger.getLogBuffer()
        self.assertThat(
            log,
            Equals("WARNING Line ['42', 'not', 'complete'] incomplete.\n"))

    def test_non_integer_id(self):
        data = [
            'ignored,id,resolution,status',
            'foo,bar,not,complete',
            ]
        parser = MantisBugBatchParser(data, self.logger)
        self.assertThat(parser.getBugs(), Equals({}))
        log = self.logger.getLogBuffer()
        self.assertThat(
            log, Equals("WARNING Encountered invalid bug ID: 'bar'.\n"))
 def test_logStartBuild(self):
     # logStartBuild will properly report the package that's being built
     job = self.makeJob()
     logger = BufferLogger()
     job.logStartBuild(logger)
     self.assertEquals(logger.getLogBuffer(), "INFO startBuild(Mydistro, recept, joe)\n")
class TestLogFileParsing(TestCase):
    """Test the parsing of log files."""

    layer = ZopelessLayer
    sample_line = (
        '69.233.136.42 - - [13/Jun/2008:14:55:22 +0100] "%(method)s '
        '/15018215/ul_logo_64x64.png HTTP/1.1" %(status)s 2261 '
        '"https://launchpad.net/~ubuntulite/+archive" "Mozilla/5.0 (X11; '
        'U; Linux i686; en-US; rv:1.9b5) Gecko/2008041514 Firefox/3.0b5"')

    def setUp(self):
        TestCase.setUp(self)
        self.logger = BufferLogger()

    def _getLastLineStart(self, fd):
        """Return the position (in bytes) where the last line of the given
        file starts.
        """
        fd.seek(0)
        lines = fd.readlines()
        return fd.tell() - len(lines[-1])

    def test_parsing(self):
        # The parse_file() function returns a tuple containing a dict (mapping
        # days and library file IDs to number of downloads), the total number
        # of bytes that have been parsed from this file, and the running total
        # of lines parsed (for testing against the maximum).  In our sample
        # log, the file with ID 8196569 has been downloaded twice (once from
        # Argentina and once from Japan) and the files with ID 12060796 and
        # 9096290 have been downloaded once.  The file with ID 15018215 has
        # also been downloaded once (last line of the sample log), but
        # parse_file() always skips the last line as it may be truncated, so
        # it doesn't show up in the dict returned.
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 5 lines resulting in 3 download stats.')
        date = datetime(2008, 6, 13)
        self.assertContentEqual(
            downloads.items(),
            [('/12060796/me-tv-icon-64x64.png', {date: {'AU': 1}}),
             ('/8196569/mediumubuntulogo.png', {date: {'AR': 1, 'JP': 1}}),
             ('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])

        # The last line is skipped, so we'll record that the file has been
        # parsed until the beginning of the last line.
        self.assertNotEqual(parsed_bytes, fd.tell())
        self.assertEqual(parsed_bytes, self._getLastLineStart(fd))

    def test_parsing_last_line(self):
        # When there's only the last line of a given file for us to parse, we
        # assume the file has been rotated and it's safe to parse its last
        # line without worrying about whether or not it's been truncated.
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=self._getLastLineStart(fd), logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 1 download stats.')
        self.assertEqual(parsed_bytes, fd.tell())

        self.assertContentEqual(
            downloads.items(),
            [('/15018215/ul_logo_64x64.png',
              {datetime(2008, 6, 13): {'US': 1}})])

    def test_unexpected_error_while_parsing(self):
        # When there's an unexpected error, we log it and return as if we had
        # parsed up to the line before the one where the failure occurred.
        # Here we force an unexpected error on the first line.
        fd = StringIO('Not a log')
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertIn('Error', self.logger.getLogBuffer())
        self.assertEqual(downloads, {})
        self.assertEqual(parsed_bytes, 0)

    def _assertResponseWithGivenStatusIsIgnored(self, status):
        """Assert that responses with the given status are ignored."""
        fd = StringIO(
            self.sample_line % dict(status=status, method='GET'))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 0 download stats.')
        self.assertEqual(downloads, {})
        self.assertEqual(parsed_bytes, fd.tell())

    def test_responses_with_404_status_are_ignored(self):
        self._assertResponseWithGivenStatusIsIgnored('404')

    def test_responses_with_206_status_are_ignored(self):
        self._assertResponseWithGivenStatusIsIgnored('206')

    def test_responses_with_304_status_are_ignored(self):
        self._assertResponseWithGivenStatusIsIgnored('304')

    def test_responses_with_503_status_are_ignored(self):
        self._assertResponseWithGivenStatusIsIgnored('503')

    def _assertRequestWithGivenMethodIsIgnored(self, method):
        """Assert that requests with the given method are ignored."""
        fd = StringIO(
            self.sample_line % dict(status='200', method=method))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 0 download stats.')
        self.assertEqual(downloads, {})
        self.assertEqual(parsed_bytes, fd.tell())

    def test_HEAD_request_is_ignored(self):
        self._assertRequestWithGivenMethodIsIgnored('HEAD')

    def test_POST_request_is_ignored(self):
        self._assertRequestWithGivenMethodIsIgnored('POST')

    def test_normal_request_is_not_ignored(self):
        fd = StringIO(
            self.sample_line % dict(status=200, method='GET'))
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertEqual(
            self.logger.getLogBuffer().strip(),
            'INFO Parsed 1 lines resulting in 1 download stats.')

        self.assertEqual(downloads,
            {'/15018215/ul_logo_64x64.png':
                {datetime(2008, 6, 13): {'US': 1}}})

        self.assertEqual(parsed_bytes, fd.tell())

    def test_max_parsed_lines(self):
        # The max_parsed_lines config option limits the number of parsed
        # lines.
        config.push(
            'log_parser config',
            '[launchpad]\nlogparser_max_parsed_lines: 2')
        self.addCleanup(config.pop, 'log_parser config')
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        self.addCleanup(fd.close)

        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)

        # We have initially parsed only the first two lines of data,
        # corresponding to one download (the first line is a 404 and
        # so ignored).
        self.assertEqual(parsed_lines, 2)
        date = datetime(2008, 6, 13)
        self.assertContentEqual(
            downloads.items(),
            [('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])
        fd.seek(0)
        lines = fd.readlines()
        line_lengths = [len(line) for line in lines]
        self.assertEqual(parsed_bytes, sum(line_lengths[:2]))

        # And the subsequent parse will be for the 3rd and 4th lines,
        # corresponding to two downloads of the same file.
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=parsed_bytes, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertContentEqual(
            downloads.items(),
            [('/12060796/me-tv-icon-64x64.png', {date: {'AU': 1}}),
             ('/8196569/mediumubuntulogo.png', {date: {'AR': 1}})])
        self.assertEqual(parsed_bytes, sum(line_lengths[:4]))

    def test_max_parsed_lines_exceeded(self):
        # Show that if a non-zero parsed_lines is passed in, the number of
        # lines parsed will be less than it would otherwise have been.

        # The max_parsed_lines config option limits the number of parsed
        # lines.
        config.push(
            'log_parser config',
            '[launchpad]\nlogparser_max_parsed_lines: 2')
        self.addCleanup(config.pop, 'log_parser config')
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        self.addCleanup(fd.close)

        # We want to start parsing on line 2 so we will have a value in
        # "downloads" to make a positive assertion about.  (The first line is
        # a 404 so wouldn't generate any output.)
        start_position = len(fd.readline())

        # If we have already parsed some lines, then the number of lines
        # parsed will be passed in (parsed_lines argument) and parse_file will
        # take that number into account when determining if the maximum number
        # of lines to parse has been reached.
        parsed_lines = 1
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=start_position, logger=self.logger,
            get_download_key=get_path_download_key, parsed_lines=parsed_lines)

        # The total number of lines parsed during the run (1 line) plus the
        # number of lines parsed previously (1 line, as passed in via
        # parsed_lines) is returned.
        self.assertEqual(parsed_lines, 2)
        # Since we told parse_file that we had already parsed 1 line and the
        # limit is 2 lines, it only parsed a single line.
        date = datetime(2008, 6, 13)
        self.assertContentEqual(
            downloads.items(),
            [('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])
class TestLoggingUIFactory(TestCase):
    """Tests for `LoggingUIFactory`."""

    def setUp(self):
        TestCase.setUp(self)
        self.fake_time = FakeTime(12345)
        self.logger = BufferLogger()

    def makeLoggingUIFactory(self):
        """Make a `LoggingUIFactory` with fake time and contained output."""
        return LoggingUIFactory(
            time_source=self.fake_time.now, logger=self.logger)

    def test_first_progress_updates(self):
        # The first call to progress generates some output.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi")
        self.assertEqual('INFO hi\n', self.logger.getLogBuffer())

    def test_second_rapid_progress_doesnt_update(self):
        # The second of two progress calls that are less than the factory's
        # interval apart does not generate output.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi")
        self.fake_time.advance(factory.interval / 2)
        bar.update("there")
        self.assertEqual('INFO hi\n', self.logger.getLogBuffer())

    def test_second_slow_progress_updates(self):
        # The second of two progress calls that are more than the factory's
        # interval apart does generate output.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi")
        self.fake_time.advance(factory.interval * 2)
        bar.update("there")
        self.assertEqual(
            'INFO hi\n'
            'INFO there\n',
            self.logger.getLogBuffer())

    def test_first_progress_on_new_bar_updates(self):
        # The first progress on a new progress task always generates output.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi")
        self.fake_time.advance(factory.interval / 2)
        bar2 = factory.nested_progress_bar()
        bar2.update("there")
        self.assertEqual(
            'INFO hi\nINFO hi:there\n', self.logger.getLogBuffer())

    def test_update_with_count_formats_nicely(self):
        # When more details are passed to update, they are formatted nicely.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi", 1, 8)
        self.assertEqual('INFO hi 1/8\n', self.logger.getLogBuffer())

    def test_report_transport_activity_reports_bytes_since_last_update(self):
        # If there is no call to _progress_updated for 'interval' seconds, the
        # next call to report_transport_activity will report however many
        # bytes have been transferred since the update.
        factory = self.makeLoggingUIFactory()
        bar = factory.nested_progress_bar()
        bar.update("hi", 1, 10)
        self.fake_time.advance(factory.interval / 2)
        # The bytes in this call will not be reported:
        factory.report_transport_activity(None, 1, 'read')
        self.fake_time.advance(factory.interval)
        bar.update("hi", 2, 10)
        self.fake_time.advance(factory.interval / 2)
        factory.report_transport_activity(None, 10, 'read')
        self.fake_time.advance(factory.interval)
        factory.report_transport_activity(None, 100, 'read')
        self.fake_time.advance(factory.interval * 2)
        # This call will cause output that does not include the transport
        # activity info.
        bar.update("hi", 3, 10)
        self.assertEqual(
            'INFO hi 1/10\n'
            'INFO hi 2/10\n'
            'INFO 110 bytes transferred | hi 2/10\n'
            'INFO hi 3/10\n',
            self.logger.getLogBuffer())

    def test_note(self):
        factory = self.makeLoggingUIFactory()
        factory.note("Banja Luka")
        self.assertEqual('INFO Banja Luka\n', self.logger.getLogBuffer())

    def test_show_error(self):
        factory = self.makeLoggingUIFactory()
        factory.show_error("Exploding Peaches")
        self.assertEqual(
            "ERROR Exploding Peaches\n", self.logger.getLogBuffer())

    def test_confirm_action(self):
        factory = self.makeLoggingUIFactory()
        self.assertTrue(factory.confirm_action(
            "How are you %(when)s?", "wellness", {"when": "today"}))

    def test_show_message(self):
        factory = self.makeLoggingUIFactory()
        factory.show_message("Peaches")
        self.assertEqual("INFO Peaches\n", self.logger.getLogBuffer())

    def test_get_username(self):
        factory = self.makeLoggingUIFactory()
        self.assertIs(
            None, factory.get_username("Who are you %(when)s?", when="today"))

    def test_get_password(self):
        factory = self.makeLoggingUIFactory()
        self.assertIs(
            None,
            factory.get_password("How is your %(drink)s", drink="coffee"))

    def test_show_warning(self):
        factory = self.makeLoggingUIFactory()
        factory.show_warning("Peaches")
        self.assertEqual("WARNING Peaches\n", self.logger.getLogBuffer())

    def test_show_warning_unicode(self):
        factory = self.makeLoggingUIFactory()
        factory.show_warning(u"Peach\xeas")
        self.assertEqual(
            "WARNING Peach\xc3\xaas\n", self.logger.getLogBuffer())

    def test_user_warning(self):
        factory = self.makeLoggingUIFactory()
        factory.show_user_warning('cross_format_fetch',
            from_format="athing", to_format="anotherthing")
        message = factory._user_warning_templates['cross_format_fetch'] % {
            "from_format": "athing",
            "to_format": "anotherthing",
            }
        self.assertEqual("WARNING %s\n" % message, self.logger.getLogBuffer())

    def test_clear_term(self):
        factory = self.makeLoggingUIFactory()
        factory.clear_term()
        self.assertEqual("", self.logger.getLogBuffer())