Exemplo n.º 1
0
    def setUp(self):
        super(TestFastDowntimeRollout, self).setUp()

        self.master_dbname = DatabaseLayer._db_fixture.dbname
        self.slave_dbname = self.master_dbname + "_slave"

        self.pgbouncer_fixture = PGBouncerFixture()
        self.pgbouncer_fixture.databases[self.slave_dbname] = self.pgbouncer_fixture.databases[self.master_dbname]

        # Configure master and slave connections to go via different
        # pgbouncer aliases.
        config_key = "master-slave-separation"
        config.push(
            config_key,
            dedent(
                """\
            [database]
            rw_main_master: dbname=%s host=localhost
            rw_main_slave: dbname=%s host=localhost
            """
                % (self.master_dbname, self.slave_dbname)
            ),
        )
        self.addCleanup(lambda: config.pop(config_key))

        self.useFixture(self.pgbouncer_fixture)

        self.pgbouncer_con = psycopg2.connect("dbname=pgbouncer user=pgbouncer host=localhost")
        self.pgbouncer_con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
        self.pgbouncer_cur = self.pgbouncer_con.cursor()

        transaction.abort()
    def test_connect_depends_on_localhost_only_config(self):
        # If localhost_only is True and the host to which we would connect is
        # not localhost, the connect() method is not called.
        localhost_only_conf = """
            [distributionmirrorprober]
            localhost_only: True
            """
        config.push('localhost_only_conf', localhost_only_conf)
        prober = self._createFactoryAndStubConnectAndTimeoutCall()
        self.failUnless(prober.connect_host != 'localhost')
        prober.probe()
        self.failIf(prober.connectCalled)
        # Restore the config.
        config.pop('localhost_only_conf')

        # If localhost_only is False, then it doesn't matter the host to which
        # we'll connect to --the connect() method will be called.
        remote_conf = """
            [distributionmirrorprober]
            localhost_only: False
            """
        config.push('remote_conf', remote_conf)
        prober = self._createFactoryAndStubConnectAndTimeoutCall()
        prober.probe()
        self.failUnless(prober.connectCalled)
        # Restore the config.
        config.pop('remote_conf')
Exemplo n.º 3
0
    def setUp(self):
        super(MasterFallbackTestCase, self).setUp()

        self.pgbouncer_fixture = PGBouncerFixture()

        # The PGBouncerFixture will set the PGPORT environment variable,
        # causing all DB connections to go via pgbouncer unless an
        # explicit port is provided.
        dbname = DatabaseLayer._db_fixture.dbname
        # Pull the direct db connection string, including explicit port.
        conn_str_direct = self.pgbouncer_fixture.databases[dbname]
        # Generate a db connection string that will go via pgbouncer.
        conn_str_pgbouncer = "dbname=%s host=localhost" % dbname

        # Configure slave connections via pgbouncer, so we can shut them
        # down. Master connections direct so they are unaffected.
        config_key = "master-slave-separation"
        config.push(
            config_key,
            dedent(
                """\
            [database]
            rw_main_master: %s
            rw_main_slave: %s
            """
                % (conn_str_direct, conn_str_pgbouncer)
            ),
        )
        self.addCleanup(lambda: config.pop(config_key))

        self.useFixture(self.pgbouncer_fixture)
Exemplo n.º 4
0
 def test_staging_message_is_demo(self):
     config.push(self.id(), '')
     self.addCleanup(config.pop, self.id())
     self.useFixture(DemoMode())
     view = create_initialized_view(self.product_set, '+new')
     message = find_tag_by_id(view.render(), 'staging-message')
     self.assertEqual(None, message)
    def setUp(self):
        super(TestHandleStatusMixin, self).setUp()
        self.factory = LaunchpadObjectFactory()
        self.build = self.makeBuild()
        # For the moment, we require a builder for the build so that
        # handleStatus_OK can get a reference to the slave.
        self.builder = self.factory.makeBuilder()
        self.build.buildqueue_record.markAsBuilding(self.builder)
        self.slave = WaitingSlave('BuildStatus.OK')
        self.slave.valid_file_hashes.append('test_file_hash')
        self.interactor = BuilderInteractor()
        self.behavior = self.interactor.getBuildBehavior(
            self.build.buildqueue_record, self.builder, self.slave)

        # We overwrite the buildmaster root to use a temp directory.
        tempdir = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, tempdir)
        self.upload_root = tempdir
        tmp_builddmaster_root = """
        [builddmaster]
        root: %s
        """ % self.upload_root
        config.push('tmp_builddmaster_root', tmp_builddmaster_root)

        # We stub out our builds getUploaderCommand() method so
        # we can check whether it was called as well as
        # verifySuccessfulUpload().
        removeSecurityProxy(self.build).verifySuccessfulUpload = FakeMethod(
            result=True)
Exemplo n.º 6
0
    def test_resumeHost_timeout(self):
        # On a resume timeouts, 'resumeHost' fires the returned deferred
        # errorback with the `TimeoutError` failure.

        # Override the configuration command-line with one that will timeout.
        timeout_config = """
        [builddmaster]
        vm_resume_command: sleep 5
        socket_timeout: 1
        """
        config.push('timeout_resume_command', timeout_config)
        self.addCleanup(config.pop, 'timeout_resume_command')

        self.slave_helper.getServerSlave()
        slave = self.slave_helper.getClientSlave()

        # On timeouts, the response is a twisted `Failure` object containing
        # a `TimeoutError` error.
        def check_resume_timeout(failure):
            self.assertIsInstance(failure, Failure)
            out, err, code = failure.value
            self.assertEqual(code, signal.SIGKILL)
        clock = Clock()
        d = slave.resume(clock=clock)
        # Move the clock beyond the socket_timeout but earlier than the
        # sleep 5.  This stops the test having to wait for the timeout.
        # Fast tests FTW!
        clock.advance(2)
        d.addBoth(check_resume_timeout)
        return d
Exemplo n.º 7
0
    def main(self):
        if self.options.product is None:
            if self.options.testing:
                self.options.product = self.create_test_product()
                self.logger.info("Product %s created", self.options.product)
            else:
                self.parser.error('No product specified')
        if len(self.args) != 1:
            self.parser.error('Please specify a bug XML file to import')
        bugs_filename = self.args[0]

        # don't send email
        send_email_data = """
            [immediate_mail]
            send_email: False
            """
        config.push('send_email_data', send_email_data)
        self.login('*****@*****.**')

        product = getUtility(IProductSet).getByName(self.options.product)
        if product is None:
            self.parser.error('Product %s does not exist'
                              % self.options.product)

        importer = BugImporter(
            product, bugs_filename, self.options.cache_filename,
            verify_users=self.options.verify_users, logger=self.logger)
        importer.importBugs(self.txn)
        config.pop('send_email_data')
Exemplo n.º 8
0
 def test_resumeSlaveHost_command_failed(self):
     reset_fail_config = """
         [builddmaster]
         vm_resume_command: /bin/false"""
     config.push('reset fail', reset_fail_config)
     self.addCleanup(config.pop, 'reset fail')
     d = self.resumeSlaveHost(MockBuilder(virtualized=True, vm_host="pop"))
     return assert_fails_with(d, CannotResumeHost)
Exemplo n.º 9
0
 def test_avatarAdaptsToOnlyRestrictedSession(self):
     config.push('codehosting-no-forking',
         "[codehosting]\nuse_forking_daemon: False\n")
     self.addCleanup(config.pop, 'codehosting-no-forking')
     session = ISession(self.avatar)
     self.failIf(isinstance(session, ForkingRestrictedExecOnlySession),
         "ISession(avatar) shouldn't adapt to "
         " ForkingRestrictedExecOnlySession when forking is disabled. ")
Exemplo n.º 10
0
 def test_HAProxyStatusView_status_code_is_configurable(self):
     config.push('change_haproxy_status_code', dedent('''
         [haproxy_status_view]
         going_down_status: 499
         '''))
     self.addCleanup(config.pop, 'change_haproxy_status_code')
     haproxy.set_going_down_flag(True)
     result = self.http(u'GET /+haproxy HTTP/1.0', handle_errors=False)
     self.assertEquals(499, result.getStatus())
Exemplo n.º 11
0
 def setUp(self):
     super(TestErrorReportingUtility, self).setUp()
     # ErrorReportingUtility reads the global config to get the
     # current error directory.
     tempdir = self.useFixture(TempDir()).path
     test_data = dedent("""
         [error_reports]
         error_dir: %s
         """ % tempdir)
     config.push('test_data', test_data)
     self.addCleanup(config.pop, 'test_data')
Exemplo n.º 12
0
 def test_resetOrFail_resume_failure(self):
     reset_fail_config = """
         [builddmaster]
         vm_resume_command: /bin/false"""
     config.push('reset fail', reset_fail_config)
     self.addCleanup(config.pop, 'reset fail')
     builder = MockBuilder(virtualized=True, vm_host="pop", builderok=True)
     vitals = extract_vitals_from_db(builder)
     d = BuilderInteractor.resetOrFail(
         vitals, BuilderInteractor.makeSlaveFromVitals(vitals), builder,
         DevNullLogger(), Exception())
     return assert_fails_with(d, CannotResumeHost)
Exemplo n.º 13
0
 def test_server(self):
     request = LaunchpadTestRequest()
     scopes = webapp.ScopesFromRequest(request)
     self.assertFalse(scopes.lookup('server.lpnet'))
     config.push('ensure_lpnet', dedent("""\
         [launchpad]
         is_lpnet: True
         """))
     try:
         self.assertTrue(scopes.lookup('server.lpnet'))
     finally:
         config.pop('ensure_lpnet')
Exemplo n.º 14
0
    def setUp(self):
        Fixture.setUp(self)
        config.push(
            "demo-fixture",
            """
[launchpad]
is_demo: true
site_message = This is a demo site mmk. \
<a href="http://example.com">File a bug</a>.
            """,
        )
        self.addCleanup(lambda: config.pop("demo-fixture"))
Exemplo n.º 15
0
 def test_basic_auth_disabled(self):
     # Basic auth uses a single password for every user, so it must
     # never be used on production. authenticate() will skip basic
     # auth unless it's enabled.
     authsvc, request = self._make('bruce', 'test')
     self.assertEqual(authsvc.authenticate(request), Bruce)
     try:
         config.push(
             "no-basic", "[launchpad]\nbasic_auth_password: none")
         self.assertEqual(authsvc.authenticate(request), None)
     finally:
         config.pop("no-basic")
Exemplo n.º 16
0
 def update_db_config(**kw):
     connection_string_keys = [
         'rw_main_master',
         'rw_main_slave',
         ]
     config_data = ["[database]"]
     for con_str_key in connection_string_keys:
         con_str = ConnectionString(getattr(config.database, con_str_key))
         for kwarg, kwval in kw.items():
             setattr(con_str, kwarg, kwval)
         config_data.append("%s: %s" % (con_str_key, str(con_str)))
     config.push('update_db_config', '\n'.join(config_data))
Exemplo n.º 17
0
    def test_resumeSlaveHost_success(self):
        reset_config = """
            [builddmaster]
            vm_resume_command: /bin/echo -n snap %(buildd_name)s %(vm_host)s
            """
        config.push('reset', reset_config)
        self.addCleanup(config.pop, 'reset')

        d = self.resumeSlaveHost(MockBuilder(
            url="http://crackle.ppa/", virtualized=True, vm_host="pop"))

        def got_resume(output):
            self.assertEqual(('snap crackle pop', ''), output)
        return d.addCallback(got_resume)
Exemplo n.º 18
0
 def test_build_notification_supresses_mail(self):
     # When the 'build_notification' config option is False, we don't
     # send any mail at all.
     self.create_builds(self.archive)
     build = self.builds[BuildStatus.FULLYBUILT.value]
     send_build_notification = dedent("""
         [builddmaster]
         send_build_notification: False
         """)
     config.push('send_build_notification', send_build_notification)
     build.notify()
     notifications = pop_notifications()
     self.assertEquals(0, len(notifications))
     # And undo what we just did.
     config.pop('send_build_notification')
Exemplo n.º 19
0
def production_config(host_name):
    """Simulate a production Launchpad and mailman config."""
    config.push('production', """\
        [mailman]
        build_host_name: %s
        """ % host_name)
    default_email_host = mm_cfg.DEFAULT_EMAIL_HOST
    mm_cfg.DEFAULT_EMAIL_HOST = host_name
    default_url_host = mm_cfg.DEFAULT_URL_HOST
    mm_cfg.DEFAULT_URL_HOST = host_name
    try:
        yield
    finally:
        mm_cfg.DEFAULT_URL_HOST = default_url_host
        mm_cfg.DEFAULT_EMAIL_HOST = default_email_host
        config.pop('production')
Exemplo n.º 20
0
 def test_notify_owner_supresses_mail(self):
     # When the 'notify_owner' config option is False, we don't send mail
     # to the owner of the SPR.
     self.create_builds(self.archive)
     build = self.builds[BuildStatus.FAILEDTOBUILD.value]
     notify_owner = dedent("""
         [builddmaster]
         send_build_notification: True
         notify_owner: False
         """)
     config.push('notify_owner', notify_owner)
     build.notify()
     notifications = pop_notifications()
     actual_emails = [n['To'] for n in notifications]
     self.assertEquals(self.buildd_admins_email, actual_emails)
     # And undo what we just did.
     config.pop('notify_owner')
    def enableRunParts(self, parts_directory=None):
        """Set up for run-parts execution.

        :param parts_directory: Base location for the run-parts directories.
            If omitted, a temporary directory will be used.
        """
        if parts_directory is None:
            parts_directory = self.makeTemporaryDirectory()
            os.makedirs(os.path.join(
                parts_directory, "ubuntu", "publish-distro.d"))
            os.makedirs(os.path.join(parts_directory, "ubuntu", "finalize.d"))
        self.parts_directory = parts_directory

        config.push("run-parts", dedent("""\
            [archivepublisher]
            run_parts_location: %s
            """ % parts_directory))

        self.addCleanup(config.pop, "run-parts")
Exemplo n.º 22
0
 def test_avatarAdaptsToForkingRestrictedExecOnlySession(self):
     config.push('codehosting-forking',
         "[codehosting]\nuse_forking_daemon: True\n")
     self.addCleanup(config.pop, 'codehosting-forking')
     session = ISession(self.avatar)
     self.failUnless(
         isinstance(session, ForkingRestrictedExecOnlySession),
         "ISession(avatar) doesn't adapt to "
         " ForkingRestrictedExecOnlySession. "
         "Got %r instead." % (session,))
     executable, arguments = session.getCommandToRun(
         'bzr serve --inet --directory=/ --allow-writes')
     executable, arguments, env = session.getCommandToFork(
         executable, arguments, session.environment)
     self.assertEqual('bzr', executable)
     self.assertEqual(
          ['bzr', 'lp-serve',
           '--inet', str(self.avatar.user_id)],
          list(arguments))
Exemplo n.º 23
0
 def test_direct_basic_call_fails_when_disabled(self):
     # Basic auth uses a single password for every user, so it must
     # never be used on production. authenticate() won't call the
     # underlying method unless it's enabled, but even if it somehow
     # does it will fail.
     authsvc, request = self._make('bruce', 'test')
     credentials = ILoginPassword(request, None)
     self.assertEqual(
         authsvc._authenticateUsingBasicAuth(credentials, request), Bruce)
     try:
         config.push(
             "no-basic", "[launchpad]\nbasic_auth_password: none")
         exception = self.assertRaises(
             AssertionError, authsvc._authenticateUsingBasicAuth,
             credentials, request)
         self.assertEquals(
             "Attempted to use basic auth when it is disabled",
             str(exception))
     finally:
         config.pop("no-basic")
Exemplo n.º 24
0
 def prepareBehavior(self, fake_successful_upload=False):
     self.queue_record = self.factory.makeSourcePackageRecipeBuildJob()
     build = self.queue_record.specific_job.build
     build.updateStatus(BuildStatus.FULLYBUILT)
     if fake_successful_upload:
         removeSecurityProxy(build).verifySuccessfulUpload = FakeMethod(result=True)
         # We overwrite the buildmaster root to use a temp directory.
         tempdir = tempfile.mkdtemp()
         self.addCleanup(shutil.rmtree, tempdir)
         self.upload_root = tempdir
         tmp_builddmaster_root = (
             """
         [builddmaster]
         root: %s
         """
             % self.upload_root
         )
         config.push("tmp_builddmaster_root", tmp_builddmaster_root)
         self.addCleanup(config.pop, "tmp_builddmaster_root")
     self.queue_record.builder = self.factory.makeBuilder()
     slave = WaitingSlave("BuildStatus.OK")
     return BuilderInteractor.getBuildBehavior(self.queue_record, self.queue_record.builder, slave)
    def test_max_parsed_lines(self):
        # The max_parsed_lines config option limits the number of parsed
        # lines.
        config.push(
            'log_parser config',
            '[launchpad]\nlogparser_max_parsed_lines: 2')
        self.addCleanup(config.pop, 'log_parser config')
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        self.addCleanup(fd.close)

        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=0, logger=self.logger,
            get_download_key=get_path_download_key)

        # We have initially parsed only the first two lines of data,
        # corresponding to one download (the first line is a 404 and
        # so ignored).
        self.assertEqual(parsed_lines, 2)
        date = datetime(2008, 6, 13)
        self.assertContentEqual(
            downloads.items(),
            [('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])
        fd.seek(0)
        lines = fd.readlines()
        line_lengths = [len(line) for line in lines]
        self.assertEqual(parsed_bytes, sum(line_lengths[:2]))

        # And the subsequent parse will be for the 3rd and 4th lines,
        # corresponding to two downloads of the same file.
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=parsed_bytes, logger=self.logger,
            get_download_key=get_path_download_key)
        self.assertContentEqual(
            downloads.items(),
            [('/12060796/me-tv-icon-64x64.png', {date: {'AU': 1}}),
             ('/8196569/mediumubuntulogo.png', {date: {'AR': 1}})])
        self.assertEqual(parsed_bytes, sum(line_lengths[:4]))
    def test_max_parsed_lines_exceeded(self):
        # Show that if a non-zero parsed_lines is passed in, the number of
        # lines parsed will be less than it would otherwise have been.

        # The max_parsed_lines config option limits the number of parsed
        # lines.
        config.push(
            'log_parser config',
            '[launchpad]\nlogparser_max_parsed_lines: 2')
        self.addCleanup(config.pop, 'log_parser config')
        fd = open(os.path.join(
            here, 'apache-log-files', 'launchpadlibrarian.net.access-log'))
        self.addCleanup(fd.close)

        # We want to start parsing on line 2 so we will have a value in
        # "downloads" to make a positive assertion about.  (The first line is
        # a 404 so wouldn't generate any output.)
        start_position = len(fd.readline())

        # If we have already parsed some lines, then the number of lines
        # parsed will be passed in (parsed_lines argument) and parse_file will
        # take that number into account when determining if the maximum number
        # of lines to parse has been reached.
        parsed_lines = 1
        downloads, parsed_bytes, parsed_lines = parse_file(
            fd, start_position=start_position, logger=self.logger,
            get_download_key=get_path_download_key, parsed_lines=parsed_lines)

        # The total number of lines parsed during the run (1 line) plus the
        # number of lines parsed previously (1 line, as passed in via
        # parsed_lines) is returned.
        self.assertEqual(parsed_lines, 2)
        # Since we told parse_file that we had already parsed 1 line and the
        # limit is 2 lines, it only parsed a single line.
        date = datetime(2008, 6, 13)
        self.assertContentEqual(
            downloads.items(),
            [('/9096290/me-tv-icon-14x14.png', {date: {'AU': 1}})])
Exemplo n.º 27
0
    def test_resumeHost_failure(self):
        # On a failed resume, 'resumeHost' fires the returned deferred
        # errorback with the `ProcessTerminated` failure.
        self.slave_helper.getServerSlave()
        slave = self.slave_helper.getClientSlave()

        # Override the configuration command-line with one that will fail.
        failed_config = """
        [builddmaster]
        vm_resume_command: test "%(vm_host)s = 'no-sir'"
        """
        config.push('failed_resume_command', failed_config)
        self.addCleanup(config.pop, 'failed_resume_command')

        # On failures, the response is a twisted `Failure` object containing
        # a tuple.
        def check_resume_failure(failure):
            out, err, code = failure.value
            # The process will exit with a return code of "1".
            self.assertEqual(code, 1)
        d = slave.resume()
        d.addBoth(check_resume_failure)
        return d
Exemplo n.º 28
0
    def main(self):
        """See `LaunchpadScript`."""
        team_name = None
        if len(self.args) == 0:
            self.parser.error('Missing team name')
        elif len(self.args) > 1:
            self.parser.error('Too many arguments')
        else:
            team_name = self.args[0]

        importer = Importer(team_name, self.logger)

        # Suppress sending emails based on the (absence) of the --notification
        # switch.  Notifications are disabled by default because they can
        # cause huge amounts to be sent to the team owner.
        send_email_config = """
            [immediate_mail]
            send_email: %s
            """ % self.options.notifications
        config.push('send_email_config', send_email_config)

        if self.options.filename == '-':
            # Read all the addresses from standard input, parse them
            # here, and use the direct interface to the importer.
            addresses = []
            while True:
                line = sys.stdin.readline()
                if line == '':
                    break
                addresses.append(line[:-1])
            importer.importAddresses(addresses)
        else:
            importer.importFromFile(self.options.filename)

        # All done; commit the database changes.
        self.txn.commit()
        return 0
Exemplo n.º 29
0
    def test_DisabledExternallyUsedSuggestions(self):
        # If foo wants to translate "error message 936" and bar happens
        # to have a translation for that, that's an externally used
        # suggestion.
        # If global suggestions are disabled, empty list is returned.
        text = "error message 936"
        foomsg = self.factory.makePOTMsgSet(self.foo_template, text)
        barmsg = self.factory.makePOTMsgSet(self.bar_template, text)
        translation = self.factory.makeCurrentTranslationMessage(
            pofile=self.bar_nl, current_other=False, potmsgset=barmsg)

        transaction.commit()

        # There is a global (externally used) suggestion.
        used_suggestions = foomsg.getExternallyUsedTranslationMessages(
            self.nl)
        self.assertEquals(len(used_suggestions), 1)
        used_suggestions = foomsg.getExternallySuggestedOrUsedTranslationMessages(
            used_languages=[self.nl], suggested_languages=[self.nl])[self.nl].used
        self.assertEquals(len(used_suggestions), 1)

        # Override the config option to disable global suggestions.
        new_config = ("""
            [rosetta]
            global_suggestions_enabled = False
            """)
        config.push('disabled_suggestions', new_config)
        disabled_used_suggestions = (
            foomsg.getExternallyUsedTranslationMessages(self.nl))
        self.assertEquals(len(disabled_used_suggestions), 0)
        disabled_used_suggestions = (
            foomsg.getExternallySuggestedOrUsedTranslationMessages(
                used_languages=[self.nl],
                suggested_languages=[self.nl]))[self.nl].used
        self.assertEquals(len(disabled_used_suggestions), 0)
        # Restore the old configuration.
        config.pop('disabled_suggestions')
Exemplo n.º 30
0
 def test_restricted_getURLForDownload(self):
     # The RestrictedLibrarianClient should use the
     # restricted_download_host and restricted_download_port, but is
     # otherwise identical to the behavior of the LibrarianClient discussed
     # and demonstrated above.
     #
     # (Set up:)
     client = RestrictedLibrarianClient()
     alias_id = client.addFile("sample.txt", 6, StringIO("sample"), "text/plain")
     config.push(
         "test config",
         textwrap.dedent(
             """\
             [librarian]
             restricted_download_host: example.com
             restricted_download_port: 5678
             """
         ),
     )
     try:
         # (Test:)
         # The LibrarianClient should use the download_host and
         # download_port.
         expected_host = "http://example.com:5678/"
         download_url = client._getURLForDownload(alias_id)
         self.failUnless(
             download_url.startswith(expected_host), "expected %s to start with %s" % (download_url, expected_host)
         )
         # If the alias has been deleted, _getURLForDownload returns None.
         lfa = LibraryFileAlias.get(alias_id)
         lfa.content = None
         call = block_implicit_flushes(RestrictedLibrarianClient._getURLForDownload)  # Prevent a ProgrammingError
         self.assertEqual(call(client, alias_id), None)
     finally:
         # (Tear down:)
         config.pop("test config")