def main(self): if self.options.product is None: if self.options.testing: self.options.product = self.create_test_product() self.logger.info("Product %s created", self.options.product) else: self.parser.error('No product specified') if len(self.args) != 1: self.parser.error('Please specify a bug XML file to import') bugs_filename = self.args[0] # don't send email send_email_data = """ [immediate_mail] send_email: False """ config.push('send_email_data', send_email_data) self.login('*****@*****.**') product = getUtility(IProductSet).getByName(self.options.product) if product is None: self.parser.error('Product %s does not exist' % self.options.product) importer = BugImporter(product, bugs_filename, self.options.cache_filename, verify_users=self.options.verify_users, logger=self.logger) importer.importBugs(self.txn) config.pop('send_email_data')
def test_restricted_getURLForDownload(self): # The RestrictedLibrarianClient should use the # restricted_download_host and restricted_download_port, but is # otherwise identical to the behaviour of the LibrarianClient discussed # and demonstrated above. # # (Set up:) client = RestrictedLibrarianClient() alias_id = client.addFile('sample.txt', 6, StringIO('sample'), 'text/plain') config.push( 'test config', textwrap.dedent('''\ [librarian] restricted_download_host: example.com restricted_download_port: 5678 ''')) try: # (Test:) # The LibrarianClient should use the download_host and # download_port. expected_host = 'http://example.com:5678/' download_url = client._getURLForDownload(alias_id) self.assertTrue( download_url.startswith(expected_host), 'expected %s to start with %s' % (download_url, expected_host)) # If the alias has been deleted, _getURLForDownload returns None. lfa = LibraryFileAlias.get(alias_id) lfa.content = None call = block_implicit_flushes( # Prevent a ProgrammingError RestrictedLibrarianClient._getURLForDownload) self.assertEqual(call(client, alias_id), None) finally: # (Tear down:) config.pop('test config')
def main(self): if self.options.product is None: if self.options.testing: self.options.product = self.create_test_product() self.logger.info("Product %s created", self.options.product) else: self.parser.error('No product specified') if len(self.args) != 1: self.parser.error('Please specify a bug XML file to import') bugs_filename = self.args[0] # don't send email send_email_data = """ [immediate_mail] send_email: False """ config.push('send_email_data', send_email_data) self.login('*****@*****.**') product = getUtility(IProductSet).getByName(self.options.product) if product is None: self.parser.error('Product %s does not exist' % self.options.product) importer = BugImporter( product, bugs_filename, self.options.cache_filename, verify_users=self.options.verify_users, logger=self.logger) importer.importBugs(self.txn) config.pop('send_email_data')
def test_connect_depends_on_localhost_only_config(self): # If localhost_only is True and the host to which we would connect is # not localhost, the connect() method is not called. localhost_only_conf = """ [distributionmirrorprober] localhost_only: True """ config.push('localhost_only_conf', localhost_only_conf) prober = self._createFactoryAndStubConnectAndTimeoutCall() self.failUnless(prober.connect_host != 'localhost') prober.probe() self.failIf(prober.connectCalled) # Restore the config. config.pop('localhost_only_conf') # If localhost_only is False, then it doesn't matter the host to which # we'll connect to --the connect() method will be called. remote_conf = """ [distributionmirrorprober] localhost_only: False """ config.push('remote_conf', remote_conf) prober = self._createFactoryAndStubConnectAndTimeoutCall() prober.probe() self.failUnless(prober.connectCalled) # Restore the config. config.pop('remote_conf')
def test__getURLForDownload(self): # This protected method is used by getFileByAlias. It is supposed to # use the internal host and port rather than the external, proxied # host and port. This is to provide relief for our own issues with the # problems reported in bug 317482. # # (Set up:) client = LibrarianClient() alias_id = client.addFile('sample.txt', 6, StringIO('sample'), 'text/plain') config.push( 'test config', textwrap.dedent('''\ [librarian] download_host: example.org download_port: 1234 ''')) try: # (Test:) # The LibrarianClient should use the download_host and # download_port. expected_host = 'http://example.org:1234/' download_url = client._getURLForDownload(alias_id) self.assertTrue( download_url.startswith(expected_host), 'expected %s to start with %s' % (download_url, expected_host)) # If the alias has been deleted, _getURLForDownload returns None. lfa = LibraryFileAlias.get(alias_id) lfa.content = None call = block_implicit_flushes( # Prevent a ProgrammingError LibrarianClient._getURLForDownload) self.assertEqual(call(client, alias_id), None) finally: # (Tear down:) config.pop('test config')
def test_basic_auth_disabled(self): # Basic auth uses a single password for every user, so it must # never be used on production. authenticate() will skip basic # auth unless it's enabled. authsvc, request = self._make('bruce', 'test') self.assertEqual(authsvc.authenticate(request), Bruce) try: config.push("no-basic", "[launchpad]\nbasic_auth_password: none") self.assertEqual(authsvc.authenticate(request), None) finally: config.pop("no-basic")
def test_server(self): request = LaunchpadTestRequest() scopes = webapp.ScopesFromRequest(request) self.assertFalse(scopes.lookup('server.lpnet')) config.push('ensure_lpnet', dedent("""\ [launchpad] is_lpnet: True """)) try: self.assertTrue(scopes.lookup('server.lpnet')) finally: config.pop('ensure_lpnet')
def test_basic_auth_disabled(self): # Basic auth uses a single password for every user, so it must # never be used on production. authenticate() will skip basic # auth unless it's enabled. authsvc, request = self._make('bruce', 'test') self.assertEqual(authsvc.authenticate(request), Bruce) try: config.push( "no-basic", "[launchpad]\nbasic_auth_password: none") self.assertEqual(authsvc.authenticate(request), None) finally: config.pop("no-basic")
def test_server(self): request = LaunchpadTestRequest() scopes = webapp.ScopesFromRequest(request) self.assertFalse(scopes.lookup('server.lpnet')) config.push( 'ensure_lpnet', dedent("""\ [launchpad] is_lpnet: True """)) try: self.assertTrue(scopes.lookup('server.lpnet')) finally: config.pop('ensure_lpnet')
def test_build_notification_supresses_mail(self): # When the 'build_notification' config option is False, we don't # send any mail at all. self.create_builds(self.archive) build = self.builds[BuildStatus.FULLYBUILT.value] send_build_notification = dedent(""" [builddmaster] send_build_notification: False """) config.push('send_build_notification', send_build_notification) build.notify() notifications = pop_notifications() self.assertEquals(0, len(notifications)) # And undo what we just did. config.pop('send_build_notification')
def setUp(self): super(MasterFallbackTestCase, self).setUp() self.pgbouncer_fixture = PGBouncerFixture() # The PGBouncerFixture will set the PGPORT environment variable, # causing all DB connections to go via pgbouncer unless an # explicit port is provided. dbname = DatabaseLayer._db_fixture.dbname # Pull the direct db connection string, including explicit port. conn_str_direct = self.pgbouncer_fixture.databases[dbname] # Generate a db connection string that will go via pgbouncer. conn_str_pgbouncer = "dbname=%s host=localhost" % dbname # Configure slave connections via pgbouncer, so we can shut them # down. Master connections direct so they are unaffected. config_key = "master-slave-separation" config.push( config_key, dedent( """\ [database] rw_main_master: %s rw_main_slave: %s """ % (conn_str_direct, conn_str_pgbouncer) ), ) self.addCleanup(lambda: config.pop(config_key)) self.useFixture(self.pgbouncer_fixture)
def production_config(host_name): """Simulate a production Launchpad and mailman config.""" config.push('production', """\ [mailman] build_host_name: %s """ % host_name) default_email_host = mm_cfg.DEFAULT_EMAIL_HOST mm_cfg.DEFAULT_EMAIL_HOST = host_name default_url_host = mm_cfg.DEFAULT_URL_HOST mm_cfg.DEFAULT_URL_HOST = host_name try: yield finally: mm_cfg.DEFAULT_URL_HOST = default_url_host mm_cfg.DEFAULT_EMAIL_HOST = default_email_host config.pop('production')
def setUp(self): super(TestFastDowntimeRollout, self).setUp() self.master_dbname = DatabaseLayer._db_fixture.dbname self.slave_dbname = self.master_dbname + '_slave' self.pgbouncer_fixture = PGBouncerFixture() self.pgbouncer_fixture.databases[self.slave_dbname] = ( self.pgbouncer_fixture.databases[self.master_dbname]) # Configure master and slave connections to go via different # pgbouncer aliases. config_key = 'master-slave-separation' config.push( config_key, dedent('''\ [database] rw_main_master: dbname=%s host=localhost rw_main_slave: dbname=%s host=localhost ''' % (self.master_dbname, self.slave_dbname))) self.addCleanup(lambda: config.pop(config_key)) self.useFixture(self.pgbouncer_fixture) self.pgbouncer_con = psycopg2.connect( 'dbname=pgbouncer user=pgbouncer host=localhost') self.pgbouncer_con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) self.pgbouncer_cur = self.pgbouncer_con.cursor() transaction.abort()
def setUp(self): super(MasterFallbackTestCase, self).setUp() self.pgbouncer_fixture = PGBouncerFixture() # The PGBouncerFixture will set the PGPORT environment variable, # causing all DB connections to go via pgbouncer unless an # explicit port is provided. dbname = DatabaseLayer._db_fixture.dbname # Pull the direct db connection string, including explicit port. conn_str_direct = self.pgbouncer_fixture.databases[dbname] # Generate a db connection string that will go via pgbouncer. conn_str_pgbouncer = 'dbname=%s host=localhost' % dbname # Configure slave connections via pgbouncer, so we can shut them # down. Master connections direct so they are unaffected. config_key = 'master-slave-separation' config.push( config_key, dedent('''\ [database] rw_main_master: %s rw_main_slave: %s ''' % (conn_str_direct, conn_str_pgbouncer))) self.addCleanup(lambda: config.pop(config_key)) self.useFixture(self.pgbouncer_fixture)
def setUp(self): super(TestFastDowntimeRollout, self).setUp() self.master_dbname = DatabaseLayer._db_fixture.dbname self.slave_dbname = self.master_dbname + "_slave" self.pgbouncer_fixture = PGBouncerFixture() self.pgbouncer_fixture.databases[self.slave_dbname] = self.pgbouncer_fixture.databases[self.master_dbname] # Configure master and slave connections to go via different # pgbouncer aliases. config_key = "master-slave-separation" config.push( config_key, dedent( """\ [database] rw_main_master: dbname=%s host=localhost rw_main_slave: dbname=%s host=localhost """ % (self.master_dbname, self.slave_dbname) ), ) self.addCleanup(lambda: config.pop(config_key)) self.useFixture(self.pgbouncer_fixture) self.pgbouncer_con = psycopg2.connect("dbname=pgbouncer user=pgbouncer host=localhost") self.pgbouncer_con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) self.pgbouncer_cur = self.pgbouncer_con.cursor() transaction.abort()
def production_config(host_name): """Simulate a production Launchpad and mailman config.""" config.push( 'production', """\ [mailman] build_host_name: %s """ % host_name) default_email_host = mm_cfg.DEFAULT_EMAIL_HOST mm_cfg.DEFAULT_EMAIL_HOST = host_name default_url_host = mm_cfg.DEFAULT_URL_HOST mm_cfg.DEFAULT_URL_HOST = host_name try: yield finally: mm_cfg.DEFAULT_URL_HOST = default_url_host mm_cfg.DEFAULT_EMAIL_HOST = default_email_host config.pop('production')
def test_notify_owner_supresses_mail(self): # When the 'notify_owner' config option is False, we don't send mail # to the owner of the SPR. self.create_builds(self.archive) build = self.builds[BuildStatus.FAILEDTOBUILD.value] notify_owner = dedent(""" [builddmaster] send_build_notification: True notify_owner: False """) config.push('notify_owner', notify_owner) build.notify() notifications = pop_notifications() actual_emails = [n['To'] for n in notifications] self.assertEquals(self.buildd_admins_email, actual_emails) # And undo what we just did. config.pop('notify_owner')
def test_notify_owner_suppresses_mail(self): # When the 'notify_owner' config option is False, we don't send mail # to the owner of the SPR. self.create_builds(self.archive) build = self.builds[BuildStatus.FAILEDTOBUILD.value] notify_owner = dedent(""" [builddmaster] send_build_notification: True notify_owner: False """) config.push('notify_owner', notify_owner) with dbuser(config.builddmaster.dbuser): build.notify() self._assert_mails_are_correct( build, [(person, "buildd-admin") for person in self.buildd_admins_members]) # And undo what we just did. config.pop('notify_owner')
def test_direct_basic_call_fails_when_disabled(self): # Basic auth uses a single password for every user, so it must # never be used on production. authenticate() won't call the # underlying method unless it's enabled, but even if it somehow # does it will fail. authsvc, request = self._make('bruce', 'test') credentials = ILoginPassword(request, None) self.assertEqual( authsvc._authenticateUsingBasicAuth(credentials, request), Bruce) try: config.push("no-basic", "[launchpad]\nbasic_auth_password: none") exception = self.assertRaises(AssertionError, authsvc._authenticateUsingBasicAuth, credentials, request) self.assertEqual("Attempted to use basic auth when it is disabled", str(exception)) finally: config.pop("no-basic")
def setUp(self): Fixture.setUp(self) config.push( 'demo-fixture', ''' [launchpad] is_demo: true site_message = This is a demo site mmk. \ <a href="http://example.com">File a bug</a>. ''') self.addCleanup(lambda: config.pop('demo-fixture'))
def test_direct_basic_call_fails_when_disabled(self): # Basic auth uses a single password for every user, so it must # never be used on production. authenticate() won't call the # underlying method unless it's enabled, but even if it somehow # does it will fail. authsvc, request = self._make('bruce', 'test') credentials = ILoginPassword(request, None) self.assertEqual( authsvc._authenticateUsingBasicAuth(credentials, request), Bruce) try: config.push( "no-basic", "[launchpad]\nbasic_auth_password: none") exception = self.assertRaises( AssertionError, authsvc._authenticateUsingBasicAuth, credentials, request) self.assertEquals( "Attempted to use basic auth when it is disabled", str(exception)) finally: config.pop("no-basic")
def setUp(self): Fixture.setUp(self) config.push( "demo-fixture", """ [launchpad] is_demo: true site_message = This is a demo site mmk. \ <a href="http://example.com">File a bug</a>. """, ) self.addCleanup(lambda: config.pop("demo-fixture"))
def test_DisabledExternallyUsedSuggestions(self): # If foo wants to translate "error message 936" and bar happens # to have a translation for that, that's an externally used # suggestion. # If global suggestions are disabled, empty list is returned. text = "error message 936" foomsg = self.factory.makePOTMsgSet(self.foo_template, text) barmsg = self.factory.makePOTMsgSet(self.bar_template, text) self.factory.makeCurrentTranslationMessage(pofile=self.bar_nl, current_other=False, potmsgset=barmsg) transaction.commit() # There is a global (externally used) suggestion. used_suggestions = foomsg.getExternallyUsedTranslationMessages(self.nl) self.assertEqual(len(used_suggestions), 1) used_suggestions = ( foomsg.getExternallySuggestedOrUsedTranslationMessages( used_languages=[self.nl], suggested_languages=[self.nl])[self.nl].used) self.assertEqual(len(used_suggestions), 1) # Override the config option to disable global suggestions. new_config = (""" [rosetta] global_suggestions_enabled = False """) config.push('disabled_suggestions', new_config) disabled_used_suggestions = ( foomsg.getExternallyUsedTranslationMessages(self.nl)) self.assertEqual(len(disabled_used_suggestions), 0) disabled_used_suggestions = ( foomsg.getExternallySuggestedOrUsedTranslationMessages( used_languages=[self.nl], suggested_languages=[self.nl]))[self.nl].used self.assertEqual(len(disabled_used_suggestions), 0) # Restore the old configuration. config.pop('disabled_suggestions')
def test_DisabledExternallyUsedSuggestions(self): # If foo wants to translate "error message 936" and bar happens # to have a translation for that, that's an externally used # suggestion. # If global suggestions are disabled, empty list is returned. text = "error message 936" foomsg = self.factory.makePOTMsgSet(self.foo_template, text) barmsg = self.factory.makePOTMsgSet(self.bar_template, text) translation = self.factory.makeCurrentTranslationMessage( pofile=self.bar_nl, current_other=False, potmsgset=barmsg) transaction.commit() # There is a global (externally used) suggestion. used_suggestions = foomsg.getExternallyUsedTranslationMessages( self.nl) self.assertEquals(len(used_suggestions), 1) used_suggestions = foomsg.getExternallySuggestedOrUsedTranslationMessages( used_languages=[self.nl], suggested_languages=[self.nl])[self.nl].used self.assertEquals(len(used_suggestions), 1) # Override the config option to disable global suggestions. new_config = (""" [rosetta] global_suggestions_enabled = False """) config.push('disabled_suggestions', new_config) disabled_used_suggestions = ( foomsg.getExternallyUsedTranslationMessages(self.nl)) self.assertEquals(len(disabled_used_suggestions), 0) disabled_used_suggestions = ( foomsg.getExternallySuggestedOrUsedTranslationMessages( used_languages=[self.nl], suggested_languages=[self.nl]))[self.nl].used self.assertEquals(len(disabled_used_suggestions), 0) # Restore the old configuration. config.pop('disabled_suggestions')
def test_restricted_getURLForDownload(self): # The RestrictedLibrarianClient should use the # restricted_download_host and restricted_download_port, but is # otherwise identical to the behavior of the LibrarianClient discussed # and demonstrated above. # # (Set up:) client = RestrictedLibrarianClient() alias_id = client.addFile("sample.txt", 6, StringIO("sample"), "text/plain") config.push( "test config", textwrap.dedent( """\ [librarian] restricted_download_host: example.com restricted_download_port: 5678 """ ), ) try: # (Test:) # The LibrarianClient should use the download_host and # download_port. expected_host = "http://example.com:5678/" download_url = client._getURLForDownload(alias_id) self.failUnless( download_url.startswith(expected_host), "expected %s to start with %s" % (download_url, expected_host) ) # If the alias has been deleted, _getURLForDownload returns None. lfa = LibraryFileAlias.get(alias_id) lfa.content = None call = block_implicit_flushes(RestrictedLibrarianClient._getURLForDownload) # Prevent a ProgrammingError self.assertEqual(call(client, alias_id), None) finally: # (Tear down:) config.pop("test config")
def test__getURLForDownload(self): # This protected method is used by getFileByAlias. It is supposed to # use the internal host and port rather than the external, proxied # host and port. This is to provide relief for our own issues with the # problems reported in bug 317482. # # (Set up:) client = LibrarianClient() alias_id = client.addFile("sample.txt", 6, StringIO("sample"), "text/plain") config.push( "test config", textwrap.dedent( """\ [librarian] download_host: example.org download_port: 1234 """ ), ) try: # (Test:) # The LibrarianClient should use the download_host and # download_port. expected_host = "http://example.org:1234/" download_url = client._getURLForDownload(alias_id) self.failUnless( download_url.startswith(expected_host), "expected %s to start with %s" % (download_url, expected_host) ) # If the alias has been deleted, _getURLForDownload returns None. lfa = LibraryFileAlias.get(alias_id) lfa.content = None call = block_implicit_flushes(LibrarianClient._getURLForDownload) # Prevent a ProgrammingError self.assertEqual(call(client, alias_id), None) finally: # (Tear down:) config.pop("test config")
def changed_config(changes): config.push('test_changes', changes) yield config.pop('test_changes')