def test_compose_preseed_with_curtin_installer(self): rack_controller = factory.make_RackController(url='') node = factory.make_Node(interface=True, status=NODE_STATUS.DEPLOYING) nic = node.get_boot_interface() nic.vlan.dhcp_on = True nic.vlan.primary_rack = rack_controller nic.vlan.save() self.useFixture(RunningClusterRPCFixture()) request = make_HttpRequest() expected_apt_proxy = get_apt_proxy(request, node.get_boot_rack_controller()) preseed = yaml.safe_load( compose_preseed(request, PRESEED_TYPE.CURTIN, node)) self.assertIn('datasource', preseed) self.assertIn('MAAS', preseed['datasource']) self.assertThat( preseed['datasource']['MAAS'], KeysEqual('metadata_url', 'consumer_key', 'token_key', 'token_secret')) self.assertDictEqual( { 'delay': 'now', 'mode': 'reboot', 'timeout': 1800, 'condition': 'test ! -e /tmp/block-reboot', }, preseed['power_state']) self.assertEqual( request.build_absolute_uri(reverse('curtin-metadata')), preseed['datasource']['MAAS']['metadata_url']) self.assertAptConfig(preseed, expected_apt_proxy)
def test_yields_configuration_with_ubuntu(self): tag = factory.make_Tag(name="wedge100") node = factory.make_Node(osystem="ubuntu", netboot=False) node.tags.add(tag) configuration = generate_rack_controller_configuration( node, proxy="http://proxy.example.com/" ) secret = "1234" Config.objects.set_config("rpc_shared_secret", secret) channel = version.get_maas_version_track_channel() maas_url = "http://%s:5240/MAAS" % get_maas_facing_server_host( node.get_boot_rack_controller() ) cmd = "/bin/snap/maas init --mode rack" self.assertThat( dict(configuration), KeysEqual( { "runcmd": [ f"snap install maas --channel={channel}", "%s --maas-url %s --secret %s" % (cmd, maas_url, secret), ] } ), )
def test_only_addError_once(self): # Even if the reactor is unclean and the test raises an error and the # cleanups raise errors, we only called addError once per test. reactor = self.make_reactor() class WhenItRains(TestCase): def it_pours(self): # Add a dirty cleanup. self.addCleanup(lambda: 3 / 0) # Dirty the reactor. from twisted.internet.protocol import ServerFactory reactor.listenTCP(0, ServerFactory()) # Unhandled error. defer.maybeDeferred(lambda: 2 / 0) # Actual error. raise RuntimeError("Excess precipitation") test = WhenItRains('it_pours') runner = self.make_runner(test) result = self.make_result() runner.run(result) self.assertThat([event[:2] for event in result._events], Equals([('startTest', test), ('addError', test), ('stopTest', test)])) error = result._events[1][2] self.assertThat( error, KeysEqual( 'traceback', 'traceback-1', 'traceback-2', 'twisted-log', 'unhandled-error-in-deferred', ))
def test_compose_preseed_with_curtin_installer(self): rack_controller = factory.make_RackController(url="") node = factory.make_Node(interface=True, status=NODE_STATUS.DEPLOYING) nic = node.get_boot_interface() nic.vlan.dhcp_on = True nic.vlan.primary_rack = rack_controller nic.vlan.save() self.useFixture(RunningClusterRPCFixture()) request = make_HttpRequest() expected_apt_proxy = get_apt_proxy(request, node.get_boot_rack_controller()) preseed = yaml.safe_load( compose_preseed(request, PRESEED_TYPE.CURTIN, node)) self.assertIn("datasource", preseed) self.assertIn("MAAS", preseed["datasource"]) self.assertThat( preseed["datasource"]["MAAS"], KeysEqual("metadata_url", "consumer_key", "token_key", "token_secret"), ) self.assertDictEqual( { "delay": "now", "mode": "reboot", "timeout": 1800, "condition": "test ! -e /tmp/block-reboot", }, preseed["power_state"], ) self.assertEqual( request.build_absolute_uri(reverse("curtin-metadata")), preseed["datasource"]["MAAS"]["metadata_url"], ) self.assertAptConfig(preseed, expected_apt_proxy)
def test_unhandled_error_from_deferred_combined_with_error(self): # If there's a Deferred with an unhandled error, the test fails. Each # unhandled error is reported with a separate traceback, and the error # is still reported. class SomeCase(TestCase): def test_cruft(self): # Note we aren't returning the Deferred so that the error will # be unhandled. defer.maybeDeferred(lambda: 1 / 0) 2 / 0 test = SomeCase('test_cruft') runner = self.make_runner(test) result = self.make_result() runner.run(result) error = result._events[1][2] result._events[1] = ('addError', test, None) self.assertThat( result._events, Equals([('startTest', test), ('addError', test, None), ('stopTest', test)])) self.assertThat( error, KeysEqual( 'traceback', 'twisted-log', 'unhandled-error-in-deferred', ))
def test_makeService(self): """ Only the site service is created when no options are given. """ options = Options() service_maker = ProvisioningServiceMaker("Harry", "Hill") service = service_maker.makeService(options, clock=None) self.assertIsInstance(service, MultiService) expected_services = [ "dhcp_probe", "networks_monitor", "image_download", "lease_socket_service", "node_monitor", "ntp", "rpc", "rpc-ping", "tftp", "image_service", "service_monitor", ] self.assertThat(service.namedServices, KeysEqual(*expected_services)) self.assertEqual(len(service.namedServices), len(service.services), "Not all services are named.") self.assertEqual(service, provisioningserver.services) self.assertThat(crochet.no_setup, MockCalledOnceWith()) self.assertThat( logger.configure, MockCalledOnceWith(options["verbosity"], logger.LoggingMode.TWISTD))
def test_unhandled_error_from_deferred(self): # If there's a Deferred with an unhandled error, the test fails. Each # unhandled error is reported with a separate traceback. # We're interested in the behavior when debugging is disabled. When # debugging is enabled, we get more stack traces. self.useFixture(DebugTwisted(False)) class SomeCase(TestCase): def test_cruft(self): # Note we aren't returning the Deferred so that the error will # be unhandled. defer.maybeDeferred(lambda: 1 / 0) defer.maybeDeferred(lambda: 2 / 0) test = SomeCase('test_cruft') runner = self.make_runner(test) result = self.make_result() runner.run(result) error = result._events[1][2] result._events[1] = ('addError', test, None) self.assertThat( result._events, Equals([('startTest', test), ('addError', test, None), ('stopTest', test)])) self.assertThat( error, KeysEqual( 'twisted-log', 'unhandled-error-in-deferred', 'unhandled-error-in-deferred-1', ))
def test_yields_configuration_when_machine_install_rackd_true(self): node = factory.make_Node(osystem="ubuntu", netboot=False) node.install_rackd = True proxy = "http://proxy.example.com/" configuration = generate_rack_controller_configuration( node, proxy=proxy ) secret = "1234" Config.objects.set_config("rpc_shared_secret", secret) channel = version.get_maas_version_track_channel() maas_url = "http://%s:5240/MAAS" % get_maas_facing_server_host( node.get_boot_rack_controller() ) cmd = "/bin/snap/maas init --mode rack" self.assertThat( dict(configuration), KeysEqual( { "runcmd": [ "snap set system proxy.http=%s proxy.https=%s" % (proxy, proxy), f"snap install maas --channel={channel}", "%s --maas-url %s --secret %s" % (cmd, maas_url, secret), ] } ), )
def test_run_200(self): # A request that returns 200 is a success. with CaptureOops() as oopses: job, reqs = self.makeAndRunJob(response_status=200) self.assertThat( job, MatchesStructure( status=Equals(JobStatus.COMPLETED), pending=Is(False), successful=Is(True), date_sent=Not(Is(None)), error_message=Is(None), json_data=ContainsDict( {'result': MatchesAll( KeysEqual('request', 'response'), ContainsDict( {'response': ContainsDict( {'status_code': Equals(200)})}))}))) self.assertEqual(1, len(reqs)) self.assertEqual([ ('POST', 'http://example.com/ep', {'Content-Type': 'application/json', 'User-Agent': 'launchpad.dev-Webhooks/r%s' % ( versioninfo.revision), 'X-Launchpad-Event-Type': 'test', 'X-Launchpad-Delivery': str(job.job_id)}), ], reqs) self.assertEqual([], oopses.oopses)
def test_includes_system_information_if_default_user(self): owner = factory.make_User() node = factory.make_Node(owner=owner, default_user=owner) vendor_data = get_vendor_data(node) self.assertThat(vendor_data, ContainsDict({ "system_info": MatchesDict({ "default_user": KeysEqual("name", "gecos"), }), }))
def test_create_no_name(self): user = factory.make_User() handler = TokenHandler(user, {}, None) new_token = handler.create({}) self.assertThat(new_token, KeysEqual("id", "key", "secret", "consumer")) event = Event.objects.get(type__level=AUDIT) self.assertIsNotNone(event) self.assertEqual(event.description, "Created token.")
def test_compose_preseed_for_commissioning_node_produces_yaml(self): node = factory.make_node(status=NODE_STATUS.COMMISSIONING) preseed = yaml.safe_load(compose_preseed(node)) self.assertIn('datasource', preseed) self.assertIn('MAAS', preseed['datasource']) self.assertThat( preseed['datasource']['MAAS'], KeysEqual( 'metadata_url', 'consumer_key', 'token_key', 'token_secret'))
def test_get(self): representation = self.webservice.get(self.webhook_url, api_version='devel').jsonBody() self.assertThat( representation, KeysEqual('active', 'date_created', 'date_last_modified', 'deliveries_collection_link', 'delivery_url', 'event_types', 'http_etag', 'registrant_link', 'resource_type_link', 'self_link', 'target_link', 'web_link'))
def test_rpc_info_from_running_ipc_master(self): # Run the IPC master, IPC worker, and RPC service so the endpoints # are updated in the database. region = factory.make_RegionController() self.useFixture(MAASIDFixture(region.system_id)) region.owner = factory.make_admin() region.save() # `workers` is only included so ipc-master will not actually get the # workers service because this test runs in all-in-one mode. self.useFixture( RegionEventLoopFixture( "ipc-master", "ipc-worker", "rpc", "workers" ) ) eventloop.start(master=True, all_in_one=True).wait(5) self.addCleanup(lambda: eventloop.reset().wait(5)) getServiceNamed = eventloop.services.getServiceNamed ipcMaster = getServiceNamed("ipc-master") @wait_for(5) @inlineCallbacks def wait_for_startup(): # Wait for the service to complete startup. yield ipcMaster.starting yield getServiceNamed("ipc-worker").starting yield getServiceNamed("rpc").starting # Force an update, because it's very hard to track when the # first iteration of the ipc-master service has completed. yield ipcMaster.update() wait_for_startup() response = self.client.get(reverse("rpc-info")) self.assertEqual("application/json", response["Content-Type"]) info = json.loads(response.content.decode("unicode_escape")) self.assertThat(info, KeysEqual("eventloops")) self.assertThat( info["eventloops"], MatchesDict( { # Each entry in the endpoints dict is a mapping from an # event loop to a list of (host, port) tuples. Each tuple is # a potential endpoint for connecting into that event loop. eventloop.loop.name: MatchesSetwise( *( MatchesListwise((Equals(addr), is_valid_port)) for addr, _ in ipcMaster._getListenAddresses(5240) ) ) } ), )
class TestKeysEqual(TestCase, TestMatchersInterface): matches_matcher = KeysEqual('foo', 'bar') matches_matches = [ { 'foo': 0, 'bar': 1 }, ] matches_mismatches = [ {}, { 'foo': 0 }, { 'bar': 1 }, { 'foo': 0, 'bar': 1, 'baz': 2 }, { 'a': None, 'b': None, 'c': None }, ] str_examples = [ ("KeysEqual('foo', 'bar')", KeysEqual('foo', 'bar')), ] describe_examples = [ ("['bar', 'foo'] does not match {'baz': 2, 'foo': 0, 'bar': 1}: " "Keys not equal", { 'foo': 0, 'bar': 1, 'baz': 2 }, KeysEqual('foo', 'bar')), ]
def test_update(self): user = factory.make_User() handler = TokenHandler(user, {}, None) name = factory.make_name("name") token = create_auth_token(user, name) new_name = factory.make_name("name") updated_token = handler.update({"id": token.id, "name": new_name}) self.assertThat(updated_token, KeysEqual("id", "key", "secret", "consumer")) self.assertEqual(new_name, updated_token["consumer"]["name"]) event = Event.objects.get(type__level=AUDIT) self.assertIsNotNone(event) self.assertEqual(event.description, "Modified consumer name of token.")
def test_deferred_error(self): class SomeTest(TestCase): def test_something(self): return defer.maybeDeferred(lambda: 1 / 0) test = SomeTest('test_something') runner = self.make_runner(test) result = self.make_result() runner.run(result) self.assertThat([event[:2] for event in result._events], Equals([('startTest', test), ('addError', test), ('stopTest', test)])) error = result._events[1][2] self.assertThat(error, KeysEqual('traceback', 'twisted-log'))
def test_compose_preseed_for_commissioning_node_produces_yaml(self): rack_controller = factory.make_RackController() node = factory.make_Node(interface=True, status=NODE_STATUS.COMMISSIONING) nic = node.get_boot_interface() nic.vlan.dhcp_on = True nic.vlan.primary_rack = rack_controller nic.vlan.save() request = make_HttpRequest() apt_proxy = get_apt_proxy(request, node.get_boot_rack_controller()) preseed = yaml.safe_load( compose_preseed(request, PRESEED_TYPE.COMMISSIONING, node)) self.assertIn('datasource', preseed) self.assertIn('MAAS', preseed['datasource']) self.assertThat( preseed['datasource']['MAAS'], KeysEqual('metadata_url', 'consumer_key', 'token_key', 'token_secret')) self.assertThat( preseed['reporting']['maas'], KeysEqual('consumer_key', 'endpoint', 'token_key', 'token_secret', 'type')) self.assertThat(preseed['rsyslog']['remotes'], KeysEqual('maas')) self.assertAptConfig(preseed, apt_proxy)
def test_compose_preseed_with_curtin_installer(self): node = factory.make_node(status=NODE_STATUS.READY) node.use_fastpath_installer() preseed = compose_preseed(node) preseed = yaml.safe_load(compose_preseed(node)) self.assertIn('datasource', preseed) self.assertIn('MAAS', preseed['datasource']) self.assertThat( preseed['datasource']['MAAS'], KeysEqual( 'metadata_url', 'consumer_key', 'token_key', 'token_secret')) self.assertEqual( absolute_reverse('curtin-metadata'), preseed['datasource']['MAAS']['metadata_url'])
def test_log_in_details(self): class LogAnError(TestCase): def test_something(self): log.msg("foo") 1 / 0 test = LogAnError('test_something') runner = self.make_runner(test) result = self.make_result() runner.run(result) self.assertThat([event[:2] for event in result._events], Equals([('startTest', test), ('addError', test), ('stopTest', test)])) error = result._events[1][2] self.assertThat(error, KeysEqual('traceback', 'twisted-log'))
def test_JsonModel_custom_cache(self): # Adding an item to the cache in the initialize method results in it # being in the cache. class ProductModelTestView(BaseProductModelTestView): def initialize(self): request = get_current_browser_request() target_info = {} target_info['title'] = "The Title" cache = IJSONRequestCache(request).objects cache['target_info'] = target_info lp.services.webapp.tests.ProductModelTestView = \ ProductModelTestView self.configZCML() browser = self.getUserBrowser(self.url) cache = loads(browser.contents) self.assertThat( cache, KeysEqual('related_features', 'context', 'target_info'))
def test_clean_reactor(self): # If there's cruft left over in the reactor, the test fails. reactor = self.make_reactor() timeout = self.make_timeout() class SomeCase(TestCase): def test_cruft(self): reactor.callLater(timeout * 10.0, lambda: None) test = SomeCase('test_cruft') runner = self.make_runner(test, timeout) result = self.make_result() runner.run(result) self.assertThat([event[:2] for event in result._events], Equals([('startTest', test), ('addError', test), ('stopTest', test)])) error = result._events[1][2] self.assertThat(error, KeysEqual('traceback', 'twisted-log'))
def assertRecipientsEqual(self, expected, changes, blamer, maintainer, changer, purpose=ArchivePurpose.PRIMARY): distribution = self.factory.makeDistribution() archive = self.factory.makeArchive( distribution=distribution, purpose=purpose) distroseries = self.factory.makeDistroSeries(distribution=distribution) # Now set the uploaders. component = getUtility(IComponentSet).ensure('main') if component not in distroseries.components: self.factory.makeComponentSelection( distroseries=distroseries, component=component) distribution.main_archive.newComponentUploader(maintainer, component) distribution.main_archive.newComponentUploader(changer, component) info = fetch_information(None, None, changes) observed, _ = PackageUploadMailer.getRecipientsForAction( 'accepted', info, blamer, None, [], archive, distroseries, PackagePublishingPocket.RELEASE) self.assertThat(observed, KeysEqual(*expected))
def test_authenticate_default(self): """ Verify that the correct username and password combination gives a succesful response with the correct fields. """ headers = { OPENAM_USERNAME_HEADER: USERNAME, OPENAM_PASSWORD_HEADER: PASSWORD, "Content-Type": "application/json", } response = requests.post(AUTHENTICATE_URI, headers=headers) self.assertThat(response.status_code, Equals(200)) # verify that the response has the right fields self.assertThat(response.json(), KeysEqual({ "successUrl": "", "tokenId": "", }))
def test_log_err_is_error(self): # An error logged during the test run is recorded as an error in the # tests. class LogAnError(TestCase): def test_something(self): try: 1 / 0 except ZeroDivisionError: f = failure.Failure() log.err(f) test = LogAnError('test_something') runner = self.make_runner(test) result = self.make_result() runner.run(result) self.assertThat([event[:2] for event in result._events], Equals([('startTest', test), ('addError', test), ('stopTest', test)])) error = result._events[1][2] self.assertThat(error, KeysEqual('logged-error', 'twisted-log'))
def test_yields_configuration_when_machine_install_rackd_true(self): node = factory.make_Node(osystem='ubuntu', netboot=False) node.install_rackd = True configuration = generate_rack_controller_configuration(node) secret = '1234' Config.objects.set_config("rpc_shared_secret", secret) channel = version.get_maas_version_track_channel() maas_url = "http://%s:5240/MAAS" % get_maas_facing_server_host( node.get_boot_rack_controller()) cmd = "/bin/snap/maas init --mode rack" self.assertThat( dict(configuration), KeysEqual({ "runcmd": [ "snap install maas --devmode --channel=%s" % channel, "%s --maas-url %s --secret %s" % (cmd, maas_url, secret), ] }))
def test_rpc_info_when_rpc_advertise_running(self): region = factory.make_RegionController() self.useFixture(MAASIDFixture(region.system_id)) region.owner = factory.make_admin() region.save() self.useFixture(RegionEventLoopFixture("rpc", "rpc-advertise")) eventloop.start().wait(5) self.addCleanup(lambda: eventloop.reset().wait(5)) getServiceNamed = eventloop.services.getServiceNamed @wait_for(5) @inlineCallbacks def wait_for_startup(): # Wait for the rpc and the rpc-advertise services to start. yield getServiceNamed("rpc").starting yield getServiceNamed("rpc-advertise").starting # Force an update, because it's very hard to track when the # first iteration of the rpc-advertise service has completed. yield getServiceNamed("rpc-advertise")._tryUpdate() wait_for_startup() response = self.client.get(reverse('rpc-info')) self.assertEqual("application/json", response["Content-Type"]) info = json.loads(response.content.decode("unicode_escape")) self.assertThat(info, KeysEqual("eventloops")) self.assertThat( info["eventloops"], MatchesDict({ # Each entry in the endpoints dict is a mapping from an # event loop to a list of (host, port) tuples. Each tuple is # a potential endpoint for connecting into that event loop. eventloop.loop.name: MatchesSetwise(*(MatchesListwise((Equals(addr), is_valid_port)) for addr in get_all_interface_addresses() if not IPAddress(addr).is_link_local() and not IPAddress(addr).is_loopback())), }))
def test_run_404(self): # A request that returns a non-2xx response is a failure and # gets retried. with CaptureOops() as oopses: job, reqs = self.makeAndRunJob(response_status=404) self.assertThat( job, MatchesStructure( status=Equals(JobStatus.WAITING), pending=Is(True), successful=Is(False), date_sent=Not(Is(None)), error_message=Equals('Bad HTTP response: 404'), json_data=ContainsDict( {'result': MatchesAll( KeysEqual('request', 'response'), ContainsDict( {'response': ContainsDict( {'status_code': Equals(404)})}))}))) self.assertEqual(1, len(reqs)) self.assertEqual([], oopses.oopses)
def test_get(self): representation = self.webservice.get(self.delivery_url, api_version='devel').jsonBody() self.assertThat( representation, MatchesAll( KeysEqual('date_created', 'date_first_sent', 'date_scheduled', 'date_sent', 'error_message', 'event_type', 'http_etag', 'payload', 'pending', 'resource_type_link', 'self_link', 'successful', 'web_link', 'webhook_link'), ContainsDict({ 'event_type': Equals('ping'), 'payload': Equals({'ping': True}), 'pending': Equals(True), 'successful': Is(None), 'date_created': Not(Is(None)), 'date_scheduled': Is(None), 'date_sent': Is(None), 'error_message': Is(None), })))
def test_byEquality(self): old_collector = RequestTimelineCollector() old_collector.count = 2 old_collector.queries = [ (0, 1, "SQL-main-slave", "SELECT 1 FROM Person", None), (2, 3, "SQL-main-slave", "SELECT 1 FROM Product", None), ] new_collector = RequestTimelineCollector() new_collector.count = 3 new_collector.queries = [ (0, 1, "SQL-main-slave", "SELECT 1 FROM Person", None), (2, 3, "SQL-main-slave", "SELECT 1 FROM Product", None), (4, 5, "SQL-main-slave", "SELECT 1 FROM Distribution", None), ] matcher = HasQueryCount.byEquality(old_collector) mismatch = matcher.match(new_collector) self.assertThat(mismatch, Not(Is(None))) details = mismatch.get_details() old_lines = [] new_lines = [] self.assertThat(details, KeysEqual("queries", "other_queries")) self.assertEqual("text", details["other_queries"].content_type.type) old_lines.append("".join(details["other_queries"].iter_text())) self.assertEqual("text", details["queries"].content_type.type) new_lines.append("".join(details["queries"].iter_text())) separator = "-" * 70 expected_old_lines = [ "0-1@SQL-main-slave SELECT 1 FROM Person\n" + separator + "\n" + "2-3@SQL-main-slave SELECT 1 FROM Product\n" + separator, ] expected_new_lines = [ "0-1@SQL-main-slave SELECT 1 FROM Person\n" + separator + "\n" + "2-3@SQL-main-slave SELECT 1 FROM Product\n" + separator + "\n" + "4-5@SQL-main-slave SELECT 1 FROM Distribution\n" + separator, ] self.assertEqual(expected_old_lines, old_lines) self.assertEqual(expected_new_lines, new_lines) self.assertEqual( "queries do not match: %s" % (Equals(2).match(3).describe(),), mismatch.describe())