def test_atomic_requests_are_enabled(self): # ATOMIC_REQUESTS *must* be set for the default connection. self.assertThat( connections.databases, ContainsDict( {"default": ContainsDict({"ATOMIC_REQUESTS": Is(True)})}), )
def assertAptConfig(self, config, apt_proxy): archive = PackageRepository.objects.get_default_archive('amd64') components = set(archive.KNOWN_COMPONENTS) if archive.disabled_components: for comp in archive.COMPONENTS_TO_DISABLE: if comp in archive.disabled_components: components.remove(comp) components = ' '.join(components) sources_list = 'deb %s $RELEASE %s\n' % (archive.url, components) if archive.disable_sources: sources_list += '# ' sources_list += 'deb-src %s $RELEASE %s\n' % (archive.url, components) for pocket in archive.POCKETS_TO_DISABLE: if pocket in archive.disabled_pockets: continue sources_list += ('deb %s $RELEASE-%s %s\n' % (archive.url, pocket, components)) if archive.disable_sources: sources_list += '# ' sources_list += ('deb-src %s $RELEASE-%s %s\n' % (archive.url, pocket, components)) self.assertThat( config, ContainsDict({ 'apt': ContainsDict({ 'preserve_sources_list': Equals(False), 'proxy': Equals(apt_proxy), 'sources_list': Equals(sources_list), }) }))
def test_patch_event_types(self): representation = self.webservice.get(self.webhook_url, api_version='devel').jsonBody() self.assertThat(representation, ContainsDict({'event_types': Equals([])})) # Including a valid type in event_types works. response = self.webservice.patch( self.webhook_url, 'application/json', json.dumps({'event_types': ['git:push:0.1']}), api_version='devel') self.assertEqual(209, response.status) representation = self.webservice.get(self.webhook_url, api_version='devel').jsonBody() self.assertThat( representation, ContainsDict({'event_types': Equals(['git:push:0.1'])})) # But an unknown type is rejected. response = self.webservice.patch(self.webhook_url, 'application/json', json.dumps( {'event_types': ['hg:push:0.1']}), api_version='devel') self.assertThat( response, MatchesStructure.byEquality( status=400, body="event_types: u'hg:push:0.1' isn't a valid token"))
def test_basic(self): commits = self.ref.getCommits(self.sha1_tip) path = self.ref.repository.getInternalPath() self.assertEqual([((path, self.sha1_tip), { "limit": None, "stop": None, "logger": None })], self.hosting_fixture.getLog.calls) self.assertThat( commits, MatchesListwise([ ContainsDict({ "sha1": Equals(self.sha1_tip), "author": MatchesStructure.byEquality(person=self.authors[0]), "author_date": Equals(self.dates[1]), "commit_message": Equals("tip"), }), ContainsDict({ "sha1": Equals(self.sha1_root), "author": MatchesStructure.byEquality(person=self.authors[1]), "author_date": Equals(self.dates[0]), "commit_message": Equals("root"), }), ])) key = "git.launchpad.dev:git-log:%s:%s" % (path, self.sha1_tip) self.assertEqual(json.dumps(self.log), getUtility(IMemcacheClient).get(key.encode("UTF-8")))
def test_includes_legacy_subnet_objects(self): space = factory.make_Space() subnet = factory.make_Subnet(space=space) uri = get_space_uri(space) response = self.client.get(uri) self.assertEqual(http.client.OK, response.status_code, response.content) parsed_space = json.loads( response.content.decode(settings.DEFAULT_CHARSET)) parsed_subnet = parsed_space["subnets"][0] self.assertThat( parsed_subnet, ContainsDict({ "id": Equals(subnet.id), "cidr": Equals(str(subnet.cidr)) }), ) self.assertThat( parsed_subnet["vlan"], ContainsDict({ "id": Equals(subnet.vlan.id), "vid": Equals(subnet.vlan.vid), "fabric_id": Equals(subnet.vlan.fabric_id), }), )
def test_read(self): node = factory.make_Node() cache_block_device = factory.make_PhysicalBlockDevice(node=node) cache_set = factory.make_CacheSet(block_device=cache_block_device) uri = get_bcache_cache_set_uri(cache_set) response = self.client.get(uri) self.assertEqual( http.client.OK, response.status_code, response.content ) parsed_cache_set = json_load_bytes(response.content) self.assertThat( parsed_cache_set, ContainsDict( { "id": Equals(cache_set.id), "name": Equals(cache_set.name), "resource_uri": Equals( get_bcache_cache_set_uri(cache_set) ), "cache_device": ContainsDict( {"id": Equals(cache_block_device.id)} ), "system_id": Equals(cache_set.get_node().system_id), } ), )
def test_read_returns_partitions(self): node = factory.make_Node() block_size = 1024 block_device = factory.make_PhysicalBlockDevice(node=node, size=1000000 * block_size) partition_table = factory.make_PartitionTable( block_device=block_device, table_type="MBR") # Use PartitionTable methods that auto-size and position partitions partition1 = partition_table.add_partition(size=50000 * block_size) partition2 = partition_table.add_partition() uri = get_blockdevice_uri(block_device) response = self.client.get(uri) self.assertEqual(http.client.OK, response.status_code, response.content) parsed_device = json_load_bytes(response.content) self.assertThat( parsed_device["partitions"][0], ContainsDict({ "bootable": Equals(partition1.bootable), "id": Equals(partition1.id), "size": Equals(partition1.size), "uuid": Equals(partition1.uuid), }), ) self.assertThat( parsed_device["partitions"][1], ContainsDict({ "bootable": Equals(partition2.bootable), "id": Equals(partition2.id), "size": Equals(partition2.size), "uuid": Equals(partition2.uuid), }), )
def test_default(self): """ Default content parsers. """ interceptors = [body_params()] request = m( content_type='application/x-www-form-urlencoded; charset="utf-8"', character_encoding='utf-8', body=BytesIO( urllib.urlencode([(b'a', b'1'), (b'b', b'2'), (b'b', b'3'), (b'c', u'\N{SNOWMAN}'.encode('utf-8')), (b'd', b'')]))) context = empty_context.set(REQUEST, request) self.assertThat( execute(context, interceptors), succeeded( ContainsDict({ REQUEST: ContainsDict({ 'form_params': Equals( pmap({ u'a': u'1', u'b': [u'2', u'3'], u'c': u'\N{SNOWMAN}', u'd': True })) }) })))
def test_read(self): subnet = factory.make_Subnet(space=RANDOM) uri = get_subnet_uri(subnet) response = self.client.get(uri) self.assertEqual( http.client.OK, response.status_code, response.content ) parsed_subnet = json.loads( response.content.decode(settings.DEFAULT_CHARSET) ) self.assertThat( parsed_subnet, ContainsDict( { "id": Equals(subnet.id), "name": Equals(subnet.name), "vlan": ContainsDict({"vid": Equals(subnet.vlan.vid)}), "space": Equals(subnet.space.get_name()), "cidr": Equals(subnet.cidr), "gateway_ip": Equals(subnet.gateway_ip), "dns_servers": Equals(subnet.dns_servers), "managed": Equals(subnet.managed), } ), )
def assertAptConfig(self, config, apt_proxy): self.assertThat( config, ContainsDict({ 'apt': ContainsDict({ 'preserve_sources_list': Equals(False), 'primary': MatchesListwise([ MatchesDict({ "arches": Equals(["default"]), "uri": Equals(PackageRepository.get_main_archive().url), }), ]), 'proxy': Equals(apt_proxy), 'security': MatchesListwise([ MatchesDict({ "arches": Equals(["default"]), "uri": Equals(PackageRepository.get_main_archive().url), }), ]), }) }))
def test_patch(self): representation = self.webservice.get(self.webhook_url, api_version='devel').jsonBody() self.assertThat( representation, ContainsDict({ 'active': Equals(True), 'delivery_url': Equals('http://example.com/ep'), 'event_types': Equals([]) })) old_mtime = representation['date_last_modified'] patch = json.dumps({ 'active': False, 'delivery_url': 'http://example.com/ep2', 'event_types': ['git:push:0.1'] }) self.webservice.patch(self.webhook_url, 'application/json', patch, api_version='devel') representation = self.webservice.get(self.webhook_url, api_version='devel').jsonBody() self.assertThat( representation, ContainsDict({ 'active': Equals(False), 'delivery_url': Equals('http://example.com/ep2'), 'date_last_modified': GreaterThan(old_mtime), 'event_types': Equals(['git:push:0.1']) }))
def test_selfheal(self): """ SelfHeal service will replace deleted server """ sh_interval = float(os.environ["AS_SELFHEAL_INTERVAL"]) group, _ = self.helper.create_group(min_entities=1) yield group.start(self.rcs, self) yield group.wait_for_state(self.rcs, ContainsDict({"activeCapacity": Equals(1)})) # delete server OOB server_id = yield only_server_id(self.rcs, group) yield NovaServer(id=server_id, pool=self.helper.pool).delete(self.rcs) # Wait for new server to come back up by self heal service. It can # take 2 * selfheal interval because the new group may get scheduled # to be triggered after last scheduling is already setup yield group.wait_for_state( self.rcs, ContainsDict({ "active": MatchesListwise([ContainsDict({"id": NotEquals(server_id)})]) }), timeout=(sh_interval + convergence_exec_time) * 2) # Delete new server again and see if it comes back. It should be # back within selfheal interval server_id = yield only_server_id(self.rcs, group) yield NovaServer(id=server_id, pool=self.helper.pool).delete(self.rcs) yield group.wait_for_state( self.rcs, ContainsDict({ "active": MatchesListwise([ContainsDict({"id": NotEquals(server_id)})]) }), timeout=sh_interval + convergence_exec_time)
def test_run_200(self): # A request that returns 200 is a success. with CaptureOops() as oopses: job, reqs = self.makeAndRunJob(response_status=200) self.assertThat( job, MatchesStructure( status=Equals(JobStatus.COMPLETED), pending=Is(False), successful=Is(True), date_sent=Not(Is(None)), error_message=Is(None), json_data=ContainsDict( {'result': MatchesAll( KeysEqual('request', 'response'), ContainsDict( {'response': ContainsDict( {'status_code': Equals(200)})}))}))) self.assertEqual(1, len(reqs)) self.assertEqual([ ('POST', 'http://example.com/ep', {'Content-Type': 'application/json', 'User-Agent': 'launchpad.dev-Webhooks/r%s' % ( versioninfo.revision), 'X-Launchpad-Event-Type': 'test', 'X-Launchpad-Delivery': str(job.job_id)}), ], reqs) self.assertEqual([], oopses.oopses)
def _cli_update_external_subnet_with_underlay_neg(self): underlay_states = [False, True] for underlay in underlay_states: rand_name_str = data_utils.rand_name() ext_network_name = "ext-fip-network-" + rand_name_str ext_network = self.create_network_with_args(ext_network_name, " --router:external") ext_subnet_name = "ext-fip-underlay-subnet-update_not-allowed" + \ rand_name_str underlay_str = "--underlay=" + str(underlay) subnet = self.create_subnet_with_args( ext_network['name'], nuage_data_utils.gimme_a_cidr_address(), "--name ", ext_subnet_name, underlay_str) self.assertThat(subnet, ContainsDict({'underlay': Equals(str(underlay))})) # Check OPENSTACK-721: update name of subnet failing new_name = ext_subnet_name + "name_upd" self.update_subnet_with_args(subnet['id'], "--name ", new_name) show_subnet = self.show_subnet(subnet['id']) updated_name = show_subnet['name'] self.assertEqual(updated_name, new_name) # Check VSD-18778 - state should not have changed self.assertThat(show_subnet, ContainsDict({'underlay': Equals(str(underlay))})) new_underlay_str = "--underlay=" + str(False if underlay else True) exp_message = "Cannot update read-only attribute underlay" self.assertRaisesRegex(exceptions.CommandFailed, exp_message, self.update_subnet_with_args, subnet['id'], new_underlay_str)
def test_read_includes_default_domain(self): defaults = GlobalDefault.objects.instance() old_default = Domain.objects.get_default_domain() domain = factory.make_Domain() defaults.domain = domain defaults.save() uri = get_domain_uri(domain) response = self.client.get(uri) self.assertEqual( http.client.OK, response.status_code, response.content ) parsed_domain = json.loads( response.content.decode(settings.DEFAULT_CHARSET) ) self.assertThat( parsed_domain, ContainsDict({"is_default": Equals(True)}) ) uri = get_domain_uri(old_default) response = self.client.get(uri) self.assertEqual( http.client.OK, response.status_code, response.content ) parsed_domain = json.loads( response.content.decode(settings.DEFAULT_CHARSET) ) self.assertThat( parsed_domain, ContainsDict({"is_default": Equals(False)}) )
def test_draining(self): """ When draining timeout is provided in launch config, the server is put in draining for that much time before removing it from CLB """ # Create group with CLB and draining timeout group, _ = self.helper.create_group(max_entities=5, draining_timeout=30) yield group.start(self.rcs, self) # Execute policy to scale up and extract server IP policy = ScalingPolicy(scale_by=1, scaling_group=group) yield policy.start(self.rcs, self) yield policy.execute(self.rcs) yield group.wait_for_state( self.rcs, ContainsDict({ "activeCapacity": Equals(1), "pendingCapacity": Equals(0), "desiredCapacity": Equals(1), "status": Equals("ACTIVE") })) ip = (yield group.get_servicenet_ips(self.rcs)).values()[0] # Scale down policy = ScalingPolicy(scale_by=-1, scaling_group=group) yield policy.start(self.rcs, self) yield policy.execute(self.rcs) # Corresponding CLB node should be draining clb = self.helper.clbs[0] yield clb.wait_for_nodes( self.rcs, MatchesListwise([ ContainsDict({ "address": Equals(ip), "condition": Equals("DRAINING") }) ]), 15, # timeout 2) # interval # After 30s the node should be removed # Extra 5s due to feed latency # extra 2 convergence intervals assuming 35s pass at end of one cycle. # Next cycle would remove node and wait for another for safety yield clb.wait_for_nodes(self.rcs, HasLength(0), 30 + 5 + convergence_interval * 2) yield group.wait_for_state( self.rcs, ContainsDict({ "activeCapacity": Equals(0), "pendingCapacity": Equals(0), "desiredCapacity": Equals(0), "status": Equals("ACTIVE") }))
def test_scheme(self): """ ``scheme`` is set according to whether the request is secure. """ self.assertThat( _nevow_request_to_request_map(fakeNevowRequest(is_secure=False)), ContainsDict({'scheme': Equals(b'http')})) self.assertThat( _nevow_request_to_request_map(fakeNevowRequest(is_secure=True)), ContainsDict({'scheme': Equals(b'https')}))
def test_snapshot_local_parent(self, content, filename): """ Create a local snapshot and then another local snapshot with the first as parent. Then upload both at once. """ data = io.BytesIO(content) # create LocalSnapshot local_snapshot = success_result_of( create_snapshot( relpath=filename, author=self.alice, data_producer=data, snapshot_stash_dir=self.stash_dir, parents=[], cooperator=self.uncooperator, )) # create another LocalSnapshot with the first as parent child_snapshot = success_result_of( create_snapshot( relpath=filename, author=self.alice, data_producer=data, snapshot_stash_dir=self.stash_dir, parents=[local_snapshot], cooperator=self.uncooperator, )) # turn them both into RemoteSnapshots remote_snapshot = success_result_of( write_snapshot_to_tahoe(child_snapshot, self.alice, self.tahoe_client)) # ...the last thing we wrote is now a RemoteSnapshot and # should have a single parent. self.assertThat( remote_snapshot, MatchesStructure( metadata=ContainsDict({"relpath": Equals(filename)}), parents_raw=AfterPreprocessing(len, Equals(1)), )) # turn the parent into a RemoteSnapshot parent_snapshot = success_result_of( create_snapshot_from_capability( Capability.from_string(remote_snapshot.parents_raw[0]), self.tahoe_client, )) self.assertThat( parent_snapshot, MatchesStructure( metadata=ContainsDict({"relpath": Equals(filename)}), parents_raw=Equals([]), ))
def test_pause_getstate(self): """ A paused group's state will say paused:True """ group, _ = self.helper.create_group() yield group.start(self.rcs, self) yield self.helper.assert_group_state( group, ContainsDict({"paused": Equals(False)})) yield group.pause(self.rcs) yield self.helper.assert_group_state( group, ContainsDict({"paused": Equals(True)}))
def test_isolation_level_is_serializable(self): # Transactions *must* be SERIALIZABLE for the default connection. self.assertThat( connections.databases, ContainsDict({ "default": ContainsDict({ "OPTIONS": ContainsDict({ "isolation_level": Equals( ISOLATION_LEVEL_REPEATABLE_READ), }), }), }), )
def test_default(self): """ Default content parsers. """ interceptors = [body_params()] context = empty_context.set(REQUEST, self.request()) self.assertThat( execute(context, interceptors), succeeded( ContainsDict({ REQUEST: ContainsDict( {'json_params': Equals(freeze(self.payload()))}) })))
def test_custom(self): """ Custom content parsers. """ interceptors = [ body_params( default_parsers(json_options=dict(object_hook=lambda _: 42))) ] context = empty_context.set(REQUEST, self.request()) self.assertThat( execute(context, interceptors), succeeded( ContainsDict( {REQUEST: ContainsDict({'json_params': Equals(42)})})))
def assertAptConfig(self, config, apt_proxy): archive = PackageRepository.objects.get_default_archive("amd64") components = set(archive.KNOWN_COMPONENTS) if archive.disabled_components: for comp in archive.COMPONENTS_TO_DISABLE: if comp in archive.disabled_components: components.remove(comp) components = " ".join(components) sources_list = "deb %s $RELEASE %s\n" % (archive.url, components) if archive.disable_sources: sources_list += "# " sources_list += "deb-src %s $RELEASE %s\n" % (archive.url, components) for pocket in archive.POCKETS_TO_DISABLE: if pocket in archive.disabled_pockets: continue sources_list += "deb %s $RELEASE-%s %s\n" % ( archive.url, pocket, components, ) if archive.disable_sources: sources_list += "# " sources_list += "deb-src %s $RELEASE-%s %s\n" % ( archive.url, pocket, components, ) self.assertThat( config, ContainsDict({ "apt": ContainsDict({ "preserve_sources_list": Equals(False), "proxy": Equals(apt_proxy), "sources_list": Equals(sources_list), }) }), ) self.assertEqual( config["snap"], { "commands": [ f'snap set system proxy.http="{apt_proxy}" proxy.https="{apt_proxy}"', ], }, )
def test_non_standard_port(self): """ Parse a non-standard port from the ``Host`` header. """ request = fake_nevow_request(uri=u'http://example.com:5144/') self.assertThat( _nevow_request_to_request_map(request), ContainsDict({ 'headers': ContainsDict({b'Host': Equals([b'example.com:5144'])}), 'server_name': Equals(b'example.com'), 'server_port': Equals(5144) }))
def test_read(self): node = factory.make_Node() cache_set = factory.make_CacheSet(node=node) backing_block_device = factory.make_PhysicalBlockDevice(node=node) backing_filesystem = factory.make_Filesystem( fstype=FILESYSTEM_TYPE.BCACHE_BACKING, block_device=backing_block_device, ) bcache = factory.make_FilesystemGroup( group_type=FILESYSTEM_GROUP_TYPE.BCACHE, cache_set=cache_set, filesystems=[backing_filesystem], ) uri = get_bcache_device_uri(bcache) response = self.client.get(uri) self.assertEqual( http.client.OK, response.status_code, response.content ) parsed_bcache = json_load_bytes(response.content) self.assertThat( parsed_bcache, ContainsDict( { "id": Equals(bcache.id), "uuid": Equals(bcache.uuid), "name": Equals(bcache.name), "size": Equals(bcache.get_size()), "human_size": Equals( human_readable_bytes(bcache.get_size()) ), "resource_uri": Equals(get_bcache_device_uri(bcache)), "virtual_device": ContainsDict( {"id": Equals(bcache.virtual_device.id)} ), "cache_set": ContainsDict( { "id": Equals(cache_set.id), "name": Equals(cache_set.name), } ), "backing_device": ContainsDict( {"id": Equals(backing_block_device.id)} ), "system_id": Equals(bcache.get_node().system_id), } ), )
def test_requestProxyToken(self): branch = self.factory.makeBranch() job = self.makeJob(branch=branch) yield job.extraBuildArgs() self.assertThat( self.proxy_api.tokens.requests, MatchesListwise([ MatchesDict({ "method": Equals("POST"), "uri": Equals( urlsplit(config.snappy.builder_proxy_auth_api_endpoint ).path), "headers": ContainsDict({ b"Authorization": MatchesListwise([ Equals(b"Basic " + base64.b64encode( b"admin-launchpad.dev:admin-secret")) ]), b"Content-Type": MatchesListwise([ Equals(b"application/json; charset=UTF-8"), ]), }), "content": AfterPreprocessing( json.loads, MatchesDict({ "username": StartsWith(job.build.build_cookie + "-"), })), }), ]))
def test_representation(self): with admin_logged_in(): faq = self.factory.makeFAQ(title="Nothing works") with notify_modified(faq, ['keywords', 'content'], user=faq.owner): faq.keywords = "foo bar" faq.content = "It is all broken." faq_url = api_url(faq) webservice = webservice_for_person(self.factory.makePerson()) repr = webservice.get(faq_url, api_version='devel').jsonBody() with admin_logged_in(): self.assertThat( repr, ContainsDict({ "id": Equals(faq.id), "title": Equals("Nothing works"), "keywords": Equals("foo bar"), "content": Equals("It is all broken."), "date_created": MatchesRegex("\d\d\d\d-\d\d-\d\dT.*"), "date_last_updated": MatchesRegex("\d\d\d\d-\d\d-\d\dT.*"), "last_updated_by_link": Contains("/devel/~%s" % faq.owner.name), "target_link": Contains("/devel/%s" % faq.target.name), }))
def test_defaults_to_process_environment(self): name = factory.make_name("name") value = factory.make_name("value") with EnvironmentVariable(name, value): self.assertThat( get_env_with_locale(), ContainsDict({name: Equals(value)}) )
def test_tryConnection_logs_error(self): listener = PostgresListenerService() exception_type = factory.make_exception_type() exception_message = factory.make_name("message") startConnection = self.patch(listener, "startConnection") startConnection.side_effect = exception_type(exception_message) with TwistedLoggerFixture() as logger: with ExpectedException(exception_type): yield listener.tryConnection() self.assertThat(logger.events, HasLength(1)) self.assertThat( logger.events[0], ContainsDict({ "log_format": Equals("Unable to connect to database: {error}"), "log_level": Equals(LogLevel.error), "error": Equals(exception_message), }), )
def test_unmount_unmounts_filesystem_as_user(self): node = factory.make_Node(status=NODE_STATUS.ALLOCATED, owner=self.user) partition = self.make_partition(node) filesystem = factory.make_Filesystem(partition=partition, mount_point="/mnt", acquired=True) uri = get_partition_uri(partition) response = self.client.post(uri, {"op": "unmount"}) content = response.content.decode(settings.DEFAULT_CHARSET) self.assertEqual(http.client.OK, response.status_code, content) self.assertThat( json.loads(content)["filesystem"], ContainsDict({ "mount_point": Is(None), "mount_options": Is(None) }), ) self.assertThat( reload_object(filesystem), MatchesStructure( mount_point=Is(None), mount_options=Is(None), is_mounted=Is(False), ), )