def test_all_unpaired_dataset_providers_should_honor_excludes( dataset_provider_cls_name, patched_excluded): provider_cls = from_class_name(dataset_provider_cls_name) raw_data_provider = FakeRawDataProvider(curated=True) dataset_spec = DatasetSpec(raw_data_provider, DatasetType.TRAIN, with_excludes=False, encoding=False, paired=False) provider = provider_cls(raw_data_provider) dataset = provider.supply_dataset(dataset_spec, batch_size=1).take(100) encountered_labels = set() for batch in dataset: image = np.rint( (batch[0][consts.FEATURES].numpy().flatten()[0] + 0.5) * 10) label = batch[1][consts.LABELS].numpy().flatten()[0] encountered_labels.update((image, )) encountered_labels.update((label, )) assert_that((np.rint(list(encountered_labels))), only_contains(not_(is_in(list(patched_excluded.numpy()))))) assert_that((np.rint(list(encountered_labels))), only_contains((is_in([0, 1, 4]))))
def test_build_nested_properties(self): np1 = NestedProperties.build(None) assert_that(np1, equal_to(None)) np2 = NestedProperties.build('aa.bb') assert_that(np2.query_fields, only_contains('aa.bb')) np3 = NestedProperties.build(('aa_bb.cc', 'cc.dd')) assert_that(np3.query_fields, only_contains('aaBb.cc', 'cc.dd'))
def test_security_group_on_vrouter(server, security_group, client_contrail_vrouter_agents, os_faults_steps, neutron_create_security_group): """Check that server's compute vRouter "know" about security groups. Steps: #. Create server with security group #. Check that security group uuid is present on server's vRouter agent /Snh_SgListReq reply #. Create new security group #. Add created security group to server #. Check that new security group uuid is present on server's vRouter agent /Snh_SgListReq reply """ compute_host = getattr(server, stepler_config.SERVER_ATTR_HOST) compute_fqdn = os_faults_steps.get_fqdn_by_host_name(compute_host) vrouter_agent = client_contrail_vrouter_agents[compute_fqdn] sg_list = vrouter_agent.get_sg_list()['SgListResp']['sg_list'] assert_that(sg_list, only_contains(has_entries(sg_uuid=security_group['id']))) new_security_group = neutron_create_security_group( next(utils.generate_ids())) server.add_security_group(new_security_group['id']) sg_list = vrouter_agent.get_sg_list()['SgListResp']['sg_list'] assert_that( sg_list, only_contains(has_entries(sg_uuid=security_group['id']), has_entries(sg_uuid=new_security_group['id'])))
def test_host_with_lun(self): host = VNXHost.get(cli=t_cli(), name='ubuntu14') assert_that(host.lun_list.lun_id, only_contains(4, 15)) assert_that(host.alu_hlu_map[4], equal_to(14)) assert_that(host.alu_hlu_map[15], equal_to(154)) assert_that(host.alu_ids, only_contains(4, 15)) assert_that(host.hlu_ids, only_contains(14, 154))
def test_accept_csv_with_CRLF_as_line_separator(self): csv = u"prop1,prop2\r\nvalue 1,value 2" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains(["prop1", "prop2"], ["value 1", "value 2"])))
def test_ignore_when_empty_row(self): csv = u"a,b\n,\nc,d" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains(["a", "b"], ["c", "d"])))
def test_ignore_values_in_comments_column(self): csv = u"a,comment,b\nc,d,e" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains(["a", "b"], ["c", "e"])))
def test_accept_when_some_values_empty(self): csv = u"a,b\n,\nc,d\nc," csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains(["a", "b"], ["c", "d"], ["c", ""])))
def check_monthly_period_range(start_date, end_date, frequency, expected): range = period_range(date(2013, 3, 1), date(2013, 3, 31), 'monthly') assert_that(range, only_contains( (date(2013, 3, 1), date(2013, 3, 31)) )) another_range = list( period_range( date(2012, 12, 1), date(2013, 2, 28), 'monthly') ) print another_range assert_that(another_range, only_contains( (date(2012, 12, 1), date(2012, 12, 31)), (date(2013, 1, 1), date(2013, 1, 31)), (date(2013, 2, 1), date(2013, 2, 28)), )) another_range = list( period_range( date(2012, 12, 20), date(2013, 2, 28), 'monthly') ) print another_range assert_that(another_range, only_contains( (date(2012, 12, 1), date(2012, 12, 31)), (date(2013, 1, 1), date(2013, 1, 31)), (date(2013, 2, 1), date(2013, 2, 28)), ))
def check_monthly_period_range(start_date, end_date, frequency, expected): range = period_range(date(2013, 3, 1), date(2013, 3, 31), 'monthly') assert_that(range, only_contains((date(2013, 3, 1), date(2013, 3, 31)))) another_range = list( period_range(date(2012, 12, 1), date(2013, 2, 28), 'monthly')) print another_range assert_that( another_range, only_contains( (date(2012, 12, 1), date(2012, 12, 31)), (date(2013, 1, 1), date(2013, 1, 31)), (date(2013, 2, 1), date(2013, 2, 28)), )) another_range = list( period_range(date(2012, 12, 20), date(2013, 2, 28), 'monthly')) print another_range assert_that( another_range, only_contains( (date(2012, 12, 1), date(2012, 12, 31)), (date(2013, 1, 1), date(2013, 1, 31)), (date(2013, 2, 1), date(2013, 2, 28)), ))
def test_ignore_comments(self): csv = u"# top comment\na,b\n# any random comment\nc,d" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains(["a", "b"], ["c", "d"])))
def test_preserve_newlines_in_quoted_values(self): csv = u'prop1,prop2\nvalue,"value\nwith newline"' csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains(["prop1", "prop2"], ["value", "value\nwith newline"])))
def event_received(): assert_that(accumulator.accumulate(), only_contains( has_entries(data=has_entries(application=real_app)) )) assert_that(parasite_accumulator.accumulate(), only_contains( has_entries(data=has_entries(application=is_not(real_app))) ))
def event_received(events, app): assert_that(events.accumulate(), only_contains( has_entry('application', app) )) assert_that(parasite_events.accumulate(), only_contains( has_entry('application', is_not(app)) ))
def test_in_date_delta(self): stock = self.get_stock_20day() assert_that( stock.in_date_delta(-4, 20110110).index, only_contains(20110106, 20110107, 20110110)) assert_that( stock.in_date_delta(3, 20110110).index, only_contains(20110110, 20110111, 20110112, 20110113))
def test_add_same_twice(self): config = UnityNfsHostConfig(no_access=[host('Host_9')]) config.allow_rw(host('Host_9'), host('Host_9')) config.allow_rw(host('Host_9')) assert_that(len(config.rw), equal_to(1)) assert_that(config.rw, only_contains(host('Host_9'))) assert_that(config.no_access, equal_to([])) assert_that(config.ro, none()) assert_that(config.root, only_contains(host('Host_9')))
def test_get_child_subtree(self): nested_props = NestedProperties('ab_cd.ef.c', 'ab_cd.ef.d', 'aaa_bb.ccc_dd', 'aaa_bb.ee_ff') sub1 = nested_props.get_child_subtree('ab_cd') assert_that(sub1.get_properties(), only_contains('ef')) sub1sub = sub1.get_child_subtree('ef') assert_that(sub1sub.get_properties(), only_contains('c', 'd')) sub2 = nested_props.get_child_subtree('aaa_bb') assert_that(sub2.get_properties(), only_contains('ccc_dd', 'ee_ff'))
def test_search_performance(self): with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: number_of_threads = 10 futures = [executor.submit(self.measure_search) for i in range(number_of_threads)] response_times = [completed.result()["response time"] for completed in as_completed(futures)] hit_numbers = [completed.result()["number of hits"] for completed in as_completed(futures)] assert_that(hit_numbers, only_contains(greater_than_or_equal_to(5))) assert_that(response_times, only_contains(less_than_or_equal_to(1000)))
def test_put(): # Disabling HA already disabled does not fail body = {'node_type': 'disabled'} result = confd.ha.put(body) result.assert_status(204) # Enable HA body = {'node_type': 'master', 'remote_address': '10.10.10.10'} result = confd.ha.put(body) result.assert_status(204) sysconfd.assert_request( '/update_ha_config', method='POST', json={ 'node_type': 'master', 'remote_address': '10.10.10.10' }, ) assert_that( confd.registrars.get().json, has_entry( 'items', only_contains( has_entries({ 'backup_host': '10.10.10.10', 'proxy_backup_host': '10.10.10.10' })), ), ) # Disable HA = reset sip/provd options to default body = {'node_type': 'disabled'} result = confd.ha.put(body) result.assert_status(204) sysconfd.assert_request( '/update_ha_config', method='POST', json={ 'node_type': 'disabled', 'remote_address': '' }, ) assert_that( confd.registrars.get().json, has_entry( 'items', only_contains( has_entries({ 'backup_host': None, 'proxy_backup_host': None })), ), )
def test_ignore_values_in_comments_column(self): csv = u"a,comment,b\nc,d,e" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains( ["a", "b"], ["c", "e"], )))
def test_ignore_comments(self): csv = u"# top comment\na,b\n# any random comment\nc,d" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains( ["a", "b"], ["c", "d"], )))
def test_ignore_when_empty_row(self): csv = u"a,b\n,\nc,d" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains( ["a", "b"], ["c", "d"], )))
def test_accept_csv_with_CR_as_line_separator(self): csv = u"prop1,prop2\rvalue 1,value 2" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains( ["prop1", "prop2"], ["value 1", "value 2"], )))
def test_accept_when_some_values_empty(self): csv = u"a,b\n,\nc,d\nc," csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains( ["a", "b"], ["c", "d"], ["c", ""], )))
def test_preserve_newlines_in_quoted_values(self): csv = u"prop1,prop2\nvalue,\"value\nwith newline\"" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) assert_that(data, only_contains(only_contains( ["prop1", "prop2"], ["value", "value\nwith newline"], )))
def test_add_ro(self): config = UnityNfsHostConfig(no_access=[host('Host_1'), host('Host_9')]) config.allow_ro(host('Host_1'), host('Host_11')) config.allow_rw(host('Host_9')) assert_that( config.root, only_contains(host('Host_1'), host('Host_9'), host('Host_11'))) assert_that(config.ro, only_contains(host('Host_1'), host('Host_11'))) assert_that(config.rw, only_contains(host('Host_9'))) assert_that(len(config.no_access), equal_to(0))
def event_received(): events = accumulator.accumulate(with_headers=True) assert_that(events, only_contains( has_entries( headers=has_entries(application_name=real_app, category='stasis'), message=has_entries(data=has_entries(application=real_app)), ), )) assert_that(parasite_accumulator.accumulate(), only_contains( has_entries(data=has_entries(application=is_not(real_app))), ))
def test_parsing_numbers_in_cells(self): csv = u"int,float,string\n12,12.1,a string" csv_stream = _string_io(csv, "utf-8") data = parse_csv(csv_stream) print(list(data[0])) assert_that(data, only_contains(only_contains( ["int", "float", "string"], [12, 12.1, "a string"], )))
def test_daily_period_range(): range = period_range(date(2013, 4, 1), date(2013, 4, 1), 'daily') assert_that(range, only_contains( (date(2013, 4, 1), date(2013, 4, 1)) )) another_range = period_range(date(2013, 4, 1), date(2013, 4, 3), 'daily') assert_that(another_range, only_contains( (date(2013, 4, 1), date(2013, 4, 1)), (date(2013, 4, 2), date(2013, 4, 2)), (date(2013, 4, 3), date(2013, 4, 3)), ))
def test_period_range(): range = period_range(date(2013, 4, 1), date(2013, 4, 7)) assert_that(range, only_contains( (date(2013, 4, 1), date(2013, 4, 7)) )) another_range = period_range(date(2013, 4, 1), date(2013, 4, 21)) assert_that(another_range, only_contains( (date(2013, 4, 1), date(2013, 4, 7)), (date(2013, 4, 8), date(2013, 4, 14)), (date(2013, 4, 15), date(2013, 4, 21)), ))
def test_daily_period_range(): range = period_range(date(2013, 4, 1), date(2013, 4, 1), 'daily') assert_that(range, only_contains((date(2013, 4, 1), date(2013, 4, 1)))) another_range = period_range(date(2013, 4, 1), date(2013, 4, 3), 'daily') assert_that( another_range, only_contains( (date(2013, 4, 1), date(2013, 4, 1)), (date(2013, 4, 2), date(2013, 4, 2)), (date(2013, 4, 3), date(2013, 4, 3)), ))
def test_weekly_period_range(): range = period_range(date(2013, 4, 1), date(2013, 4, 7), 'weekly') assert_that(range, only_contains((date(2013, 4, 1), date(2013, 4, 7)))) another_range = period_range(date(2013, 4, 1), date(2013, 4, 21), 'weekly') assert_that( another_range, only_contains( (date(2013, 4, 1), date(2013, 4, 7)), (date(2013, 4, 8), date(2013, 4, 14)), (date(2013, 4, 15), date(2013, 4, 21)), ))
def test_nested_properties(self): host = UnityHost(_id='Host_12', cli=t_rest()) assert_that( host.fc_host_initiators.initiator_id, only_contains('20:00:00:00:C9:F3:AB:0C:10:00:00:00:C9:F3:AB:0C', '20:00:00:00:C9:F3:AB:0D:10:00:00:00:C9:F3:AB:0D')) assert_that(host.iscsi_host_initiators.initiator_id, only_contains( 'iqn.1998-01.com.vmware:esx239209-7e7a57a4')) assert_that(host.fc_host_initiators[0].paths[0].is_logged_in, equal_to(True)) assert_that( host.fc_host_initiators[1].paths[0].fc_port.wwn, equal_to('50:06:01:60:C7:E0:01:DA:50:06:01:6C:47:E0:01:DA'))
def test_add_ro(self): config = UnityNfsHostConfig( no_access=[host('Host_1'), host('Host_9')]) config.allow_ro(host('Host_1'), host('Host_11')) config.allow_rw(host('Host_9')) assert_that(config.root, only_contains(host('Host_1'), host('Host_9'), host('Host_11'))) assert_that(config.ro, only_contains(host('Host_1'), host('Host_11'))) assert_that(config.rw, only_contains(host('Host_9'))) assert_that(len(config.no_access), equal_to(0))
def test_index_some_directories(): target = 'foo/' scanner = [(target, ['one', 'two'], []), (target + 'one', [], ['zoidberg.txt']), (target + 'two', [], ['zoidberg.txt'])] index = dir2json.create_index(target, scanner) dirone = has_entry('index', only_contains(has_entry('path', 'one/zoidberg.txt'))) dirtwo = has_entry('index', only_contains(has_entry('path', 'two/zoidberg.txt'))) assert_that(index, has_entry('type', 'directory')) assert_that(index, has_entry('index', only_contains(dirone, dirtwo)))
async def test_exercises_list_content(create_app): app, url, client_session = await create_app() async with client_session.get('{}/exercises'.format(url)) as response: content = await response.json() assert_that(content, has_entries( exercises=only_contains( has_entries(name=instance_of(str)))))
def test_parse_csv(self): csv_stream = StringIO("a,b\nx,y\nq,w") data = parse_csv(csv_stream) assert_that(data, only_contains({"a": "x", "b": "y"}, {"a": "q", "b": "w"}))
def test_get_lun_sv2_simple_property(self): lun = UnityLun(_id='sv_2', cli=t_rest()) assert_that(lun.existed, equal_to(True)) assert_that(lun.id, equal_to('sv_2')) assert_that(lun.name, equal_to('openstack_lun')) assert_that(lun.description, equal_to('sample')) assert_that(lun.size_total, equal_to(107374182400)) assert_that(lun.total_size_gb, equal_to(100)) assert_that(lun.size_allocated, equal_to(0)) assert_that(lun.per_tier_size_used, only_contains(2952790016, 0, 0)) assert_that(lun.is_thin_enabled, equal_to(True)) assert_that( lun.wwn, equal_to('60:06:01:60:17:50:3C:00:C2:0A:D5:56:92:D1:BA:12')) assert_that(lun.is_replication_destination, equal_to(False)) assert_that(lun.is_snap_schedule_paused, equal_to(False)) assert_that(lun.metadata_size, equal_to(5100273664)) assert_that(lun.metadata_size_allocated, equal_to(2684354560)) assert_that( lun.snap_wwn, equal_to('60:06:01:60:17:50:3C:00:C4:0A:D5:56:00:95:DE:11')) assert_that(lun.snaps_size, equal_to(0)) assert_that(lun.snaps_size_allocated, equal_to(0)) assert_that(lun.snap_count, equal_to(0)) assert_that(lun.storage_resource, instance_of(UnityStorageResource)) assert_that(lun.pool, instance_of(UnityPool)) assert_that(lun.io_limit_rule, none()) assert_that(lun.is_compression_enabled, equal_to(False)) assert_that(lun.is_data_reduction_enabled, equal_to(False))
def test_get_properties(self): dpe = UnityDpe('dpe', cli=t_rest()) assert_that(dpe.existed, equal_to(True)) assert_that(dpe.health, instance_of(UnityHealth)) assert_that(dpe.needs_replacement, equal_to(False)) assert_that(dpe.slot_number, equal_to(0)) assert_that(dpe.name, equal_to('DPE')) assert_that(dpe.manufacturer, equal_to('')) assert_that(dpe.model, equal_to('OBERON 25 DRIVE CHASSIS')) assert_that(dpe.emc_part_number, equal_to('100-542-901-05')) assert_that(dpe.emc_serial_number, equal_to('CF2CV150500005')) assert_that(dpe.vendor_part_number, equal_to('')) assert_that(dpe.vendor_serial_number, equal_to('')) assert_that(dpe.bus_id, equal_to(0)) assert_that(dpe.current_power, equal_to(429)) assert_that(dpe.avg_power, equal_to(397)) assert_that(dpe.max_power, equal_to(429)) assert_that(dpe.current_temperature, equal_to(26)) assert_that(dpe.avg_temperature, equal_to(26)) assert_that(dpe.max_temperature, equal_to(26)) assert_that(dpe.current_speed, equal_to(12000000000)) assert_that(dpe.max_speed, equal_to(12000000000)) assert_that(dpe.parent_system, instance_of(UnitySystem)) assert_that(dpe.enclosure_type, equal_to(EnclosureTypeEnum.MIRANDA_12G_SAS_DPE)) assert_that(dpe.drive_types, only_contains(DiskTypeEnum.SAS, DiskTypeEnum.SAS_FLASH_2))
def test_monthly_period_range_defaults_to_a_month_ago(): range = period_range(None, None, 'monthly') # from pprint import pprint # pprint(range) assert_that(range, only_contains( (date(2013, 3, 1), date(2013, 3, 31)) ))
def everything_is_up(): status = integration_test.call_logd.status.get() component_statuses = [ component['status'] for component in status.values() if 'status' in component ] assert_that(component_statuses, only_contains('ok'))
def test_all_of_ignore_name(): class A(object): name = "long_name" value = "val" t = "ololo" assert_that(all_of(A), only_contains(("t", "ololo"),))
def check_all_active_hosts_available(self, zone_name='nova'): """Checks that all active nova hosts for zone are available. Nova checks hosts status with some interval. To prevent checking on outdated hosts data this method wait for `updated_at` host's attribute to be changed. Args: zone_name (str): zone name Raises: AssertionError: if not all hosts are active TimeoutExpired: if there is no updates for hosts """ def _get_hosts(): zone = waiting.wait( lambda: self._client.find(zoneName=zone_name), timeout_seconds=config.NOVA_AVAILABILITY_TIMEOUT, expected_exceptions=nova_exceptions.ClientException) for hosts_dict in zone.hosts.values(): for host in hosts_dict.values(): host['updated_at'] = parser.parse(host['updated_at']) yield host last_updated = max([x['updated_at'] for x in _get_hosts()]) def _predicate(): return all([x['updated_at'] > last_updated for x in _get_hosts()]) waiting.wait(_predicate, timeout_seconds=config.NOVA_AVAILABILITY_TIMEOUT) active_hosts = [x for x in _get_hosts() if x['active']] assert_that(active_hosts, only_contains(has_entries(available=True)))
def verify_dm_1(dm): assert_that(dm.status, equal_to('ok')) assert_that(dm.mover_id, equal_to(1)) assert_that(dm.uptime, equal_to(7086723)) assert_that(dm.i18n_mode, equal_to('UNICODE')) assert_that(dm.name, equal_to('server_2')) assert_that(dm.existed, equal_to(True)) assert_that(dm.version, equal_to('T8.1.7.70')) assert_that(dm.standby_fors, none()) assert_that(dm.dns_domain, none()) assert_that(dm.failover_policy, equal_to('auto')) assert_that(dm.host_id, equal_to(1)) assert_that(dm.role, equal_to('primary')) assert_that(dm.standbys, only_contains(2)) assert_that(dm.timezone, equal_to('GMT')) interface = next(i for i in dm.interfaces if i.name == 'el30') assert_that(interface.mover_id, equal_to(1)) assert_that(interface.ip_addr, equal_to('172.18.70.2')) assert_that(interface.name, equal_to('el30')) assert_that(interface.existed, equal_to(True)) assert_that(interface.broadcast_addr, equal_to('172.18.255.255')) assert_that(interface.net_mask, equal_to('255.255.0.0')) assert_that(interface.up, equal_to(True)) assert_that(interface.mtu, equal_to(1500)) assert_that(interface.ip_version, equal_to('IPv4')) assert_that(interface.mac_addr, equal_to('2:60:48:20:b:0')) assert_that(interface.device, equal_to('cge0')) assert_that(interface.vlan_id, equal_to(0)) dedup_settings = dm.dedup_settings assert_that(dedup_settings.cpu_high_watermark, equal_to(90)) assert_that(dedup_settings.minimum_scan_interval, equal_to(7)) assert_that(dedup_settings.duplicate_detection_method, equal_to('sha1')) assert_that(dedup_settings.mover_id, equal_to(1)) assert_that(dedup_settings.minimum_size, equal_to(24)) assert_that(dedup_settings.access_time, equal_to(15)) assert_that(dedup_settings.file_extension_exclude_list, equal_to('')) assert_that(dedup_settings.case_sensitive, equal_to(False)) assert_that(dedup_settings.cifs_compression_enabled, equal_to(True)) assert_that(dedup_settings.modification_time, equal_to(15)) assert_that(dedup_settings.sav_vol_high_watermark, equal_to(90)) assert_that(dedup_settings.backup_data_high_watermark, equal_to(90)) assert_that(dedup_settings.maximum_size, equal_to(8388608)) assert_that(dedup_settings.cpu_low_watermark, equal_to(40)) assert_that(dedup_settings.existed, equal_to(True)) device = next(i for i in dm.devices if i.name == 'fxg-8-0') assert_that(device.mover_id, equal_to(1)) assert_that(device.name, equal_to('fxg-8-0')) assert_that(device.existed, equal_to(True)) assert_that(device.type, equal_to('physical-ethernet')) assert_that(device.interfaces, equal_to('10.110.42.83')) assert_that(device.speed, equal_to('FD10000')) route = next(i for i in dm.route if i.destination == '172.18.0.0') assert_that(route.mover_id, equal_to(1)) assert_that(route.existed, equal_to(True)) assert_that(route.destination, equal_to('172.18.0.0')) assert_that(route.net_mask, equal_to('255.255.0.0')) assert_that(route.ip_version, equal_to('IPv4')) assert_that(route.interface, equal_to('172.18.70.2')) assert_that(route.gateway, equal_to('172.18.70.2'))
def test_make_records_from_rows(self): rows = [ ["name", "size"], ["bottle", 123], ["screen", 567], ["mug", 12], ] records = make_dicts(rows) assert_that( records, only_contains( { "name": "bottle", "size": 123 }, { "name": "screen", "size": 567 }, { "name": "mug", "size": 12 }, ))
def test_get_host_ipv6_with_mask(self): host = UnityHost.get_host(t_rest(), '2001:db8:a0b:12f0::/64', tenant='tenant_1', force_create=True) assert_that(host, not_none()) assert_that(host.ip_list, only_contains('2001:db8:a0b:12f0:0:0:0:0'))
def test_create_delete_dns_server(unity_gf): ip = '1.1.1.1' fi = unity_gf.nas_server.create_file_interface( 'spa_eth2', ip, role=FileInterfaceRoleEnum.PRODUCTION) dns = unity_gf.nas_server.create_dns_server('test.dev', ip) assert_that(dns.existed, equal_to(True)) assert_that(dns.addresses, only_contains(ip)) dns.delete() fi.delete()
def test_parse_utf8_data(self): csv = u"a,b\nà,ù" csv_stream = StringIO(csv.encode("utf-8")) data = parse_csv(csv_stream) assert_that(data, only_contains( {"a": u"à", "b": u"ù"} ))
def test_ignore_values_in_comments_column(self): csv = u"a,comment,b\nc,d,e" csv_stream = StringIO(csv.encode("utf-8")) data = parse_csv(csv_stream) assert_that(data, only_contains( {"a": u"c", "b": u"e"} ))
def test_completion(): app = TestApp(handlers.app) filepath = fixture_filepath("basic.py") request_data = {"source": open(filepath).read(), "line": 7, "col": 2, "source_path": filepath} completions = app.post_json("/completions", request_data).json["completions"] assert_that(completions, only_contains(valid_completions())) assert_that(completions, has_items(CompletionEntry("a"), CompletionEntry("b")))
def test_ignore_when_empty_row(self): csv = u"a,b\n,\nc,d" csv_stream = StringIO(csv.encode("utf-8")) data = parse_csv(csv_stream) assert_that(data, only_contains( {"a": u"c", "b": u"d"} ))
def test_ignore_comments(self): csv = u"# top comment\na,b\n# any random comment\nc,d" csv_stream = StringIO(csv.encode("utf-8")) data = parse_csv(csv_stream) assert_that(data, only_contains( {"a": u"c", "b": u"d"} ))
def test_create_mount_point(self): lun = VNXLun(name='l1', cli=t_cli()) m1 = lun.create_mount_point(mount_point_name='m1') assert_that(m1.name, equal_to('m1')) assert_that(m1.lun_id, equal_to(4057)) assert_that(m1.attached_snapshot, equal_to('s1')) m2 = lun.create_mount_point(mount_point_name='m2') assert_that(lun.snapshot_mount_points, only_contains(4056, 4057)) assert_that(m2.attached_snapshot, equal_to('N/A'))
def test_it_should_search_stations_by_name(self): query = 'callEaVapiés' stations = list(self.stations.by_search(query)) assert_that(stations, only_contains(any_of( has_property('nombre', contains_string('Lavapies')), has_property('address', contains_string('Lavapies')), ))) assert_that(stations, has_length(greater_than(0)))
def check_control_recording_type_specific_step(self, type): dt = 0.1 steps = 3 dim = ctrl_signal_dimensions[type] for i in xrange(steps): getattr(self.recorder, 'step_' + type)( dt, [range(dim), range(dim, 2 * dim)]) self.fileh.flush() controls = self.recorder.controls assert_that(controls.cols.type, only_contains( getattr(ControlsRecorder.Controls.columns['type'].enum, type))) assert_that(controls.cols.dt[:], only_contains(0.1)) assert_equal( [x['U'][:dim] for x in controls.where('uav == 0')], np.tile(range(dim), (steps, 1))) assert_equal( [x['U'][:dim] for x in controls.where('uav == 1')], np.tile(range(dim, 2 * dim), (steps, 1)))
def verify_dm_ref_1(dm): assert_that(dm.mover_id, equal_to(1)) assert_that(dm.i18n_mode, equal_to('UNICODE')) assert_that(dm.name, equal_to('server_2')) assert_that(dm.existed, equal_to(True)) assert_that(dm.standby_fors, none()) assert_that(dm.failover_policy, equal_to('auto')) assert_that(dm.host_id, equal_to(1)) assert_that(dm.role, equal_to('primary')) assert_that(dm.standbys, only_contains(2))
def test_index_nested(): target = 'foo/' scanner = [(target, ['one'], []), (target + 'one', ['two'], []), (target + 'one/two', [], ['zoidberg.txt'])] index = dir2json.create_index(target, scanner) deep = index['index'][0]['index'][0]['index'] assert_that(deep, only_contains(has_entry('path', 'one/two/zoidberg.txt')))