def FlagsForFile_DoNotAddMacIncludePathsWithSysroot_test(): flags_object = flags.Flags() def FlagsForFile( filename ): return { 'flags': [ '-isysroot', 'test1', '--test2=test' ] } with MockExtraConfModule( FlagsForFile ): flags_list = flags_object.FlagsForFile( '/foo' ) assert_that( flags_list, not_( has_item( 'sentinel_value_for_testing' ) ) ) def FlagsForFile( filename ): return { 'flags': [ '-test', '--sysroot', 'test1' ] } with MockExtraConfModule( FlagsForFile ): flags_list = flags_object.FlagsForFile( '/foo' ) assert_that( flags_list, not_( has_item( 'sentinel_value_for_testing' ) ) ) def FlagsForFile( filename ): return { 'flags': [ '-test', 'test1', '--sysroot=test' ] } with MockExtraConfModule( FlagsForFile ): flags_list = flags_object.FlagsForFile( '/foo' ) assert_that( flags_list, not_( has_item( 'sentinel_value_for_testing' ) ) )
def test_removes_deprecated_frames_on_save(mp3): filename = mp3(TXXX_Tagger="TGiT v1.1", TXXX_TAGGING_TIME="2014-03-26 14:18:55 EDT-0400", TXXX_UPC="987654321111") container.save(filename, Metadata()) tags = MP3(filename) assert_that(tags, all_of(not_(has_key("TXXX:Tagger")), not_(has_key("TXXX:TAGGING_TIME")), not_(has_key("TXXX:UPC"))), "tags in file")
def test_asserts_text_edit_content(text_edit, driver): text_edit.setPlainText("some text\nspanning\nmultiple lines") driver.has_plain_text("some text\nspanning\nmultiple lines") with raises(AssertionError): driver.has_plain_text(not_(equal_to("some text\nspanning\nmultiple lines")))
def test_should_not_raise_api_method_version_err_with_empty_params( self): service = ServiceBase(api_version=9.0) assert_that( calling(service._check_param_versions).with_args("aMethod", []), not_(raises(ApiMethodVersionError)) )
def test_asserts_label_text(label, driver): label.setText("text") driver.has_text("text") with raises(AssertionError): driver.has_text(not_(equal_to("text")))
def test_should_not_raise_api_parameter_version_err_with_version_match( self): service = ServiceBase(api_version=9.0) assert_that( calling(service._check_method_version).with_args("aMethod", 9.0), not_(raises(ApiParameterVersionError)) )
def _assert_idle_hints_state(prefix_exten): hints_state = _get_hints_state(prefix_exten) assert_that(hints_state, not_(empty())) for state in hints_state: if state != 'Idle': return False return True
def test_removes_old_ipi_frames(mp3): filename = mp3(TXXX_IPI_Joel_Miller="00000123456789", TXXX_IPI_Rebecca_Ann_Maloy="98765432100000") container.save(filename, Metadata(ipis={"Joel Miller": "00000123456789"})) tags = MP3(filename) assert_that(tags, all_of(has_key("TXXX:IPI:Joel Miller"), not_(has_key("TXXX:IPI:Rebecca Ann Maloy"))), "tags in file")
def test_should_not_raise_api_parameter_version_err_with_matching_version( self): service = ServiceBase(api_version=9.0) assert_that( calling(service._check_param_versions).with_args( "aMethod", [("aParam", "aValue", 9.0, None)]), not_(raises(ApiParameterVersionError)) )
def test_other_application(self): assert_that( calling(self.stasis.stasis_start).with_args( Mock(), {'application': 'foobar'}), not_(raises(Exception)), ) self.service.dial_all_contacts.assert_not_called() self.service.join_bridge.assert_not_called()
def test_search_or_distinct_missing(self): assert_that( calling(self.schema().load).with_args({}), raises(ValidationError, pattern='search or distinct'), ) assert_that( calling(self.schema().load).with_args({'search': 'ok'}), not_(raises(ValidationError, pattern='search or distinct')), )
def test_handle_death_on_own_death_does_nothing(self): victim = fake_player(123, "Fragged Player", team="red") connected_players(victim) self.plugin.handle_death(victim, victim, {"MOD": "ROCKET"}) assert_that(self.plugin.frag_log, not_(contains_exactly(victim.steam_id))) # type: ignore
def test_that_unauthorized_tenants_on_GET_return_403(self): token = self._post_token('foo', 'bar')['token'] self._get_token_with_expected_exception( token, tenant='55ee61f3-c4a5-427c-9f40-9d5c33466240', status_code=403) assert_that( calling(self.client.token.get).with_args(token), not_(raises(Exception))) with self.client_in_subtenant() as (_, __, sub_tenant): assert_that( calling(self.client.token.get).with_args( token, tenant=sub_tenant['uuid']), not_(raises(Exception)), )
def validateExpectedJson(expected, actual, jsonCompare): if (jsonCompare.lower == 'yes'): assert_that(readJson(expected), equal_to(actual)) else: for k, v in actual.items(): if (expected[k] == 'customcheck'): assert_that(actual[k], is_(not_(empty()))) else: assert_that(expected[k], equal_to(actual[k]))
def test_given_one_completed_relocate_when_list_then_relocate_not_found(self): user_uuid = SOME_USER_UUID token = self.given_user_token(user_uuid) ctid_ng = self.make_ctid_ng(token) relocate, user_uuid = self.given_completed_user_relocate() relocates = ctid_ng.relocates.list_from_user() assert_that(relocates['items'], not_(contains(has_entry('uuid', relocate['uuid']))))
def test_given_http_service_and_body_none_when_load_then_body_stripped( self): subscription = dict(VALID_SUBSCRIPTION) subscription['config'] = dict(VALID_SUBSCRIPTION['config']) subscription['config']['body'] = None result = subscription_schema.load(subscription) assert_that(result.data['config'], not_(has_key('body')))
def test_given_http_service_and_verify_certificate_string_when_load_then_pass( self): subscription = dict(VALID_SUBSCRIPTION) subscription['config'] = dict(VALID_SUBSCRIPTION['config']) subscription['config']['verify_certificate'] = '/some/path' assert_that( calling(subscription_schema.load).with_args(subscription), not_(raises(ValidationError)))
def test_deliver_unconditional_message_no_user_found(self): self.user_service_manager.dao.user.get_by_uuid.side_effect = NoSuchUserException assert_that(calling(self.user_service_manager.deliver_unconditional_message) .with_args('7f523550-03cf-4dac-a858-cb8afdb34775', False, ''), not_(raises(NoSuchUserException))) assert_that(self.user_service_manager.dao.user.set_unconditional_fwd.called, equal_to(False)) assert_that(self.user_service_notifier.unconditional_fwd_enabled.called, equal_to(False)) assert_that(self.funckey_manager.update_all_unconditional_fwd.called, equal_to(False))
def test_contains_bytestring(): should_match = contains_bytestring(b"foo") should_not_match = contains_bytestring(b"bar") assert_that(b"a foo b", should_match) assert_that(b"a foo b", not_(should_not_match)) assert_that(should_match, has_string("bytestring containing <b'foo'>")) assert_that(should_not_match, mismatches_with(b" a foo b", "was <b' a foo b'>"))
def test_given_source_when_delete_then_ok(self): source = self.client.backends.create_source(backend=self.BACKEND, body=self.config()) assert_that( calling(self.client.backends.delete_source).with_args( backend=self.BACKEND, source_uuid=source['uuid'], ), not_(raises(requests.HTTPError)))
def test_given_http_service_and_url_with_no_dots_when_load_then_pass(self): subscription = dict(VALID_SUBSCRIPTION) subscription['config'] = dict(VALID_SUBSCRIPTION['config']) subscription['config']['url'] = 'http://third-party-http/test' assert_that( calling(subscription_schema.load).with_args(subscription), not_(raises(ValidationError)), )
def test_when_create_twice_without_authorize_then_not_created(self): self.client.external.create(MICROSOFT, self.admin_user_uuid, {}) assert_that( calling(self.client.external.create).with_args( MICROSOFT, self.admin_user_uuid, {} ), not_(raises(requests.HTTPError)), )
def PrepareFlagsForClang_Sysroot_test(*args): assert_that( list( flags.PrepareFlagsForClang(['-isysroot', 'test1', '--test2=test'], 'test.cc', True)), not_(has_item('sentinel_value_for_testing'))) assert_that( list( flags.PrepareFlagsForClang(['-test', '--sysroot', 'test1'], 'test.cc', True)), not_(has_item('sentinel_value_for_testing'))) assert_that( list( flags.PrepareFlagsForClang(['-test', 'test1', '--sysroot=test'], 'test.cc', True)), not_(has_item('sentinel_value_for_testing')))
def test_response_json(): # Given # When actual = requests.get("https://httpbin.org/json") # Then assert_that(actual, is_response().with_json(has_key("slideshow"))) assert_that(actual, not_(is_response().with_json(has_key("shitshow"))))
def test_app_unsubscribe(ari): """ Test passes, but operation does not work for now; a tiny Asterisk patch is required. """ ari.amqp.stasisSubscribe(**subscribe_args) assert_that( calling(ari.amqp.stasisUnsubscribe).with_args(**subscribe_args), not_(raises(Exception)) )
def test_response_encoding(): # Given # When actual = requests.get("https://httpbin.org/encoding/utf8") # Then assert_that(actual, is_response().with_encoding("utf-8")) assert_that(actual, not_(is_response().with_encoding("ISO-8859-1")))
def test_that_asterisk_has_maintenance_mode(self): maintenance = self._set_unset_asterisk_in_maintenance_mode("on") assert_that(maintenance, 'asterisk should be in maintenance state on consul') time.sleep(1) maintenance = self._set_unset_asterisk_in_maintenance_mode("off") assert_that(not_(maintenance), 'asterisk should not be in maintenance state on consul')
def test_delete_funckey_template_when_user_and_funckey_template_associated( user, funckey_template): with a.user_funckey_template(user, funckey_template, check=False): response = confd.users(user['id']).funckeys.templates.get() assert_that(response.items, not_(empty())) confd.funckeys.templates( funckey_template['id']).delete().assert_deleted() response = confd.users(user['id']).funckeys.templates.get() assert_that(response.items, empty())
def test_bake_without_travis_pypi_setup(cookies): with bake_in_temp_dir( cookies, extra_context={'use_pypi_deployment_with_travis': 'n'}) as baked_result: project_path = pathlib.Path(baked_result.project) result_travis_config = yaml.full_load( project_path.joinpath('.travis.yml').open()) assert_that(result_travis_config, not_(has_key('deploy'))) assert_that(result_travis_config['language'], is_(equal_to('python')))
def test_removes_contributor_from_chain_of_title(): chain_of_title = make_chain_of_title( authors_composers=[joel_miller(), john_roney(), contributor("Yoko Ono"), contributor("John Lennon")], publishers=[effendi_records(), contributor("Effendi Records")]) chain_of_title.update(lyricists=["Joel Miller"], composers=["John Lennon"], publishers=["Effendi Records"]) assert_that(chain_of_title, not_(any_of(has_key("John Roney"), has_key("Yoko Ono"), has_key("Universals"))), "The chain of title")
def test_to_dict_with_no_data(self): class MyForm(BaseForm): attribute1 = StringField() with app.test_request_context(): form = MyForm() result = form.to_dict() assert_that(result, not_(has_key('attribute1')))
def test_when_create_authorize_get_then_does_not_raise(self): self.client.external.create(GOOGLE, self.admin_user_uuid, {}) self._simulate_user_authentication() assert_that( calling(self.client.external.get).with_args( GOOGLE, self.admin_user_uuid), not_(raises(requests.HTTPError)))
def PrepareFlagsForClang_Sysroot_test( *args ): assert_that( list( flags.PrepareFlagsForClang( [ '-isysroot', 'test1', '--test2=test' ], 'test.cc', True ) ), not_( has_item( 'sentinel_value_for_testing' ) ) ) assert_that( list( flags.PrepareFlagsForClang( [ '-test', '--sysroot', 'test1' ], 'test.cc', True ) ), not_( has_item( 'sentinel_value_for_testing' ) ) ) assert_that( list( flags.PrepareFlagsForClang( [ '-test', 'test1', '--sysroot=test' ], 'test.cc', True ) ), not_( has_item( 'sentinel_value_for_testing' ) ) )
def test_that_two_policies_cannot_have_the_same_name_and_tenant(self, policy_uuid, tenant_uuid): # Same name different tenants no exception assert_that( calling(self.create_and_delete_policy).with_args('foobar', '', tenant_uuid=tenant_uuid), not_(raises(exceptions.DuplicatePolicyException)), ) # Same tenant different names no exception assert_that( calling(self.create_and_delete_policy).with_args('foobaz', ''), not_(raises(exceptions.DuplicatePolicyException)), ) # Same name same tenant assert_that( calling(self.create_and_delete_policy).with_args('foobar', ''), raises(exceptions.DuplicatePolicyException), )
def test_remove_user(self): def when(nb_deleted, group_exists=True, user_exists=True, system_managed=False): self.group_dao.remove_user.return_value = nb_deleted self.group_dao.exists.return_value = group_exists self.group_dao.is_system_managed.return_value = system_managed self.user_dao.exists.return_value = user_exists when(nb_deleted=0, group_exists=False) assert_that( calling(self.service.remove_user).with_args( s.group_uuid, s.user_uuid), raises(exceptions.UnknownGroupException), ) when(nb_deleted=0, user_exists=False) assert_that( calling(self.service.remove_user).with_args( s.group_uuid, s.user_uuid), raises(exceptions.UnknownUserException), ) when(nb_deleted=0, system_managed=True) assert_that( calling(self.service.remove_user).with_args( s.group_uuid, s.user_uuid), raises(exceptions.SystemGroupForbidden), ) when(nb_deleted=0) assert_that( calling(self.service.remove_user).with_args( s.group_uuid, s.user_uuid), not_(raises(Exception)), ) when(nb_deleted=1) assert_that( calling(self.service.remove_user).with_args( s.group_uuid, s.user_uuid), not_(raises(Exception)), )
def test_collect(self): phpfpm_metrics_collector = PHPFPMMetricsCollector( object=self.phpfpm_obj, interval=self.phpfpm_obj.intervals['metrics']) assert_that(phpfpm_metrics_collector, not_none()) counted_vars = { 'php.fpm.queue.req': 0, 'php.fpm.slow_req': 0, 'php.fpm.conn.accepted': 3 } counted_vars_2 = { 'php.fpm.queue.req': 5, 'php.fpm.slow_req': 4, 'php.fpm.conn.accepted': 5 } # make direct aggregate call like a child collector would phpfpm_metrics_collector.aggregate_counters(counted_vars=counted_vars, stamp=1) # collect (runs increment) phpfpm_metrics_collector.collect() time.sleep(0.1) # first collect should not have counters assert_that(self.phpfpm_obj.statsd.current, not_(has_item('counter'))) # make a second call phpfpm_metrics_collector.aggregate_counters( counted_vars=counted_vars_2, stamp=2) phpfpm_metrics_collector.collect() time.sleep(0.1) # now there should be counters assert_that(self.phpfpm_obj.statsd.current, has_item('counter')) counters = self.phpfpm_obj.statsd.current['counter'] assert_that(counters, has_length(3)) """ counters: { 'php.fpm.queue.req': [[2, 5]], 'php.fpm.slow_req': [[2, 4]], 'php.fpm.conn.accepted': [[2, 2]] } """ assert_that(counters['php.fpm.queue.req'][0][1], equal_to(5)) assert_that(counters['php.fpm.slow_req'][0][1], equal_to(4)) assert_that(counters['php.fpm.conn.accepted'][0][1], equal_to(2)) for metric_records in counters.itervalues(): # get stamp from first recording in records stamp = metric_records[0][0] assert_that(stamp, equal_to(2))
def test_repush_without_staged_data(config_file, pushdir, lookasidedir, branch, name, version, release, default_config, capsys): """Push, clear stage, push again. The script should warn, but not error""" rpm = '-'.join([name, version, release]) + '.src.rpm' options = [ '-v', '-c', config_file, '--push', branch, os.path.join(RPMS_PATH, rpm) ] # push once assert_that(calling(main).with_args(options), exits(0)) # clear the output/err capsys.readouterr() # remove the handlers from global logger alt-src remove_handlers() # clear stage stagedir = default_config['stagedir'] for fn in os.listdir(stagedir): shutil.rmtree("%s/%s" % (stagedir, fn)) # run same push again assert_that(calling(main).with_args(options), exits(0)) out, err = capsys.readouterr() out_lines = out.splitlines() try: # It should not have logged any ERROR assert_that(len(err), equal_to(0)) # There should be a debrand commit subject = git_subject('%s/%s.git' % (pushdir, name), branch) expected_subject = 'debrand %s-%s-%s' % (name, version, release) assert_that(subject, equal_to(expected_subject)) except AssertionError: if rpm not in DEBRAND_XFAIL: raise else: assert_that(rpm not in DEBRAND_XFAIL, 'RPM was expected to fail debranding, but passed') # It should have logged a WARNING about duplicate content w_expect = '[WARNING] Skipping push for duplicate content' assert [l for l in out_lines if w_expect in l] # It should also warn about the existing tag w_expect = re.compile('\[WARNING\] Tag imports/.* already present on remote') assert [l for l in out_lines if w_expect.search(l)] # lookaside dir should have content lookaside = '%s/%s/%s' % (lookasidedir, name, branch) files = os.listdir(lookaside) assert_that(files, not_(empty()))
def test_IncludeCache_Cached_NewMtime( self ): with TemporaryTestDir() as tmp_dir: include_cache = IncludeCache() assert_that( include_cache._cache, equal_to( {} ) ) foo_path = os.path.join( tmp_dir, 'foo' ) with open( foo_path, 'w' ) as foo_file: foo_file.write( 'foo' ) old_includes = include_cache.GetIncludes( tmp_dir ) old_mtime = os.path.getmtime( tmp_dir ) assert_that( old_includes, contains_exactly( has_properties( { 'name': 'foo', 'entry_type': 1 } ) ) ) assert_that( include_cache._cache, has_entry( tmp_dir, has_entries( { 'mtime': old_mtime, 'includes': contains_exactly( has_properties( { 'name': 'foo', 'entry_type': 1 } ) ) } ) ) ) sleep( 2 ) bar_path = os.path.join( tmp_dir, 'bar' ) with open( bar_path, 'w' ) as bar_file: bar_file.write( 'bar' ) new_includes = include_cache.GetIncludes( tmp_dir ) new_mtime = os.path.getmtime( tmp_dir ) assert_that( old_mtime, not_( equal_to( new_mtime ) ) ) assert_that( new_includes, contains_inanyorder( has_properties( { 'name': 'foo', 'entry_type': 1 } ), has_properties( { 'name': 'bar', 'entry_type': 1 } ) ) ) assert_that( include_cache._cache, has_entry( tmp_dir, has_entries( { 'mtime': new_mtime, 'includes': contains_inanyorder( has_properties( { 'name': 'foo', 'entry_type': 1 } ), has_properties( { 'name': 'bar', 'entry_type': 1 } ) ) } ) ) )
def test_HasWeekday(): assert_that(datetime.date(1968, 7, 21), HasWeekday(6)) assert_that(datetime.date(1968, 7, 21), not_(HasWeekday(2))) assert_that(HasWeekday(2), has_string("Date with weekday matching <2>")) assert_that( HasWeekday(2), mismatches_with(datetime.date(1968, 7, 21), "was <1968-07-21> with weekday <6>, a Sunday"), )
def test_is_weekday(): assert_that(datetime.date(1968, 7, 19), is_weekday()) assert_that(datetime.date(1968, 7, 21), not_(is_weekday())) assert_that(is_weekday(), has_string("A weekday")) assert_that( is_weekday(), mismatches_with(datetime.date(1968, 7, 21), "was <1968-07-21> with weekday <6>, a Sunday"), )
def test_that_load_with_no_personal_service_does_not_add_route(self): JsonViewPlugin().load({ 'config': {}, 'http_namespace': Mock(), 'api': self.api, 'services': {} }) assert_that(self.api.add_resource.call_args_list, not_(has_item(call(Personal, ANY))))
def test_that_asterisk_is_registered_and_deregistered(self): registered = self._is_asterisk_registered_to_consul() assert_that(registered, 'asterisk should be registered on consul') self.stop_service('asterisk') registered = self._is_asterisk_registered_to_consul() assert_that(not_(registered), 'asterisk should not be registered on consul')
def when_i_reconfigure_the_phone_1_on_line_2_3(step, name, exten, context): step.scenario.phone_register.remove(name) line = line_sip_helper.find_with_exten_context(exten, context) phone_config = sip_config.create_config(world.config, step.scenario.phone_register, line) def phone_is_registered(): return sip_phone.register_line(phone_config) phone = common.wait_until(phone_is_registered) assert_that(phone, is_(not_(None))) step.scenario.phone_register.add_registered_phone(phone, name)
def test_removes_linked_publisher_from_a_contributor_when_removing_a_publisher(): chain_of_title = make_chain_of_title(authors_composers=[joel_miller(), john_roney()], publishers=[effendi_records()]) chain_of_title.update(lyricists=["Joel Miller"], composers=["John Roney"], publishers=[]) assert_that(chain_of_title.contributors, all_of(has_author_composer("John Roney", has_entry("publisher", "")), has_author_composer("Joel Miller", has_entry("publisher", "")), has_entry("publishers", not_(has_key("Effendi Records")))), "The chain of title")
def test_scales_image_to_specified_size_preserving_image_attributes(): original_image = image_file(resources.path("front-cover.jpg")) scaled_image = imager.scale(original_image, 50, 50) assert_that(scaled_image, image_with(data=not_(empty()), mime=original_image.mime, type=original_image.type, desc=original_image.desc), "scaled image") picture = QImage.fromData(scaled_image.data, imager.format_for(original_image.mime)) assert_that(picture.width(), equal_to(50), "width") assert_that(picture.height(), equal_to(50), "height")
def test_omit_pytest_markers(executed_docstring_source): """ >>> import pytest >>> @pytest.mark.usermark1 ... @pytest.mark.usermark2 ... @pytest.mark.parametrize("param", ["foo"]) ... @pytest.mark.skipif(False, reason="reason2") ... @pytest.mark.skipif(False, reason="reason1") ... def test_omit_pytest_markers_example(param): ... pass """ assert_that(executed_docstring_source.allure_report, has_test_case('test_omit_pytest_markers_example[foo]', has_tag("usermark1"), has_tag("usermark2"), not_(has_tag("skipif(False, reason='reason2')")), not_(has_tag("skipif(False, reason='reason1')")), not_(has_tag("parametrize('param', ['foo'])")) ) )
def test_default_suite(executed_docstring_source): """ >>> def test_default_suite_example(): ... pass """ assert_that(executed_docstring_source.allure_report, has_test_case("test_default_suite_example", has_parent_suite(anything()), # path to testdir has_suite("test_default_suite"), # created file name not_(has_sub_suite(anything())) ) )
def step_then_reuse_prefix_inside_nested_functions(context): used_prefixes = [] for results in context.results: unique_prefixes = list(set( result.split(DELIMITER, 1)[0] for result in results )) assert_that(unique_prefixes, has_length(1)) prefix = unique_prefixes[0] assert_that(used_prefixes, not_(has_item(prefix))) used_prefixes.append(prefix)
async def test_get_long_result_answer(create_app, long_answer): app, base_url, client_session = await create_app() exercise_name, answer_payload = long_answer task_uuid = await send_answer(client_session, base_url, long_answer) url = '{base_url}/results/{uuid}'.format(base_url=base_url, uuid=task_uuid) async with client_session.get(url) as response: content = await response.json() assert_that(content, has_entries( error=has_entries( message='Checking in progress'))) await asyncio.sleep(1) async with client_session.get(url) as response: content = await response.json() assert_that(content, not_(has_key('error')))
def test_call_decorated_as_step_function(executed_docstring_source): """ >>> import allure >>> with allure.step("step outside"): ... pass >>> def test_call_decorated_as_step_function_example(): ... pass """ assert_that(executed_docstring_source.allure_report, has_test_case("test_call_decorated_as_step_function_example", not_(has_step("step outside")) ) )
def test_nested_fixture(allured_testdir, parent_scope, child_scope): allured_testdir.testdir.makepyfile(""" import pytest @pytest.fixture(scope="{parent_scope}") def parent_fixture(): pass @pytest.fixture(scope="{child_scope}") def child_fixture(parent_fixture): pass def test_nested_fixture_example(child_fixture): pass def test_fixture_used_in_other_fixtures_example(parent_fixture): pass """.format(parent_scope=parent_scope, child_scope=child_scope)) allured_testdir.run_with_allure() assert_that(allured_testdir.allure_report, has_test_case("test_nested_fixture_example", has_container(allured_testdir.allure_report, has_before("parent_fixture") ), has_container(allured_testdir.allure_report, has_before("child_fixture"), ) ) ) assert_that(allured_testdir.allure_report, has_test_case("test_fixture_used_in_other_fixtures_example", has_container(allured_testdir.allure_report, has_before("parent_fixture") ), not_(has_container(allured_testdir.allure_report, has_before("child_fixture"), ) ) ) )
def test_removes_interface_from_bond(self): self.client.remove_interface_from_bond(self.test_port) bond = self.client.get_bond(42) assert_that(bond.members, not_(has_item(self.test_port)))
def doesnt_have_parameter(name): return has_entry('parameters', not_( has_item( has_entry('name', equal_to(name)), )))
def assert_function(): expected_message = 'PUTVAL [^/]+/calls-{app}.{app_instance}/counter-connect .* N:1' expected_message = expected_message.format(app=STASIS_APP_NAME, app_instance=STASIS_APP_INSTANCE_NAME) assert_that(self.bus.events(), not_(has_item(matches_regexp(expected_message))))
def test_not_ends_like(): assert_that('hellou', not_(ends_like('trello')))
def test_is_not_four(): assert_that(5, not_(is_four()))
def is_hungup(self): channel_ids = (channel.id for channel in self._ari.channels.list()) return not_(is_in(list(channel_ids)))
def then_extension_is_in_context(step, extension, context): in_context = _extension_in_context(extension, context) common.wait_until_assert(lambda: assert_that(not_(in_context)), tries=3)
def test_removes_bond_from_get_bonds(self): self.client.remove_bond(42) assert_that(self.client.get_bonds(), not_(has_item(42)))