def test_run_only(report_for, severities): """ Checks that running for given severities runs only selected tests """ report = report_for(""" import pytest @pytest.allure.CRITICAL def test_a(): pass @pytest.allure.MINOR def test_b(): pass def test_c(): pass """, extra_run_args=['--allure_severities', ','.join(severities)]) a_status, b_status, c_status = [Status.PASSED if s in severities else Status.CANCELED for s in [Severity.CRITICAL, Severity.MINOR, Severity.NORMAL]] assert_that(report.xpath(".//test-case"), contains( all_of(has_property('name', 'test_a'), has_property('attrib', has_entry('status', a_status))), all_of(has_property('name', 'test_b'), has_property('attrib', has_entry('status', b_status))), all_of(has_property('name', 'test_c'), has_property('attrib', has_entry('status', c_status))) ))
def test_associate_secondary_user_with_line(self): main_user = self.add_user() secondary_user = self.add_user() line = self.add_line() self.add_user_line(user_id=main_user.id, line_id=line.id, extension_id=None, main_user=True, main_line=True) user_line_dao.associate(secondary_user, line) result = (self.session.query(UserLine) .filter(UserLine.line_id == line.id) .all()) assert_that(result, contains_inanyorder( all_of(has_property('user_id', main_user.id), has_property('line_id', line.id), has_property('extension_id', None), has_property('main_user', True), has_property('main_line', True)), all_of(has_property('user_id', secondary_user.id), has_property('line_id', line.id), has_property('extension_id', None), has_property('main_user', False), has_property('main_line', True))))
def GetCompletions_Basic_test( app ): filepath = PathToTestFile( 'test.go' ) completion_data = BuildRequest( filepath = filepath, filetype = 'go', contents = ReadFile( filepath ), force_semantic = True, line_num = 9, column_num = 9 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, all_of( has_items( CompletionEntryMatcher( 'Llongfile', 'untyped int' ), CompletionEntryMatcher( 'Logger', 'struct' ) ) ) ) completion_data = BuildRequest( filepath = filepath, filetype = 'go', contents = ReadFile( filepath ), force_semantic = True, line_num = 9, column_num = 11 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, all_of( has_item( CompletionEntryMatcher( 'Logger', 'struct' ) ) ) )
def it_returns_random_point_within_the_given_data_boundaries(): centroid = ml.random_centroid_in((-15, -3, 20, 10)) cx = centroid[0] cy = centroid[1] assert_that(cx, all_of( greater_than_or_equal_to(-15), less_than_or_equal_to(20))) assert_that(cy, all_of( greater_than_or_equal_to(-3), less_than_or_equal_to(10)))
def test_list_multi_tenant(main, sub): response = confd.applications.get(wazo_tenant=MAIN_TENANT) assert_that(response.items, all_of(has_item(main)), not_(has_item(sub))) response = confd.applications.get(wazo_tenant=SUB_TENANT) assert_that(response.items, all_of(has_item(sub), not_(has_item(main)))) response = confd.applications.get(wazo_tenant=MAIN_TENANT, recurse=True) assert_that(response.items, has_items(main, sub))
def test_list_multi_tenant(main, sub): response = confd.endpoints.sip.templates.get(wazo_tenant=MAIN_TENANT) assert_that(response.items, all_of(has_items(main)), not_(has_items(sub))) response = confd.endpoints.sip.templates.get(wazo_tenant=SUB_TENANT) assert_that(response.items, all_of(has_items(sub), not_(has_items(main)))) response = confd.endpoints.sip.templates.get(wazo_tenant=MAIN_TENANT, recurse=True) assert_that(response.items, has_items(main, sub))
def _matches(self, report): return has_property('test_containers', has_item( all_of( has_entry('children', all_of( *[has_item(self._test_case_id_by_name(report, name)) for name in self.test_case_names] )), *self.matchers ) ) ).matches(report)
def test__shouldReturnListOfTransitionApprovalMeta(self): initial_state = State.objects.create(label="state-1") state_2 = State.objects.create(label="state-2") state_3 = State.objects.create(label="state-3") content_type = ContentType.objects.first() workflow = Workflow.objects.create(initial_state=initial_state, content_type=content_type, field_name="test-field") transition_meta_1 = TransitionMeta.objects.create( workflow=workflow, source_state=initial_state, destination_state=state_2) transition_meta_2 = TransitionMeta.objects.create( workflow=workflow, source_state=initial_state, destination_state=state_3) transition_approval_meta_1 = TransitionApprovalMeta.objects.create( workflow=workflow, transition_meta=transition_meta_1, priority=0) transition_approval_meta_2 = TransitionApprovalMeta.objects.create( workflow=workflow, transition_meta=transition_meta_2, priority=0) response = self.client.get('/transition-approval-meta/list/') assert_that(response.status_code, equal_to(HTTP_200_OK)) assert_that(response.data, has_length(2)) assert_that( response.data, has_item( all_of( has_entry("id", equal_to(transition_approval_meta_1.id)), has_entry("workflow", equal_to(workflow.id)), has_entry("transition_meta", equal_to(transition_meta_1.id)), has_entry("priority", equal_to(transition_approval_meta_1.priority)), has_entry("permissions", has_length(0)), has_entry("groups", has_length(0))))) assert_that( response.data, has_item( all_of( has_entry("id", equal_to(transition_approval_meta_2.id)), has_entry("workflow", equal_to(workflow.id)), has_entry("transition_meta", equal_to(transition_meta_2.id)), has_entry("priority", equal_to(transition_approval_meta_2.priority)), has_entry("permissions", has_length(0)), has_entry("groups", has_length(0)))))
def test_group_with_test_fixtures(self): assert_that( self.allure_report, has_test_case( 'Case With Test Fixtures', has_container( self.allure_report, has_container( self.allure_report, all_of(has_before('Suite Setup Keyword'), has_after('Suite Teardown Keyword'))), all_of(has_before('Test Setup Keyword'), has_after('Test Teardown Keyword')))), )
def test_swagger(self): response = self.client.get("/api/swagger") assert_that(response.status_code, is_(equal_to(200))) data = loads(response.data) upload = data["paths"]["/file"]["post"] upload_for = data["paths"]["/person/{person_id}/file"]["post"] # both endpoints return form data assert_that( upload["consumes"], contains("multipart/form-data"), ) assert_that( upload_for["consumes"], contains("multipart/form-data"), ) # one endpoint gets an extra query string parameter (and the other doesn't) assert_that( upload["parameters"], has_item( has_entries(name="extra"), ), ) assert_that( upload_for["parameters"], has_item( is_not(has_entries(name="extra")), ), ) # one endpoint gets a custom response type (and the other doesn't) assert_that( upload["responses"], all_of( has_key("204"), is_not(has_key("200")), has_entry("204", is_not(has_key("schema"))), ), ) assert_that( upload_for["responses"], all_of( has_key("200"), is_not(has_key("204")), has_entry("200", has_entry("schema", has_entry("$ref", "#/definitions/FileResponse"))), ), )
def test_one_failure(report_for): report = report_for(""" def test_fail(): 'fail test dosctring' assert 0 """) assert_that(report.findall('.//test-case'), contains(all_of( has_property('name', 'test_fail'), has_property('description', 'fail test dosctring'), has_entry('status', Status.FAILED), has_property('failure', all_of(has_property('message'), has_property('stack-trace'))) )))
def _has_voicemail(self, voicemail): matchers = [] for field in Voicemail.FIELDS: matcher = has_property(field, getattr(voicemail, field)) matchers.append(matcher) return all_of(instance_of(Voicemail), *matchers)
def test_feature_and_stories_inheritance(report_for): """ Checks that feature and stories markers can be inherited. """ report = report_for(""" import allure pytestmark = allure.feature('Feature1') @allure.feature('Feature2') class TestMy: @allure.story('Story1') def test_a(self): pass def test_b(self): pass """) assert_that(report, all_of( has_label('TestMy.test_a', 'feature', 'Feature1'), has_label('TestMy.test_a', 'feature', 'Feature2'), has_label('TestMy.test_a', 'story', 'Story1'), has_label('TestMy.test_a', 'feature', 'Feature1'), has_label('TestMy.test_a', 'feature', 'Feature2')))
def test_labels_inheritance(report_for): """ Checks that label markers can be inherited. """ report = report_for(""" import allure pytestmark = allure.label('label_name1', 'label_value1') @allure.label('label_name2', 'label_value2') class TestMy: @allure.label('label_name3', 'label_value3') @allure.label('label_name4', 'label_value4') def test_a(self): pass def test_b(self): pass """) assert_that(report, all_of( has_label('TestMy.test_a', 'label_name1', 'label_value1'), has_label('TestMy.test_a', 'label_name2', 'label_value2'), has_label('TestMy.test_a', 'label_name3', 'label_value3'), has_label('TestMy.test_a', 'label_name4', 'label_value4'), has_label('TestMy.test_a', 'label_name1', 'label_value1'), has_label('TestMy.test_a', 'label_name2', 'label_value2')))
def test_testcases(report_for): """ Checks that issues markers for tests are shown in report. """ report = report_for(""" import allure @allure.testcase('http://my.bugtracker.com/TESTCASE-1') def test_a(self): pass @allure.testcase('http://my.bugtracker.com/TESTCASE-2') class TestMy: @allure.testcase('http://my.bugtracker.com/TESTCASE-3') def test_b(self): pass def test_c(self): pass """) assert_that(report, all_of( has_label('test_a', 'testId', 'http://my.bugtracker.com/TESTCASE-1'), has_label('TestMy.test_b', 'testId', 'http://my.bugtracker.com/TESTCASE-2'), has_label('TestMy.test_b', 'testId', 'http://my.bugtracker.com/TESTCASE-3'), has_label('TestMy.test_c', 'testId', 'http://my.bugtracker.com/TESTCASE-2')))
def test_issues(report_for): """ Checks that issues markers for tests are shown in report. """ report = report_for(""" import allure @allure.issue('Issue1') def test_a(self): pass @allure.issue('Issue2') class TestMy: @allure.issue('Issue3') def test_b(self): pass def test_c(self): pass """) assert_that(report, all_of( has_label('test_a', 'issue', 'Issue1'), has_label('TestMy.test_b', 'issue', 'Issue2'), has_label('TestMy.test_b', 'issue', 'Issue3'), has_label('TestMy.test_c', 'issue', 'Issue2')))
def test_multiple_features_and_stories(report_for): """ Checks that we can handle multiple feature and stories markers. """ report = report_for(""" import allure @allure.feature('Feature1', 'Feature2') @allure.feature('Feature3') def test_a(): pass @allure.story('Story1', 'Story2') @allure.story('Story3') def test_b(): pass """) assert_that(report, all_of( has_label('test_a', 'feature', 'Feature1'), has_label('test_a', 'feature', 'Feature2'), has_label('test_a', 'feature', 'Feature3'), has_label('test_b', 'story', 'Story1'), has_label('test_b', 'story', 'Story2'), has_label('test_b', 'story', 'Story3')))
def test_create(self): entity_name = 'testentity' context_name = 'contextname' context_type = ContextType.internal context = Context(name=context_name, display_name=context_name, type=context_type) self.add_entity(name=entity_name) created_context = context_dao.create(context) context_row = self.session.query(ContextSchema).filter(ContextSchema.name == context_name).first() assert_that(created_context, instance_of(Context)) assert_that(context_row, all_of( has_property('name', context_name), has_property('displayname', context_name), has_property('entity', entity_name), has_property('contexttype', context_type), has_property('commented', 0), has_property('description', '') ))
def GetCompletions_MaxDetailedCompletion_test( app ): RunTest( app, { 'expect': { 'data': has_entries( { 'completions': all_of( contains_inanyorder( CompletionEntryMatcher( 'methodA' ), CompletionEntryMatcher( 'methodB' ), CompletionEntryMatcher( 'methodC' ), ), is_not( any_of( has_item( CompletionEntryMatcher( 'methodA', extra_params = { 'menu_text': 'methodA (method) Foo.methodA(): void' } ) ), has_item( CompletionEntryMatcher( 'methodB', extra_params = { 'menu_text': 'methodB (method) Foo.methodB(): void' } ) ), has_item( CompletionEntryMatcher( 'methodC', extra_params = { 'menu_text': ( 'methodC (method) Foo.methodC(a: ' '{ foo: string; bar: number; }): void' ) } ) ) ) ) ) } ) } } )
def test_to_call_log(self): self.raw_call_log.date = Mock() self.raw_call_log.date_answer = Mock() self.raw_call_log.date_end = Mock() self.raw_call_log.source_name = Mock() self.raw_call_log.source_exten = Mock() self.raw_call_log.destination_name = Mock() self.raw_call_log.destination_exten = Mock() self.raw_call_log.user_field = Mock() self.raw_call_log.cel_ids = [1, 2, 3] result = self.raw_call_log.to_call_log() assert_that( result, all_of( has_property('date', self.raw_call_log.date), has_property('date_answer', self.raw_call_log.date_answer), has_property('date_end', self.raw_call_log.date_end), has_property('source_name', self.raw_call_log.source_name), has_property('source_exten', self.raw_call_log.source_exten), has_property('destination_name', self.raw_call_log.destination_name), has_property('destination_exten', self.raw_call_log.destination_exten), has_property('user_field', self.raw_call_log.user_field), ), ) assert_that(result.cel_ids, equal_to([1, 2, 3]))
def LanguageServerCompleter_GetCompletions_UnsupportedKinds_test(): completer = MockCompleter() request_data = RequestWrap(BuildRequest()) completion_response = { 'result': [{ 'label': 'test', 'kind': len(lsp.ITEM_KIND) + 1 }] } resolve_responses = [ { 'result': { 'label': 'test' } }, ] with patch.object(completer, 'ServerIsReady', return_value=True): with patch.object(completer.GetConnection(), 'GetResponse', side_effect=[completion_response] + resolve_responses): assert_that( completer.ComputeCandidatesInner(request_data), has_items( all_of(has_entry('insertion_text', 'test'), is_not(has_key('kind')))))
def test_unmarshal(self): event = ConcreteConferenceExtensionConfigEvent.unmarshal(self.msg) assert_that( event, all_of(has_property('conference_id', CONFERENCE_ID), has_property('extension_id', EXTENSION_ID)))
def _run_pubsub_bq_pipeline(self, method, triggering_frequency=None): l = [i for i in range(self._SIZE)] matchers = [ PipelineStateMatcher(PipelineState.RUNNING), BigqueryFullResultStreamingMatcher(project=self.project, query="SELECT number FROM %s" % self.output_table, data=[(i, ) for i in l]) ] args = self.test_pipeline.get_full_options_as_args( on_success_matcher=hc.all_of(*matchers), wait_until_finish_duration=self.WAIT_UNTIL_FINISH_DURATION, experiments='use_beam_bq_sink', streaming=True) def add_schema_info(element): yield {'number': element} messages = [str(i).encode('utf-8') for i in l] for message in messages: self.pub_client.publish(self.input_topic.name, message) with beam.Pipeline(argv=args) as p: mesages = (p | ReadFromPubSub(subscription=self.input_sub.name) | beam.ParDo(add_schema_info)) _ = mesages | WriteToBigQuery( self.output_table, schema=self.SCHEMA, method=method, triggering_frequency=triggering_frequency)
def test_labels_inheritance(report_for): """ Checks that label markers can be inherited. """ report = report_for(""" import allure pytestmark = allure.label('label_name1', 'label_value1') @allure.label('label_name2', 'label_value2') class TestMy: @allure.label('label_name3', 'label_value3') @allure.label('label_name4', 'label_value4') def test_a(self): pass def test_b(self): pass """) assert_that( report, all_of(has_label_length('TestMy.test_a', 4), has_label('TestMy.test_a', 'label_name1', 'label_value1'), has_label('TestMy.test_a', 'label_name2', 'label_value2'), has_label('TestMy.test_a', 'label_name3', 'label_value3'), has_label('TestMy.test_a', 'label_name4', 'label_value4'), has_label_length('TestMy.test_b', 2), has_label('TestMy.test_a', 'label_name1', 'label_value1'), has_label('TestMy.test_a', 'label_name2', 'label_value2')))
def test_shouldReturnAnApprovalWhenUserIsAuthorizedWithAUserGroup(self): authorized_user_group = GroupObjectFactory() authorized_user = UserObjectFactory(groups=[authorized_user_group]) state1 = StateObjectFactory(label="state1") state2 = StateObjectFactory(label="state2") workflow = WorkflowFactory(initial_state=state1, content_type=self.content_type, field_name="my_field") transition_meta = TransitionMetaFactory.create( workflow=workflow, source_state=state1, destination_state=state2, ) approval_meta = TransitionApprovalMetaFactory.create( workflow=workflow, transition_meta=transition_meta, priority=0) approval_meta.groups.add(authorized_user_group) workflow_object = BasicTestModelObjectFactory(workflow=workflow) available_approvals = BasicTestModel.river.my_field.get_available_approvals( as_user=authorized_user) assert_that(available_approvals, has_length(1)) assert_that( list(available_approvals), has_item( all_of( has_property("workflow_object", workflow_object.model), has_property("workflow", workflow), has_property("transition", transition_meta.transitions.first()))))
def test_bulk_with_report(self): results = self.store.bulk( actions=[ ("index", self.kevin), ("delete", self.steph), ], batch_size=2, ) assert_that( self.store.retrieve(self.kevin.id), all_of( has_property("id", self.kevin.id), has_property("first", "Kevin"), has_property("middle", none()), has_property("last", "Durant"), ), ) result = results[0] # Updated items assert_that(result[0], is_(equal_to(1))) # Report on failed to delete items assert_that(result[1], contains( has_key('delete'), )) assert_that(result[1][0]['delete'], has_entry('result', 'not_found'))
def test_unmarshal(self): event = ConcreteUserVoicemailConfigEvent.unmarshal(self.msg) assert_that(event, all_of( has_property('user_id', USER_ID), has_property('voicemail_id', VOICEMAIL_ID), has_property('enabled', ENABLED)))
def testLabelStar(self): s = TeX() s.input(r'\section{hi} text \section*{bye\label{two}}') output = s.parse() assert_that(output, has_length(2)) section = output[0] section_star = output[1] __traceback_info__ = (section, section.__dict__, section_star, section_star.__dict__, dict(type(section).__dict__)) assert_that( section, has_property( 'arguments', has_item( all_of(has_property('index', 0), has_property('name', '*modifier*'))))) assert_that(section, has_property('source', '\\section{hi} text \n\n')) assert_that(section, has_property('id', 'two')) assert_that(section, has_property('argSource', '{hi}')) assert_that(section_star, has_property('source', '\\section*{bye\\label{two}}')) assert_that(section_star, has_property('argSource', '*{bye\\label{two}}')) assert_that(section_star, has_property('id', is_not('two'))) assert_that(dict(section_star.__dict__), has_entry('@hasgenid', True))
def assert_no_request(self, path, method='GET', query=None, body=None, json=None): try: results = self.requests_matching(path, method) except AssertionError: return if query: query_matcher = has_entry('query', has_entries(query)) else: query_matcher = anything() if body: body_matcher = has_entry('body', equal_to(body)) else: body_matcher = anything() if json: json_matcher = has_entry('json', equal_to(json)) else: json_matcher = anything() assert_that( results, not (has_item(all_of(query_matcher, body_matcher, json_matcher), pformat(results))), )
def test_multiple_features_and_stories(report_for): """ Checks that we can handle multiple feature and stories markers. """ report = report_for(""" import allure @allure.feature('Feature1', 'Feature2') @allure.feature('Feature3') def test_a(): pass @allure.story('Story1', 'Story2') @allure.story('Story3') def test_b(): pass """) assert_that( report, all_of(has_label('test_a', 'feature', 'Feature1'), has_label('test_a', 'feature', 'Feature2'), has_label('test_a', 'feature', 'Feature3'), has_label('test_b', 'story', 'Story1'), has_label('test_b', 'story', 'Story2'), has_label('test_b', 'story', 'Story3')))
def test_unmarshal(self): event = PagingMemberUsersAssociatedEvent.unmarshal(self.msg) assert_that( event, all_of(has_property('paging_id', GROUP_ID), has_property('user_uuids', USER_UUIDS)))
def test_unmarshal(self): event = CallFilterRecipientUsersAssociatedEvent.unmarshal(self.msg) assert_that( event, all_of(has_property('call_filter_id', CALL_FILTER_ID), has_property('user_uuids', USER_UUIDS)))
def test_capture_stdout(allured_testdir, capture): """ >>> import pytest >>> import allure >>> @pytest.fixture ... def fixture(request): ... print ("Start fixture") ... def finalizer(): ... print ("Stop fixture") ... request.addfinalizer(finalizer) >>> def test_capture_stdout_example(fixture): ... print ("Start test") ... with allure.step("Step"): ... print ("Start step") """ allured_testdir.parse_docstring_source() allured_testdir.run_with_allure("--capture={capture}".format(capture=capture)) if_pytest_capture_ = is_not if capture == "no" else is_ assert_that(allured_testdir.allure_report, has_property("attachments", all_of( if_pytest_capture_(has_value(contains_string("Start fixture"))), if_pytest_capture_(has_value(contains_string("Stop fixture"))), if_pytest_capture_(has_value(contains_string("Start test"))), if_pytest_capture_(has_value(contains_string("Start step"))) ) ) )
def testProcessCommandEvent_final(self, plugin, attempt_metric): # Test ProcessCommandEvent for a final state command = self._CreateCommand() command_manager.ScheduleTasks([command]) _, request_id, _, command_id = command.key.flat() tasks = command_manager.GetActiveTasks(command) self.assertEqual(len(tasks), 1) command_task_store.LeaseTask(tasks[0].task_id) attempt = command_event_test_util.CreateCommandAttempt( command, "attempt0", common.CommandState.UNKNOWN, task=tasks[0]) event = command_event_test_util.CreateTestCommandEvent( request_id, command_id, "attempt0", common.InvocationEventType.INVOCATION_COMPLETED, task=tasks[0], time=TIMESTAMP) commander.ProcessCommandEvent(event) tasks = command_manager.GetActiveTasks(command) self.assertEqual(len(tasks), 0) command = command.key.get(use_cache=False) self.assertEqual(common.CommandState.COMPLETED, command.state) request = command.key.parent().get(use_cache=False) self.assertEqual(common.RequestState.COMPLETED, request.state) attempt_metric.assert_called_once_with(cluster_id=command.cluster, run_target=command.run_target, hostname="hostname", state="COMPLETED") plugin.assert_has_calls([ mock.call.OnCreateCommands([ plugin_base.CommandInfo(command_id=COMMAND_ID, command_line="command_line1", run_count=1, shard_count=1, shard_index=0) ], { "ants_invocation_id": "i123", "command_ants_work_unit_id": "w123" }, {}), mock.call.OnProcessCommandEvent( command, hamcrest.match_equality( hamcrest.all_of( hamcrest.has_property("command_id", attempt.command_id), hamcrest.has_property("attempt_id", attempt.attempt_id), hamcrest.has_property("task_id", attempt.task_id), )), event_data={ "total_test_count": 1000, "device_lost_detected": 0, "failed_test_run_count": 10, "passed_test_count": 900, "failed_test_count": 100, "summary": "summary" }), ])
def test_big_query_write_new_types(self): table_name = 'python_new_types_table' table_id = '{}.{}'.format(self.dataset_id, table_name) row_data = { 'float': 0.33, 'numeric': Decimal('10'), 'bytes': base64.b64encode(b'\xab\xac').decode('utf-8'), 'date': '3000-12-31', 'time': '23:59:59', 'datetime': '2018-12-31T12:44:31', 'timestamp': '2018-12-31 12:44:31.744957 UTC', 'geo': 'POINT(30 10)' } input_data = [row_data] # add rows with only one key value pair and None values for all other keys for key, value in iteritems(row_data): input_data.append({key: value}) table_schema = {"fields": [ {"name": "float", "type": "FLOAT"}, {"name": "numeric", "type": "NUMERIC"}, {"name": "bytes", "type": "BYTES"}, {"name": "date", "type": "DATE"}, {"name": "time", "type": "TIME"}, {"name": "datetime", "type": "DATETIME"}, {"name": "timestamp", "type": "TIMESTAMP"}, {"name": "geo", "type": "GEOGRAPHY"} ]} expected_row = (0.33, Decimal('10'), b'\xab\xac', datetime.date(3000, 12, 31), datetime.time(23, 59, 59), datetime.datetime(2018, 12, 31, 12, 44, 31), datetime.datetime(2018, 12, 31, 12, 44, 31, 744957, tzinfo=pytz.utc), 'POINT(30 10)', ) expected_data = [expected_row] # add rows with only one key value pair and None values for all other keys for i, value in enumerate(expected_row): row = [None]*len(expected_row) row[i] = value expected_data.append(tuple(row)) pipeline_verifiers = [ BigqueryFullResultMatcher( project=self.project, query='SELECT float, numeric, bytes, date, time, datetime,' 'timestamp, geo FROM %s' % table_id, data=expected_data)] args = self.test_pipeline.get_full_options_as_args( on_success_matcher=hc.all_of(*pipeline_verifiers)) with beam.Pipeline(argv=args) as p: # pylint: disable=expression-not-assigned (p | 'create' >> beam.Create(input_data) | 'write' >> beam.io.WriteToBigQuery( table_id, schema=table_schema, create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED, write_disposition=beam.io.BigQueryDisposition.WRITE_EMPTY))
def test_big_query_write(self): table_name = 'python_write_table' table_id = '{}.{}'.format(self.dataset_id, table_name) input_data = [ {'number': 1, 'str': 'abc'}, {'number': 2, 'str': 'def'}, {'number': 3, 'str': u'你好'}, {'number': 4, 'str': u'привет'}, ] table_schema = {"fields": [ {"name": "number", "type": "INTEGER"}, {"name": "str", "type": "STRING"}]} pipeline_verifiers = [ BigqueryFullResultMatcher( project=self.project, query="SELECT number, str FROM %s" % table_id, data=[(1, 'abc',), (2, 'def',), (3, u'你好',), (4, u'привет',)])] args = self.test_pipeline.get_full_options_as_args( on_success_matcher=hc.all_of(*pipeline_verifiers)) with beam.Pipeline(argv=args) as p: # pylint: disable=expression-not-assigned (p | 'create' >> beam.Create(input_data) | 'write' >> beam.io.WriteToBigQuery( table_id, schema=table_schema, create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED, write_disposition=beam.io.BigQueryDisposition.WRITE_EMPTY))
def test_big_query_write_schema_autodetect(self): if self.runner_name == 'TestDataflowRunner': self.skipTest('DataflowRunner does not support schema autodetection') table_name = 'python_write_table' table_id = '{}.{}'.format(self.dataset_id, table_name) input_data = [ {'number': 1, 'str': 'abc'}, {'number': 2, 'str': 'def'}, ] pipeline_verifiers = [ BigqueryFullResultMatcher( project=self.project, query="SELECT number, str FROM %s" % table_id, data=[(1, 'abc',), (2, 'def',)])] args = self.test_pipeline.get_full_options_as_args( on_success_matcher=hc.all_of(*pipeline_verifiers), experiments='use_beam_bq_sink') with beam.Pipeline(argv=args) as p: # pylint: disable=expression-not-assigned (p | 'create' >> beam.Create(input_data) | 'write' >> beam.io.WriteToBigQuery( table_id, method=beam.io.WriteToBigQuery.Method.FILE_LOADS, schema=beam.io.gcp.bigquery.SCHEMA_AUTODETECT, create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED, write_disposition=beam.io.BigQueryDisposition.WRITE_EMPTY))
def test_unmarshal(self): event = ConcreteLineExtensionConfigEvent.unmarshal(self.msg) assert_that( event, all_of(has_property('line_id', LINE_ID), has_property('extension_id', EXTENSION_ID)))
def test_feature_and_stories_inheritance(report_for): """ Checks that feature and stories markers can be inherited. """ report = report_for(""" import allure pytestmark = allure.feature('Feature1') @allure.feature('Feature2') class TestMy: @allure.story('Story1') def test_a(self): pass def test_b(self): pass """) assert_that( report, all_of(has_label_length('TestMy.test_a', 3), has_label('TestMy.test_a', 'feature', 'Feature1'), has_label('TestMy.test_a', 'feature', 'Feature2'), has_label('TestMy.test_a', 'story', 'Story1'), has_label_length('TestMy.test_b', 2), has_label('TestMy.test_a', 'feature', 'Feature1'), has_label('TestMy.test_a', 'feature', 'Feature2')))
def has_test_case(name, *matchers): return has_property( 'test_cases', has_item( all_of( any_of(has_entry('fullName', ends_with(name)), has_entry('name', starts_with(name))), *matchers)))
def test_unmarshal(self): event = ConcreteUserCtiProfileConfigEvent.unmarshal(self.msg) assert_that(event, all_of( has_property('user_id', USER_ID), has_property('cti_profile_id', CTI_PROFILE_ID), has_property('enabled', True)))
def test_issues(report_for): """ Checks that issues markers for tests are shown in report. """ report = report_for(""" import allure @allure.issue('Issue1') def test_a(self): pass @allure.issue('Issue2') class TestMy: @allure.issue('Issue3') def test_b(self): pass def test_c(self): pass """) assert_that( report, all_of(has_label('test_a', 'issue', 'Issue1'), has_label('TestMy.test_b', 'issue', 'Issue2'), has_label('TestMy.test_b', 'issue', 'Issue3'), has_label('TestMy.test_c', 'issue', 'Issue2')))
def test_capture_stdout(allured_testdir, capture): """ >>> import pytest >>> import allure >>> @pytest.fixture ... def fixture(request): ... print ("Start fixture") ... def finalizer(): ... print ("Stop fixture") ... request.addfinalizer(finalizer) >>> def test_capture_stdout_example(fixture): ... print ("Start test") ... with allure.step("Step"): ... print ("Start step") """ allured_testdir.parse_docstring_source() allured_testdir.run_with_allure( "--capture={capture}".format(capture=capture)) if_pytest_capture_ = is_not if capture == "no" else is_ assert_that( allured_testdir.allure_report, has_property( "attachments", all_of( if_pytest_capture_(has_value( contains_string("Start fixture"))), if_pytest_capture_(has_value(contains_string("Stop fixture"))), if_pytest_capture_(has_value(contains_string("Start test"))), if_pytest_capture_(has_value(contains_string("Start step"))))))
def GetCompletions_Basic_test( app ): filepath = PathToTestFile( 'basic.py' ) completion_data = BuildRequest( filepath = filepath, filetype = 'python', contents = ReadFile( filepath ), line_num = 7, column_num = 3 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'a', 'self.a = 1', { 'extra_data': has_entry( 'location', has_entries( { 'line_num': 3, 'column_num': 10, 'filepath': filepath } ) ) } ), CompletionEntryMatcher( 'b', 'self.b = 2', { 'extra_data': has_entry( 'location', has_entries( { 'line_num': 4, 'column_num': 10, 'filepath': filepath } ) ) } ) ) ) completion_data = BuildRequest( filepath = filepath, filetype = 'python', contents = ReadFile( filepath ), line_num = 7, column_num = 4 ) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, all_of( has_item( CompletionEntryMatcher( 'a', 'self.a = 1', { 'extra_data': has_entry( 'location', has_entries( { 'line_num': 3, 'column_num': 10, 'filepath': filepath } ) ) } ) ), is_not( has_item( CompletionEntryMatcher( 'b' ) ) ) ) )
def test_publish_two_messages_non_strict(self): self.graph.sns_producer.produce(created("foo"), uri="http://localhost") self.graph.sns_producer.produce(created("bar"), uri="http://localhost") assert_that( self.graph.sns_producer, published_inanyorder( all_of( has_media_type(created("bar")), has_uri("http://localhost"), ), all_of( has_media_type(created("foo")), has_uri(), ), ), )
def test_get_by_extension_id_with_line(self): user_line_row = self.add_user_line_with_exten() line_extension = dao.get_by_extension_id(user_line_row.extension_id) assert_that(line_extension, all_of( has_property('line_id', user_line_row.line_id), has_property('extension_id', user_line_row.extension_id)))
def test_not_decorated_sub_class_decorated_method(executed_docstring_path): assert_that(executed_docstring_path.allure_report, has_test_case("TestNotDecoratedSubClass#test_decorated_method", all_of(has_severity("critical"), is_not(has_severity("trivial")) ) ) )
def then_i_see_an_ami_message_on_the_queue(step, event_name, queue): events = bus_helper.get_messages_from_bus(queue) matcher_dict = dict((event_line['header'], matches_regexp(event_line['value'])) for event_line in step.hashes) assert_that(events, has_item(all_of(has_entry('name', event_name), has_entry('data', has_entries(matcher_dict)))))
def test_removes_old_ipi_frames(mp3): filename = mp3(TXXX_IPI_Joel_Miller="00000123456789", TXXX_IPI_Rebecca_Ann_Maloy="98765432100000") container.save(filename, Metadata(ipis={"Joel Miller": "00000123456789"})) tags = MP3(filename) assert_that(tags, all_of(has_key("TXXX:IPI:Joel Miller"), not_(has_key("TXXX:IPI:Rebecca Ann Maloy"))), "tags in file")
def has_parameter(name, value): return has_entry('parameters', has_item( all_of( has_entry('name', equal_to(name)), has_entry('value', equal_to(value)) ) ))
def test_it_should_create_an_index_using_station_data(self): result = self.filter(index('nombre', 'address'), (NAME_AND_ADDRESS_STATION,)) assert_that(result, contains(has_property('index', all_of( contains_string('matadero'), contains_string('chopera')) )))
def test_find(self): extension_row = self.add_extension(exten='1234', context='default') result = extension_dao.find(extension_row.id) assert_that(result, all_of( has_property('exten', extension_row.exten), has_property('context', extension_row.context)))
def has_link(url, link_type=None, name=None): return has_entry('links', has_item( all_of( *[has_entry(key, value) for key, value in zip(('url', 'type', 'name'), (url, link_type, name)) if value is not None] ) ))
def has_error(message='', trace='', status=Status.FAILED): return has_property('{}test-cases', has_property('test-case', all_of(has_property('attrib', has_entry('status', status)), has_property('failure', has_properties({'message': message, 'stack-trace': has_string(trace) })))))
def has_step(name, *matchers): return has_entry('steps', has_item( all_of( has_entry('name', equal_to(name)), *matchers ) ))