def setUp(self): super(HallSetupMixin, self).setUp() self.hall_100 = Hall.objects.create( name='Hall 100', rows_number=10, seats_per_row=10, ) self.hall_100_url = make_url_of_model(self.hall_100) self.hall_100_url_match = ends_with(self.hall_100_url) self.hall_100_match = has_entries( url=self.hall_100_url_match, name='Hall 100', rows_number=10, seats_per_row=10, capacity=100, ) self.hall_400 = Hall.objects.create( name='Hall 400', rows_number=20, seats_per_row=20, ) self.hall_400_url = make_url_of_model(self.hall_400) self.hall_400_url_match = ends_with(self.hall_400_url) self.hall_400_match = has_entries( url=self.hall_400_url_match, name='Hall 400', rows_number=20, seats_per_row=20, capacity=400, )
def has_test_case(name, *matchers): return has_property( 'test_cases', has_item( all_of( any_of(has_entry('fullName', ends_with(name)), has_entry('name', ends_with(name))), *matchers)))
def setUp(self): super(MovieSetupMixin, self).setUp() self.movie_unused = Movie.objects.create( name='The Disaster Artist', duration=104, ) self.movie_unused_url = make_url_of_model(self.movie_unused) self.movie_unused_url_match = ends_with(self.movie_unused_url) self.movie_unused_match = has_entries( url=self.movie_unused_url_match, name='The Disaster Artist', duration=104, ) self.movie_90 = Movie.objects.create(name='Movie 90', duration=90) self.movie_90_url = make_url_of_model(self.movie_90) self.movie_90_url_match = ends_with(self.movie_90_url) self.movie_90_match = has_entries( url=self.movie_90_url_match, name='Movie 90', duration=90, ) self.movie_120 = Movie.objects.create(name='Movie 120', duration=120) self.movie_120_url = make_url_of_model(self.movie_120) self.movie_120_url_match = ends_with(self.movie_120_url) self.movie_120_match = has_entries( url=self.movie_120_url_match, name='Movie 120', duration=120, )
def has_only_n_test_cases(name, num, *matchers): return has_property( 'test_cases', ContainsExactly( num, all_of( any_of(has_entry('fullName', ends_with(name)), has_entry('name', ends_with(name))), *matchers)))
def setUp(self): super(MovieSessionSetupMixin, self).setUp() self.movie_session_past = MovieSession.objects.create( hall=self.hall_100, movie=self.movie_90, date=(timezone.now() - timedelta(days=1)).date(), starts_at=time(8), ticket_cost=100.0, ) self.movie_session_past_cost_str = '100.00' self.movie_session_past_url = make_url_of_model( self.movie_session_past) self.movie_session_past_url_match = ends_with( self.movie_session_past_url, ) self.movie_session_past_match = has_entries( url=self.movie_session_past_url_match, hall=self.hall_100_url_match, movie=self.movie_90_url_match, ) self.movie_session_100_90 = MovieSession.objects.create( hall=self.hall_100, movie=self.movie_90, date=(timezone.now() + timedelta(days=1)).date(), starts_at=time(8), ticket_cost=100.0, ) self.movie_session_100_90_cost_str = '100.00' self.movie_session_100_90_url = make_url_of_model( self.movie_session_100_90, ) self.movie_session_100_90_url_match = ends_with( self.movie_session_100_90_url, ) self.movie_session_100_90_match = has_entries( url=self.movie_session_100_90_url_match, hall=self.hall_100_url_match, movie=self.movie_90_url_match, ) self.movie_session_400_120 = MovieSession.objects.create( hall=self.hall_400, movie=self.movie_120, date=(timezone.now() + timedelta(days=1)).date(), starts_at=time(12), ticket_cost=150, ) self.movie_session_400_120_cost_str = '150.00' self.movie_session_400_120_url = make_url_of_model( self.movie_session_400_120, ) self.movie_session_400_120_url_match = ends_with( self.movie_session_400_120_url, ) self.movie_session_400_120_match = has_entries( url=self.movie_session_400_120_url_match, hall=self.hall_400_url_match, movie=self.movie_120_url_match, )
def test_that_a_token_has_a_remote_address_and_user_agent(self): ua = 'My Test Runner' post_result = self.client.token.new(expiration=1, user_agent=ua) assert_that(post_result, has_entries(user_agent=ua, remote_addr=ends_with('.1'))) # Docker host address are always X.X.X.1 get_result = self.client.token.get(post_result['token']) assert_that(get_result, has_entries(user_agent=ua, remote_addr=ends_with('.1')))
def has_only_n_test_cases(name, num, *matchers): return has_property('test_cases', ContainsExactly(num, all_of( any_of( has_entry('fullName', ends_with(name)), has_entry('name', ends_with(name)) ), *matchers ) ) )
def test_main(): with patch('builtins.print') as mocked_print: arguments = ['--hashes', '--output', '/dev/null', path.join(here, 'files/empty'), path.join(here, 'files/1234')] main(arguments) # assert a few of the prints that should have been made mocked_print.assert_any_call('{} ({})'.format(path.join(here, 'files/empty'), '0 bytes'), flush=True) mocked_print.assert_any_call(' sha256 e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855') mocked_print.assert_any_call('{} ({})'.format(path.join(here, 'files/1234'), '4 bytes'), flush=True) mocked_print.assert_any_call(' sha256 9f64a747e1b97f131fabb6b447296c9b6f0201e79fb3c5356e6c77e89b6a806a') four_bytes_output = { 'source': eq(ends_with('files/1234')), 'size': 4, 'completed': ANY, 'md5': '08d6c05a21512a79a1dfeb9d2a8f262f', 'sha1': '12dada1fff4d4787ade3333147202c3b443e376f', 'sha256': '9f64a747e1b97f131fabb6b447296c9b6f0201e79fb3c5356e6c77e89b6a806a', 'sha512': 'a7c976db1723adb41274178dc82e9b777941ab201c69de61d0f2bc6d27a3598f594fa748e50d88d3c2bf1e2c2e72c3cfef78c3c6d4afa90391f7e33ababca48e', } random_dd_output = { 'source': eq(ends_with('files/random.dd')), 'size': 1048576, 'completed': eq(instance_of(datetime)), 'md5': '257f5c2913ea856cb0a2313f167452d4', 'sha1': '2f8a9e749cc8e46bebe602827228e76611346f54', 'sha256': '810ec5f2086379f0e8000456dbf2aede8538fbc9d9898835f114c8771ed834b5', 'sha512': '24dbb6cb56757a621fb8e6a8c8733f1cfc3c77bd23ac325e672eaaf856eac602307541ac434f598afb62448e90b3608344cfeb2e64778d3f7024bc69f5bb46ef', } if sha3_available: four_bytes_output['sha3-256'] = '966dbdcbd0e0348faa1ccbce5a62b8e73b0d08955d666db82243b303d9bd9502' random_dd_output['sha3-256'] = 'f704f27aaf0d689f02917be02c1e873abefab54b9b517bcdf3d868569d6b2e65' with patch('builtins.print'), patch('digestive.main.output_to_file') as output: output_generator = MagicMock() output.return_value = output_generator # force raw format to treat random.E01 as a regular file (courtesy of Travis…) arguments = ['--hashes', '--recursive', '--output', '/dev/null', path.join(here, 'files')] main(arguments) # assert recursing into files and processing the test files, posting results to output output.assert_called_with('/dev/null') output_generator.send.assert_has_calls([ # initial info call call({'digestive': '0.1', 'started': ANY}), # hashes of tests/files/1234 call(four_bytes_output), # hashes of tests/files/random.dd call(random_dd_output) ], any_order=True)
def test_configure_engine(): """ Engine factory should work with zero configuration. """ graph = create_object_graph(name="example", testing=True) engine = graph.postgres assert_that(engine, is_(instance_of(Engine))) # engine has expected configuration assert_that( str(engine.url), starts_with("postgresql://example:@"), ) assert_that( str(engine.url), ends_with(":5432/example_test_db"), ) # engine supports connections with engine.connect() as connection: row = connection.execute("SELECT 1;").fetchone() assert_that(row[0], is_(equal_to(1)))
def LanguageServerCompleter_Diagnostics_PercentEncodeCannonical_test(): completer = MockCompleter() filepath = os.path.realpath( '/foo?' ) uri = lsp.FilePathToUri( filepath ) assert_that( uri, ends_with( '%3F' ) ) request_data = RequestWrap( BuildRequest( line_num = 1, column_num = 1, filepath = filepath, contents = '' ) ) notification = { 'jsonrpc': '2.0', 'method': 'textDocument/publishDiagnostics', 'params': { 'uri': uri.replace( '%3F', '%3f' ), 'diagnostics': [ { 'range': { 'start': { 'line': 3, 'character': 10 }, 'end': { 'line': 3, 'character': 11 } }, 'severity': 1, 'message': 'First error' } ] } } completer.GetConnection()._notifications.put( notification ) completer.HandleNotificationInPollThread( notification ) with patch.object( completer, 'ServerIsReady', return_value = True ): completer.SendInitialize( request_data, completer._GetSettingsFromExtraConf( request_data ) ) # Simulate receipt of response and initialization complete initialize_response = { 'result': { 'capabilities': {} } } completer._HandleInitializeInPollThread( initialize_response ) diagnostics = contains( has_entries( { 'kind': equal_to( 'ERROR' ), 'location': LocationMatcher( filepath, 4, 11 ), 'location_extent': RangeMatcher( filepath, ( 4, 11 ), ( 4, 12 ) ), 'ranges': contains( RangeMatcher( filepath, ( 4, 11 ), ( 4, 12 ) ) ), 'text': equal_to( 'First error' ), 'fixit_available': False } ) ) assert_that( completer.OnFileReadyToParse( request_data ), diagnostics ) assert_that( completer.PollForMessages( request_data ), contains( has_entries( { 'diagnostics': diagnostics, 'filepath': filepath } ) ) )
def test_select_by_testcase_id_test(ids, expected_tests, allured_testdir): """ >>> import allure >>> @allure.id("1") ... def test_number_one(): ... pass >>> @allure.id("2") ... def test_number_two(): ... pass >>> @allure.id("3") ... @allure.id("3+") ... def test_number_three(): ... pass >>> def test_without_number(): ... pass """ allured_testdir.parse_docstring_source() if ids: py_path = allured_testdir.testdir.makefile(".json", json.dumps(ids)) os.environ["AS_TESTPLAN_PATH"] = py_path.strpath else: del os.environ["AS_TESTPLAN_PATH"] allured_testdir.run_with_allure() test_cases = [test_case["fullName"] for test_case in allured_testdir.allure_report.test_cases] assert_that(test_cases, only_contains( any_of( *[ends_with(name) for name in expected_tests] ) ))
def test_http_error_from_backdrop_flashes_message(self, data_set_post_patch, is_virus_patch, get_data_set_patch, client): is_virus_patch.return_value = False get_data_set_patch.return_value = { 'data_group': 'carers-allowance', 'data_type': 'volumetrics', 'bearer_token': 'abc123', 'foo': 'bar' } data_set_post_patch.side_effect = backdrop_response(401, {}) post_data = { 'file': (StringIO('_timestamp,foo\n2014-08-05T00:00:00Z,40'), 'MYSPECIALFILE.csv') } response = client.post('/upload-data/carers-allowance/volumetrics', data=post_data) assert_that( self.get_from_session('upload_data')['payload'], equal_to(['[401] {}'])) assert_that(response.headers['Location'], ends_with('/upload-data')) assert_that(response.status_code, equal_to(302))
def test_should_raise_exception_when_trying_to_use_matcher_as_second_keyword_argument_in_native_verification( self): test_object = Mock(targetpackage.TheClass()) test_object.some_method(1, 2, 3, hello='world') exception_raised = False try: verify(test_object).some_method(1, 2, 3, hello='world', world=ANY_VALUE) except VerificationError as error: exception_raised = True assert_that( str(error), ends_with(""" Please configure your mock using fluentmock.when in order to be able to use matchers! """)) assert_that(exception_raised)
def test_redirect_to_error_if_problems_and_prevent_post( self, data_set_post_patch, validate_patch, get_data_set_patch, client): validate_patch.return_value = ["99"] get_data_set_patch.return_value = { 'data_group': 'carers-allowance', 'data_type': 'volumetrics', 'bearer_token': 'abc123', 'foo': 'bar' } post_data = { 'file': (StringIO('_timestamp,foo\n2014-08-05T00:00:00Z,40'), 'MYSPECIALFILE.csv') } response = client.post( '/upload-data/carers-allowance/volumetrics', data=post_data) assert_that( self.get_from_session('upload_data')['payload'], equal_to(['99'])) assert_that(response.headers['Location'], ends_with('/upload-data')) assert_that(response.status_code, equal_to(302)) assert_that(data_set_post_patch.called, equal_to(False))
def test_redirects_to_channel_options_page(self, mock_get_dashboard): response = self.client.post( '/dashboard/dashboard-uuid/digital-take-up/upload-options', data=self.params()) assert_that(response.status, equal_to('302 FOUND')) assert_that(response.headers['Location'], ends_with('/channel-options'))
def test_save_and_continue_button_for_update(self, update_mock, mock_list_organisations, mock_list_data_sets, mock_list_module_types): with self.client.session_transaction() as session: session['oauth_token'] = {'access_token': 'token'} session['oauth_user'] = { 'permissions': ['signin', 'admin'] } data = valid_dashboard_data({ 'save_and_continue':'', }) resp = self.client.post( '/admin/dashboards/uuid', data=data) assert_that(resp.status_code, equal_to(302)) assert_that( resp.headers['Location'], ends_with('/admin/dashboards/uuid')) expected_flash = 'Updated the <a href="http://spotlight.development' +\ '.performance.service.gov.uk/performance/valid-slug">' + \ 'My valid title</a> dashboard' self.assert_flashes(expected_flash, expected_category='success')
def test_continues_if_add_module_fails( self, add_module_to_dashboard_patch, list_module_types_patch, create_data_group_patch, get_data_group_patch, get_module_config_patch, create_data_set_patch, get_data_set_patch, get_dashboard_patch, client): get_module_config_patch.return_value = { 'title': 'Cost per transaction' } get_dashboard_patch.return_value = {'slug': 'visas'} get_data_set_patch.return_value = { 'name': 'apply_uk_visa_transactions_by_channel', 'data_type': 'transactions-by-channel', 'data_group': 'apply-uk-visa', 'bearer_token': 'abc123', 'upload_format': 'csv', 'auto_ids': '_timestamp, period, channel', 'max_age_expected': 1300000 } get_data_group_patch.return_value = {'name': 'visas'} list_module_types_patch.return_value = [{ 'name': 'single_timeseries', 'id': 'uuid' }] # === response = Mock() response.status_code = 400 response.text = 'Module with this Dashboard and Slug already exists' error = HTTPError('Error message', response=response) # === add_module_to_dashboard_patch.side_effect = error response = client.post(self.upload_url, data=self.file_data) # see line 117 of application/controllers/upload.py # to see why this is expected. assert_that( self.get_from_session('upload_data')['payload'], equal_to([])) assert_that(response.status, equal_to('302 FOUND')) assert_that( response.headers['Location'], ends_with("/dashboard/{}" "/cost-per-transaction/upload/success".format( self.dash_id))) get_data_set_patch.assert_called_once_with("visas", "cost-per-transaction") assert_that(create_data_set_patch.called, equal_to(False)) get_data_group_patch.assert_called_once_with("visas") assert_that(create_data_group_patch.called, equal_to(False)) assert_that(list_module_types_patch.call_count, equal_to(1)) add_module_to_dashboard_patch.assert_called_once_with( 'visas', { 'type_id': 'uuid', 'data_group': 'visas', 'data_type': 'cost-per-transaction', 'title': 'Cost per transaction' })
def test_redirects_if_module_exists(self, mock_get_dashboard): mock_get_dashboard.return_value = { 'id': 'dashboard-uuid', 'title': 'A dashboard', 'description': 'All about this dashboard', 'slug': 'valid-slug', 'owning_organisation': 'organisation-uuid', 'dashboard_type': 'transaction', 'customer_type': 'Business', 'strapline': 'Dashboard', 'business_model': 'Department budget', 'published': False, 'status': 'unpublished', 'modules': [ {'data_type': 'user-satisfaction-score'}, {'slug': 'slug'} ] } response = self.client.get( '/dashboard/dashboard-uuid/user-satisfaction/add', data=self.params()) assert_that(response.headers['Location'], ends_with('/dashboards/dashboard-uuid'))
def test_delete_works_and_moves_user_to_setup(): """'Delete dashboard' button works and redirects user to setup page.""" splinter_tests.visit("#/advanced") do_sleep() button = find_delete_button() assert not button.first['disabled'] # Click the delete button button.click() # Wait a moment while the deletion happens do_sleep() # Are we at the setup page now? assert_that(browser.url, ends_with("#/setup")) # If we go back to Advanced Options, is the button disabled? splinter_tests.visit("#/advanced") do_sleep() button = find_delete_button() assert button.first['disabled']
def test_updating_existing_dashboard(self, update_mock): with self.client.session_transaction() as session: session['oauth_token'] = {'access_token': 'token'} session['oauth_user'] = { 'permissions': ['signin', 'dashboard'] } data = { 'slug': 'my-valid-slug', 'title': 'My valid title', 'modules-0-slug': 'carers-realtime', 'modules-0-data_group': 'carers-allowance', 'modules-0-data_type': 'realtime', 'modules-0-options': '{}', 'modules-0-query_parameters': '{}', 'modules-0-id': 'module-uuid', } resp = self.client.post( '/administer-dashboards/uuid', data=data) post_json = update_mock.call_args[0][1] assert_that(post_json['modules'][0], has_entries({ 'slug': 'carers-realtime', 'data_group': 'carers-allowance', 'data_type': 'realtime', 'options': {}, 'query_parameters': {}, 'id': 'module-uuid', })) assert_that(update_mock.call_args[0][0], equal_to('uuid')) assert_that(resp.status_code, equal_to(302)) assert_that( resp.headers['Location'], ends_with('/administer-dashboards')) self.assert_flashes( 'Updated the my-valid-slug dashboard', expected_category='success')
def test_failing_updating_existing_dashboard_flashes_error( self, update_mock): with self.client.session_transaction() as session: session['oauth_token'] = {'access_token': 'token'} session['oauth_user'] = { 'permissions': ['signin', 'dashboard'] } data = { 'slug': 'my-valid-slug', 'title': 'My valid title', 'modules-0-slug': 'carers-realtime', 'modules-0-data_group': 'carers-allowance', 'modules-0-data_type': 'realtime', 'modules-0-options': '{}', 'modules-0-query_parameters': '{}', 'modules-0-id': 'module-uuid', } response_json_mock = Mock() response_json_mock.return_value = {'message': 'Error message'} response = requests.Response() response.status_code = 400 response.json = response_json_mock error = requests.HTTPError('Error message', response=response) update_mock.side_effect = error resp = self.client.post( '/administer-dashboards/uuid', data=data) assert_that(resp.status_code, equal_to(302)) assert_that( resp.headers['Location'], ends_with('/administer-dashboards/uuid')) self.assert_flashes( 'Error updating the my-valid-slug dashboard: Error message', expected_category='danger')
def test_redirects_if_module_exists(self, mock_get_dashboard): mock_get_dashboard.return_value = { 'id': 'dashboard-uuid', 'title': 'A dashboard', 'description': 'All about this dashboard', 'slug': 'valid-slug', 'owning_organisation': 'organisation-uuid', 'dashboard_type': 'transaction', 'customer_type': 'Business', 'strapline': 'Dashboard', 'business_model': 'Department budget', 'published': False, 'status': 'unpublished', 'modules': [{ 'data_type': 'user-satisfaction-score' }, { 'slug': 'slug' }] } response = self.client.get( '/dashboard/dashboard-uuid/user-satisfaction/add', data=self.params()) assert_that(response.headers['Location'], ends_with('/dashboards/dashboard-uuid'))
def test_http_error_from_backdrop_flashes_message( self, data_set_post_patch, is_virus_patch, get_data_set_patch, client): is_virus_patch.return_value = False get_data_set_patch.return_value = { 'data_group': 'carers-allowance', 'data_type': 'volumetrics', 'bearer_token': 'abc123', 'foo': 'bar' } data_set_post_patch.side_effect = backdrop_response(401, {}) post_data = { 'file': (StringIO('_timestamp,foo\n2014-08-05T00:00:00Z,40'), 'MYSPECIALFILE.csv') } response = client.post( '/upload-data/carers-allowance/volumetrics', data=post_data) assert_that( self.get_from_session('upload_data')['payload'], equal_to(['[401] {}'])) assert_that(response.headers['Location'], ends_with('/upload-data')) assert_that(response.status_code, equal_to(302))
def test_user_can_post_to_upload_data(self, data_set_post_patch, is_virus_patch, get_data_set_patch, client): is_virus_patch.return_value = False get_data_set_patch.return_value = { 'data_group': 'carers-allowance', 'data_type': 'volumetrics', 'bearer_token': 'abc123', 'foo': 'bar' } post_data = { 'file': (StringIO('_timestamp,foo\n2014-08-05T00:00:00Z,40'), 'MYSPECIALFILE.csv') } response = client.post('/upload-data/carers-allowance/volumetrics', data=post_data) expected_post = [{u'_timestamp': u'2014-08-05T00:00:00Z', u'foo': 40}] data_set_post_patch.assert_called_once_with(expected_post) upload_done_path = '/upload-data' assert_that(response.headers['Location'], ends_with(upload_done_path)) assert_that(response.status_code, equal_to(302)) assert_that( self.get_from_session('upload_data'), has_entries({ u'data_type': u'volumetrics', u'data_group': u'carers-allowance' }))
def test_user_can_post_to_upload_data( self, data_set_post_patch, is_virus_patch, get_data_set_patch, client): is_virus_patch.return_value = False get_data_set_patch.return_value = { 'data_group': 'carers-allowance', 'data_type': 'volumetrics', 'bearer_token': 'abc123', 'foo': 'bar' } post_data = { 'file': (StringIO('_timestamp,foo\n2014-08-05T00:00:00Z,40'), 'MYSPECIALFILE.csv') } response = client.post( '/upload-data/carers-allowance/volumetrics', data=post_data) expected_post = [{u'_timestamp': u'2014-08-05T00:00:00Z', u'foo': 40}] data_set_post_patch.assert_called_once_with(expected_post) upload_done_path = '/upload-data' assert_that(response.headers['Location'], ends_with(upload_done_path)) assert_that(response.status_code, equal_to(302)) assert_that( self.get_from_session('upload_data'), has_entries({ u'data_type': u'volumetrics', u'data_group': u'carers-allowance' }))
def assert_content_disposition(self, filename): headers = { key.lower(): value for key, value in self.response.headers.items() } expected = has_entry('content-disposition', ends_with('filename={}'.format(filename))) assert_that(headers, expected, 'Content-Disposition header not found')
def test_signout_redirects_properly_and_clears_session( self, oauth_authorization_url_patch, oauth_get_patch, oauth_fetch_token_patch, client): response = client.get("/sign-out") assert_that(response.status_code, equal_to(302)) assert_that(response.headers['Location'], ends_with('/users/sign_out')) with client.session_transaction() as session: assert_that(session, equal_to({}))
def test_published_dashboards_cannot_be_edited( self, mock_get_dashboard): response = self.client.get('/dashboards/dashboard-uuid') self.assert_flashes( 'In review or published dashboards cannot be edited', 'info') assert_that(response.status, equal_to('302 FOUND')) assert_that(response.headers['Location'], ends_with('/dashboards'))
def test_renders_a_contact_us_page_if_unable_to_extract_done_page( self, mock_get_dashboard): params = self.params( {'done_page_url': 'http://www.gov.uk/some-transaction'}) response = self.client.post( '/dashboard/dashboard-uuid/user-satisfaction/add', data=params) assert_that(response.status, equal_to('302 FOUND')) assert_that(response.headers['Location'], ends_with('/get-in-touch'))
def test_ping_sp_cmd(self): hb = NodeHeartBeat(interval=20) ip = '1.1.1.1' hb.add('spa', ip) hb.update_by_ip(ip, latency=5.123) assert_that(' '.join(hb.get_agent(ip)), ends_with('-h 1.1.1.1 -t 35 -np getagent')) hb.stop()
def clean_slate(): # Delete all the dashboards delete_all_dashboards("test_user", "e2e_password", "login.etherios.com") # Log back in log_in_clean("test_user", "e2e_password", "login.etherios.com") # With no dashboards, logging in should lead to the dashboard setup page. assert_that(browser.url, ends_with("#/setup"))
def LanguageServerCompleter_Diagnostics_PercentEncodeCannonical_test( app ): completer = MockCompleter() filepath = os.path.realpath( '/foo?' ) uri = lsp.FilePathToUri( filepath ) assert_that( uri, ends_with( '%3F' ) ) request_data = RequestWrap( BuildRequest( line_num = 1, column_num = 1, filepath = filepath, contents = '' ) ) notification = { 'jsonrpc': '2.0', 'method': 'textDocument/publishDiagnostics', 'params': { 'uri': uri.replace( '%3F', '%3f' ), 'diagnostics': [ { 'range': { 'start': { 'line': 3, 'character': 10 }, 'end': { 'line': 3, 'character': 11 } }, 'severity': 1, 'message': 'First error' } ] } } completer.GetConnection()._notifications.put( notification ) completer.HandleNotificationInPollThread( notification ) with patch.object( completer, '_ServerIsInitialized', return_value = True ): completer.OnFileReadyToParse( request_data ) # Simulate receipt of response and initialization complete initialize_response = { 'result': { 'capabilities': {} } } completer._HandleInitializeInPollThread( initialize_response ) diagnostics = contains_exactly( has_entries( { 'kind': equal_to( 'ERROR' ), 'location': LocationMatcher( filepath, 4, 11 ), 'location_extent': RangeMatcher( filepath, ( 4, 11 ), ( 4, 12 ) ), 'ranges': contains_exactly( RangeMatcher( filepath, ( 4, 11 ), ( 4, 12 ) ) ), 'text': equal_to( 'First error' ), 'fixit_available': False } ) ) assert_that( completer.OnFileReadyToParse( request_data ), diagnostics ) assert_that( completer.PollForMessages( request_data ), contains_exactly( has_entries( { 'diagnostics': diagnostics, 'filepath': filepath } ) ) )
def test_renders_a_contact_us_page_if_no_user_satisfaction_data_set( self, mock_list_module_types, mock_get_data_set, mock_get_dashboard): mock_get_data_set.return_value = None response = self.client.post( '/dashboard/dashboard-uuid/user-satisfaction/add', data=self.params()) assert_that(response.status, equal_to('302 FOUND')) assert_that(response.headers['Location'], ends_with('/get-in-touch'))
def test_removes_files_from_folder_if_they_match_given_condition(tmpdir): for filename in "track1.mp3", "track2.flac", "track3.mp3": touch(tmpdir.join(filename).strpath) fs.remove_files(folder=tmpdir.strpath, matching=lambda entry: entry.endswith(".mp3")) assert_that(fs.list_dir(tmpdir.strpath), contains(ends_with("track2.flac")), "files left after removing mp3 files") fs.remove_files(folder=tmpdir.strpath) assert_that(fs.list_dir(tmpdir.strpath), empty(), "files left after removing remaining files")
def test_redirects_to_about_your_service_page( self, mock_update_dashboard, mock_get_dashboard): response = self.client.post( '/dashboards/dashboard-uuid/publish', data={}) assert_that(response.status, equal_to('302 FOUND')) assert_that(response.headers['Location'], ends_with('/dashboards'))
def test_api_choice_redirects_to_api_get_in_touch_page( self, mock_get_dashboard): data = self.params({'upload_option': 'api'}) response = self.client.post( '/dashboard/dashboard-uuid/digital-take-up/upload-options', data=data) assert_that(response.status, equal_to('302 FOUND')) assert_that(response.headers['Location'], ends_with('/api-get-in-touch'))
def test_redirects_to_about_your_service_page(self, mock_ses_connection, mock_update_dashboard, mock_get_dashboard): response = self.client.post( '/dashboards/dashboard-uuid/send-for-review', data={}) self.assert_flashes('Your dashboard has been sent for review', 'success') assert_that(response.status, equal_to('302 FOUND')) assert_that(response.headers['Location'], ends_with('/dashboards'))
def fn(): # Attempt to go to the given page splinter_tests.visit(visited_uri) # Sleep the test runner for a moment, to allow the browser to catch up. splinter_tests.utils.do_sleep() # Check that we ended up on the login page assert_that(splinter_tests.browser.url, ends_with("/#/login"))
def test_page_contents(): """The setup page should appear correctly.""" assert_that(browser.url, ends_with("#/setup")) assert_that(browser.title, starts_with("Dashboard Creation |")) header = browser.find_by_css("body > .content-holder > .container > h1") assert_that(header.first.text, is_("Dashboard Creation")) assert browser.is_text_present(STEP_TITLES[1])
def test_renders_a_contact_us_page_if_unable_to_extract_done_page( self, mock_get_dashboard): params = self.params({ 'done_page_url': 'http://www.gov.uk/some-transaction'}) response = self.client.post( '/dashboard/dashboard-uuid/user-satisfaction/add', data=params) assert_that(response.status, equal_to('302 FOUND')) assert_that(response.headers['Location'], ends_with('/get-in-touch'))
def test_GIVEN_variables_from_different_files_WHEN_calculate_THEN_history_added(self): self._make_two_gridded() expr = 'var1 + var2' res = self.calc.evaluate(self.data, expr) expected_history = "Evaluated using CIS version " + __version__ + \ "\nExpression evaluated: 'var1 + var2'" + \ "\nwith variables: 'var1' from files ['filename1']," + \ "\n'var2' from files ['filename2']." # Do an ends_with comparison because history starts with timestamp assert_that(res.history, ends_with(expected_history))
def setup(): splinter_tests.start_browser() # First, logout to ensure that we are in fact logged out. splinter_tests.visit("/logout") # Wait a few seconds for redirect. splinter_tests.utils.do_sleep() assert_that(splinter_tests.browser.url, ends_with("/#/login"))
def verify_results(self, loc): """Testing for expected results.""" results = sorted(loc.results, key=lambda entry: Adapter(entry).file) assert_that(len(results), equal_to(len(self.expected_results()))) for idx, expected in enumerate(self.expected_results()): assert_that(Adapter(results[idx]).file, ends_with(expected.filename)) assert_that(Adapter(results[idx]).loc, equal_to(expected.loc)) assert_that(Adapter(results[idx]).com, equal_to(expected.com)) assert_that(float(Adapter(results[idx]).ratio), equal_to(expected.ratio))
def click_add_widget(): '''Find and click the "Add Widget" button, if it is present.''' # Find the 'Add Widget' button btn = browser.find_by_xpath("//button[contains(.,'Add Widget')]") assert not btn.is_empty() # Click the button, make sure we landed at the Add Widget page btn.first.click() assert_that(browser.url, ends_with("/#/add_widget")) assert browser.is_text_present("Create a new Widget")
def test_switch_to_eu_then_back(): """ When the user clicks 'Europe Cloud', they end up on the page that will have them log in against the EU Cloud. Then, if they click on 'US Cloud', they end up back on #/login """ browser = splinter_tests.browser assert_that(browser.url, ends_with("#/login")) browser.find_link_by_text("Europe Cloud").click() assert_that(browser.url, ends_with("#/login/login.etherios.co.uk")) browser.find_link_by_text("US Cloud").click() assert_that(browser.url, ends_with("#/login")) # Check that the grayed-out text indicating the login server URL is not # present. assert browser.is_element_not_present_by_css("h1.title > span.text-muted")
def test_handles_invalid_spreadsheet( self, add_module_to_dashboard_patch, list_module_types_patch, create_data_group_patch, get_data_group_patch, get_module_config_patch, create_data_set_patch, get_data_set_patch, get_dashboard_patch, client): get_module_config_patch.return_value = { 'title': 'Cost per transaction'} get_dashboard_patch.return_value = { 'slug': 'visas'} get_data_set_patch.return_value = { 'name': 'apply_uk_visa_transactions_by_channel', 'data_type': 'transactions-by-channel', 'data_group': 'apply-uk-visa', 'bearer_token': 'abc123', 'upload_format': 'csv', 'auto_ids': '_timestamp, period, channel', 'max_age_expected': 1300000 } get_data_group_patch.return_value = { 'name': 'visas' } list_module_types_patch.return_value = [{'name': 'single_timeseries', 'id': 'uuid'}] add_module_to_dashboard_patch.return_value = {} self.upload_spreadsheet_mock.return_value = \ (['Message 1', 'Message 2'], False) response = client.post(self.upload_url, data=self.file_data) # see line 117 of application/controllers/upload.py # to see why this is expected. assert_that( self.get_from_session('upload_data')['payload'], equal_to(['Message 1', 'Message 2'])) assert_that(response.status, equal_to('302 FOUND')) assert_that( response.headers['Location'], ends_with("/dashboard/{}" "/cost-per-transaction/upload".format( self.dash_id))) with client.session_transaction() as session: assert_that( '_flashes' in session, equal_to(False))
def test_GIVEN_variables_from_same_file_WHEN_calculate_THEN_history_added(self): self._make_two_ungridded_data() self.data[1].alias = 'alias2' self.data[1].filenames = self.data[0].filenames expr = 'var1 + alias2' res = self.calc.evaluate(self.data, expr) expected_history = "Evaluated using CIS version " + __version__ + \ "\nExpression evaluated: 'var1 + alias2'" + \ "\nwith variables: 'var1' from files ['filename1']," + \ "\n'var2' (as 'alias2') from files ['filename1']." # Do an ends_with comparison because history starts with timestamp assert_that(res.history, ends_with(expected_history))
def has_test_case(name, *matchers): return has_property('test_cases', has_item( all_of( any_of( has_entry('fullName', ends_with(name)), has_entry('name', starts_with(name)) ), *matchers ) ) )
def test_redirects_to_about_your_service_page( self, mock_ses_connection, mock_update_dashboard, mock_get_dashboard): response = self.client.post( '/dashboards/dashboard-uuid/send-for-review', data={}) self.assert_flashes( 'Your dashboard has been sent for review', 'success') assert_that(response.status, equal_to('302 FOUND')) assert_that(response.headers['Location'], ends_with('/dashboards'))