import unittest import responses import pillarsdk import pillarsdk.exceptions as sdk_exceptions mock = responses.RequestsMock(assert_all_requests_are_fired=True) class ProjectsTests(unittest.TestCase): def setUp(self): self.endpoint = 'http://localhost:12345' self.api = pillarsdk.Api( endpoint=self.endpoint, username='', password='', token='jemoeder', ) @mock.activate def test_find_project_happy(self): project_id = 24 * 'a' # Finding the existing project mock.add(responses.GET, '%s/projects' % self.endpoint, json={'_items': [{ '_id': project_id, '_etag': 'awesome-etag', 'name': 'test-project'} ]})
def test_duckling_entity_extractor_with_multiple_extracted_dates( create_duckling: Callable[[Dict[Text, Any]], DucklingEntityExtractorComponent] ): duckling = create_duckling({"dimensions": ["time"], "timezone": "UTC"}) with responses.RequestsMock() as rsps: rsps.add( responses.POST, "http://localhost:8000/parse", json=[ { "body": "Today", "start": 0, "value": { "values": [ { "value": "2018-11-13T00:00:00.000-08:00", "grain": "day", "type": "value", } ], "value": "2018-11-13T00:00:00.000-08:00", "grain": "day", "type": "value", }, "end": 5, "dim": "time", "latent": False, }, { "body": "the 5th", "start": 9, "value": { "values": [ { "value": "2018-12-05T00:00:00.000-08:00", "grain": "day", "type": "value", }, { "value": "2019-01-05T00:00:00.000-08:00", "grain": "day", "type": "value", }, { "value": "2019-02-05T00:00:00.000-08:00", "grain": "day", "type": "value", }, ], "value": "2018-12-05T00:00:00.000-08:00", "grain": "day", "type": "value", }, "end": 16, "dim": "time", "latent": False, }, { "body": "5th of May", "start": 13, "value": { "values": [ { "value": "2019-05-05T00:00:00.000-07:00", "grain": "day", "type": "value", }, { "value": "2020-05-05T00:00:00.000-07:00", "grain": "day", "type": "value", }, { "value": "2021-05-05T00:00:00.000-07:00", "grain": "day", "type": "value", }, ], "value": "2019-05-05T00:00:00.000-07:00", "grain": "day", "type": "value", }, "end": 23, "dim": "time", "latent": False, }, { "body": "tomorrow", "start": 37, "value": { "values": [ { "value": "2018-11-14T00:00:00.000-08:00", "grain": "day", "type": "value", } ], "value": "2018-11-14T00:00:00.000-08:00", "grain": "day", "type": "value", }, "end": 45, "dim": "time", "latent": False, }, ], ) messages = [ Message(data={TEXT: "Today is the 5th of May. Let us meet tomorrow."}) ] parsed_messages = duckling.process(messages) assert len(parsed_messages) == 1 entities = parsed_messages[0].get("entities") assert len(entities) == 4
def mocked_responses(): with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: yield rsps
def test_wikidata_cache(cache_test_normal, basket_ball_wiki_es, monkeypatch): """ We test the cache for the Wikidata ES """ client = TestClient(app) with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: """ We mock all wikipedia requests since the information are expected to be in the Wiki ES """ rsps.add('GET', re.compile(r'^https://.*\.wikipedia.org/'), status=200) response = client.get( url=f'http://localhost/v1/pois/osm:way:7777777?lang=fr', ) assert response.status_code == 200 resp = response.json() """ We should have a "wikipedia" block in the answer """ assert any(b['type'] == "wikipedia" for b in resp['blocks'][2].get('blocks')) with monkeypatch.context() as m: """ Now that the "basket ball" request should be in the cache, we test that the same request will not invoke the WikidataConnector So we change the method used by the WikidataConnector by a fake method to be sure the real method is not called """ from idunn.blocks.wikipedia import WikidataConnector @wraps(WikidataConnector.get_wiki_info) def fake_get_wiki_info(): """ Fake method for test This method should never be called """ raise Exception m.setattr(WikidataConnector, "get_wiki_info", fake_get_wiki_info) """ We make 10 requests to the basket_ball POI and we should still have the wikipedia block in the answer but without call to wikidata neither wikipedia Without the cache the request would fail in the "get_wiki_info()" method """ for i in range(10): response = client.get( url=f'http://localhost/v1/pois/osm:way:7777777?lang=fr', ) resp = response.json() assert any(b['type'] == "wikipedia" for b in resp['blocks'][2].get('blocks')) # we still have the wikipedia block assert len(rsps.calls) == 0 # Wikipedia API has never been called
def mocked_responses(): with responses.RequestsMock() as rsps: yield rsps
def __enter__(self): # otherwise swallows errors from within the test self._requests = responses.RequestsMock( assert_all_requests_are_fired=False).__enter__() self._requests.add(APIResponse(self)) return self
def test_anidb_get_animes(self): # Fake an artist entry with no AniDB creator ID that will be filled by retrieving Sangatsu artist = Artist(name="Shinbou Akiyuki").save() filenames = [ 'anidb/sangatsu_no_lion.xml', 'anidb/sangatsu_no_lion.xml', 'anidb/hibike_euphonium.xml' ] with responses.RequestsMock( assert_all_requests_are_fired=True) as rsps: for filename in filenames: rsps.add(responses.GET, AniDB.BASE_URL, body=self.read_fixture(filename), status=200, content_type='application/xml') sangatsu = self.anidb.get_or_update_work(11606) tags_sangatsu_from_anidb = self.anidb.get_tags(11606) tags_diff_sangatsu = diff_between_anidb_and_local_tags( sangatsu, tags_sangatsu_from_anidb) hibike = self.anidb.get_or_update_work(10889) # Retrieve tags tags_sangatsu = set( Work.objects.get(pk=sangatsu.pk).taggedwork_set.all().values_list( 'tag__title', flat=True)) tags_hibike = set( Work.objects.get(pk=hibike.pk).taggedwork_set.all().values_list( 'tag__title', flat=True)) shared_tags = tags_sangatsu.intersection(tags_hibike) # Checks on tags self.assertEqual(len(tags_sangatsu), 30) self.assertEqual(len(tags_hibike), 38) self.assertEqual(len(shared_tags), 18) # Check for Sangatsu's informations self.assertEqual(sangatsu.title, 'Sangatsu no Lion') self.assertEqual(sangatsu.nb_episodes, 22) self.assertEqual(sangatsu.studio.title, 'Shaft') self.assertEqual(sangatsu.date, datetime(2016, 10, 8, 0, 0)) self.assertEqual(sangatsu.end_date, datetime(2017, 3, 18, 0, 0)) # Check for Sangatsu's staff staff_sangatsu = Work.objects.get( pk=sangatsu.pk).staff_set.all().values_list('artist__name', flat=True) self.assertCountEqual(staff_sangatsu, [ 'Umino Chika', 'Hashimoto Yukari', 'Shinbou Akiyuki', 'Okada Kenjirou' ]) # Check retrieved tags from AniDB self.assertEqual(len(tags_diff_sangatsu["deleted_tags"]), 0) self.assertEqual(len(tags_diff_sangatsu["added_tags"]), 0) self.assertEqual(len(tags_diff_sangatsu["updated_tags"]), 0) self.assertEqual(len(tags_diff_sangatsu["kept_tags"]), len(tags_sangatsu)) # Check for no artist duplication artist = Artist.objects.filter(name="Shinbou Akiyuki") self.assertEqual(artist.count(), 1) self.assertEqual(artist.first().anidb_creator_id, 59)
def test_401(self): generic_client = GenericClient(url=MOCK_API_URL, ) with responses.RequestsMock() as rsps: rsps.add(responses.GET, MOCK_API_URL + '/users', status=401) with self.assertRaises(generic_client.NotAuthenticatedError): generic_client.users.all()
def test_commits_ahead_behind(self, fixture_working_dir_lfs_disabled): with responses.RequestsMock() as rsps: rsps.add(responses.GET, 'https://usersrv.gigantum.io/key', json={'key': 'afaketoken'}, status=200) config_file, client = fixture_working_dir_lfs_disabled[0], \ fixture_working_dir_lfs_disabled[2] im = InventoryManager(config_file) lb = im.create_labbook(UT_USERNAME, UT_USERNAME, UT_LBNAME, description="tester") bm = BranchManager(lb, username=UT_USERNAME) bm.create_branch('new-branch-1') bm.create_branch('new-branch-2') bm.workon_branch('master') q = f""" {{ labbook(name: "{UT_LBNAME}", owner: "{UT_USERNAME}") {{ branches {{ branchName isLocal isRemote isActive commitsAhead commitsBehind }} }} }} """ r = client.execute(q) assert 'errors' not in r assert len(r['data']['labbook']['branches']) == 3 assert r['data']['labbook']['branches'][0][ 'branchName'] == 'master' assert r['data']['labbook']['branches'][0][ 'isLocal'] is True, "Should be local" assert r['data']['labbook']['branches'][0][ 'isRemote'] is False, "not published yet" assert r['data']['labbook']['branches'][0]['isActive'] is True assert r['data']['labbook']['branches'][0]['commitsAhead'] == 0 assert r['data']['labbook']['branches'][0]['commitsBehind'] == 0 # Make a remote change! username = '******' wf = LabbookWorkflow(lb) wf.publish(username=username) other_user = '******' wf_other = LabbookWorkflow.import_from_remote( remote_url=wf.remote, username=other_user, config_file=lb.client_config.config_file) with open(os.path.join(wf_other.repository.root_dir, 'testfile'), 'w') as f: f.write('filedata') wf_other.repository.sweep_uncommitted_changes() wf_other.sync(username=other_user) r = client.execute(q) assert 'errors' not in r assert len(r['data']['labbook']['branches']) == 3 assert r['data']['labbook']['branches'][0][ 'branchName'] == 'master' assert r['data']['labbook']['branches'][0][ 'isLocal'] is True, "Should be local" assert r['data']['labbook']['branches'][0][ 'isRemote'] is True, "There should be a remote" assert r['data']['labbook']['branches'][0]['isActive'] is True assert r['data']['labbook']['branches'][0]['commitsAhead'] == 0 assert r['data']['labbook']['branches'][0]['commitsBehind'] == 1 # Make a local change! lb.write_readme("blah") r = client.execute(q) assert 'errors' not in r assert len(r['data']['labbook']['branches']) == 3 assert r['data']['labbook']['branches'][0][ 'branchName'] == 'master' assert r['data']['labbook']['branches'][0][ 'isLocal'] is True, "Should be local" assert r['data']['labbook']['branches'][0][ 'isRemote'] is True, "There should be a remote" assert r['data']['labbook']['branches'][0]['isActive'] is True assert r['data']['labbook']['branches'][0]['commitsAhead'] == 1 assert r['data']['labbook']['branches'][0]['commitsBehind'] == 1 # Sync wf.sync(username=username) r = client.execute(q) assert 'errors' not in r assert len(r['data']['labbook']['branches']) == 3 assert r['data']['labbook']['branches'][0][ 'branchName'] == 'master' assert r['data']['labbook']['branches'][0][ 'isLocal'] is True, "Should be local" assert r['data']['labbook']['branches'][0][ 'isRemote'] is True, "There should be a remote" assert r['data']['labbook']['branches'][0]['isActive'] is True assert r['data']['labbook']['branches'][0]['commitsAhead'] == 0 assert r['data']['labbook']['branches'][0]['commitsBehind'] == 0
def test_delete_db(server): with responses.RequestsMock() as rsps: rsps.add(responses.DELETE, "http://example.com/testing1") server.delete("testing1")
def untermstrich_response(): with responses.RequestsMock() as response: response.add(responses.POST, UNTERMSTRICH_URL) yield response
def test_create_db(server): with responses.RequestsMock() as rsps: rsps.add(responses.PUT, "http://example.com/testing1") rsps.add(responses.HEAD, "http://example.com/testing1") server.create("testing1")
def test_result_shape(self): """ Tests that the results from the different TSDB methods have the expected format. """ now = parse_datetime('2018-03-09T01:00:00Z') project_id = 194503 dts = [now + timedelta(hours=i) for i in range(4)] with responses.RequestsMock() as rsps: def snuba_response(request): body = json.loads(request.body) aggs = body.get('aggregations', []) meta = [{ 'name': col } for col in body['groupby'] + [a[2] for a in aggs]] datum = {col['name']: 1 for col in meta} datum['project_id'] = project_id if 'time' in datum: datum['time'] = '2018-03-09T01:00:00Z' for agg in aggs: if agg[0].startswith('topK'): datum[agg[2]] = [99] return (200, {}, json.dumps({'data': [datum], 'meta': meta})) rsps.add_callback(responses.POST, settings.SENTRY_SNUBA + '/query', callback=snuba_response) results = self.db.get_most_frequent( TSDBModel.frequent_issues_by_project, [project_id], dts[0], dts[0]) assert has_shape(results, {1: [(1, 1.0)]}) results = self.db.get_most_frequent_series( TSDBModel.frequent_issues_by_project, [project_id], dts[0], dts[0]) assert has_shape(results, {1: [(1, {1: 1.0})]}) items = { project_id: (0, 1, 2) # {project_id: (issue_id, issue_id, ...)} } results = self.db.get_frequency_series( TSDBModel.frequent_issues_by_project, items, dts[0], dts[-1]) assert has_shape(results, {1: [(1, {1: 1})]}) results = self.db.get_frequency_totals( TSDBModel.frequent_issues_by_project, items, dts[0], dts[-1]) assert has_shape(results, {1: {1: 1}}) results = self.db.get_range(TSDBModel.project, [project_id], dts[0], dts[-1]) assert has_shape(results, {1: [(1, 1)]}) results = self.db.get_distinct_counts_series( TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]) assert has_shape(results, {1: [(1, 1)]}) results = self.db.get_distinct_counts_totals( TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]) assert has_shape(results, {1: 1}) results = self.db.get_distinct_counts_union( TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]) assert has_shape(results, 1)
def mocked_requests(): # With default verified secrets, we don't want to be making API calls during tests. with responses.RequestsMock() as rsps: yield rsps
def test_capturable_payment_that_should_be_cancelled(self): """ Test that if the govuk payment is in 'capturable' state and the payment should be cancelled: - the MTP payment record is patched with the card details attributes if necessary - the method cancels the payment - no email is sent - the method returns GovUkPaymentStatus.cancelled """ client = PaymentClient() payment = { 'uuid': 'some-id', 'recipient_name': 'Alice Re', 'prisoner_number': 'AAB0A00', 'prisoner_name': 'John Doe', 'amount': 1700, 'security_check': { 'status': 'rejected', 'user_actioned': True, }, } payment_extra_details = { 'email': '*****@*****.**', 'worldpay_id': '123456789', 'cardholder_name': 'John Doe', 'card_number_first_digits': '1234', 'card_number_last_digits': '987', 'card_expiry_date': '01/20', 'card_brand': 'visa', 'billing_address': { 'line1': '102 Petty France', 'line2': '', 'postcode': 'SW1H9AJ', 'city': 'London', 'country': 'GB', }, } govuk_payment = { 'payment_id': 'payment-id', 'state': { 'status': GovUkPaymentStatus.capturable.name, }, 'email': '*****@*****.**', 'provider_id': '123456789', 'card_details': { 'cardholder_name': 'John Doe', 'first_digits_card_number': '1234', 'last_digits_card_number': '987', 'expiry_date': '01/20', 'card_brand': 'visa', 'billing_address': { 'line1': '102 Petty France', 'line2': '', 'postcode': 'SW1H9AJ', 'city': 'London', 'country': 'GB', }, }, } with responses.RequestsMock() as rsps: mock_auth(rsps) # API call related to updating the email address and card details rsps.add( rsps.PATCH, api_url(f'/payments/{payment["uuid"]}/'), json={ **payment, **payment_extra_details, }, status=200, ) rsps.add( rsps.POST, govuk_url(f'/payments/{govuk_payment["payment_id"]}/cancel/'), status=204, ) status = client.complete_payment_if_necessary(payment, govuk_payment) payment_patch_body = json.loads(rsps.calls[-2].request.body.decode()) self.assertDictEqual( payment_patch_body, payment_extra_details, ) self.assertEqual(status, GovUkPaymentStatus.cancelled) self.assertEqual(len(mail.outbox), 0)
def test_admin_account_compatibility(self): self.mocked_login(roles=['prison-clerk', 'security']) with responses.RequestsMock(): response = self.client.get(reverse('new-user')) self.assertEqual(response.templates[0].name, 'mtp_common/user_admin/incompatible-admin.html')
def init_instance_fixtures(self): super(WithResponses, self).init_instance_fixtures() self.responses = self.enter_instance_context( responses.RequestsMock(), )
def test_user_account_compatibility(self): self.mocked_login() with responses.RequestsMock() as rsps: self._init_existing_user(rsps, roles=['prison-clerk', 'security']) response = self.client.get(reverse('edit-user', kwargs={'username': '******'})) self.assertEqual(response.templates[0].name, 'mtp_common/user_admin/incompatible-user.html')
def test_anidb_related_animes(self): animes = {} related_animes = {} animes_sources = { 'anidb/hibike_euphonium.xml': 10889, 'anidb/hibike_euphonium2.xml': 11746, 'anidb/hibike_euphonium_movie1.xml': 11747, 'anidb/hibike_euphonium_movie2.xml': 12962, 'anidb/hibike_euphonium_original_movies.xml': 13207, 'anidb/sangatsu_no_lion.xml': 11606 } with responses.RequestsMock( assert_all_requests_are_fired=True) as rsps: for filename, _ in animes_sources.items(): for _ in range(2): rsps.add(responses.GET, AniDB.BASE_URL, body=self.read_fixture(filename), status=200, content_type='application/xml') for filename, anidb_aid in animes_sources.items(): animes[filename] = self.anidb.get_or_update_work(anidb_aid) related_animes[filename] = self.anidb.get_related_animes( anidb_aid=anidb_aid) # Ran once in get_or_update_work but ran again to check that it does not cause errors for filename in animes_sources: self.anidb._build_related_animes(animes[filename], related_animes[filename]) relations = RelatedWork.objects.filter( child_work__anidb_aid__in=animes_sources.values(), parent_work__anidb_aid__in=animes_sources.values()) # Checks that anime are created if missing but not all data is retrieved from AniDB self.assertEqual( Work.objects.get( title='Sangatsu no Lion meets Bump of Chicken').ext_synopsis, '') self.assertNotEqual( Work.objects.get(title='Sangatsu no Lion').ext_synopsis, '') # Checks on relations self.assertTrue( relations.filter(child_work__anidb_aid=11746, parent_work__anidb_aid=10889, type='sequel').exists()) self.assertTrue( relations.filter(child_work__anidb_aid=10889, parent_work__anidb_aid=11746, type='prequel').exists()) self.assertTrue( relations.filter(child_work__anidb_aid=11747, parent_work__anidb_aid=10889, type='summary').exists()) self.assertTrue( relations.filter(child_work__anidb_aid=10889, parent_work__anidb_aid=11747, type='full_story').exists()) self.assertTrue( relations.filter(child_work__anidb_aid=13207, parent_work__anidb_aid=11746, type='sequel').exists()) self.assertTrue( relations.filter(child_work__anidb_aid=11746, parent_work__anidb_aid=13207, type='prequel').exists()) self.assertTrue( relations.filter(child_work__anidb_aid=12962, parent_work__anidb_aid=11746, type='summary').exists()) self.assertTrue( relations.filter(child_work__anidb_aid=11746, parent_work__anidb_aid=12962, type='full_story').exists())
def mocked_responses(request): with responses.RequestsMock() as rsps: if request.cls is not None: request.cls.mocked_responses = rsps yield rsps
def responses(): with responses_.RequestsMock() as rsps: yield rsps
def setup_responses(): with responses.RequestsMock() as rsps: rsps.add(responses.GET, 'https://api.github.com/users/renzon', json=_resp_renzon) yield rsps
else: del os.environ[k] RESPONSES_METHODS = [ responses.GET, responses.DELETE, responses.HEAD, responses.OPTIONS, responses.PATCH, responses.POST, responses.PUT, ] botocore_mock = responses.RequestsMock( assert_all_requests_are_fired=False, target="botocore.vendored.requests.adapters.HTTPAdapter.send", ) responses_mock = get_response_mock() BOTOCORE_HTTP_METHODS = [ "GET", "DELETE", "HEAD", "OPTIONS", "PATCH", "POST", "PUT" ] class MockRawResponse(BytesIO): def __init__(self, response_input): if isinstance(response_input, str): response_input = response_input.encode("utf-8") super().__init__(response_input)
def __enter__(self): self.rsps = responses.RequestsMock() self.rsps.__enter__() self.__add_responses() return self
'multifactor': ['duo'], 'sub': 'ad|Mozilla-LDAP|lmoran' } def get_app_config(extra_config): config = { 'TESTING': True, 'SECRET_KEY': os.urandom(24) } config.update(extra_config) return config # TODO: move this to cli_common and mock taskcluster module requests_mock = responses.RequestsMock(assert_all_requests_are_fired=False) def build_header(client_id, ext_data=None): '''Build a fake Hawk header to share client id & scopes. ''' out = collections.OrderedDict({ 'id': client_id, 'ts': int(time.time()), 'nonce': random.randint(0, 100000), }) if ext_data is not None: json_data = json.dumps(ext_data, sort_keys=True).encode('utf-8') out['ext'] = base64.b64encode(json_data).decode('utf-8')
def test_success_status(self): """ Test that if the govuk payment is in 'success' state and the MTP payment record doesn't have all the card details and email field filled in: - the MTP payment record is patched with the extra payment details - the method returns GovUkPaymentStatus.success - no email is sent """ client = PaymentClient() payment = { 'uuid': 'some-id', } payment_extra_details = { 'email': '*****@*****.**', 'cardholder_name': 'John Doe', 'card_number_first_digits': '1234', 'card_number_last_digits': '987', 'card_expiry_date': '01/20', 'card_brand': 'visa', 'billing_address': { 'line1': '102 Petty France', 'line2': '', 'postcode': 'SW1H9AJ', 'city': 'London', 'country': 'GB', }, } govuk_payment = { 'payment_id': 'payment-id', 'state': { 'status': GovUkPaymentStatus.success.name, }, 'email': '*****@*****.**', 'card_details': { 'cardholder_name': 'John Doe', 'first_digits_card_number': '1234', 'last_digits_card_number': '987', 'expiry_date': '01/20', 'card_brand': 'visa', 'billing_address': { 'line1': '102 Petty France', 'line2': '', 'postcode': 'SW1H9AJ', 'city': 'London', 'country': 'GB', }, }, } with responses.RequestsMock() as rsps: mock_auth(rsps) # API call related to updating the email address and other details on the payment record rsps.add( rsps.PATCH, api_url(f'/payments/{payment["uuid"]}/'), json={ **payment, **payment_extra_details, }, status=200, ) status = client.complete_payment_if_necessary(payment, govuk_payment) self.assertDictEqual( json.loads(rsps.calls[-1].request.body.decode()), payment_extra_details, ) self.assertEqual(status, GovUkPaymentStatus.success) self.assertEqual(len(mail.outbox), 0)
def mocked_responses(): with responses.RequestsMock() as mocked: yield mocked
def test_capturable_payment_that_shouldnt_be_captured_yet(self): """ Test that if the govuk payment is in 'capturable' state, the MTP payment record doesn't have the email field filled in and the payment should not be captured yet: - the MTP payment record is patched with the card details attributes - the method returns GovUkPaymentStatus.capturable - an email is sent to the sender """ client = PaymentClient() payment = { 'uuid': 'b74a0eb6-0437-4b22-bce8-e6f11bd43802', 'recipient_name': 'Alice Re', 'prisoner_name': 'John Doe', 'prisoner_number': 'AAB0A00', 'amount': 1700, 'security_check': { 'status': 'pending', 'user_actioned': False, }, } payment_extra_details = { 'email': '*****@*****.**', 'worldpay_id': '123456789', 'cardholder_name': 'John Doe', 'card_number_first_digits': '1234', 'card_number_last_digits': '987', 'card_expiry_date': '01/20', 'card_brand': 'visa', 'billing_address': { 'line1': '102 Petty France', 'line2': '', 'postcode': 'SW1H9AJ', 'city': 'London', 'country': 'GB', }, } govuk_payment = { 'payment_id': 'payment-id', 'state': { 'status': GovUkPaymentStatus.capturable.name, }, 'email': '*****@*****.**', 'provider_id': '123456789', 'card_details': { 'cardholder_name': 'John Doe', 'first_digits_card_number': '1234', 'last_digits_card_number': '987', 'expiry_date': '01/20', 'card_brand': 'visa', 'billing_address': { 'line1': '102 Petty France', 'line2': '', 'postcode': 'SW1H9AJ', 'city': 'London', 'country': 'GB', }, }, } with responses.RequestsMock() as rsps: mock_auth(rsps) # API call related to updating the email address and card details rsps.add( rsps.PATCH, api_url(f'/payments/{payment["uuid"]}/'), json={ **payment, **payment_extra_details, }, status=200, ) status = client.complete_payment_if_necessary(payment, govuk_payment) payment_patch_body = json.loads(rsps.calls[-1].request.body.decode()) self.assertDictEqual( payment_patch_body, payment_extra_details, ) self.assertEqual(status, GovUkPaymentStatus.capturable) self.assertEqual(len(mail.outbox), 1) self.assertEqual(payment['email'], '*****@*****.**') self.assertEqual(mail.outbox[0].subject, 'Send money to someone in prison: your payment is being processed')
def setUp(self): # avoid using a ton of CPU for hashing passwords in testing pwd_context.update(pbkdf2_sha512__default_rounds=1) self.requests_mock = responses.RequestsMock(assert_all_requests_are_fired=False) self.requests_mock.start() mock_mp = Mixpanel('dummy_token', MockMixpanelConsumer()) self.mp_patcher = mock.patch('quilt_server.views.mp', mock_mp) self.mp_patcher.start() self.payments_patcher = mock.patch('quilt_server.views.HAVE_PAYMENTS', False) self.payments_patcher.start() self.s3_stubber = Stubber(s3_client) self.s3_stubber.activate() random_name = ''.join(random.sample(string.ascii_lowercase, 10)) self.db_url = 'postgresql://postgres@localhost/test_%s' % random_name def mock_verify(username_or_token): user = User.query.filter_by(name=username_or_token).one_or_none() if user: return user else: return verify_token_string(username_or_token) # instead of checking token, just use username self.token_verify_mock = mock.patch('quilt_server.views.verify_token_string', mock_verify) self.token_verify_mock.start() # disable 8 character restriction for passwords self.validate_password_mock = mock.patch('quilt_server.auth.validate_password', lambda x: True) self.validate_password_mock.start() self.app = quilt_server.app.test_client() quilt_server.app.config['TESTING'] = True quilt_server.app.config['SQLALCHEMY_ECHO'] = False quilt_server.app.config['SQLALCHEMY_DATABASE_URI'] = self.db_url sqlalchemy_utils.create_database(self.db_url) quilt_server.db.create_all() self.email_suffix = '@example.com' self.TEST_USER = '******' self.TEST_USER_EMAIL = '*****@*****.**' self.TEST_USER_PASSWORD = '******' self.OTHER_USER = '******' self.OTHER_USER_EMAIL = '*****@*****.**' self.OTHER_USER_PASSWORD = '******' self.TEST_ADMIN = 'admin' self.TEST_ADMIN_EMAIL = '*****@*****.**' self.TEST_ADMIN_PASSWORD = '******' _create_user(self.TEST_USER, email=self.TEST_USER_EMAIL, password=self.TEST_USER_PASSWORD, requires_activation=False) _create_user(self.TEST_ADMIN, email=self.TEST_ADMIN_EMAIL, password=self.TEST_ADMIN_PASSWORD, is_admin=True, requires_activation=False) _create_user('bad_user', email='*****@*****.**', requires_activation=False) _create_user(self.OTHER_USER, email=self.OTHER_USER_EMAIL, password=self.OTHER_USER_PASSWORD, requires_activation=False) _create_user('user1', email='*****@*****.**', password='******', requires_activation=False) _create_user('user2', email='*****@*****.**', password='******', requires_activation=False) db.session.commit()
def mock_resp(): with responses.RequestsMock() as r: yield r