def test_handle_s3_sns_unfetchable_cert(self, mock_handle_subscription_confirmation, base_body_dict): with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: rmock.register_uri("GET", "https://nowhere.amazonaws.com/cert.pem", status_code=404) client = self.get_authorized_client() res = client.post("/callbacks/sns/s3/uploaded", data=json.dumps({ **base_body_dict, "Signature": "should_be_irrelevant", "SigningCertURL": "https://nowhere.amazonaws.com/cert.pem", }), content_type="application/json") assert res.status_code == 400 assert mock_app_log.call_args_list == [ ( (logging.INFO, AnyStringMatching(r"Failed to fetch certificate .*404"), ()), AnySupersetOf({"extra": AnySupersetOf({ "target_url": "https://nowhere.amazonaws.com/cert.pem", })}), ), ( (logging.WARNING, AnyStringMatching(r"SNS request body failed "), ()), AnySupersetOf({"extra": AnySupersetOf({ "validation_error": RestrictedAny(lambda x: isinstance(x, requests.exceptions.HTTPError)), })}), ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 400})})) ] assert rmock.request_history == [ RestrictedAny(lambda r: r.url == "https://nowhere.amazonaws.com/cert.pem") ] assert mock_handle_subscription_confirmation.called is False
def test_handle_s3_sns_bad_signature(self, mock_handle_subscription_confirmation, mock_validate, base_body_dict): mock_validate.side_effect = validatesns.ValidationError with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: client = self.get_authorized_client() res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps(base_body_dict), content_type="application/json", ) assert res.status_code == 400 assert mock_app_log.call_args_list == [ ( (logging.WARNING, AnyStringMatching(r".*failed signature validation"), ()), AnySupersetOf({ "extra": AnySupersetOf({ "validation_error": RestrictedAny(lambda x: isinstance( x, validatesns.ValidationError)), }) }), ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 400})})) ] assert not rmock.request_history assert mock_validate.call_args_list == [((base_body_dict, ), AnySupersetOf({}))] assert mock_handle_subscription_confirmation.called is False
def test_reset_password_request_redirects_to_same_page_and_shows_flash_message( self, send_email, user_role): self.data_api_client.get_user.return_value = self.user( 123, "*****@*****.**", 1234, "Ahoy", name="Bob", role=user_role, ) res = self.client.post("/user/reset-password", data={'email_address': '*****@*****.**'}, follow_redirects=True) assert res.status_code == 200 content = self.strip_all_whitespace(res.get_data(as_text=True)) assert self.strip_all_whitespace( "we'll send a link to reset the password") in content assert send_email.call_args_list == [ mock.call('*****@*****.**', personalisation={ 'url': AnyStringMatching( r"http://localhost/user/reset-password/*") }, reference="reset-password-{}".format( self.expected_email_hash), template_name_or_id=self.app.config['NOTIFY_TEMPLATES'] ['reset_password']) ]
def test_handle_s3_sns_weird_request_type( self, mock_handle_subscription_confirmation, mock_validate): with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: client = self.get_authorized_client() weird_body_dict = { "MessageId": "1234321", "Type": "EuropeanConflagration", } res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps(weird_body_dict), content_type="application/json", ) assert res.status_code == 400 assert mock_app_log.call_args_list == [ ( (logging.WARNING, AnyStringMatching(r"Unrecognized request type "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "request_type": "EuropeanConflagration", }) }), ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 400})})) ] assert not rmock.request_history assert mock_validate.call_args_list == [((weird_body_dict, ), AnySupersetOf({}))] assert mock_handle_subscription_confirmation.called is False
def test_404(self): response = self.get_authorized_client().get('/foo/bar/baz') assert response.status_code == 404 assert response.mimetype == self.app.config["JSONIFY_MIMETYPE"] assert json.loads(response.get_data()) == { "error": AnyStringMatching(r".*found.*", flags=re.I), }
def test_index(self): response = self.get_authorized_client().get('/') assert response.status_code == 200 assert response.mimetype == self.app.config["JSONIFY_MIMETYPE"] assert json.loads(response.get_data()) == { "this": AnyStringMatching(r".*pointless.*", flags=re.I), }
def test_template_failure_does_not_continue(self): self.mock_notify_client.send_email.side_effect = EmailTemplateError( "foo") with pytest.raises(EmailTemplateError): notify_suppliers_of_framework_application_event( data_api_client=self.mock_data_api_client, notify_client=self.mock_notify_client, notify_template_id="8877eeff", framework_slug="g-cloud-99", dry_run=False, stage="production", logger=self.mock_logger, run_id=uuid.UUID("12345678-1234-5678-1234-567812345678"), ) assert mock.call.error( "Failed sending to {email_hash}: {e}", extra={ "email_hash": mock.ANY, "e": AnyStringMatching("foo"), }, ) in self.mock_logger.mock_calls # check that we do not end up trying to send all emails instead of giving up after error assert tuple( call[0][0] for call in self.mock_notify_client.send_email.call_args_list) == ( "*****@*****.**", )
def test_string_matching(self): assert { "a": "Metempsychosis", "b": "c" } == { "a": AnyStringMatching(r"m+.+psycho.*", flags=re.I), "b": "c" }
def test_basic_auth_is_required(self): response = self.app.test_client().get('/callbacks') assert response.status_code == 401 assert response.headers["WWW-Authenticate"] == "Basic realm=callbacks" assert response.mimetype == self.app.config["JSONIFY_MIMETYPE"] assert json.loads(response.get_data()) == { "error": AnyStringMatching(r".*authoriz.*", flags=re.I), }
def test_get_certificate(self, call_params_seq, single_flask_request): """ :param call_params_seq: a sequence of tuples, each representing a call to make to _get_certificate and including information about the context it should be called in and the call's expected result. each tuple consists of the parameters: ( url, # the url to set up with register_uri() and then pass as _get_certificate's # argument rmock_response_kwargs, # kwargs to pass to register_uri specifying how requests_mock should respond to # such a request expected_output, # either an Exception class to expect _get_certificate's invocation to raise # or the value to compare _get_certificate's return value with expect_request_made, # whether an actual (intercepted) http request should have been made to the url ) :param single_flask_request: whether all requests of ``call_params_seq`` should be performed in a single test flask request. otherwise a separate flask test request is used per call """ with self.mocked_app_logger_log() as mock_app_log: with self.app.test_request_context( ) if single_flask_request else null_context_manager(): for url, rmock_response_kwargs, expected_output, expect_request_made in call_params_seq: mock_app_log.reset_mock() with null_context_manager( ) if single_flask_request else self.app.test_request_context( ): with requests_mock.Mocker() as rmock: rmock.register_uri("GET", url, **rmock_response_kwargs) expect_exception = isinstance(expected_output, type) and \ issubclass(expected_output, Exception) with pytest.raises( expected_output ) if expect_exception else null_context_manager(): out = _get_certificate(url) if not expect_exception: assert out == expected_output assert rmock.called is expect_request_made # TODO more complete logging testing assert mock_app_log.call_args_list == ( [] if not expect_request_made else [ ( (logging.INFO, AnyStringMatching( "Failed" if expect_exception else "Fetched"), ()), AnySupersetOf({ "extra": AnySupersetOf({"target_url": url}) }), ), ])
def test_invalid_bearer_token_is_rejected(self): response = self.app.test_client().get( '/', headers={'Authorization': 'Bearer some-invalid-token'}, ) assert response.status_code == 403 assert response.mimetype == self.app.config["JSONIFY_MIMETYPE"] assert json.loads(response.get_data()) == { "error": AnyStringMatching(r".*forbidden.*some-invalid-token.*", flags=re.I), }
def test_invalid_basic_auth_is_rejected(self): response = self.app.test_client().get( "/callbacks", headers={'Authorization': 'Basic some-invalid-credentials'}, ) assert response.status_code == 403 assert response.mimetype == self.app.config["JSONIFY_MIMETYPE"] assert json.loads(response.get_data()) == { "error": AnyStringMatching(r".*forbidden.*some-invalid-credentials.*", flags=re.I), }
def test_should_strip_whitespace_surrounding_reset_password_email_address_field( self, send_email): self.client.post("/user/reset-password", data={'email_address': ' [email protected]'}) self.data_api_client.get_user.assert_called_with( email_address='*****@*****.**') assert send_email.call_args_list == [ mock.call('*****@*****.**', personalisation={ 'url': AnyStringMatching( r"http://localhost/user/reset-password/*") }, reference="reset-password-{}".format( self.expected_email_hash), template_name_or_id=self.app.config['NOTIFY_TEMPLATES'] ['reset_password']) ]
def assert_external_service_log_entry(service=r'\w+', description='.+', successful_call=True, extra_modules=None, count=1): """An extension of assert_log_entry specialised to inspect for the standardised message that is logged when making calls to backing services (Notify, S3, etc). `service` and `description` both take regex patterns for matching values.""" if successful_call: expected_message = r'Call to {service} \({description}\) executed in {{duration_real}}s' else: expected_message = r'Exception from call to {service} \({description}\) after {{duration_real}}s' expected_message = expected_message.format(service=service, description=description) modules = ['dmutils.timing'] if extra_modules: modules += extra_modules return assert_log_entry(modules=tuple(modules), message=AnyStringMatching(expected_message), count=count)
def test_handle_s3_sns_test_event( self, mock_handle_subscription_confirmation, mock_validate, ): with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: client = self.get_authorized_client() body_dict = { "MessageId": "1234321", "Type": "Notification", "Message": '{"Event":"s3:TestEvent","nut":"shell"}', } res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps(body_dict), content_type="application/json", headers={"X-Amz-Sns-Subscription-Arn": "kcirtaP"}, ) assert res.status_code == 200 assert mock_app_log.call_args_list == [ ( (logging.INFO, AnyStringMatching(r"Processing message "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "message_id": "1234321", "subscription_arn": "kcirtaP", }) }), ), ( (logging.INFO, "Received S3 test event", ()), {}, ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 200})})) ] assert not rmock.request_history assert mock_validate.call_args_list == [((body_dict, ), AnySupersetOf({}))] assert mock_handle_subscription_confirmation.called is False
def test_sending_failure_continues(self): def _send_email_side_effect(email_address, *args, **kwargs): if email_address == "*****@*****.**": raise EmailError("foo") return {} self.mock_notify_client.send_email.side_effect = _send_email_side_effect assert notify_suppliers_of_framework_application_event( data_api_client=self.mock_data_api_client, notify_client=self.mock_notify_client, notify_template_id="8877eeff", framework_slug="g-cloud-99", dry_run=False, stage="production", logger=self.mock_logger, run_id=uuid.UUID("12345678-1234-5678-1234-567812345678"), ) == 1 assert mock.call.error( "Failed sending to {email_hash}: {e}", extra={ "email_hash": mock.ANY, "e": AnyStringMatching("foo"), }, ) in self.mock_logger.mock_calls # check that we do actually end up trying to send all emails instead of giving up after error assert tuple( call[0][0] for call in self.mock_notify_client.send_email.call_args_list) == ( "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**", )
def test_pattern_caching(self): # not actually testing that it *is* definitely caching, just checking that it's not broken due to attempted # caching pattern_a = AnyStringMatching(r"transmigration", flags=re.I) pattern_b = AnyStringMatching(r"transmigration") pattern_c = AnyStringMatching(r"transmigration", flags=re.I) pattern_d = AnyStringMatching(r"Transmigration", flags=re.I) pattern_e = AnyStringMatching(r"Transmigration") pattern_f = AnyStringMatching(r"transmigration") assert { "u": "transMigration", "v": "transmigration", "w": "Transmigration", "x": "transmigratioN", "y": "Transmigration", "z": "transmigration", } == { "u": pattern_a, "v": pattern_b, "w": pattern_c, "x": pattern_d, "y": pattern_e, "z": pattern_f, } assert { "u": "transMigration", "v": "transmigration", "w": "Transmigration", "x": "transmigratioN", "y": "Transmigration", "z": "Transmigration", # <-- only difference here } != { "u": pattern_a, "v": pattern_b, "w": pattern_c, "x": pattern_d, "y": pattern_e, "z": pattern_f, }
def _logging_call_site(app): app.logger.info( "Charming day {ankles}, {underleaves}, {parent_span_id}", extra={"underleaves": "ample"}, ) _set_request_class_is_sampled(app_with_stream_logger, is_sampled) with app.test_request_context('/'): test_extra_log_context = { "ankles": "thinsocked", "span_id": "beesWaxed", } request.get_extra_log_context = mock.Mock(spec_set=[]) request.get_extra_log_context.return_value = test_extra_log_context # we perform the log call in a specifically designated & named function to exercise & be able to reliably # assert the behaviour of the introspective aspects of our logging _logging_call_site(app) all_lines = tuple( json.loads(line) for line in stream.read().splitlines()) assert all_lines == ( AnySupersetOf({ "message": "Missing keys when formatting log message: ('parent_span_id',)", # it may seem foolish and a bit fragile to include the following parameters in our assertions but # properly testing introspective behaviour is always going to get a bit weird and meta in that regard. "app_funcName": "_logging_call_site", "app_pathname": os.path.normcase(_logging_call_site.__code__.co_filename), "app_lineno": RestrictedAny(lambda value: isinstance(value, int)), "lineno": RestrictedAny(lambda value: isinstance(value, int)), "pathname": AnyStringMatching(r".+\/dmutils\/logging\.pyc?"), }), AnySupersetOf({ "time": mock.ANY, "application": mock.ANY, "message": "Charming day thinsocked, ample, {parent_span_id: missing key}", "underleaves": "ample", "ankles": "thinsocked", "spanId": "beesWaxed", "parentSpanId": None, "requestId": None, "debugFlag": None, "isSampled": None, # as above, these parameters are included in the assertion to ensure our modifications haven't affected # the regular logging introspection features "lineno": RestrictedAny(lambda value: isinstance(value, int)), "pathname": os.path.normcase(_logging_call_site.__code__.co_filename), **({ "app_funcName": "_logging_call_site", "app_pathname": os.path.normcase(_logging_call_site.__code__.co_filename), "app_lineno": RestrictedAny(lambda value: isinstance(value, int)), } if is_sampled else {}), }), ) if not is_sampled: # AppStackLocationFilter shouldn't have included information in this low-urgency message for unexpected_key in ( "app_funcName", "app_lineno", "app_pathname", ): assert unexpected_key not in all_lines[1] for unexpected_key in ( "span_id", "trace_id", "traceId", "request_id", "debug_flag", "is_sampled", "parent_span_id", # also ensuring "missing key" functionality didn't add a value for this ): assert not any(unexpected_key in line for line in all_lines)
def test_update_outcome_scenarios( self, other_oc_brief_based, initial_brief_based, other_oc_data, initial_data, put_values, expected_status_code, expected_response_data, ): """ A number of arguments control the background context this test is run in and the parameters PUT to the endpoint. Not all of the combinations make sense together and a caller should not expect a test to pass with a nonsensical combination of arguments :param other_oc_brief_based: whether the "other", existing Outcome should be Brief-based as opposed to Direct Award-based :param initial_brief_based: whether the target Outcome should initially be set up to be Brief-based as opposed to Direct Award-based :param other_oc_data: field values to set up the "other" Outcome with, ``None`` for no "other" Outcome to be created :param initial_data: field values to initially set up the target Outcome with :param put_values: payload dictionary to be PUT to the target endpoint (without the ``outcome`` wrapper) :param expected_status_code: :param expected_response_data: """ user_id = self.setup_dummy_user(id=1, role='buyer') self.setup_dummy_suppliers(3) project = None search = None chosen_archived_service = other_archived_service = None if not (other_oc_brief_based and initial_brief_based): # create required objects for direct award-based Outcome self.setup_dummy_services(3, model=ArchivedService) project = DirectAwardProject( name="Lambay Island", users=[User.query.get(user_id)], ) db.session.add(project) search = DirectAwardSearch( project=project, created_by=user_id, active=True, search_url="http://nothing.nowhere", ) db.session.add(search) for archived_service in db.session.query(ArchivedService).filter( ArchivedService.service_id.in_(( "2000000000", "2000000001", ))).all(): search.archived_services.append(archived_service) chosen_archived_service, other_archived_service = search.archived_services[: 2] # else skip creating these to save time brief = None chosen_brief_response = other_brief_response = None if other_oc_brief_based or initial_brief_based: # create required objects for brief-based Outcome brief = self.setup_dummy_brief(status="closed", user_id=user_id, data={}) chosen_brief_response, other_brief_response = (BriefResponse( brief=brief, supplier_id=i, submitted_at=datetime.datetime.utcnow(), data={}, ) for i in ( 1, 2, )) db.session.add(chosen_brief_response) db.session.add(other_brief_response) # else skip creating these to save time other_outcome = None if other_oc_data is not None: # create "other" Outcome for our target one to potentially clash with other_outcome = Outcome( **({ "brief": brief } if other_oc_brief_based else { "direct_award_project": project }), **( { "result": other_oc_data.get("result", "awarded"), **({ "brief_response": other_brief_response, } if other_oc_brief_based else { "direct_award_search": search, "direct_award_archived_service": other_archived_service, }), } if other_oc_data.get( "result", "awarded") == "awarded" else { "result": other_oc_data["result"] }), **{ k: v for k, v in (other_oc_data or {}).items() if k not in ( "completed_at", "result", ) }, ) if "completed_at" in other_oc_data: other_outcome.completed_at = other_oc_data["completed_at"] db.session.add(other_outcome) # create our target Outcome in its initial state outcome = Outcome( **({ "brief": brief } if initial_brief_based else { "direct_award_project": project }), **( { "result": initial_data.get("result", "awarded"), **({ "brief_response": chosen_brief_response, } if initial_brief_based else { "direct_award_search": search, "direct_award_archived_service": chosen_archived_service, }), } if initial_data.get("result", "awarded") == "awarded" else { "result": initial_data["result"] }), **{ k: v for k, v in (initial_data or {}).items() if k not in ( "completed_at", "result", ) }, ) if "completed_at" in initial_data: # can only set completed_at after other fields have been set outcome.completed_at = initial_data["completed_at"] db.session.add(outcome) # must assign ids before we can lock project db.session.flush() if project: project.locked_at = datetime.datetime.now() # make a concrete note of these so we don't have to fetch them back from the database after the request, # potentially getting back values which have been inadvertantly changed outcome_external_id = outcome.external_id project_external_id = project and project.external_id search_id = search and search.id chosen_archived_service_id = chosen_archived_service and chosen_archived_service.id chosen_archived_service_service_id = chosen_archived_service and chosen_archived_service.service_id brief_id = brief and brief.id chosen_brief_response_id = chosen_brief_response and chosen_brief_response.id audit_event_count = AuditEvent.query.count() db.session.commit() # keep an nice concrete representation for later comparison outcome_serialization_before = outcome.serialize() res = self.client.put( f"/outcomes/{outcome.external_id}", data=json.dumps({ "updated_by": "*****@*****.**", "outcome": put_values, }), content_type="application/json", ) assert res.status_code == expected_status_code response_data = json.loads(res.get_data()) assert response_data == expected_response_data # allow these to be re-used in this session, "refreshed" db.session.add_all(x for x in ( outcome, project, search, chosen_archived_service, ) if x is not None) db.session.expire_all() if res.status_code != 200: # assert change wasn't made, audit event wasn't added assert outcome.serialize() == outcome_serialization_before assert AuditEvent.query.count() == audit_event_count else: # an additional check of values we should be able to figure out the "correct" values for assert response_data == { "outcome": { "id": outcome_external_id, "result": initial_data.get("result", "awarded"), "completed": ( bool(outcome_serialization_before.get("completedAt")) or put_values.get("completed") is True ), "completedAt": ( outcome_serialization_before.get("completedAt") or ( AnyStringMatching(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z") if put_values.get("completed") else None ) ), **({ "resultOfFurtherCompetition": { "brief": { "id": brief_id, }, **({ "briefResponse": { "id": chosen_brief_response_id, }, } if initial_data.get("result", "awarded") == "awarded" else {}), }, } if initial_brief_based else { "resultOfDirectAward": { "project": { "id": project_external_id, }, **({ "search": { "id": search_id, }, "archivedService": { "id": chosen_archived_service_id, "service": { "id": chosen_archived_service_service_id, }, }, } if initial_data.get("result", "awarded") == "awarded" else {}) }, }), **({"award": AnySupersetOf({})} if initial_data.get("result", "awarded") == "awarded" else {}), } } # check changes actually got committed assert response_data == { "outcome": outcome.serialize(), } # check audit event(s) were saved expect_complete_audit_event = put_values.get( "completed") is True and not initial_data.get("completed_at") n_expected_new_audit_events = 2 if expect_complete_audit_event else 1 assert AuditEvent.query.count( ) == audit_event_count + n_expected_new_audit_events # grab those most recent (1 or) 2 audit events from the db, re-sorting them to be in a predictable order - # we don't care whether the complete_outcome or update_outcome comes out of the db first audit_events = sorted( db.session.query(AuditEvent).order_by( desc(AuditEvent.created_at), desc(AuditEvent.id), )[:n_expected_new_audit_events], key=lambda ae: ae.type, reverse=True, ) assert audit_events[0].type == "update_outcome" assert audit_events[0].object is outcome assert audit_events[0].acknowledged is False assert audit_events[0].acknowledged_at is None assert not audit_events[0].acknowledged_by assert audit_events[0].user == "*****@*****.**" assert audit_events[0].data == put_values if expect_complete_audit_event: assert audit_events[1].type == "complete_outcome" assert audit_events[1].created_at == audit_events[ 0].created_at == outcome.completed_at assert audit_events[1].object is outcome assert audit_events[1].acknowledged is False assert audit_events[1].acknowledged_at is None assert not audit_events[1].acknowledged_by assert audit_events[1].user == "*****@*****.**" assert audit_events[1].data == {}
class TestUpdateOutcome(BaseApplicationTest, FixtureMixin): _test_update_outcome_base_scenarios = ( ( # other_oc_data {}, # initial_data {}, # put_values { "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "00314.1500", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "314.15", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }), }, ), ( # other_oc_data None, # initial_data { "completed_at": None, "result": "none-suitable", }, # put_values { "completed": True, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": True, }), }, ), ( # other_oc_data { "completed_at": None, "result": "cancelled", }, # initial_data { "completed_at": None, "result": "none-suitable", }, # put_values { "completed": True, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": True, }), }, ), ( # other_oc_data None, # initial_data {}, # put_values { "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "00314.1500", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "314.15", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }), }, ), ( # other_oc_data { "completed_at": datetime.datetime(2010, 10, 10, 10, 10, 10), "awarding_organisation_name": "Lambay Freehold", "award_value": 54321, "start_date": datetime.date(2010, 12, 12), "end_date": datetime.date(2011, 12, 12), }, # initial_data {}, # put_values { "award": { "awardingOrganisationName": "Omphalos", "awardValue": "00314.1500", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": False, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "314.15", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }), }, ), ( # other_oc_data { "awarding_organisation_name": "Lambay Freehold", "award_value": 54321, "start_date": datetime.date(2010, 12, 12), "end_date": datetime.date(2011, 12, 12), }, # initial_data {}, # put_values { "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "00314.1500", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "314.15", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }), }, ), ( # other_oc_data {}, # initial_data { "completed_at": datetime.datetime(2010, 10, 10, 10, 10, 10), "awarding_organisation_name": "Lambay Freehold", "award_value": 54321, "start_date": datetime.date(2010, 12, 12), "end_date": datetime.date(2011, 12, 12), }, # put_values { "completed": True, "award": { "awardingOrganisationName": "Incubator", "awardValue": "271271.2", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "award": { "awardingOrganisationName": "Incubator", "awardValue": "271271.20", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }), }, ), ( # other_oc_data None, # initial_data { "completed_at": None, "awarding_organisation_name": "Lambay Freehold", "award_value": 54321, "start_date": datetime.date(2010, 12, 12), "end_date": datetime.date(2011, 12, 12), }, # put_values { "completed": False, "award": { "startDate": None, }, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": False, "award": { "awardingOrganisationName": "Lambay Freehold", "awardValue": "54321.00", "startDate": None, "endDate": "2011-12-12", }, }), }, ), ( # other_oc_data {}, # initial_data { "completed_at": None, "awarding_organisation_name": "Lambay Freehold", "award_value": 5432.1, "start_date": datetime.date(2010, 12, 12), "end_date": datetime.date(2011, 12, 12), }, # put_values { "completed": True, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": True, "award": { "awardingOrganisationName": "Lambay Freehold", "awardValue": "5432.10", "startDate": "2010-12-12", "endDate": "2011-12-12", }, }), }, ), ( # other_oc_data {}, # initial_data { "completed_at": None, "result": "none-suitable", }, # put_values { "award": { "awardingOrganisationName": "Talbot de Malahide", }, }, # expected_status_code 400, # expected_response_data { "error": ("awarding_organisation_name cannot be set for Outcomes with result='none-suitable'." " Attempted to set value 'Talbot de Malahide'"), }, ), ( # other_oc_data { "completed_at": None, "result": "cancelled", }, # initial_data { "completed_at": datetime.datetime(2010, 3, 3, 3, 3, 3), "result": "none-suitable", }, # put_values { "completed": False, }, # expected_status_code 400, # expected_response_data { "error": "Can't un-complete outcome", }, ), ( # other_oc_data { "completed_at": datetime.datetime(2010, 3, 3, 3, 3, 3), "result": "cancelled", }, # initial_data { "completed_at": None, "result": "none-suitable", }, # put_values { "completed": True, }, # expected_status_code 400, # expected_response_data { "error": AnyStringMatching( r".+ \d+ already has a complete outcome: \d+"), }, ), ( # other_oc_data None, # initial_data {}, # put_values { "result": "cancelled", }, # expected_status_code 400, # expected_response_data { "error": AnyStringMatching(r".*json was not a valid format.*", flags=re.I), }, ), ( # other_oc_data {}, # initial_data {}, # put_values { "resultOfDirectAward": { "projectId": 321, }, }, # expected_status_code 400, # expected_response_data { "error": AnyStringMatching(r".*json was not a valid format.*", flags=re.I), }, ), ( # other_oc_data None, # initial_data {}, # put_values { "completed": True, # note "award" section flattened here "awardingOrganisationName": "Omphalos", "awardValue": "00314.1500", "startDate": "2020-10-10", "endDate": "2020-11-20", }, # expected_status_code 400, # expected_response_data { "error": AnyStringMatching(r".*json was not a valid format.*", flags=re.I), }, ), ( # other_oc_data { "completed_at": datetime.datetime(2010, 10, 10, 10, 10, 10), "awarding_organisation_name": "Lambay Freehold", "award_value": 54321, "start_date": datetime.date(2010, 12, 12), "end_date": datetime.date(2011, 12, 12), }, # initial_data {}, # put_values { "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "00314.1500", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 400, # expected_response_data { "error": AnyStringMatching( r".+ \d+ already has a complete outcome: \d+"), }, ), ( # other_oc_data { "completed_at": datetime.datetime(2010, 10, 10, 10, 10, 10), "result": "cancelled", }, # initial_data {}, # put_values { "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "00314.1500", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 400, # expected_response_data { "error": AnyStringMatching( r".+ \d+ already has a complete outcome: \d+", ), }, ), ( # other_oc_data {}, # initial_data { "completed_at": datetime.datetime(2010, 10, 10, 10, 10, 10), "awarding_organisation_name": "Lambay Freehold", "award_value": 54321, "start_date": datetime.date(2010, 12, 12), "end_date": datetime.date(2011, 12, 12), }, # put_values { "completed": False, "award": { "awardingOrganisationName": "Incubator", "awardValue": "271271.2", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 400, # expected_response_data { "error": "Can't un-complete outcome", }, ), ( # other_oc_data None, # initial_data { "completed_at": datetime.datetime(2010, 10, 10, 10, 10, 10), "awarding_organisation_name": "Lambay Freehold", "award_value": 54321, "start_date": datetime.date(2010, 12, 12), "end_date": datetime.date(2011, 12, 12), }, # put_values { "completed": True, "award": { "awardingOrganisationName": "", "awardValue": "271271.2", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 400, # expected_response_data { "error": AnyStringMatching(r".*\bawarding_organisation_name\b.*"), }, ), ( # other_oc_data {}, # initial_data {}, # put_values { "completed": True, "award": { "awardingOrganisationName": "Billy Pitt", "awardValue": "314.15", "startDate": "2020-10-10", "endDate": "2020-20-20", }, }, # expected_status_code 400, # expected_response_data { "error": AnyStringMatching(r".*\bendDate\b.*"), }, ), ( # other_oc_data None, # initial_data {}, # put_values { "completed": True, "award": { "awardingOrganisationName": "Martello", "awardValue": "Twelve quid", "startDate": "2020-01-01", "endDate": "2021-12-21", }, }, # expected_status_code 400, # expected_response_data { "error": AnyStringMatching(r".*\bawardValue\b.*"), }, ), ) @pytest.mark.parametrize( ( "other_oc_brief_based", "initial_brief_based", "other_oc_data", "initial_data", "put_values", "expected_status_code", "expected_response_data", ), tuple( chain( ( # we reproduce here the variants in _test_update_outcome_base_scenarios, once for Briefs, once # for Projects ( f_t, f_t, ) + variant_params for f_t, variant_params in product(( False, True, ), _test_update_outcome_base_scenarios)), ( # and also include some with mixed target-types ( # other_oc_brief_based False, # initial_brief_based True, # other_oc_data { "completed_at": datetime.datetime( 2007, 7, 7, 7, 7, 7), "result": "none-suitable", }, # initial_data { "completed_at": None, "result": "cancelled", }, # put_values { "completed": True, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": True, }), }, ), ( # other_oc_brief_based True, # initial_brief_based False, # other_oc_data { "completed_at": datetime.datetime( 2007, 7, 7, 7, 7, 7), "awarding_organisation_name": "Lambay Freehold", "award_value": 54321, "start_date": datetime.date(2010, 12, 12), "end_date": datetime.date(2011, 12, 12), }, # initial_data {}, # put_values { "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "00314.1500", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }, # expected_status_code 200, # expected_response_data { "outcome": AnySupersetOf({ "completed": True, "award": { "awardingOrganisationName": "Omphalos", "awardValue": "314.15", "startDate": "2020-10-10", "endDate": "2020-11-20", }, }), }, ), ), )), # help pytest make its printed representation of the parameter set a little more readable ids=(lambda val: "EMPTYDCT" if val == {} else None), ) def test_update_outcome_scenarios( self, other_oc_brief_based, initial_brief_based, other_oc_data, initial_data, put_values, expected_status_code, expected_response_data, ): """ A number of arguments control the background context this test is run in and the parameters PUT to the endpoint. Not all of the combinations make sense together and a caller should not expect a test to pass with a nonsensical combination of arguments :param other_oc_brief_based: whether the "other", existing Outcome should be Brief-based as opposed to Direct Award-based :param initial_brief_based: whether the target Outcome should initially be set up to be Brief-based as opposed to Direct Award-based :param other_oc_data: field values to set up the "other" Outcome with, ``None`` for no "other" Outcome to be created :param initial_data: field values to initially set up the target Outcome with :param put_values: payload dictionary to be PUT to the target endpoint (without the ``outcome`` wrapper) :param expected_status_code: :param expected_response_data: """ user_id = self.setup_dummy_user(id=1, role='buyer') self.setup_dummy_suppliers(3) project = None search = None chosen_archived_service = other_archived_service = None if not (other_oc_brief_based and initial_brief_based): # create required objects for direct award-based Outcome self.setup_dummy_services(3, model=ArchivedService) project = DirectAwardProject( name="Lambay Island", users=[User.query.get(user_id)], ) db.session.add(project) search = DirectAwardSearch( project=project, created_by=user_id, active=True, search_url="http://nothing.nowhere", ) db.session.add(search) for archived_service in db.session.query(ArchivedService).filter( ArchivedService.service_id.in_(( "2000000000", "2000000001", ))).all(): search.archived_services.append(archived_service) chosen_archived_service, other_archived_service = search.archived_services[: 2] # else skip creating these to save time brief = None chosen_brief_response = other_brief_response = None if other_oc_brief_based or initial_brief_based: # create required objects for brief-based Outcome brief = self.setup_dummy_brief(status="closed", user_id=user_id, data={}) chosen_brief_response, other_brief_response = (BriefResponse( brief=brief, supplier_id=i, submitted_at=datetime.datetime.utcnow(), data={}, ) for i in ( 1, 2, )) db.session.add(chosen_brief_response) db.session.add(other_brief_response) # else skip creating these to save time other_outcome = None if other_oc_data is not None: # create "other" Outcome for our target one to potentially clash with other_outcome = Outcome( **({ "brief": brief } if other_oc_brief_based else { "direct_award_project": project }), **( { "result": other_oc_data.get("result", "awarded"), **({ "brief_response": other_brief_response, } if other_oc_brief_based else { "direct_award_search": search, "direct_award_archived_service": other_archived_service, }), } if other_oc_data.get( "result", "awarded") == "awarded" else { "result": other_oc_data["result"] }), **{ k: v for k, v in (other_oc_data or {}).items() if k not in ( "completed_at", "result", ) }, ) if "completed_at" in other_oc_data: other_outcome.completed_at = other_oc_data["completed_at"] db.session.add(other_outcome) # create our target Outcome in its initial state outcome = Outcome( **({ "brief": brief } if initial_brief_based else { "direct_award_project": project }), **( { "result": initial_data.get("result", "awarded"), **({ "brief_response": chosen_brief_response, } if initial_brief_based else { "direct_award_search": search, "direct_award_archived_service": chosen_archived_service, }), } if initial_data.get("result", "awarded") == "awarded" else { "result": initial_data["result"] }), **{ k: v for k, v in (initial_data or {}).items() if k not in ( "completed_at", "result", ) }, ) if "completed_at" in initial_data: # can only set completed_at after other fields have been set outcome.completed_at = initial_data["completed_at"] db.session.add(outcome) # must assign ids before we can lock project db.session.flush() if project: project.locked_at = datetime.datetime.now() # make a concrete note of these so we don't have to fetch them back from the database after the request, # potentially getting back values which have been inadvertantly changed outcome_external_id = outcome.external_id project_external_id = project and project.external_id search_id = search and search.id chosen_archived_service_id = chosen_archived_service and chosen_archived_service.id chosen_archived_service_service_id = chosen_archived_service and chosen_archived_service.service_id brief_id = brief and brief.id chosen_brief_response_id = chosen_brief_response and chosen_brief_response.id audit_event_count = AuditEvent.query.count() db.session.commit() # keep an nice concrete representation for later comparison outcome_serialization_before = outcome.serialize() res = self.client.put( f"/outcomes/{outcome.external_id}", data=json.dumps({ "updated_by": "*****@*****.**", "outcome": put_values, }), content_type="application/json", ) assert res.status_code == expected_status_code response_data = json.loads(res.get_data()) assert response_data == expected_response_data # allow these to be re-used in this session, "refreshed" db.session.add_all(x for x in ( outcome, project, search, chosen_archived_service, ) if x is not None) db.session.expire_all() if res.status_code != 200: # assert change wasn't made, audit event wasn't added assert outcome.serialize() == outcome_serialization_before assert AuditEvent.query.count() == audit_event_count else: # an additional check of values we should be able to figure out the "correct" values for assert response_data == { "outcome": { "id": outcome_external_id, "result": initial_data.get("result", "awarded"), "completed": ( bool(outcome_serialization_before.get("completedAt")) or put_values.get("completed") is True ), "completedAt": ( outcome_serialization_before.get("completedAt") or ( AnyStringMatching(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z") if put_values.get("completed") else None ) ), **({ "resultOfFurtherCompetition": { "brief": { "id": brief_id, }, **({ "briefResponse": { "id": chosen_brief_response_id, }, } if initial_data.get("result", "awarded") == "awarded" else {}), }, } if initial_brief_based else { "resultOfDirectAward": { "project": { "id": project_external_id, }, **({ "search": { "id": search_id, }, "archivedService": { "id": chosen_archived_service_id, "service": { "id": chosen_archived_service_service_id, }, }, } if initial_data.get("result", "awarded") == "awarded" else {}) }, }), **({"award": AnySupersetOf({})} if initial_data.get("result", "awarded") == "awarded" else {}), } } # check changes actually got committed assert response_data == { "outcome": outcome.serialize(), } # check audit event(s) were saved expect_complete_audit_event = put_values.get( "completed") is True and not initial_data.get("completed_at") n_expected_new_audit_events = 2 if expect_complete_audit_event else 1 assert AuditEvent.query.count( ) == audit_event_count + n_expected_new_audit_events # grab those most recent (1 or) 2 audit events from the db, re-sorting them to be in a predictable order - # we don't care whether the complete_outcome or update_outcome comes out of the db first audit_events = sorted( db.session.query(AuditEvent).order_by( desc(AuditEvent.created_at), desc(AuditEvent.id), )[:n_expected_new_audit_events], key=lambda ae: ae.type, reverse=True, ) assert audit_events[0].type == "update_outcome" assert audit_events[0].object is outcome assert audit_events[0].acknowledged is False assert audit_events[0].acknowledged_at is None assert not audit_events[0].acknowledged_by assert audit_events[0].user == "*****@*****.**" assert audit_events[0].data == put_values if expect_complete_audit_event: assert audit_events[1].type == "complete_outcome" assert audit_events[1].created_at == audit_events[ 0].created_at == outcome.completed_at assert audit_events[1].object is outcome assert audit_events[1].acknowledged is False assert audit_events[1].acknowledged_at is None assert not audit_events[1].acknowledged_by assert audit_events[1].user == "*****@*****.**" assert audit_events[1].data == {} def test_nonexistent_outcome(self): res = self.client.put( f"/outcomes/314159", data=json.dumps({ "updated_by": "*****@*****.**", "outcome": { "completed": True, }, }), content_type="application/json", ) assert res.status_code == 404 assert json.loads(res.get_data()) == { "error": "Outcome 314159 not found", }
class TestScanAndTagS3Object(BaseApplicationTest): @pytest.mark.parametrize( ( "initial_tagset", "concurrent_new_tagset", "clamd_instream_retval", "expected_retval", "expected_log_calls", "expected_notify_calls", "expected_tagset", ), ( ( # initial_tagset { "existing": "tag123", "avStatus.irrelevant": "who is here", }, # concurrent_new_tagset {"surprise": "tag234"}, # clamd_instream_retval {"stream": ("OK", "dénouement sufficient",)}, # expected_retval ( {}, True, { "avStatus.clamdVerStr": "ClamAV 567; first watch", "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:05.040302", }, ), # expected_log_calls ( ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Object version .* has no .avStatus\.result. tag "), ()), AnySupersetOf({"extra": AnySupersetOf({ "av_status": {"avStatus.irrelevant": "who is here"}, "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(initiate object download"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Scanned "), ()), AnySupersetOf({"extra": AnySupersetOf({ "file_length": 12, "file_name": "too ducky.puddeny-pie.pdf", "clamd_result": ("OK", "dénouement sufficient"), })}), ), ( (logging.DEBUG, AnyStringMatching(r"Fetched clamd version "), ()), AnySupersetOf({"extra": AnySupersetOf({ "clamd_version": "ClamAV 567; first watch", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(put object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Handled bucket "), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ), # expected_notify_calls (), # expected_tagset { "avStatus.result": "pass", "avStatus.clamdVerStr": "ClamAV 567_ first watch", "avStatus.ts": "2010-09-08T07:06:05.040302", "surprise": "tag234", }, ), ( # initial_tagset {"existing": "tag123"}, # concurrent_new_tagset {"surprise": "tag234"}, # clamd_instream_retval {"stream": ("FOUND", "After him, Garry!",)}, # expected_retval ( {}, True, { "avStatus.clamdVerStr": "ClamAV 567; first watch", "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:05.040302", }, ), # expected_log_calls ( ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Object version .* has no .avStatus\.result. tag "), ()), AnySupersetOf({"extra": AnySupersetOf({ "av_status": {}, "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(initiate object download"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Scanned "), ()), AnySupersetOf({"extra": AnySupersetOf({ "file_length": 12, "file_name": "too ducky.puddeny-pie.pdf", "clamd_result": ("FOUND", "After him, Garry!"), })}), ), ( (logging.DEBUG, AnyStringMatching(r"Fetched clamd version "), ()), AnySupersetOf({"extra": AnySupersetOf({ "clamd_version": "ClamAV 567; first watch", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(put object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Handled bucket "), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ), # expected_notify_calls ( mock.call("not_a_real_key-00000000-fake-uuid-0000-000000000000"), mock.call().send_email( to_email_address="*****@*****.**", personalisation={ "bucket_name": "spade", "clamd_output": "FOUND, After him, Garry!", "dm_trace_id": mock.ANY, "file_name": "too ducky.puddeny-pie.pdf", "object_key": "sandman/+4321 billy-winks☾.pdf", "object_version": "0", "sns_message_id": "<N/A>", }, template_name_or_id="developer_virus_alert", ), ), # expected_tagset { "avStatus.result": "fail", "avStatus.clamdVerStr": "ClamAV 567_ first watch", "avStatus.ts": "2010-09-08T07:06:05.040302", "surprise": "tag234", } ), ( # initial_tagset {"existing": "tag123"}, # concurrent_new_tagset None, # clamd_instream_retval {"stream": ("FOUND", "eicar-test-signature",)}, # expected_retval ( {}, True, { "avStatus.clamdVerStr": "ClamAV 567; first watch", "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:05.040302", }, ), # expected_log_calls ( ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Object version .* has no .avStatus\.result. tag "), ()), AnySupersetOf({"extra": AnySupersetOf({ "av_status": {}, "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(initiate object download"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Scanned "), ()), AnySupersetOf({"extra": AnySupersetOf({ "file_length": 12, "file_name": "too ducky.puddeny-pie.pdf", "clamd_result": ("FOUND", "eicar-test-signature"), })}), ), ( (logging.DEBUG, AnyStringMatching(r"Fetched clamd version "), ()), AnySupersetOf({"extra": AnySupersetOf({ "clamd_version": "ClamAV 567; first watch", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(put object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Handled bucket "), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ), # expected_notify_calls ( mock.call("not_a_real_key-00000000-fake-uuid-0000-000000000000"), mock.call().send_email( to_email_address="*****@*****.**", personalisation={ "bucket_name": "spade", "clamd_output": "FOUND, eicar-test-signature", "dm_trace_id": mock.ANY, "file_name": "too ducky.puddeny-pie.pdf", "object_key": "sandman/+4321 billy-winks☾.pdf", "object_version": "0", "sns_message_id": "<N/A>", }, template_name_or_id="developer_virus_alert", reference="eicar-found-4d3daeeb3ea3d90d4d6e7a20a5b483a9-development", ), ), # expected_tagset { "avStatus.result": "fail", "avStatus.clamdVerStr": "ClamAV 567_ first watch", "avStatus.ts": "2010-09-08T07:06:05.040302", "existing": "tag123", } ), ( # initial_tagset {"existing": "tag123"}, # concurrent_new_tagset { "surprise": "tag234", "avStatus.ts": "2010-09-08T07:06:05.040302", }, # clamd_instream_retval {"stream": ("ERROR", " Some trouble is on here",)}, # expected_retval UnknownClamdError, # expected_log_calls ( ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Object version .* has no .avStatus\.result. tag "), ()), AnySupersetOf({"extra": AnySupersetOf({ "av_status": {}, "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(initiate object download"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Scanned "), ()), AnySupersetOf({"extra": AnySupersetOf({ "file_length": 12, "file_name": "too ducky.puddeny-pie.pdf", "clamd_result": ("ERROR", " Some trouble is on here",), })}), ), ( (logging.INFO, AnyStringMatching(r"Failed handling.*UnknownClamdError.*"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ), # expected_notify_calls (), # expected_tagset { "surprise": "tag234", "avStatus.ts": "2010-09-08T07:06:05.040302", }, ), ( # initial_tagset {"existing": "tag123"}, # concurrent_new_tagset { "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:04.010101", "avStatus.irrelevant": "who is here", }, # clamd_instream_retval {"stream": ("OK", "Egg two demolished",)}, # expected_retval ( { "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:04.010101", "avStatus.irrelevant": "who is here", }, False, { "avStatus.clamdVerStr": "ClamAV 567; first watch", "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:05.040302", }, ), # expected_log_calls ( ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Object version .* has no .avStatus\.result. tag "), ()), AnySupersetOf({"extra": AnySupersetOf({ "av_status": {}, "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(initiate object download"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Scanned "), ()), AnySupersetOf({"extra": AnySupersetOf({ "file_length": 12, "file_name": "too ducky.puddeny-pie.pdf", "clamd_result": ("OK", "Egg two demolished"), })}), ), ( (logging.DEBUG, AnyStringMatching(r"Fetched clamd version "), ()), AnySupersetOf({"extra": AnySupersetOf({ "clamd_version": "ClamAV 567; first watch", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( ( logging.WARNING, AnyStringMatching(r"Object was tagged.*existing.*unapplied.*"), (), ), AnySupersetOf({"extra": AnySupersetOf({ "existing_av_status": { "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:04.010101", "avStatus.irrelevant": "who is here", }, "existing_av_status_result": "fail", "unapplied_av_status": { "avStatus.result": "pass", "avStatus.clamdVerStr": "ClamAV 567; first watch", "avStatus.ts": "2010-09-08T07:06:05.040302", }, "unapplied_av_status_result": "pass", })}), ), ( (logging.INFO, AnyStringMatching(r"Handled bucket "), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ), # expected_notify_calls (), # expected_tagset { "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:04.010101", "avStatus.irrelevant": "who is here", }, ), ( # initial_tagset {"existing": "tag123"}, # concurrent_new_tagset { "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:04.010101", "avStatus.clamdVerStr": "4321_ 7654", "surprise": "789+789", }, # clamd_instream_retval {"stream": ("FOUND", "After him, boy!",)}, # expected_retval ( { "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:04.010101", "avStatus.clamdVerStr": "4321_ 7654", }, False, { "avStatus.clamdVerStr": "ClamAV 567; first watch", "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:05.040302", }, ), # expected_log_calls ( ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Object version .* has no .avStatus\.result. tag "), ()), AnySupersetOf({"extra": AnySupersetOf({ "av_status": {}, "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(initiate object download"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Scanned "), ()), AnySupersetOf({"extra": AnySupersetOf({ "file_length": 12, "file_name": "too ducky.puddeny-pie.pdf", "clamd_result": ("FOUND", "After him, boy!"), })}), ), ( (logging.DEBUG, AnyStringMatching(r"Fetched clamd version "), ()), AnySupersetOf({"extra": AnySupersetOf({ "clamd_version": "ClamAV 567; first watch", })}), ), ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( ( logging.WARNING, AnyStringMatching(r"Object was tagged.*existing.*unapplied.*"), (), ), AnySupersetOf({"extra": AnySupersetOf({ "existing_av_status": { "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:04.010101", "avStatus.clamdVerStr": "4321_ 7654", }, "existing_av_status_result": "pass", "unapplied_av_status": { "avStatus.result": "fail", "avStatus.clamdVerStr": "ClamAV 567; first watch", "avStatus.ts": "2010-09-08T07:06:05.040302", }, "unapplied_av_status_result": "fail", })}), ), ( (logging.INFO, AnyStringMatching(r"Handled bucket "), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ), # expected_notify_calls (), # expected_tagset { "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:04.010101", "avStatus.clamdVerStr": "4321_ 7654", "surprise": "789+789", }, ), ( # initial_tagset { "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:04.010101", }, # concurrent_new_tagset None, # clamd_instream_retval None, # expected_retval ( { "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:04.010101", }, False, None, ), # expected_log_calls ( ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Object version.*already.*avStatus\.result.*tag.+"), ()), AnySupersetOf({"extra": AnySupersetOf({ "existing_av_status": { "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:04.010101", }, "existing_av_status_result": "pass", "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Handled bucket "), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ), # expected_notify_calls (), # expected_tagset { "avStatus.result": "pass", "avStatus.ts": "2010-09-08T07:06:04.010101", }, ), ( # initial_tagset { "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:04.010101", }, # concurrent_new_tagset None, # clamd_instream_retval None, # expected_retval ( { "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:04.010101", }, False, None, ), # expected_log_calls ( ( (logging.DEBUG, AnyStringMatching(r"Call to S3 \(get object tagging"), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Object version.*already.*avStatus\.result.*tag.+"), ()), AnySupersetOf({"extra": AnySupersetOf({ "existing_av_status": { "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:04.010101", }, "existing_av_status_result": "fail", "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ( (logging.INFO, AnyStringMatching(r"Handled bucket "), ()), AnySupersetOf({"extra": AnySupersetOf({ "s3_bucket_name": "spade", "s3_object_key": "sandman/+4321 billy-winks☾.pdf", "s3_object_version": "0", })}), ), ), # expected_notify_calls (), # expected_tagset { "avStatus.result": "fail", "avStatus.ts": "2010-09-08T07:06:04.010101", }, ), ), ) @freeze_time("2010-09-08T07:06:05.040302") @mock.patch("app.s3.DMNotifyClient", autospec=True) def test_scan_and_tag_s3_object( self, mock_notify_client, bucket_with_file, mock_clamd, initial_tagset, concurrent_new_tagset, clamd_instream_retval, expected_retval, expected_log_calls, expected_notify_calls, expected_tagset, ): """ :param initial_tagset: tagset (dict) that file in bucket will appear to have initially :param concurrent_new_tagset: a tagset (dict) that coincidentally gets set "while" the clam instream process is running, None to skip this update :param clamd_instream_retval: value to return from mock clamd instream(...) call, None to expect no call to take place :param expected_retval: return value to expect from call, or, if a subclass of Exception, expect to raise this exception type :param expected_log_calls: sequence of expected mock.call()s to have been made to app logger :param expected_notify_calls: sequence of expected mock.call()s to have been made to mock DMNotifyClient :param expected_tagset: tagset (dict) to expect file to have after the request processing has finished """ bucket, objver = bucket_with_file s3_client = boto3.client("s3", region_name="howth-west-2") if initial_tagset is not None: s3_client.put_object_tagging( Bucket=bucket.name, Key=objver.Object().key, VersionId=objver.id, Tagging={"TagSet": tagset_from_dict(initial_tagset)}, ) def clamd_instream_func(*args, **kwargs): # if clamd_instream_retval *is* None, we'd be expecting "instream" not to be called at all assert clamd_instream_retval is not None assert args == (RestrictedAny(lambda x: x.read() == b"123412341234"),) assert kwargs == {} if concurrent_new_tagset is not None: # a very literal "side effect" here - simulating a modification to the object's tags while scanning... s3_client.put_object_tagging( Bucket=bucket.name, Key=objver.Object().key, VersionId=objver.id, Tagging={"TagSet": tagset_from_dict(concurrent_new_tagset)}, ) return clamd_instream_retval mock_clamd.instream.side_effect = clamd_instream_func mock_clamd.version.return_value = "ClamAV 567; first watch" with self.mocked_app_logger_log() as mock_app_log: with pytest.raises(expected_retval) if ( isinstance(expected_retval, type) and issubclass(expected_retval, Exception) ) else null_context_manager(): with self.app.test_request_context(): retval = scan_and_tag_s3_object( s3_client, bucket.name, objver.Object().key, objver.id, ) if not (isinstance(expected_retval, type) and issubclass(expected_retval, Exception)): assert retval == expected_retval assert mock_app_log.call_args_list == list(expected_log_calls) assert mock_notify_client.mock_calls == list(expected_notify_calls) assert dict_from_tagset( s3_client.get_object_tagging( Bucket=bucket.name, Key=objver.Object().key, VersionId=objver.id, )["TagSet"] ) == expected_tagset
def test_handle_s3_sns_notification( self, mock_validate, mock_scan_and_tag_s3_object, content_type, bucket_with_file, ): bucket, objver = bucket_with_file with self.mocked_app_logger_log() as mock_app_log: client = self.get_authorized_client() body_dict = { **self._basic_notification_body, "TopicArn": "bull:by:the:horns:123:s3_file_upload_notification_development", "Subject": "Someone uploaded a file, yeah?", "Message": json.dumps({ "Records": [ { "s3": { "bucket": { "name": bucket.name, }, "object": { "key": quote_plus(objver.Object().key), "versionId": objver.id, }, }, "awsRegion": "howth-west-2", }, ], }), } subscription_arn = f"{body_dict['TopicArn']}:314159" res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps(body_dict), content_type=content_type, headers={"X-Amz-Sns-Subscription-Arn": subscription_arn}, ) assert res.status_code == 200 assert mock_app_log.call_args_list == [ ( (logging.INFO, AnyStringMatching(r"Processing message "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "message_id": "424344def", "subscription_arn": "bull:by:the:horns:123:s3_file_upload_notification_development:314159", }) }), ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 200})})), ] assert mock_validate.call_args_list == [((body_dict, ), AnySupersetOf({}))] assert mock_scan_and_tag_s3_object.call_args_list == [ mock.call( s3_client=mock.ANY, s3_bucket_name=bucket.name, s3_object_key=objver.Object().key, s3_object_version=objver.id, sns_message_id="424344def", ), ]
def _default_and_no_exception(log_context): return timing.logged_duration.default_condition( log_context) and sys.exc_info()[0] is None # a dictionary of messages to be passed to logged_duration mapped against a tuple of values to expect, the first of # these in the case that log message formatting has *not* yet taken place, the second assuming it *has*. _messages_expected = OrderedDict(( ( timing.logged_duration.default_message, ( { 'success': "Block executed in {duration_real}s of real-time", 'error': AnyStringMatching( r"Block raised \w+ in \{duration_real\}s of real-time"), }, { 'success': AnyStringMatching( r"Block executed in [0-9eE.-]+s of real-time"), 'error': AnyStringMatching( r"Block raised \w+ in [0-9eE.-]+s of real-time"), }, ), ), ( timing.different_message_for_success_or_error( success_message='Block succeeded in {duration_real}s', error_message='Block raised {exc_info[0]} in {duration_real}s',
class TestHandleS3Sns(BaseCallbackApplicationTest): _basic_subscription_confirmation_body = { "Type": "SubscriptionConfirmation", "TopicArn": "54321:cattleTrade", "Token": "314159b", "Timestamp": "2018-05-05T11:00:01.12345Z", "SubscribeURL": "https://laissez.faire/doctrine", "MessageId": "abc123", } _basic_notification_body = { "Type": "Notification", "TopicArn": "65432:oldIndustries", "Timestamp": "2018-05-05T11:00:01.12345Z", "MessageId": "424344def", "Message": "The way thereof", } @pytest.mark.parametrize("base_body_dict", ( _basic_subscription_confirmation_body, _basic_notification_body, )) @freeze_time("2018-05-05T10:00") @mock.patch("app.callbacks.views.sns._handle_subscription_confirmation", autospec=True) def test_handle_s3_sns_unfetchable_cert( self, mock_handle_subscription_confirmation, base_body_dict): with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: rmock.register_uri("GET", "https://nowhere.amazonaws.com/cert.pem", status_code=404) client = self.get_authorized_client() res = client.post("/callbacks/sns/s3/uploaded", data=json.dumps({ **base_body_dict, "Signature": "should_be_irrelevant", "SigningCertURL": "https://nowhere.amazonaws.com/cert.pem", }), content_type="application/json") assert res.status_code == 400 assert mock_app_log.call_args_list == [ ( (logging.INFO, AnyStringMatching( r"Failed to fetch certificate .*404"), ()), AnySupersetOf({ "extra": AnySupersetOf({ "target_url": "https://nowhere.amazonaws.com/cert.pem", }) }), ), ( (logging.WARNING, AnyStringMatching(r"SNS request body failed "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "validation_error": RestrictedAny(lambda x: isinstance( x, requests.exceptions.HTTPError)), }) }), ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 400})})) ] assert rmock.request_history == [ RestrictedAny(lambda r: r.url == "https://nowhere.amazonaws.com/cert.pem") ] assert mock_handle_subscription_confirmation.called is False @pytest.mark.parametrize("base_body_dict", ( _basic_subscription_confirmation_body, _basic_notification_body, )) @mock.patch("validatesns.validate", autospec=True) @mock.patch("app.callbacks.views.sns._handle_subscription_confirmation", autospec=True) def test_handle_s3_sns_bad_signature(self, mock_handle_subscription_confirmation, mock_validate, base_body_dict): mock_validate.side_effect = validatesns.ValidationError with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: client = self.get_authorized_client() res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps(base_body_dict), content_type="application/json", ) assert res.status_code == 400 assert mock_app_log.call_args_list == [ ( (logging.WARNING, AnyStringMatching(r".*failed signature validation"), ()), AnySupersetOf({ "extra": AnySupersetOf({ "validation_error": RestrictedAny(lambda x: isinstance( x, validatesns.ValidationError)), }) }), ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 400})})) ] assert not rmock.request_history assert mock_validate.call_args_list == [((base_body_dict, ), AnySupersetOf({}))] assert mock_handle_subscription_confirmation.called is False @mock.patch("validatesns.validate", autospec=True) @mock.patch("app.callbacks.views.sns._handle_subscription_confirmation", autospec=True) def test_handle_s3_sns_weird_request_type( self, mock_handle_subscription_confirmation, mock_validate): with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: client = self.get_authorized_client() weird_body_dict = { "MessageId": "1234321", "Type": "EuropeanConflagration", } res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps(weird_body_dict), content_type="application/json", ) assert res.status_code == 400 assert mock_app_log.call_args_list == [ ( (logging.WARNING, AnyStringMatching(r"Unrecognized request type "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "request_type": "EuropeanConflagration", }) }), ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 400})})) ] assert not rmock.request_history assert mock_validate.call_args_list == [((weird_body_dict, ), AnySupersetOf({}))] assert mock_handle_subscription_confirmation.called is False _test_handle_s3_sns_unexpected_message_contents_didnt_match_log_args = lambda message: ( # noqa (logging.WARNING, AnyStringMatching(r"Message contents didn't match "), ()), AnySupersetOf( {"extra": AnySupersetOf({ "message_contents": message, })}), ) _test_handle_s3_sns_unexpected_message_unrecognized_message_format_log_args = lambda message: ( # noqa (logging.WARNING, AnyStringMatching(r"Unrecognized message format "), ()), AnySupersetOf({"extra": AnySupersetOf({ "body_message": message, })}), ) @pytest.mark.parametrize( "message,expected_warning_log_call", ( ( "mangiD", _test_handle_s3_sns_unexpected_message_contents_didnt_match_log_args( "mangiD"), ), ( 123, _test_handle_s3_sns_unexpected_message_contents_didnt_match_log_args( 123), ), ( None, _test_handle_s3_sns_unexpected_message_contents_didnt_match_log_args( None), ), ( "", _test_handle_s3_sns_unexpected_message_contents_didnt_match_log_args( ""), ), ( '{"a":"b"}', _test_handle_s3_sns_unexpected_message_unrecognized_message_format_log_args( {"a": "b"}), ), ), ) @mock.patch("validatesns.validate", autospec=True) @mock.patch("app.callbacks.views.sns._handle_subscription_confirmation", autospec=True) def test_handle_s3_sns_unexpected_message( self, mock_handle_subscription_confirmation, mock_validate, message, expected_warning_log_call, ): with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: client = self.get_authorized_client() body_dict = { "MessageId": "1234321", "Type": "Notification", "Message": message, } res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps(body_dict), content_type="application/json", headers={"X-Amz-Sns-Subscription-Arn": "kcirtaP"}, ) assert res.status_code == 400 assert mock_app_log.call_args_list == [ ( (logging.INFO, AnyStringMatching(r"Processing message "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "message_id": "1234321", "subscription_arn": "kcirtaP", }) }), ), expected_warning_log_call, (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 400})})) ] assert not rmock.request_history assert mock_validate.call_args_list == [((body_dict, ), AnySupersetOf({}))] assert mock_handle_subscription_confirmation.called is False @mock.patch("validatesns.validate", autospec=True) @mock.patch("app.callbacks.views.sns._handle_subscription_confirmation", autospec=True) def test_handle_s3_sns_test_event( self, mock_handle_subscription_confirmation, mock_validate, ): with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: client = self.get_authorized_client() body_dict = { "MessageId": "1234321", "Type": "Notification", "Message": '{"Event":"s3:TestEvent","nut":"shell"}', } res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps(body_dict), content_type="application/json", headers={"X-Amz-Sns-Subscription-Arn": "kcirtaP"}, ) assert res.status_code == 200 assert mock_app_log.call_args_list == [ ( (logging.INFO, AnyStringMatching(r"Processing message "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "message_id": "1234321", "subscription_arn": "kcirtaP", }) }), ), ( (logging.INFO, "Received S3 test event", ()), {}, ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 200})})) ] assert not rmock.request_history assert mock_validate.call_args_list == [((body_dict, ), AnySupersetOf({}))] assert mock_handle_subscription_confirmation.called is False @pytest.mark.parametrize("content_type", ( "application/json", "text/plain", )) @mock.patch("validatesns.validate", autospec=True) @mock.patch("app.callbacks.views.sns._handle_subscription_confirmation", autospec=True) def test_handle_s3_sns_subscription_confirmation( self, mock_handle_subscription_confirmation, mock_validate, content_type, ): # arbitrary sentinel Response mock_handle_subscription_confirmation.return_value = Response( "Grain supplies"), 200 with self.mocked_app_logger_log() as mock_app_log: with requests_mock.Mocker() as rmock: client = self.get_authorized_client() res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps( self._basic_subscription_confirmation_body), content_type=content_type, ) assert res.status_code == 200 assert res.get_data() == b"Grain supplies" assert mock_app_log.call_args_list == [ (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 200})})) ] assert not rmock.request_history assert mock_validate.call_args_list == [ ((self._basic_subscription_confirmation_body, ), AnySupersetOf({})) ] assert mock_handle_subscription_confirmation.call_args_list == [ (( self._basic_subscription_confirmation_body, "s3_file_upload_notification_development", ), {}) ] @pytest.mark.parametrize("content_type", ( "application/json", "text/plain", )) @mock.patch("app.callbacks.views.sns.scan_and_tag_s3_object", autospec=True) @mock.patch("validatesns.validate", autospec=True) def test_handle_s3_sns_notification( self, mock_validate, mock_scan_and_tag_s3_object, content_type, bucket_with_file, ): bucket, objver = bucket_with_file with self.mocked_app_logger_log() as mock_app_log: client = self.get_authorized_client() body_dict = { **self._basic_notification_body, "TopicArn": "bull:by:the:horns:123:s3_file_upload_notification_development", "Subject": "Someone uploaded a file, yeah?", "Message": json.dumps({ "Records": [ { "s3": { "bucket": { "name": bucket.name, }, "object": { "key": quote_plus(objver.Object().key), "versionId": objver.id, }, }, "awsRegion": "howth-west-2", }, ], }), } subscription_arn = f"{body_dict['TopicArn']}:314159" res = client.post( "/callbacks/sns/s3/uploaded", data=json.dumps(body_dict), content_type=content_type, headers={"X-Amz-Sns-Subscription-Arn": subscription_arn}, ) assert res.status_code == 200 assert mock_app_log.call_args_list == [ ( (logging.INFO, AnyStringMatching(r"Processing message "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "message_id": "424344def", "subscription_arn": "bull:by:the:horns:123:s3_file_upload_notification_development:314159", }) }), ), (mock.ANY, AnySupersetOf({"extra": AnySupersetOf({"status": 200})})), ] assert mock_validate.call_args_list == [((body_dict, ), AnySupersetOf({}))] assert mock_scan_and_tag_s3_object.call_args_list == [ mock.call( s3_client=mock.ANY, s3_bucket_name=bucket.name, s3_object_key=objver.Object().key, s3_object_version=objver.id, sns_message_id="424344def", ), ]
class TestHandleSubscriptionConfirmation(BaseCallbackApplicationTest): @pytest.mark.parametrize( ( "body_dict", "supported_topic_name", "rmock_response_kwargs", "expected_output", "expect_request_made", "expected_log_calls", ), tuple( chain.from_iterable(( ( ( # body_dict { "SubscribeURL": "https://amz.net", "TopicArn": f"arn:aws:sns:howth-west-2:123456789012:Drawers" }, # supported_topic_name "Drawers", # rmock_response_kwargs { "text": f"""<ConfirmSubscriptionResponse xmlns="http://brazenfaced.things"> <ConfirmSubscriptionResult><SubscriptionArn> arn:aws:sns:howth-west-2:123456789012:Drawers:bicycles </SubscriptionArn></ConfirmSubscriptionResult> <ResponseMetadata><RequestId> always-skeezing </RequestId></ResponseMetadata> </ConfirmSubscriptionResponse>""" }, # expected_output (RestrictedAny(lambda x: isinstance(x, Response)), 200 ), # expect_request_made True, # expected_log_calls ( ( (logging.INFO, AnyStringMatching("Made GET "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "target_url": "https://amz.net", "topic_arn": f"arn:aws:sns:howth-west-2:123456789012:Drawers", }), }), ), ( (logging.INFO, AnyStringMatching( "SubscriptionConfirmation succeeded "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "subscription_arn": f"arn:aws:sns:howth-west-2:123456789012:Drawers:bicycles", "confirmation_request_id": "always-skeezing", }), }), ), ), ), ( # body_dict { "SubscribeURL": "https://butt.bridge", "TopicArn": "premature:decay" }, # supported_topic_name "BrilliantFellows", # rmock_response_kwargs { "text": "dummy" }, # expected_output BadRequest, # expect_request_made False, # expected_log_calls ( ( (logging.WARNING, AnyStringMatching(r".*unrecognized topic.*"), ()), AnySupersetOf({ "extra": AnySupersetOf({ "topic_name": "decay", "topic_arn": "premature:decay", }), }), ), ), ), ( # body_dict { "SubscribeURL": "https://sister.island.co.uk", "TopicArn": "100M:Played:Out:CoalSeams" }, # supported_topic_name "CoalSeams", # rmock_response_kwargs { "text": """<ConfirmSubscriptionResponse xmlns="http://neighbours-across.the/channel"> <ConfirmSubscriptionResult><SubscriptionArn> unrelated:naming:scheme </SubscriptionArn></ConfirmSubscriptionResult> </ConfirmSubscriptionResponse>""", "status_code": 210 }, # expected_output (RestrictedAny(lambda x: isinstance(x, Response)), 200 ), # expect_request_made True, # expected_log_calls ( ( (logging.INFO, AnyStringMatching("Made GET "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "target_url": "https://sister.island.co.uk", "topic_arn": "100M:Played:Out:CoalSeams", }), }), ), ( (logging.INFO, AnyStringMatching( "SubscriptionConfirmation succeeded "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "subscription_arn": "unrelated:naming:scheme", }), }), ), ), ), ( # body_dict { "SubscribeURL": "https://disorder.ly.hous.es", "TopicArn": "nice:mixup" }, # supported_topic_name "mixup", # rmock_response_kwargs { "text": "<Second drink<does it<" }, # expected_output BadRequest, # expect_request_made True, # expected_log_calls ( ( (logging.INFO, AnyStringMatching("Made GET "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "target_url": "https://disorder.ly.hous.es", "topic_arn": "nice:mixup", }), }), ), ( (logging.WARNING, RestrictedAny( lambda x: isinstance(x, ParseError)), ()), AnySupersetOf({}), ), ( (logging.WARNING, "SubscriptionConfirmation response parsing failed", ()), AnySupersetOf({}), ), ), ), ), ( # the following case should basically have the same results for all request errors, so testing many # of these cases ( # body_dict { "SubscribeURL": "https://wildgoose.chase/this", "TopicArn": "first-class:third:ticket" }, # supported_topic_name "ticket", # rmock_response_kwargs rmock_response_kwargs, # expected_output BadRequest, # expect_request_made True, # expected_log_calls ( ( (logging.INFO, AnyStringMatching("Failed to make GET "), ()), AnySupersetOf({ "extra": AnySupersetOf({ "target_url": "https://wildgoose.chase/this", "topic_arn": "first-class:third:ticket", }), }), ), ), ) for rmock_response_kwargs in ( { "status_code": 404, "text": "where?" }, { "status_code": 500, "text": "what?" }, { "status_code": 403, "text": "who?" }, { "status_code": 400, "text": "no" }, { "exc": requests.exceptions.ConnectTimeout }, { "exc": requests.exceptions.SSLError }, )), ))), ) def test_handle_subscription_confirmation( self, body_dict, supported_topic_name, rmock_response_kwargs, expected_output, expect_request_made, expected_log_calls, ): """ :param body_dict: request body_dict to pass directly to _handle_subscription_confirmation :param supported_topic_name: supported_topic_name to pass directly to _handle_subscription_confirmation :param rmock_response_kwargs: kwargs to pass to register_uri specifying how requests_mock should respond to a request to the "subscribe" url :param expected_output: either an Exception subclass to expect to be raised or the expected return value of _handle_subscription_confirmation :param expect_request_made: whether to expect a request to have been made to the "subscribe" url :param expected_log_calls: sequence of expected mock.call()s to have been made to app logger """ with self.mocked_app_logger_log() as mock_app_log: with self.app.test_request_context(): with requests_mock.Mocker() as rmock: rmock.register_uri("GET", body_dict["SubscribeURL"], **rmock_response_kwargs) expect_exception = isinstance( expected_output, type) and issubclass( expected_output, Exception) with pytest.raises( expected_output ) if expect_exception else null_context_manager(): out = _handle_subscription_confirmation( body_dict, supported_topic_name) if not expect_exception: assert out == expected_output assert rmock.called is expect_request_made assert mock_app_log.call_args_list == list( expected_log_calls)