def test_delete_tags(self): stubber = Stubber(self.instance_resource.meta.client) stubber.add_response('delete_tags', {}) stubber.activate() response = self.instance_resource.delete_tags(Tags=[{'Key': 'foo'}]) stubber.assert_no_pending_responses() self.assertEqual(response, {}) stubber.deactivate()
class TestMturk(BaseSessionTest): def setUp(self): super(TestMturk, self).setUp() self.region = 'us-west-2' self.client = self.session.create_client( 'mturk', self.region) self.stubber = Stubber(self.client) self.stubber.activate() def tearDown(self): self.stubber.deactivate() def test_list_hits_aliased(self): self.stubber.add_response('list_hits_for_qualification_type', {}) self.stubber.add_response('list_hits_for_qualification_type', {}) params = {'QualificationTypeId': 'foo'} self.client.list_hi_ts_for_qualification_type(**params) self.client.list_hits_for_qualification_type(**params) self.stubber.assert_no_pending_responses()
class TestS3ObjectSummary(unittest.TestCase): def setUp(self): self.session = boto3.session.Session( aws_access_key_id='foo', aws_secret_access_key='bar', region_name='us-west-2') self.s3 = self.session.resource('s3') self.obj_summary = self.s3.ObjectSummary('my_bucket', 'my_key') self.obj_summary_size = 12 self.stubber = Stubber(self.s3.meta.client) self.stubber.activate() self.stubber.add_response( method='head_object', service_response={ 'ContentLength': self.obj_summary_size, 'ETag': 'my-etag', 'ContentType': 'binary' }, expected_params={ 'Bucket': 'my_bucket', 'Key': 'my_key' } ) def tearDown(self): self.stubber.deactivate() def test_has_load(self): self.assertTrue(hasattr(self.obj_summary, 'load'), 'load() was not injected onto ObjectSummary resource.') def test_autoloads_correctly(self): # In HeadObject the parameter returned is ContentLength, this # should get mapped to Size of ListObject since the resource uses # the shape returned to by ListObjects. self.assertEqual(self.obj_summary.size, self.obj_summary_size) def test_cannot_access_other_non_related_parameters(self): # Even though an HeadObject was used to load this, it should # only expose the attributes from its shape defined in ListObjects. self.assertFalse(hasattr(self.obj_summary, 'content_length'))
class TestSagemaker(BaseSessionTest): def setUp(self): super(TestSagemaker, self).setUp() self.region = 'us-west-2' self.client = self.session.create_client( 'sagemaker', self.region) self.stubber = Stubber(self.client) self.stubber.activate() self.hook_calls = [] def _hook(self, **kwargs): self.hook_calls.append(kwargs['event_name']) def tearDown(self): self.stubber.deactivate() def test_event_with_old_prefix(self): self.client.meta.events.register( 'provide-client-params.sagemaker.ListEndpoints', self._hook ) self.stubber.add_response('list_endpoints', {'Endpoints': []}) self.client.list_endpoints() self.assertEqual(self.hook_calls, [ 'provide-client-params.sagemaker.ListEndpoints' ]) def test_event_with_new_prefix(self): self.client.meta.events.register( 'provide-client-params.api.sagemaker.ListEndpoints', self._hook ) self.stubber.add_response('list_endpoints', {'Endpoints': []}) self.client.list_endpoints() self.assertEqual(self.hook_calls, [ 'provide-client-params.sagemaker.ListEndpoints' ])
class StubbedClientTest(unittest.TestCase): def setUp(self): self.session = botocore.session.get_session() self.region = 'us-west-2' self.client = self.session.create_client( 's3', self.region, aws_access_key_id='foo', aws_secret_access_key='bar') self.stubber = Stubber(self.client) self.stubber.activate() def tearDown(self): self.stubber.deactivate() def reset_stubber_with_new_client(self, override_client_kwargs): client_kwargs = { 'service_name': 's3', 'region_name': self.region, 'aws_access_key_id': 'foo', 'aws_secret_access_key': 'bar' } client_kwargs.update(override_client_kwargs) self.client = self.session.create_client(**client_kwargs) self.stubber = Stubber(self.client) self.stubber.activate()
class AWSTests(TestCase): def setUp(self): self.ses_client = botocore.session.get_session().create_client( 'ses', region_name='us-west-2') self.stubber = Stubber(self.ses_client) self.ses_response = {'MessageId': '*****@*****.**'} expected_params = {'Source': ANY, 'Destination': ANY, 'Message': ANY} self.stubber.add_response('send_email', self.ses_response, expected_params) self.stubber.activate() def tearDown(self): self.stubber.deactivate() def test_get_ses_client(self): # Test get_ses_client returns the global client object. original_client = app.ses assert original_client is None # Not when get_ses_client is invoked, the global ses client is # initialized and is same as the one returned by the method. new_client = get_ses_client() # The client should be an instance of BaseClient object. assert isinstance(new_client, BaseClient) # The global app.ses client should be now set to the value returned by # get_ses_client. assert app.ses is not None # The global value should be same as the one returned by get_ses_client. assert new_client is app.ses # If the global is already initialized, do not create another one. another_client = get_ses_client() assert another_client is new_client @mock.patch('app.ses') @mock.patch('app.send_email_ses') def test_send_email(self, mock_method, mock_client): # Test send_email invokes send_email_ses. args = ['value1', 'value2'] kwargs = dict(key='value') val = send_email(*args, **kwargs) assert not mock_client.send_email.called assert mock_method.called mock_method.assert_called_with(*args, **kwargs, ses_client=mock_client) assert val is True # Test that send_email prints out error when an exception is raised. mock_method.reset_mock() # Raise an exception when this method is called. mock_method.side_effect = Exception('I am a super exception!') with captured_output() as (out, err): val = send_email(*args, **kwargs) assert val is False assert 'I am a super exception!' == out.getvalue().strip() assert not mock_client.send_email.called def test_send_email_ses(self): """ Test send email sends out emails. """ response = send_email_ses(self.ses_client, '*****@*****.**', "Please send this to test.", "This is the sample email body.\n") assert response == self.ses_response @pytest.mark.xfail def test_send_email_ses_with_bad_email(self): # Test bad email address fails gracefully. response = send_email_ses(self.ses_client, 'notanemail', 'Please send this to someone.', 'This is the sample email body.\n') assert response != self.ses_response assert response is None @mock.patch('app.ses') def test_send_email_ses_uses_sender_env(self, mock_client): # setting DEFAULT_FROM_EMAIL env should use that as sender. recipient = 'notanemail' subject = 'Please send this to someone.' body = 'This is the sample email body.\n' send_email_ses(mock_client, recipient, subject, body) mock_client.send_email.assert_called() mock_client.send_email.assert_called_with( Source=os.getenv('DEFAULT_FROM_EMAIL'), Destination=prepare_destination([recipient]), Message=prepare_email(subject, body)) @mock.patch('app.ses') @mock.patch.dict(os.environ, {'DEFAULT_FROM_EMAIL': '*****@*****.**'}) def test_send_email_ses_new_sender(self, mock_client): # Test send_email_ses changes sender when environment variable # 'DEFAULT_FROM_EMAIL' is changed. recipient = 'notanemail' subject = 'Please send this to someone.' body = 'This is the sample email body.\n' send_email_ses(mock_client, recipient, subject, body) mock_client.send_email.assert_called() mock_client.send_email.assert_called_with( Source=os.getenv('DEFAULT_FROM_EMAIL'), Destination=prepare_destination([recipient]), Message=prepare_email(subject, body)) @mock.patch.dict(os.environ, {"ADMIN_EMAIL": "*****@*****.**"}) @mock.patch('app.ses') def test_notify_admin(self, mock_client): error_trace = 'Error Traceback for emails.' notify_admin(error_trace) mock_client.send_email.assert_called() @pytest.mark.xfail @mock.patch.dict(os.environ, { "ADMIN_EMAIL": '*****@*****.**', "ALERT_ADMIN": "No" }) @mock.patch('app.ses') def test_notify_admin_silenced(self, mock_client): # If alert admin is set to "No", do not send out emails on errors and # failures. error_trace = 'Error traceback for emails' notify_admin(error_trace) assert not mock_client.send_email.called
def codeartifact_stubber(): codeartifact_stubber = Stubber(codeartifact) codeartifact_stubber.activate() yield codeartifact_stubber codeartifact_stubber.deactivate()
def stubber(client): stubber = Stubber(client) stubber.activate() yield stubber stubber.deactivate()
def iam_stubber(): iam_stubber = Stubber(iam) iam_stubber.activate() yield iam_stubber iam_stubber.deactivate()
def globalaccelerator_stubber(): globalaccelerator_stubber = Stubber(globalaccelerator) globalaccelerator_stubber.activate() yield globalaccelerator_stubber globalaccelerator_stubber.deactivate()
class PricingClientStubber: def __init__(self, client): self.client = client self.stubber = Stubber(client) def __enter__(self): self.stubber.activate() def __exit__(self, type, value, traceback): self.stubber.deactivate() def get_client(self): return self.client def stub_describe_services_error(self, service_error_code='', service_message='', http_status_code=400): self.stubber.add_client_error('describe_services', service_error_code, service_message, http_status_code) def stub_describe_services_response(self, service_codes): expected_params = {'FormatVersion': 'aws_v1'} describe_services_response = { 'Services': list( map(lambda service_code: {'ServiceCode': service_code}, service_codes)) } self.stubber.add_response('describe_services', describe_services_response, expected_params) def stub_get_products_response(self, rate_data): if len(rate_data) == 0: self.stubber.add_response('get_products', {'PriceList': []}) return for rate_datum in rate_data: service_code = rate_datum['service_code'] term_code = rate_datum['term_code'] rate_code = rate_datum['rate_code'] unit = rate_datum['unit'] price = rate_datum['price'] expected_params = { 'FormatVersion': 'aws_v1', 'ServiceCode': service_code, 'Filters': [{ 'Type': 'TERM_MATCH', 'Field': 'RateCode', 'Value': rate_code }], 'MaxResults': 1, } get_products_response = { 'PriceList': [ ''' {{ "terms": {{ "OnDemand": {{ "{0}": {{ "priceDimensions": {{ "{1}": {{ "rateCode": "{1}", "unit": "{2}", "pricePerUnit": {{"USD": "{3}"}} }} }} }} }} }} }} '''.format(term_code, rate_code, unit, price) ] } self.stubber.add_response('get_products', get_products_response, expected_params) def stub_get_products_error(self, service_error_code='', service_message='', http_status_code=400): self.stubber.add_client_error('get_products', service_error_code, service_message, http_status_code)
def imagebuilder_stubber(): imagebuilder_stubber = Stubber(imagebuilder) imagebuilder_stubber.activate() yield imagebuilder_stubber imagebuilder_stubber.deactivate()
class FakeAthenaResource(AthenaResource): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.polling_interval = 0 self.stubber = Stubber(self.client) s3 = boto3.resource("s3", region_name="us-east-1") self.bucket = s3.Bucket("fake-athena-results-bucket") self.bucket.create() def execute_query(self, query, fetch_results=False, expected_states=None, expected_results=None): # pylint: disable=arguments-differ """Fake for execute_query; stubs the expected Athena endpoints, polls against the provided expected query execution states, and returns the provided results as a list of tuples. Args: query (str): The query to execute. fetch_results (Optional[bool]): Whether to return the results of executing the query. Defaults to False, in which case the query will be executed without retrieving the results. expected_states (list[str]): The expected query execution states. Defaults to successfully passing through QUEUED, RUNNING, and SUCCEEDED. expected_results ([List[Tuple[Any, ...]]]): The expected results. All non-None items are cast to strings. Defaults to [(1,)]. Returns: Optional[List[Tuple[Optional[str], ...]]]: The expected_resutls when fetch_resutls is set. Otherwise, return None. All items in the tuple are represented as strings except for empty columns which are represented as None. """ if not expected_states: expected_states = ["QUEUED", "RUNNING", "SUCCEEDED"] if not expected_results: expected_results = [("1", )] self.stubber.activate() execution_id = str(uuid.uuid4()) self._stub_start_query_execution(execution_id, query) self._stub_get_query_execution(execution_id, expected_states) if expected_states[-1] == "SUCCEEDED" and fetch_results: self._fake_results(execution_id, expected_results) result = super().execute_query(query, fetch_results=fetch_results) self.stubber.deactivate() self.stubber.assert_no_pending_responses() return result def _stub_start_query_execution(self, execution_id, query): self.stubber.add_response( method="start_query_execution", service_response={"QueryExecutionId": execution_id}, expected_params={ "QueryString": query, "WorkGroup": self.workgroup }, ) def _stub_get_query_execution(self, execution_id, states): for state in states: self.stubber.add_response( method="get_query_execution", service_response={ "QueryExecution": { "Status": { "State": state, "StateChangeReason": "state change reason" }, } }, expected_params={"QueryExecutionId": execution_id}, ) def _fake_results(self, execution_id, expected_results): with io.StringIO() as results: writer = csv.writer(results) # Athena adds a header row to its CSV output writer.writerow([]) for row in expected_results: # Athena writes all non-null columns as strings in its CSV output stringified = tuple([str(item) for item in row if item]) writer.writerow(stringified) results.seek(0) self.bucket.Object(execution_id + ".csv").put(Body=results.read()) self.stubber.add_response( method="get_query_execution", service_response={ "QueryExecution": { "ResultConfiguration": { "OutputLocation": os.path.join("s3://", self.bucket.name, execution_id + ".csv") } } }, expected_params={"QueryExecutionId": execution_id}, )
class TestADICommandS3(TransactionTestCase): fixtures = ('base/addon_3615', 'base/featured', 'addons/persona', 'base/appversion.json') date = '2014-07-10' stats_source = 's3' def add_response(self, stat): stat_path = os.path.join(hive_folder, 'src', '%s.hive' % stat) data = get_stats_data(stat_path) response = { 'Body': data, } expected_params = { 'Bucket': 'test-bucket', 'Key': os.path.join('amo_stats', stat, self.date, '000000_0'), 'Range': ANY } self.stubber.add_response('get_object', response, expected_params) def setUp(self): self.client = boto3.client('s3') self.stubber = Stubber(self.client) self.stubber.activate() def tearDown(self): self.stubber.deactivate() @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_update_counts_from_s3(self, mock_boto3): stats = ['app', 'locale', 'os', 'status', 'version'] for x in range(2): for stat in stats: self.add_response('update_counts_by_%s' % stat) mock_boto3.client.return_value = self.client management.call_command('update_counts_from_file', date=self.date, stats_source=self.stats_source) assert UpdateCount.objects.all().count() == 1 update_count = UpdateCount.objects.last() # should be identical to `statuses.userEnabled` assert update_count.count == 4 assert update_count.date == date(2014, 7, 10) assert update_count.versions == {u'3.8': 2, u'3.7': 3} assert update_count.statuses == {u'userDisabled': 1, u'userEnabled': 4} application = u'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}' assert update_count.applications[application] == {u'3.6': 18} assert update_count.oses == {u'WINNT': 5} assert update_count.locales == {u'en-us': 1, u'en-US': 4} @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_download_counts_from_s3(self, mock_boto3): for x in range(2): self.add_response('download_counts') mock_boto3.client.return_value = self.client management.call_command('download_counts_from_file', date=self.date, stats_source=self.stats_source) assert DownloadCount.objects.all().count() == 2 download_count = DownloadCount.objects.get(addon_id=3615) assert download_count.count == 3 assert download_count.date == date(2014, 7, 10) assert download_count.sources == {u'search': 2, u'cb-dl-bob': 1} @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_theme_update_counts_from_s3(self, mock_boto3): for x in range(2): self.add_response('theme_update_counts') mock_boto3.client.return_value = self.client management.call_command('theme_update_counts_from_file', date=self.date, stats_source=self.stats_source) assert ThemeUpdateCount.objects.all().count() == 1 # Persona 813 has addon id 15663: we need the count to be the sum of # the "old" request on the persona_id 813 (only the one with the source # "gp") and the "new" request on the addon_id 15663. tuc2 = ThemeUpdateCount.objects.get(addon_id=15663) assert tuc2.count == 15 @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_lwt_stats_go_to_migrated_static_theme(self, mock_boto3): lwt = Addon.objects.get(id=15663) lwt.delete() static_theme = addon_factory(type=amo.ADDON_STATICTHEME) MigratedLWT.objects.create(lightweight_theme=lwt, static_theme=static_theme) for x in range(2): self.add_response('theme_update_counts') mock_boto3.client.return_value = self.client management.call_command('theme_update_counts_from_file', date=self.date, stats_source=self.stats_source) assert ThemeUpdateCount.objects.all().count() == 0 assert UpdateCount.objects.all().count() == 1 assert UpdateCount.objects.get(addon_id=static_theme.id).count == 15 @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_lwt_stats_go_to_migrated_with_stats_already(self, mock_boto3): lwt = Addon.objects.get(id=15663) lwt.delete() static_theme = addon_factory(type=amo.ADDON_STATICTHEME) MigratedLWT.objects.create(lightweight_theme=lwt, static_theme=static_theme) UpdateCount.objects.create(addon=static_theme, count=123, date=date(2014, 7, 10)) for x in range(2): self.add_response('theme_update_counts') mock_boto3.client.return_value = self.client management.call_command('theme_update_counts_from_file', date=self.date, stats_source=self.stats_source) assert ThemeUpdateCount.objects.all().count() == 0 assert UpdateCount.objects.all().count() == 1 assert UpdateCount.objects.get(addon_id=static_theme.id).count == 138
def test_query_cloudwatch_handles_empty_return_values(self, mock_datetime): endtime = datetime.datetime(2018, 10, 8, 23, 12, 48, 663351) starttime = datetime.datetime(2018, 10, 7, 23, 12, 48, 663351) mock_datetime.utcnow = Mock(return_value=endtime) expected_deadletter_query = [{ 'Id': 'visible_messages', 'MetricStat': { 'Metric': { 'Namespace': 'AWS/SQS', 'MetricName': 'ApproximateNumberOfMessagesVisible', 'Dimensions': [{ 'Name': 'QueueName', 'Value': f'dcp-upload-pre-csum-deadletter-queue-test' }] }, 'Period': 90000, 'Stat': 'Average' } }, { 'Id': 'received_messages', 'MetricStat': { 'Metric': { 'Namespace': 'AWS/SQS', 'MetricName': 'NumberOfMessagesReceived', 'Dimensions': [{ 'Name': 'QueueName', 'Value': f'dcp-upload-pre-csum-deadletter-queue-test' }] }, 'Period': 90000, 'Stat': 'Average' } }] mock_deadletter_metric_data = { 'MetricDataResults': [{ 'Id': 'visible_messages', 'Label': 'ApproximateNumberOfMessagesVisible', 'Timestamps': [datetime.datetime(2018, 10, 4, 23, 32, tzinfo=tzutc())], 'Values': [], 'StatusCode': 'Complete' }, { 'Id': 'received_messages', 'Label': 'NumberOfMessagesReceived', 'Timestamps': [datetime.datetime(2018, 10, 4, 23, 32, tzinfo=tzutc())], 'Values': [], 'StatusCode': 'Complete' }], 'ResponseMetadata': { 'RequestId': '1506bb49-c8f7-11e8-b5b9-5135a8265cdd', 'HTTPStatusCode': 200, 'HTTPHeaders': { 'x-amzn-requestid': '1506bb49-c8f7-11e8-b5b9-5135a8265cdd', 'content-type': 'text/xml', 'content-length': '945', 'date': 'Fri, 05 Oct 2018 23:33:36 GMT' }, 'RetryAttempts': 0 } } from upload.lambdas.health_check.health_check import client stubber = Stubber(client) stubber.add_response( 'get_metric_data', mock_deadletter_metric_data, { 'MetricDataQueries': expected_deadletter_query, 'StartTime': starttime, 'EndTime': endtime }) stubber.activate() deadletter_dict = self.health_check._query_cloudwatch_metrics_for_past_day( expected_deadletter_query) assert deadletter_dict == { 'visible_messages': 'no value returned', 'received_messages': 'no value returned' } stubber.deactivate()
class TestIndex(TestCase): def setUp(self): self.requests_mock = responses.RequestsMock( assert_all_requests_are_fired=False) self.requests_mock.start() # Create a dummy S3 client that (hopefully) can't do anything. self.s3_client = boto3.client( 's3', config=Config(signature_version=UNSIGNED)) self.s3_client_patcher = patch(__name__ + '.index.make_s3_client', return_value=self.s3_client) self.s3_client_patcher.start() self.s3_stubber = Stubber(self.s3_client) self.s3_stubber.activate() self.env_patcher = patch.dict( os.environ, { 'ES_HOST': 'example.com', 'AWS_ACCESS_KEY_ID': 'test_key', 'AWS_SECRET_ACCESS_KEY': 'test_secret', 'AWS_DEFAULT_REGION': 'ng-north-1', }) self.env_patcher.start() def tearDown(self): self.env_patcher.stop() self.s3_stubber.assert_no_pending_responses() self.s3_stubber.deactivate() self.s3_client_patcher.stop() self.requests_mock.stop() def test_delete_event(self): """ Check that the indexer doesn't blow up on delete events. """ # don't mock head or get; they should never be called for deleted objects self._test_index_event("ObjectRemoved:Delete", mock_head=False, mock_object=False) def test_delete_marker_event(self): """ Common event in versioned; buckets, should no-op """ # don't mock head or get; this event should never call them self._test_index_event( "ObjectRemoved:DeleteMarkerCreated", # we should never call Elastic in this case mock_elastic=False, mock_head=False, mock_object=False) def test_test_event(self): """ Check that the indexer doesn't do anything when it gets S3 test notification. """ event = { "Records": [{ "body": json.dumps({"Message": json.dumps({"Event": "s3:TestEvent"})}) }] } index.handler(event, None) def test_index_file(self): """test indexing a single file""" self._test_index_event("ObjectCreated:Put") @patch(__name__ + '.index.get_contents') def test_index_exception(self, get_mock): """test indexing a single file that throws an exception""" class ContentException(Exception): pass get_mock.side_effect = ContentException("Unable to get contents") with pytest.raises(ContentException): # get_mock already mocks get_object, so don't mock it in _test_index_event self._test_index_event("ObjectCreated:Put", mock_object=False) def _test_index_event(self, event_name, mock_elastic=True, mock_head=True, mock_object=True): """ Reusable helper function to test indexing a single text file. """ event = { "Records": [{ "body": json.dumps({ "Message": json.dumps({ "Records": [{ "eventName": event_name, "s3": { "bucket": { "name": "test-bucket" }, "object": { "key": "hello+world.txt", "eTag": "123456" } } }] }) }) }] } now = index.now_like_boto3() metadata = { 'helium': json.dumps({ 'comment': 'blah', 'user_meta': { 'foo': 'bar' }, 'x': 'y' }) } if mock_head: self.s3_stubber.add_response(method='head_object', service_response={ 'Metadata': metadata, 'ContentLength': 100, 'LastModified': now, }, expected_params={ 'Bucket': 'test-bucket', 'Key': 'hello world.txt', 'IfMatch': '123456', }) if mock_object: self.s3_stubber.add_response( method='get_object', service_response={ 'Metadata': metadata, 'ContentLength': 100, 'LastModified': now, 'Body': BytesIO(b'Hello World!'), }, expected_params={ 'Bucket': 'test-bucket', 'Key': 'hello world.txt', 'IfMatch': '123456', 'Range': f'bytes=0-{index.ELASTIC_LIMIT_BYTES}', }) def es_callback(request): response_key = 'delete' if event_name == index.OBJECT_DELETE else 'index' actions = [json.loads(line) for line in request.body.splitlines()] expected = [{ response_key: { '_index': 'test-bucket', '_type': '_doc', '_id': 'hello world.txt:None' } }, { 'comment': 'blah', 'content': '' if not mock_object else 'Hello World!', 'etag': '123456', 'event': event_name, 'ext': '.txt', 'key': 'hello world.txt', 'last_modified': now.isoformat(), 'meta_text': 'blah {"x": "y"} {"foo": "bar"}', 'size': 100, 'target': '', 'updated': ANY, 'version_id': None }] if response_key == 'delete': # delete events do not include request body expected.pop() assert actions == expected, "Unexpected request to ElasticSearch" response = {'items': [{response_key: {'status': 200}}]} return (200, {}, json.dumps(response)) if mock_elastic: self.requests_mock.add_callback(responses.POST, 'https://example.com:443/_bulk', callback=es_callback, content_type='application/json') index.handler(event, MockContext()) def test_unsupported_contents(self): contents = index.get_contents('test-bucket', 'foo.exe', '.exe', etag='etag', version_id=None, s3_client=self.s3_client, size=123) assert contents == "" contents = index.get_contents('test-bucket', 'foo.exe.gz', '.exe.gz', etag='etag', version_id=None, s3_client=self.s3_client, size=123) assert contents == "" def test_get_plain_text(self): self.s3_stubber.add_response( method='get_object', service_response={ 'Metadata': {}, 'ContentLength': 123, 'Body': BytesIO(b'Hello World!\nThere is more to know.'), }, expected_params={ 'Bucket': 'test-bucket', 'Key': 'foo.txt', 'IfMatch': 'etag', 'Range': f'bytes=0-{index.ELASTIC_LIMIT_BYTES}', }) contents = index.get_plain_text('test-bucket', 'foo.txt', compression=None, etag='etag', version_id=None, s3_client=self.s3_client, size=123) assert contents == "Hello World!\nThere is more to know." def test_text_contents(self): self.s3_stubber.add_response( method='get_object', service_response={ 'Metadata': {}, 'ContentLength': 123, 'Body': BytesIO(b'Hello World!'), }, expected_params={ 'Bucket': 'test-bucket', 'Key': 'foo.txt', 'IfMatch': 'etag', 'Range': f'bytes=0-{index.ELASTIC_LIMIT_BYTES}', }) contents = index.get_contents('test-bucket', 'foo.txt', '.txt', etag='etag', version_id=None, s3_client=self.s3_client, size=123) assert contents == "Hello World!" def test_gzipped_text_contents(self): self.s3_stubber.add_response( method='get_object', service_response={ 'Metadata': {}, 'ContentLength': 123, 'Body': BytesIO(compress(b'Hello World!')), }, expected_params={ 'Bucket': 'test-bucket', 'Key': 'foo.txt.gz', 'IfMatch': 'etag', 'Range': f'bytes=0-{index.ELASTIC_LIMIT_BYTES}', }) contents = index.get_contents('test-bucket', 'foo.txt.gz', '.txt.gz', etag='etag', version_id=None, s3_client=self.s3_client, size=123) assert contents == "Hello World!" def test_notebook_contents(self): notebook = (BASE_DIR / 'normal.ipynb').read_bytes() self.s3_stubber.add_response(method='get_object', service_response={ 'Metadata': {}, 'ContentLength': 123, 'Body': BytesIO(notebook), }, expected_params={ 'Bucket': 'test-bucket', 'Key': 'foo.ipynb', 'IfMatch': 'etag', }) contents = index.get_contents('test-bucket', 'foo.ipynb', '.ipynb', etag='etag', version_id=None, s3_client=self.s3_client, size=123) assert "model.fit" in contents def test_gzipped_notebook_contents(self): notebook = compress((BASE_DIR / 'normal.ipynb').read_bytes()) self.s3_stubber.add_response(method='get_object', service_response={ 'Metadata': {}, 'ContentLength': 123, 'Body': BytesIO(notebook), }, expected_params={ 'Bucket': 'test-bucket', 'Key': 'foo.ipynb.gz', 'IfMatch': 'etag', }) contents = index.get_contents('test-bucket', 'foo.ipynb.gz', '.ipynb.gz', etag='etag', version_id=None, s3_client=self.s3_client, size=123) assert "Model results visualization" in contents def test_parquet_contents(self): parquet = (BASE_DIR / 'amazon-reviews-1000.snappy.parquet').read_bytes() self.s3_stubber.add_response(method='get_object', service_response={ 'Metadata': {}, 'ContentLength': 123, 'Body': BytesIO(parquet), }, expected_params={ 'Bucket': 'test-bucket', 'Key': 'foo.parquet', 'IfMatch': 'etag', }) contents = index.get_contents('test-bucket', 'foo.parquet', '.parquet', etag='etag', version_id=None, s3_client=self.s3_client, size=123) size = len(contents.encode('utf-8', 'ignore')) assert size <= index.ELASTIC_LIMIT_BYTES # spot check for contents assert "This is not even worth the money." in contents assert "As for results; I felt relief almost immediately." in contents assert "R2LO11IPLTDQDX" in contents # see PRE conditions in conftest.py @pytest.mark.extended def test_parquet_extended(self): directory = (BASE_DIR / 'amazon-reviews-pds') files = directory.glob('**/*.parquet') for f in files: print(f"Testing {f}") parquet = f.read_bytes() self.s3_stubber.add_response(method='get_object', service_response={ 'Metadata': {}, 'ContentLength': 123, 'Body': BytesIO(parquet), }, expected_params={ 'Bucket': 'test-bucket', 'Key': 'foo.parquet', 'IfMatch': 'etag', })
def ec2_stubber(): ec2_stubber = Stubber(ec2) ec2_stubber.activate() yield ec2_stubber ec2_stubber.deactivate()
class FakeECSClient(ECSClient): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.polling_interval = 0 self.stubber = Stubber(self.client) def start_task(self, task_definition): self.stubber.activate() self._stub_start_task(task_definition) result = super().start_task(task_definition) self.stubber.deactivate() self.stubber.assert_no_pending_responses() return result def run_task( self, task_definition, expected_statuses=None, expected_stop_code="EssentialContainerExited", **kwargs, ): # pylint: disable=arguments-differ """Fake for run_task; stubs the expected ECS endpoints and polls against the provided expected container statuses until all containers are STOPPED. Args: task_definition (str): The family and revision (family:revision) or full ARN of the task definition to run. If a revision is not specified, the latest ACTIVE revision is used. expected_statuses (list[str]): The expected container satuses: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-lifecycle.html Defaults to successfully passing through PROVISIONING, RUNNING, and STOPPED. expected_stop_code (str): The expected Stopped Task Error Code: https://docs.aws.amazon.com/AmazonECS/latest/userguide/stopped-task-error-codes.html Defaults to EssentialContainerExited; the fake container successfully runs to completion before reaching the STOPPED status. Returns: None """ if not expected_statuses: expected_statuses = ["PROVISIONING", "RUNNING", "STOPPED"] self.stubber.activate() task_arn = self._stub_start_task(task_definition) self._stub_describe_tasks(expected_statuses, expected_stop_code, task_arn) super().run_task(task_definition, **kwargs) self.stubber.deactivate() self.stubber.assert_no_pending_responses() def stop_task(self, task_arn, expected_statuses=None): # pylint: disable=arguments-differ """Fake for stop; stubs the expected ECS endpoints and polls against the provided expected container statuses until all containers are STOPPED. Args: task_arn (str): The Task ARN. expected_statuses (list[str]): The expected container satuses: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-lifecycle.html Defaults to initially RUNNING and then STOPPED. Returns: bool: True if the task stops; False if the task was already stopped. """ if not expected_statuses: expected_statuses = ["RUNNING", "STOPPED"] self.stubber.activate() self.stubber.add_response( method="stop_task", service_response={ "task": { "lastStatus": expected_statuses.pop(0) } }, expected_params={ "task": task_arn, "cluster": self.cluster }, ) for status in expected_statuses: self.stubber.add_response( method="describe_tasks", service_response={"tasks": [{ "lastStatus": status }]}, expected_params={ "cluster": self.cluster, "tasks": [task_arn] }, ) result = super().stop_task(task_arn) self.stubber.deactivate() self.stubber.assert_no_pending_responses() return result def _stub_start_task(self, task_definition): task = f"arn:aws:ecs:us-east-2:0123456789:task/{uuid.uuid4()}" self.stubber.add_response( method="run_task", service_response={"tasks": [{ "taskArn": task }]}, expected_params={ "count": 1, "launchType": "FARGATE", "taskDefinition": task_definition, "cluster": self.cluster, "networkConfiguration": { "awsvpcConfiguration": { "subnets": self.subnets, "assignPublicIp": "ENABLED" } }, }, ) return task def _stub_describe_tasks(self, expected_statuses, expected_stop_code, task_arn): for status in expected_statuses: self.stubber.add_response( method="describe_tasks", service_response={ "tasks": [{ "lastStatus": status, "stopCode": expected_stop_code }] }, expected_params={ "cluster": self.cluster, "tasks": [task_arn] }, )
def cloudfront_stubber(): cloudfront_stubber = Stubber(cloudfront) cloudfront_stubber.activate() yield cloudfront_stubber cloudfront_stubber.deactivate()
def test_anon_access(self): """ Test anonymous call w/ ALLOW_ANONYMOUS_ACCESS """ bucket = "bucket" key = ".quilt/packages/manifest_hash" params = dict( bucket=bucket, manifest=key, ) expected_args = { 'Bucket': bucket, 'Key': key, 'Expression': "SELECT SUBSTRING(s.logical_key, 1) AS logical_key FROM s3object s", 'ExpressionType': 'SQL', 'InputSerialization': { 'CompressionType': 'NONE', 'JSON': { 'Type': 'LINES' } }, 'OutputSerialization': { 'JSON': { 'RecordDelimiter': '\n' } }, } env_patcher = patch.dict( os.environ, { 'AWS_ACCESS_KEY_ID': 'test_key', 'AWS_SECRET_ACCESS_KEY': 'test_secret', 'ALLOW_ANONYMOUS_ACCESS': '1' }) env_patcher.start() mock_s3 = boto3.client('s3') client_patch = patch.object( mock_s3, 'select_object_content', side_effect=[self.s3response, self.s3response_meta]) client_patch.start() response = { 'ETag': '12345', 'VersionId': '1.0', 'ContentLength': 123, } expected_params = { 'Bucket': bucket, 'Key': key, } s3_stubber = Stubber(mock_s3) s3_stubber.activate() s3_stubber.add_response('head_object', response, expected_params) with patch('boto3.Session.client', return_value=mock_s3): response = lambda_handler(self._make_event(params), None) print(response) assert response['statusCode'] == 200 folder = json.loads(read_body(response))['contents'] print(folder) assert len(folder['prefixes']) == 1 assert len(folder['objects']) == 1 assert folder['objects'][0]['logical_key'] == 'foo.csv' assert folder['prefixes'][0]['logical_key'] == 'bar/' s3_stubber.deactivate() client_patch.stop() env_patcher.stop()
def ddb_stubber(): # noinspection PyProtectedMember ddb_stubber = Stubber(interface._model.get_table().meta.client) ddb_stubber.activate() yield ddb_stubber ddb_stubber.deactivate()
def cloudhsm_stubber(): cloudhsm_stubber = Stubber(cloudhsm) cloudhsm_stubber.activate() yield cloudhsm_stubber cloudhsm_stubber.deactivate()
def kms_stubber(): kms_stubber = Stubber(kms) kms_stubber.activate() yield kms_stubber kms_stubber.deactivate()
class QuiltTestCase(TestCase): """ Base class for unittests. - Creates a test client - Creates and drops a test database - Mocks requests """ def setUp(self): # avoid using a ton of CPU for hashing passwords in testing pwd_context.update(pbkdf2_sha512__default_rounds=1) self.requests_mock = responses.RequestsMock(assert_all_requests_are_fired=False) self.requests_mock.start() mock_mp = Mixpanel('dummy_token', MockMixpanelConsumer()) self.mp_patcher = mock.patch('quilt_server.views.mp', mock_mp) self.mp_patcher.start() self.payments_patcher = mock.patch('quilt_server.views.HAVE_PAYMENTS', False) self.payments_patcher.start() self.s3_stubber = Stubber(s3_client) self.s3_stubber.activate() random_name = ''.join(random.sample(string.ascii_lowercase, 10)) self.db_url = 'postgresql://postgres@localhost/test_%s' % random_name def mock_verify(username_or_token): user = User.query.filter_by(name=username_or_token).one_or_none() if user: return user else: return verify_token_string(username_or_token) # instead of checking token, just use username self.token_verify_mock = mock.patch('quilt_server.views.verify_token_string', mock_verify) self.token_verify_mock.start() # disable 8 character restriction for passwords self.validate_password_mock = mock.patch('quilt_server.auth.validate_password', lambda x: True) self.validate_password_mock.start() self.app = quilt_server.app.test_client() quilt_server.app.config['TESTING'] = True quilt_server.app.config['SQLALCHEMY_ECHO'] = False quilt_server.app.config['SQLALCHEMY_DATABASE_URI'] = self.db_url sqlalchemy_utils.create_database(self.db_url) quilt_server.db.create_all() self.email_suffix = '@example.com' self.TEST_USER = '******' self.TEST_USER_EMAIL = '*****@*****.**' self.TEST_USER_PASSWORD = '******' self.OTHER_USER = '******' self.OTHER_USER_EMAIL = '*****@*****.**' self.OTHER_USER_PASSWORD = '******' self.TEST_ADMIN = 'admin' self.TEST_ADMIN_EMAIL = '*****@*****.**' self.TEST_ADMIN_PASSWORD = '******' _create_user(self.TEST_USER, email=self.TEST_USER_EMAIL, password=self.TEST_USER_PASSWORD, requires_activation=False) _create_user(self.TEST_ADMIN, email=self.TEST_ADMIN_EMAIL, password=self.TEST_ADMIN_PASSWORD, is_admin=True, requires_activation=False) _create_user('bad_user', email='*****@*****.**', requires_activation=False) _create_user(self.OTHER_USER, email=self.OTHER_USER_EMAIL, password=self.OTHER_USER_PASSWORD, requires_activation=False) _create_user('user1', email='*****@*****.**', password='******', requires_activation=False) _create_user('user2', email='*****@*****.**', password='******', requires_activation=False) db.session.commit() def tearDown(self): # restore PW hash rounds pwd_context.update(pbkdf2_sha512__default_rounds=500000) quilt_server.db.session.remove() quilt_server.db.drop_all() sqlalchemy_utils.drop_database(self.db_url) self.s3_stubber.deactivate() self.payments_patcher.stop() self.mp_patcher.stop() self.requests_mock.stop() def _mock_email(self): """Mocks the auth API call and just returns the value of the Authorization header""" invite_url = quilt_server.app.config['INVITE_SEND_URL'] self.requests_mock.add(responses.POST, invite_url, json.dumps(dict())) def put_package(self, owner, package, contents, is_public=False, is_team=False, tag_latest=False): contents_hash = hash_contents(contents) pkgurl = '/api/package/{usr}/{pkg}/{hash}'.format( usr=owner, pkg=package, hash=contents_hash ) resp = self.app.put( pkgurl, data=json.dumps(dict( description="", contents=contents, is_public=is_public, is_team=is_team, ), default=encode_node), content_type='application/json', headers={ 'Authorization': owner } ) assert resp.status_code == requests.codes.ok if tag_latest: resp = self.app.put( '/api/tag/{usr}/{pkg}/{tag}'.format( usr=owner, pkg=package, tag='latest' ), data=json.dumps(dict( hash=contents_hash )), content_type='application/json', headers={ 'Authorization': owner } ) assert resp.status_code == requests.codes.ok return pkgurl def _share_package(self, owner, pkg, other_user): return self.app.put( '/api/access/{owner}/{pkg}/{usr}'.format( owner=owner, usr=other_user, pkg=pkg ), headers={ 'Authorization': owner } ) def _unshare_package(self, owner, pkg, other_user): return self.app.delete( '/api/access/{owner}/{pkg}/{usr}'.format( owner=owner, usr=other_user, pkg=pkg ), headers={ 'Authorization': owner } ) def _mock_object(self, owner, blob_hash, contents): contents_gzipped = gzip.compress(contents) self.s3_stubber.add_response('get_object', dict( Body=BytesIO(contents_gzipped), ContentEncoding='gzip' ), dict( Bucket=quilt_server.app.config['PACKAGE_BUCKET_NAME'], Key='objs/%s/%s' % (owner, blob_hash), Range='bytes=-%d' % MAX_PREVIEW_SIZE ))
class TestADICommandS3(TransactionTestCase): fixtures = ('base/addon_3615', 'base/featured', 'addons/persona', 'base/appversion.json') date = '2014-07-10' stats_source = 's3' def add_response(self, stat): stat_path = os.path.join(hive_folder, 'src', '%s.hive' % stat) data = get_stats_data(stat_path) response = { 'Body': data, } expected_params = {'Bucket': 'test-bucket', 'Key': os.path.join('amo_stats', stat, self.date, '000000_0'), 'Range': ANY} self.stubber.add_response('get_object', response, expected_params) def setUp(self): self.client = boto3.client('s3') self.stubber = Stubber(self.client) self.stubber.activate() def tearDown(self): self.stubber.deactivate() @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch( 'olympia.stats.management.commands.update_counts_from_file.' 'save_stats_to_file') @mock.patch('olympia.stats.management.commands.boto3') def test_update_counts_from_s3(self, mock_boto3, mock_save_stats_to_file): stats = ['app', 'locale', 'os', 'status', 'version'] for x in range(2): for stat in stats: self.add_response('update_counts_by_%s' % stat) mock_boto3.client.return_value = self.client management.call_command('update_counts_from_file', date=self.date, stats_source=self.stats_source) assert UpdateCount.objects.all().count() == 1 update_count = UpdateCount.objects.last() # should be identical to `statuses.userEnabled` assert update_count.count == 4 assert update_count.date == date(2014, 7, 10) assert update_count.versions == {u'3.8': 2, u'3.7': 3} assert update_count.statuses == {u'userDisabled': 1, u'userEnabled': 4} application = u'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}' assert update_count.applications[application] == {u'3.6': 18} assert update_count.oses == {u'WINNT': 5} assert update_count.locales == {u'en-us': 1, u'en-US': 4} # save_stats_to_file is called with a non-saved model. assert isinstance(mock_save_stats_to_file.call_args[0][0], UpdateCount) @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch( 'olympia.stats.management.commands.download_counts_from_file.' 'save_stats_to_file') @mock.patch('olympia.stats.management.commands.boto3') def test_download_counts_from_s3(self, mock_boto3, mock_save_stats_to_file): for x in range(2): self.add_response('download_counts') # Create the necessary "valid download sources" entries. DownloadSource.objects.create(name='search', type='full') DownloadSource.objects.create(name='coll', type='prefix') mock_boto3.client.return_value = self.client management.call_command('download_counts_from_file', date=self.date, stats_source=self.stats_source) assert DownloadCount.objects.all().count() == 2 download_count = DownloadCount.objects.get(addon_id=3615) assert download_count.count == 3 assert download_count.date == date(2014, 7, 10) assert download_count.sources == {u'search': 2, u'collection': 1} # save_stats_to_file is called with a non-saved model. assert isinstance( mock_save_stats_to_file.call_args[0][0], DownloadCount) @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch( 'olympia.stats.management.commands.theme_update_counts_from_file.' 'save_stats_to_file') @mock.patch('olympia.stats.management.commands.boto3') def test_theme_update_counts_from_s3(self, mock_boto3, mock_save_stats_to_file): for x in range(2): self.add_response('theme_update_counts') mock_boto3.client.return_value = self.client management.call_command('theme_update_counts_from_file', date=self.date, stats_source=self.stats_source) assert ThemeUpdateCount.objects.all().count() == 1 # Persona 813 has addon id 15663: we need the count to be the sum of # the "old" request on the persona_id 813 (only the one with the source # "gp") and the "new" request on the addon_id 15663. tuc2 = ThemeUpdateCount.objects.get(addon_id=15663) assert tuc2.count == 15 assert mock_save_stats_to_file.call_count == 1 # save_stats_to_file is called with a non-saved model. assert isinstance( mock_save_stats_to_file.call_args[0][0], ThemeUpdateCount)
class TestADICommandS3(TransactionTestCase): fixtures = ('base/addon_3615', 'base/featured', 'addons/persona', 'base/appversion.json') date = '2014-07-10' stats_source = 's3' def add_response(self, stat): stat_path = os.path.join(hive_folder, 'src', '%s.hive' % stat) data = get_stats_data(stat_path) response = { 'Body': data, } expected_params = {'Bucket': 'test-bucket', 'Key': os.path.join('amo_stats', stat, self.date, '000000_0'), 'Range': ANY} self.stubber.add_response('get_object', response, expected_params) def setUp(self): self.client = boto3.client('s3') self.stubber = Stubber(self.client) self.stubber.activate() def tearDown(self): self.stubber.deactivate() @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_update_counts_from_s3(self, mock_boto3): stats = ['app', 'locale', 'os', 'status', 'version'] for x in range(2): for stat in stats: self.add_response('update_counts_by_%s' % stat) mock_boto3.client.return_value = self.client management.call_command('update_counts_from_file', date=self.date, stats_source=self.stats_source) assert UpdateCount.objects.all().count() == 1 update_count = UpdateCount.objects.last() # should be identical to `statuses.userEnabled` assert update_count.count == 4 assert update_count.date == date(2014, 7, 10) assert update_count.versions == {u'3.8': 2, u'3.7': 3} assert update_count.statuses == {u'userDisabled': 1, u'userEnabled': 4} application = u'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}' assert update_count.applications[application] == {u'3.6': 18} assert update_count.oses == {u'WINNT': 5} assert update_count.locales == {u'en-us': 1, u'en-US': 4} @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_download_counts_from_s3(self, mock_boto3): for x in range(2): self.add_response('download_counts') mock_boto3.client.return_value = self.client management.call_command('download_counts_from_file', date=self.date, stats_source=self.stats_source) assert DownloadCount.objects.all().count() == 2 download_count = DownloadCount.objects.get(addon_id=3615) assert download_count.count == 3 assert download_count.date == date(2014, 7, 10) assert download_count.sources == {u'search': 2, u'cb-dl-bob': 1} @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_theme_update_counts_from_s3(self, mock_boto3): for x in range(2): self.add_response('theme_update_counts') mock_boto3.client.return_value = self.client management.call_command('theme_update_counts_from_file', date=self.date, stats_source=self.stats_source) assert ThemeUpdateCount.objects.all().count() == 1 # Persona 813 has addon id 15663: we need the count to be the sum of # the "old" request on the persona_id 813 (only the one with the source # "gp") and the "new" request on the addon_id 15663. tuc2 = ThemeUpdateCount.objects.get(addon_id=15663) assert tuc2.count == 15 @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_lwt_stats_go_to_migrated_static_theme(self, mock_boto3): lwt = Addon.objects.get(id=15663) lwt.delete() static_theme = addon_factory(type=amo.ADDON_STATICTHEME) MigratedLWT.objects.create( lightweight_theme=lwt, static_theme=static_theme) for x in range(2): self.add_response('theme_update_counts') mock_boto3.client.return_value = self.client management.call_command('theme_update_counts_from_file', date=self.date, stats_source=self.stats_source) assert ThemeUpdateCount.objects.all().count() == 0 assert UpdateCount.objects.all().count() == 1 assert UpdateCount.objects.get(addon_id=static_theme.id).count == 15 @override_settings(AWS_STATS_S3_BUCKET='test-bucket') @mock.patch('olympia.stats.management.commands.boto3') def test_lwt_stats_go_to_migrated_with_stats_already(self, mock_boto3): lwt = Addon.objects.get(id=15663) lwt.delete() static_theme = addon_factory(type=amo.ADDON_STATICTHEME) MigratedLWT.objects.create( lightweight_theme=lwt, static_theme=static_theme) UpdateCount.objects.create( addon=static_theme, count=123, date=date(2014, 7, 10)) for x in range(2): self.add_response('theme_update_counts') mock_boto3.client.return_value = self.client management.call_command('theme_update_counts_from_file', date=self.date, stats_source=self.stats_source) assert ThemeUpdateCount.objects.all().count() == 0 assert UpdateCount.objects.all().count() == 1 assert UpdateCount.objects.get(addon_id=static_theme.id).count == 138
class TestStubber(unittest.TestCase): def setUp(self): self.event_emitter = hooks.HierarchicalEmitter() self.client = mock.Mock() self.client.meta.events = self.event_emitter self.client.meta.method_to_api_mapping.get.return_value = 'foo' self.stubber = Stubber(self.client) self.validate_parameters_mock = mock.Mock() self.validate_parameters_patch = mock.patch( 'botocore.stub.validate_parameters', self.validate_parameters_mock) self.validate_parameters_patch.start() def tearDown(self): self.validate_parameters_patch.stop() def emit_get_response_event(self, model=None, request_dict=None, signer=None, context=None): if model is None: model = mock.Mock() model.name = 'foo' handler, response = self.event_emitter.emit_until_response( event_name='before-call.myservice.foo', model=model, params=request_dict, request_signer=signer, context=context) return response def test_stubber_registers_events(self): self.event_emitter = mock.Mock() self.client.meta.events = self.event_emitter self.stubber.activate() # This just ensures that we register at the correct event # and nothing more self.event_emitter.register_first.assert_called_with( 'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY) self.event_emitter.register.assert_called_with('before-call.*.*', mock.ANY, unique_id=mock.ANY) def test_stubber_unregisters_events(self): self.event_emitter = mock.Mock() self.client.meta.events = self.event_emitter self.stubber.activate() self.stubber.deactivate() self.event_emitter.unregister.assert_any_call( 'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY) self.event_emitter.unregister.assert_any_call('before-call.*.*', mock.ANY, unique_id=mock.ANY) def test_context_manager(self): self.event_emitter = mock.Mock() self.client.meta.events = self.event_emitter with self.stubber: # Ensure events are registered in context self.event_emitter.register_first.assert_called_with( 'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY) self.event_emitter.register.assert_called_with('before-call.*.*', mock.ANY, unique_id=mock.ANY) # Ensure events are no longer registered once we leave the context self.event_emitter.unregister.assert_any_call( 'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY) self.event_emitter.unregister.assert_any_call('before-call.*.*', mock.ANY, unique_id=mock.ANY) def test_add_response(self): response = {'foo': 'bar'} self.stubber.add_response('foo', response) with self.assertRaises(AssertionError): self.stubber.assert_no_pending_responses() def test_add_response_fails_when_missing_client_method(self): del self.client.foo with self.assertRaises(ValueError): self.stubber.add_response('foo', {}) def test_validates_service_response(self): self.stubber.add_response('foo', {}) self.assertTrue(self.validate_parameters_mock.called) def test_validate_ignores_response_metadata(self): service_response = {'ResponseMetadata': {'foo': 'bar'}} service_model = ServiceModel({ 'documentation': '', 'operations': { 'foo': { 'name': 'foo', 'input': { 'shape': 'StringShape' }, 'output': { 'shape': 'StringShape' } } }, 'shapes': { 'StringShape': { 'type': 'string' } } }) op_name = service_model.operation_names[0] output_shape = service_model.operation_model(op_name).output_shape self.client.meta.service_model = service_model self.stubber.add_response('TestOperation', service_response) self.validate_parameters_mock.assert_called_with({}, output_shape) # Make sure service response hasn't been mutated self.assertEqual(service_response, {'ResponseMetadata': { 'foo': 'bar' }}) def test_validates_on_empty_output_shape(self): service_model = ServiceModel({ 'documentation': '', 'operations': { 'foo': { 'name': 'foo' } } }) self.client.meta.service_model = service_model with self.assertRaises(ParamValidationError): self.stubber.add_response('TestOperation', {'foo': 'bar'}) def test_get_response(self): service_response = {'bar': 'baz'} self.stubber.add_response('foo', service_response) self.stubber.activate() response = self.emit_get_response_event() self.assertEqual(response[1], service_response) self.assertEqual(response[0].status_code, 200) def test_get_client_error_response(self): error_code = "foo" service_message = "bar" self.stubber.add_client_error('foo', error_code, service_message) self.stubber.activate() response = self.emit_get_response_event() self.assertEqual(response[1]['Error']['Message'], service_message) self.assertEqual(response[1]['Error']['Code'], error_code) def test_get_client_error_with_extra_error_meta(self): error_code = "foo" error_message = "bar" error_meta = { "Endpoint": "https://foo.bar.baz", } self.stubber.add_client_error('foo', error_code, error_message, http_status_code=301, service_error_meta=error_meta) with self.stubber: response = self.emit_get_response_event() error = response[1]['Error'] self.assertIn('Endpoint', error) self.assertEqual(error['Endpoint'], "https://foo.bar.baz") def test_get_client_error_with_extra_response_meta(self): error_code = "foo" error_message = "bar" stub_response_meta = { "RequestId": "79104EXAMPLEB723", } self.stubber.add_client_error('foo', error_code, error_message, http_status_code=301, response_meta=stub_response_meta) with self.stubber: response = self.emit_get_response_event() actual_response_meta = response[1]['ResponseMetadata'] self.assertIn('RequestId', actual_response_meta) self.assertEqual(actual_response_meta['RequestId'], "79104EXAMPLEB723") def test_get_response_errors_with_no_stubs(self): self.stubber.activate() with self.assertRaises(UnStubbedResponseError): self.emit_get_response_event() def test_assert_no_responses_remaining(self): self.stubber.add_response('foo', {}) with self.assertRaises(AssertionError): self.stubber.assert_no_pending_responses()
class TestStubber(unittest.TestCase): def setUp(self): self.event_emitter = hooks.HierarchicalEmitter() self.client = mock.Mock() self.client.meta.events = self.event_emitter self.client.meta.method_to_api_mapping.get.return_value = 'foo' self.stubber = Stubber(self.client) self.validate_parameters_mock = mock.Mock() self.validate_parameters_patch = mock.patch( 'botocore.stub.validate_parameters', self.validate_parameters_mock) self.validate_parameters_patch.start() def tearDown(self): self.validate_parameters_patch.stop() def emit_get_response_event(self, model=None, request_dict=None, signer=None, context=None): if model is None: model = mock.Mock() model.name = 'foo' handler, response = self.event_emitter.emit_until_response( event_name='before-call.myservice.foo', model=model, params=request_dict, request_signer=signer, context=context) return response def test_stubber_registers_events(self): self.event_emitter = mock.Mock() self.client.meta.events = self.event_emitter self.stubber.activate() # This just ensures that we register at the correct event # and nothing more self.event_emitter.register_first.assert_called_with( 'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY) self.event_emitter.register.assert_called_with( 'before-call.*.*', mock.ANY, unique_id=mock.ANY) def test_stubber_unregisters_events(self): self.event_emitter = mock.Mock() self.client.meta.events = self.event_emitter self.stubber.activate() self.stubber.deactivate() self.event_emitter.unregister.assert_any_call( 'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY) self.event_emitter.unregister.assert_any_call( 'before-call.*.*', mock.ANY, unique_id=mock.ANY) def test_add_response(self): response = {'foo': 'bar'} self.stubber.add_response('foo', response) with self.assertRaises(AssertionError): self.stubber.assert_no_pending_responses() def test_add_response_fails_when_missing_client_method(self): del self.client.foo with self.assertRaises(ValueError): self.stubber.add_response('foo', {}) def test_validates_service_response(self): self.stubber.add_response('foo', {}) self.assertTrue(self.validate_parameters_mock.called) def test_validate_ignores_response_metadata(self): service_response = {'ResponseMetadata': {'foo': 'bar'}} service_model = ServiceModel({ 'documentation': '', 'operations': { 'foo': { 'name': 'foo', 'input': {'shape': 'StringShape'}, 'output': {'shape': 'StringShape'} } }, 'shapes': { 'StringShape': {'type': 'string'} } }) op_name = service_model.operation_names[0] output_shape = service_model.operation_model(op_name).output_shape self.client.meta.service_model = service_model self.stubber.add_response('TestOperation', service_response) self.validate_parameters_mock.assert_called_with( {}, output_shape) # Make sure service response hasn't been mutated self.assertEqual( service_response, {'ResponseMetadata': {'foo': 'bar'}}) def test_validates_on_empty_output_shape(self): service_model = ServiceModel({ 'documentation': '', 'operations': { 'foo': { 'name': 'foo' } } }) self.client.meta.service_model = service_model with self.assertRaises(ParamValidationError): self.stubber.add_response('TestOperation', {'foo': 'bar'}) def test_get_response(self): service_response = {'bar': 'baz'} self.stubber.add_response('foo', service_response) self.stubber.activate() response = self.emit_get_response_event() self.assertEqual(response[1], service_response) self.assertEqual(response[0].status_code, 200) def test_get_client_error_response(self): error_code = "foo" service_message = "bar" self.stubber.add_client_error('foo', error_code, service_message) self.stubber.activate() response = self.emit_get_response_event() self.assertEqual(response[1]['Error']['Message'], service_message) self.assertEqual(response[1]['Error']['Code'], error_code) def test_get_response_errors_with_no_stubs(self): self.stubber.activate() with self.assertRaises(StubResponseError): self.emit_get_response_event() def test_assert_no_responses_remaining(self): self.stubber.add_response('foo', {}) with self.assertRaises(AssertionError): self.stubber.assert_no_pending_responses()
class TestStatefulVolume(unittest.TestCase): def setUp(self): # Disable logging for testing logging.disable(logging.CRITICAL) self.instance_id = 'i-1234567890abcdef0' self.tag_name = 'sebs' self.device_name = '/dev/xdf' # Setup our ec2 client stubb ec2 = botocore.session.get_session().create_client('ec2', region_name='us-west-2') self.ec2_client = ec2 self.stub_client = Stubber(ec2) # Setup our ec2 resource stub ec2_resource = boto3.resource('ec2', region_name='us-west-2') self.stub_resource = Stubber(ec2_resource.meta.client) # Use mocks to pass out client stubb to our code self.boto3 = MagicMock(name='module_mock') self.mock_client = MagicMock(name='client_mock', return_value=ec2) self.boto3.client = self.mock_client # Setup resource and volume mocks self.mock_snapshot = MagicMock( name='snapshot_mock', snapshot_id='sn-12345') self.first_volume = Mock(name='first_volume', attachments=[{'InstanceId': ''}], volume_type='GP2', volume_id='vol-1111') self.second_volume = Mock(name='second_volume', attachments=[{'InstanceId': ''}], volume_type='GP2', volume_id='vol-2222') self.first_volume.create_snapshot = Mock( return_value=self.mock_snapshot) self.mock_resource = MagicMock( name='mock_resource_object') self.mock_resource.Volume = MagicMock( name='mock_volume_constructor') self.mock_resource.Volume.side_effect = [ self.first_volume, self.second_volume] self.boto3.resource = MagicMock( name='mock_resource_contructor', return_value=self.mock_resource) self.mock_session = MagicMock(name='mock_session') self.mock_session.client.return_value = self.ec2_client self.mock_session.resource.return_value = self.mock_resource self.boto3.session.Session = self.mock_session modules = { 'boto3': self.boto3, 'ec2_metadata': MagicMock() } self.default_response = { 'Volumes': [ { 'Attachments': [], 'AvailabilityZone': 'string', 'CreateTime': datetime.datetime(2015, 1, 1), 'Encrypted': False, 'KmsKeyId': 'string', 'OutpostArn': 'string', 'Size': 123, 'SnapshotId': 'string', 'State': 'available', 'VolumeId': 'vol-XXXXXX', 'Iops': 123, 'Tags': [ { 'Key': 'string', 'Value': 'string' }, ], 'VolumeType': 'gp2', 'FastRestored': False, 'MultiAttachEnabled': False } ] } self.default_params = {'Filters': ANY} self.stub_client.activate() self.module_patcher = patch.dict('sys.modules', modules) self.module_patcher.start() from sebs.ec2 import StatefulVolume self.StatefulVolume = StatefulVolume def tearDown(self): # Turn logging back on logging.disable(logging.NOTSET) self.module_patcher.stop() self.stub_client.deactivate() def test_class_properties(self): sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) self.stub_client.assert_no_pending_responses() self.assertEqual(sv.instance_id, self.instance_id, 'Should set the instance_id we pass in.') self.assertEqual(sv.device_name, self.device_name, 'Should set the deviceName') self.assertEqual( sv.ready, False, 'Should set the volume to not ready.') self.assertEqual(sv.status, 'Unknown', 'Should set the status') self.assertEqual(sv.volume, None, 'Should set the tag name') self.assertEqual(sv.tag_name, self.tag_name, 'Should set the tag name') self.assertEqual(sv.ec2_client, self.ec2_client, 'Should set an ec2 client') self.assertEqual(sv.ec2_resource, self.mock_resource, 'Should set our mock volume.') def test_status_new(self): response = self.default_response.copy() response['Volumes'] = [] self.stub_client.add_response( 'describe_volumes', response, self.default_params) self.stub_client.add_response( 'describe_volumes', self.default_response, {'Filters': [ { 'Name': 'attachment.instance-id', 'Values': [self.instance_id] }, { 'Name': 'attachment.device', 'Values': [ self.device_name, ] } ]}) sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) sv.get_status() self.stub_client.assert_no_pending_responses() self.assertEqual(sv.status, 'Attached', 'Volume should be mounted already.') self.assertIsInstance(sv.volume, Mock, 'Should have a boto3 Volume resource') self.assertEqual(sv.ready, True, 'Should be ready.') def test_status_missing(self): response = self.default_response.copy() response['Volumes'] = [] self.stub_client.add_response( 'describe_volumes', response, self.default_params) self.stub_client.add_response( 'describe_volumes', response, {'Filters': [ { 'Name': 'attachment.instance-id', 'Values': [self.instance_id] }, { 'Name': 'attachment.device', 'Values': [ self.device_name, ] } ]}) sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) sv.get_status() self.stub_client.assert_no_pending_responses() self.assertEqual(sv.status, 'Missing', 'We should not find a tagged volume.') self.assertEqual(sv.volume, None, 'Should not have set a volume resourse.') self.assertEqual(sv.ready, False, 'Should not be ready.') def test_status_duplicate(self): response = self.default_response.copy() response['Volumes'] = [ {'VolumeId': 'vol-1111'}, {'VolumeId': 'vol-2222'}] self.stub_client.add_response( 'describe_volumes', response, self.default_params) sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) sv.get_status() self.stub_client.assert_no_pending_responses() self.assertEqual(sv.status, 'Duplicate', 'Should be a duplicate volume') self.assertEqual(sv.volume, None, 'Should not have set a volume resourse.') self.assertEqual(sv.ready, False, 'Volume should not be Ready') def test_status_not_attached(self): self.stub_client.add_response( 'describe_volumes', self.default_response, self.default_params) sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) sv.get_status() self.stub_client.assert_no_pending_responses() self.assertEqual(sv.status, 'Not Attached', 'Should find an existing volume') self.assertEqual(sv.volume, self.first_volume, 'Should be our volume mock') self.assertEqual(sv.ready, False, 'Volume should not be Ready') def test_volume_tagging(self): response = self.default_response.copy() response['Volumes'] = [] self.stub_client.add_response( 'describe_volumes', response, {'Filters': [ { 'Name': f'tag:{self.tag_name}', 'Values': [self.device_name] } ]}) self.stub_client.add_response( 'describe_volumes', self.default_response, {'Filters': [ { 'Name': 'attachment.instance-id', 'Values': [self.instance_id] }, { 'Name': 'attachment.device', 'Values': [ self.device_name, ] } ]}) sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) sv.get_status() sv.tag_volume() self.first_volume.create_tags.assert_called_with( Tags=[{'Key': self.tag_name, 'Value': self.device_name}]) def test_copy_new(self): sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) sv.status = 'New' response = sv.copy('fakeAZ') self.assertEqual(response, 'New', "Should do nothing if status in not 'Not Attached'.") def test_copy_same_az(self): sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) self.first_volume.availability_zone = 'fakeAZ' sv.status = 'Not Attached' sv.volume = self.first_volume response = sv.copy('fakeAZ') self.assertEqual(response, 'Not Attached', "Should not change the status if in the same AZ.") self.first_volume.copy.assert_not_called() def test_copy_different_az(self): self.stub_client.add_response( 'create_volume', {'VolumeId': 'vol-2222', 'AvailabilityZone': 'newAZ', 'Encrypted': True, 'Size': 50, 'SnapshotId': 'sn-2323', 'VolumeType': 'gp2'}, {'AvailabilityZone': 'newAZ', 'SnapshotId': ANY, 'VolumeType': ANY, 'TagSpecifications': ANY}) self.stub_client.add_response('describe_volumes', {'Volumes': [ {'VolumeId': 'vol-2222', 'State': 'available'}]}, {'VolumeIds': ['vol-2222']}) sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) self.first_volume.availability_zone = 'fakeAZ' sv.status = 'Not Attached' sv.volume = self.mock_resource.Volume() response = sv.copy('newAZ') # Last test we need here is that we are actually creating a second volume self.assertEqual(response, 'Not Attached', 'Should not change the status after a copy') self.assertFalse(sv.ready, 'We should not be ready after copying.') self.assertEqual(sv.volume, self.second_volume, 'Should have our second volume.') self.first_volume.delete.assert_called_once() self.mock_snapshot.delete.assert_called_once() def test_attach_new(self): sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) sv.status = 'New' response = sv.attach() self.assertEqual(response, 'New', "Should do nothing if status in not 'Not Attached'.") def test_attach(self): self.stub_client.add_response('describe_volumes', {'Volumes': [ {'VolumeId': 'vol-1111', 'State': 'in-use'}]}, {'Filters': [ { 'Name': 'attachment.instance-id', 'Values': [ self.instance_id, ] }, { 'Name': 'attachment.device', 'Values': [ self.device_name, ] }, ]}) self.stub_client.add_response('describe_volumes', {'Volumes': [ {'VolumeId': 'vol-2222', 'State': 'available'}]}, {'VolumeIds': ['vol-1111']}) self.stub_client.add_response('describe_volumes', {'Volumes': [ {'VolumeId': 'vol-2222', 'State': 'in-use'}]}, {'VolumeIds': ['vol-2222']}) sv = self.StatefulVolume(self.mock_session, self.instance_id, self.device_name, self.tag_name) sv.status = 'Not Attached' sv.volume = self.second_volume response = sv.attach() self.assertEqual(response, 'Attached', 'Should be Attached to the instance.') self.assertFalse(sv.ready, 'Should not be Ready') # We shoud delete the previous volume self.first_volume.delete.assert_called_once()