def test_cluster_info(cluster_provisioner): cluster_id = 'foo-bar-spam-egs' stubber = Stubber(cluster_provisioner.emr) response = { 'Cluster': { 'MasterPublicDnsName': '1.2.3.4', 'Status': { 'State': 'RUNNING', 'StateChangeReason': { 'Code': 'ALL_STEPS_COMPLETED', 'Message': 'All steps completed.', }, 'Timeline': { 'CreationDateTime': datetime(2015, 1, 1), 'ReadyDateTime': datetime(2015, 1, 2), 'EndDateTime': datetime(2015, 1, 3), } }, }, } expected_params = {'ClusterId': cluster_id} stubber.add_response('describe_cluster', response, expected_params) with stubber: info = cluster_provisioner.info(cluster_id) assert info == { 'creation_datetime': datetime(2015, 1, 1), 'ready_datetime': datetime(2015, 1, 2), 'end_datetime': datetime(2015, 1, 3), 'state_change_reason_code': 'ALL_STEPS_COMPLETED', 'state_change_reason_message': 'All steps completed.', 'state': 'RUNNING', 'public_dns': '1.2.3.4', }
class TestIdempotencyToken(unittest.TestCase): def setUp(self): self.function_name = "purchase_scheduled_instances" self.region = "us-west-2" self.session = botocore.session.get_session() self.client = self.session.create_client("ec2", self.region) self.stubber = Stubber(self.client) self.service_response = {} self.params_seen = [] # Record all the parameters that get seen self.client.meta.events.register_first("before-call.*.*", self.collect_params, unique_id="TestIdempotencyToken") def collect_params(self, model, params, *args, **kwargs): self.params_seen.extend(params["body"].keys()) def test_provided_idempotency_token(self): expected_params = {"PurchaseRequests": [{"PurchaseToken": "foo", "InstanceCount": 123}], "ClientToken": ANY} self.stubber.add_response(self.function_name, self.service_response, expected_params) with self.stubber: self.client.purchase_scheduled_instances( PurchaseRequests=[{"PurchaseToken": "foo", "InstanceCount": 123}], ClientToken="foobar" ) self.assertIn("ClientToken", self.params_seen) def test_insert_idempotency_token(self): expected_params = {"PurchaseRequests": [{"PurchaseToken": "foo", "InstanceCount": 123}]} self.stubber.add_response(self.function_name, self.service_response, expected_params) with self.stubber: self.client.purchase_scheduled_instances(PurchaseRequests=[{"PurchaseToken": "foo", "InstanceCount": 123}]) self.assertIn("ClientToken", self.params_seen)
def test_mutating_filters(self): stubber = Stubber(self.service_resource.meta.client) instance_filters = [ {'Name': 'instance-state-name', 'Values': ['running']} ] running_instances = self.service_resource.instances.filter( Filters=instance_filters ) # This should not impact the already-created filter. instance_filters.append( {'Name': 'instance-type', 'Values': ['c4.large']} ) stubber.add_response( method='describe_instances', service_response={ 'Reservations': [] }, expected_params={ 'Filters': [{ 'Name': 'instance-state-name', 'Values': ['running'] }] } ) with stubber: list(running_instances) stubber.assert_no_pending_responses()
class TestRoute53Pagination(unittest.TestCase): def setUp(self): self.session = botocore.session.get_session() self.client = self.session.create_client('route53', 'us-west-2') self.stubber = Stubber(self.client) # response has required fields self.response = { 'HostedZones': [], 'Marker': '', 'IsTruncated': True, 'MaxItems': '1' } self.operation_name = 'list_hosted_zones' def test_paginate_with_max_items_int(self): # Route53 has a string type for MaxItems. We need to ensure that this # still works with integers as the cli auto converts the page size # argument to an integer. self.stubber.add_response(self.operation_name, self.response) paginator = self.client.get_paginator('list_hosted_zones') with self.stubber: config={'PageSize': 1} results = list(paginator.paginate(PaginationConfig=config)) self.assertTrue(len(results) >= 0) def test_paginate_with_max_items_str(self): # Route53 has a string type for MaxItems. We need to ensure that this # still works with strings as that's the expected type for this key. self.stubber.add_response(self.operation_name, self.response) paginator = self.client.get_paginator('list_hosted_zones') with self.stubber: config={'PageSize': '1'} results = list(paginator.paginate(PaginationConfig=config)) self.assertTrue(len(results) >= 0)
def test_ensure_cfn_bucket_doesnt_exist_us_west(self): session = get_session("us-west-1") provider = Provider(session) action = BaseAction( context=mock_context("mynamespace"), provider_builder=MockProviderBuilder(provider, region="us-west-1") ) stubber = Stubber(action.s3_conn) stubber.add_client_error( "head_bucket", service_error_code="NoSuchBucket", service_message="Not Found", http_status_code=404, ) stubber.add_response( "create_bucket", service_response={}, expected_params={ "Bucket": ANY, "CreateBucketConfiguration": { "LocationConstraint": "us-west-1", } } ) with stubber: action.ensure_cfn_bucket()
def test_delete_tags(self): stubber = Stubber(self.instance_resource.meta.client) stubber.add_response('delete_tags', {}) stubber.activate() response = self.instance_resource.delete_tags(Tags=[{'Key': 'foo'}]) stubber.assert_no_pending_responses() self.assertEqual(response, {}) stubber.deactivate()
def create_client_sts_stub(service, *args, **kwargs): client = _original_create_client(service, *args, **kwargs) stub = Stubber(client) response = self.create_assume_role_response(expected_creds) self.actual_client_region = client.meta.region_name stub.add_response('assume_role', response) stub.activate() return client
def test_stop_cluster(cluster_provisioner): stubber = Stubber(cluster_provisioner.emr) response = {} expected_params = { 'JobFlowIds': ['12345'], } stubber.add_response('terminate_job_flows', response, expected_params) with stubber: cluster_provisioner.stop(jobflow_id='12345')
class TestLogsCommandContext_get_resource_id_from_stack(TestCase): def setUp(self): self.real_client = botocore.session.get_session().create_client('cloudformation', region_name="us-east-1") self.cfn_client_stubber = Stubber(self.real_client) self.logical_id = "name" self.stack_name = "stackname" self.physical_id = "myid" def test_must_get_from_cfn(self): expected_params = { "StackName": self.stack_name, "LogicalResourceId": self.logical_id } mock_response = { "StackResourceDetail": { "PhysicalResourceId": self.physical_id, "LogicalResourceId": self.logical_id, "ResourceType": "AWS::Lambda::Function", "ResourceStatus": "UPDATE_COMPLETE", "LastUpdatedTimestamp": "2017-07-28T23:34:13.435Z" } } self.cfn_client_stubber.add_response("describe_stack_resource", mock_response, expected_params) with self.cfn_client_stubber: result = LogsCommandContext._get_resource_id_from_stack(self.real_client, self.stack_name, self.logical_id) self.assertEquals(result, self.physical_id) def test_must_handle_resource_not_found(self): errmsg = "Something went wrong" errcode = "SomeException" self.cfn_client_stubber.add_client_error("describe_stack_resource", service_error_code=errcode, service_message=errmsg) expected_error_msg = "An error occurred ({}) when calling the DescribeStackResource operation: {}".format( errcode, errmsg) with self.cfn_client_stubber: with self.assertRaises(UserException) as context: LogsCommandContext._get_resource_id_from_stack(self.real_client, self.stack_name, self.logical_id) self.assertEquals(expected_error_msg, str(context.exception))
def set_cloudformation_stubber_for_client(self, redshift_client): stubber = Stubber(redshift_client) with open(self.resource_path+'/DescribeSourceClusterResponse.json') as describe_source_response: describe_source_cluster_response = json.load(describe_source_response) expected_source_params = {'ClusterIdentifier': 'rscopyunloadtest3-redshiftclustersource-1so4t2ip0ei3a'} stubber.add_response('describe_clusters', describe_source_cluster_response, expected_source_params) with open(self.resource_path+'/DescribeTargetClusterResponse.json') as describe_target_response: describe_target_cluster_response = json.load(describe_target_response) expected_target_params = {'ClusterIdentifier': 'rscopyunloadtest3-redshiftclustertarget-oaw35zvu02h'} stubber.add_response('describe_clusters', describe_target_cluster_response, expected_target_params) stubber.activate()
class TestIdempotencyToken(unittest.TestCase): def setUp(self): self.function_name = 'purchase_scheduled_instances' self.region = 'us-west-2' self.session = botocore.session.get_session() self.client = self.session.create_client( 'ec2', self.region) self.stubber = Stubber(self.client) self.service_response = {} self.params_seen = [] # Record all the parameters that get seen self.client.meta.events.register_first( 'before-call.*.*', self.collect_params, unique_id='TestIdempotencyToken') def collect_params(self, model, params, *args, **kwargs): self.params_seen.extend(params['body'].keys()) def test_provided_idempotency_token(self): expected_params = { 'PurchaseRequests': [ {'PurchaseToken': 'foo', 'InstanceCount': 123}], 'ClientToken': ANY } self.stubber.add_response( self.function_name, self.service_response, expected_params) with self.stubber: self.client.purchase_scheduled_instances( PurchaseRequests=[{'PurchaseToken': 'foo', 'InstanceCount': 123}], ClientToken='foobar') self.assertIn('ClientToken', self.params_seen) def test_insert_idempotency_token(self): expected_params = { 'PurchaseRequests': [ {'PurchaseToken': 'foo', 'InstanceCount': 123}], } self.stubber.add_response( self.function_name, self.service_response, expected_params) with self.stubber: self.client.purchase_scheduled_instances( PurchaseRequests=[{'PurchaseToken': 'foo', 'InstanceCount': 123}]) self.assertIn('ClientToken', self.params_seen)
def test_operation_without_output(self): table = self.resource.Table('mytable') stubber = Stubber(table.meta.client) stubber.add_response('tag_resource', {}) arn = 'arn:aws:dynamodb:us-west-2:123456789:table/mytable' with stubber: table.meta.client.tag_resource( ResourceArn=arn, Tags=[{'Key': 'project', 'Value': 'val'}] ) stubber.assert_no_pending_responses()
def test_create_cluster_valid_parameters(): """Test that the parameters passed down to run_job_flow are valid""" stubber = Stubber(emr) response = {'JobFlowId': 'job-flow-id'} stubber.add_response('run_job_flow', response) emr_release = settings.AWS_CONFIG['EMR_RELEASES'][0] params = ['*****@*****.**', 'cluster', 3, 'public-key', emr_release] with stubber: job_flow_id = provisioning.cluster_start(*params) assert job_flow_id == response['JobFlowId']
def test_spark_job_remove(spark_job_provisioner): key = 's3://test/test-notebook.ipynb' stubber = Stubber(spark_job_provisioner.s3) response = {'DeleteMarker': False} expected_params = { 'Bucket': settings.AWS_CONFIG['CODE_BUCKET'], 'Key': key, } stubber.add_response('delete_object', response, expected_params) with stubber: spark_job_provisioner.remove(key)
def test_spark_job_get(spark_job_provisioner): key = 's3://test/test-notebook.ipynb' stubber = Stubber(spark_job_provisioner.s3) response = { 'Body': 'content', } expected_params = { 'Bucket': settings.AWS_CONFIG['CODE_BUCKET'], 'Key': key, } stubber.add_response('get_object', response, expected_params) with stubber: result = spark_job_provisioner.get(key) assert result == response
def test_multipart_download_with_multiple_parts_and_extra_args(self): client = Session().create_client('s3') stubber = Stubber(client) response_body = b'foobarbaz' response = {'Body': six.BytesIO(response_body)} expected_params = { 'Range': mock.ANY, 'Bucket': mock.ANY, 'Key': mock.ANY, 'RequestPayer': 'requester'} stubber.add_response('get_object', response, expected_params) stubber.activate() downloader = MultipartDownloader( client, TransferConfig(), InMemoryOSLayer({}), SequentialExecutor) downloader.download_file( 'bucket', 'key', 'filename', len(response_body), {'RequestPayer': 'requester'}) stubber.assert_no_pending_responses()
def test_table_scan_can_be_stubbed_with_expressions(self): table = self.resource.Table('mytable') filter_expr = Attr('myattr').eq('foo') & ( Attr('myattr2').lte('buzz') | Attr('myattr2').gte('fizz') ) stubber = Stubber(table.meta.client) stubber.add_response('scan', dict(Items=list()), expected_params=dict( TableName='mytable', FilterExpression=filter_expr )) with stubber: response = table.scan(FilterExpression=filter_expr) self.assertEqual(list(), response['Items']) stubber.assert_no_pending_responses()
def test_ensure_cfn_bucket_exists(self): session = get_session("us-east-1") provider = Provider(session) action = BaseAction( context=mock_context("mynamespace"), provider_builder=MockProviderBuilder(provider) ) stubber = Stubber(action.s3_conn) stubber.add_response( "head_bucket", service_response={}, expected_params={ "Bucket": ANY, } ) with stubber: action.ensure_cfn_bucket()
def _boto3_stubber(service, mocked_requests): client = boto3.client(service, region) stubber = Stubber(client) # Save a ref to the stubber so that we can deactivate it at the end of the test. created_stubbers.append(stubber) # Attach mocked requests to the Stubber and activate it. if not isinstance(mocked_requests, list): mocked_requests = [mocked_requests] for mocked_request in mocked_requests: stubber.add_response( mocked_request.method, mocked_request.response, expected_params=mocked_request.expected_params ) stubber.activate() # Add stubber to the collection of mocked clients. This allows to mock multiple clients. # Mocking twice the same client will replace the previous one. mocked_clients[service] = client return client
def test_create_cluster_valid_parameters(cluster_provisioner): """Test that the parameters passed down to run_job_flow are valid""" stubber = Stubber(cluster_provisioner.emr) response = {'JobFlowId': 'job-flow-id'} stubber.add_response('run_job_flow', response) emr_release = '5.0.0' with stubber: jobflow_id = cluster_provisioner.start( user_username='******', user_email='*****@*****.**', identifier='cluster', emr_release=emr_release, size=3, public_key='public-key', ) assert jobflow_id == response['JobFlowId']
class TestRDSPagination(BaseSessionTest): def setUp(self): super(TestRDSPagination, self).setUp() self.region = 'us-west-2' self.client = self.session.create_client( 'rds', self.region) self.stubber = Stubber(self.client) def test_can_specify_zero_marker(self): service_response = { 'LogFileData': 'foo', 'Marker': '2', 'AdditionalDataPending': True } expected_params = { 'DBInstanceIdentifier': 'foo', 'LogFileName': 'bar', 'NumberOfLines': 2, 'Marker': '0' } function_name = 'download_db_log_file_portion' # The stubber will assert that the function is called with the expected # parameters. self.stubber.add_response( function_name, service_response, expected_params) self.stubber.activate() try: paginator = self.client.get_paginator(function_name) result = paginator.paginate( DBInstanceIdentifier='foo', LogFileName='bar', NumberOfLines=2, PaginationConfig={ 'StartingToken': '0', 'MaxItems': 3 }).build_full_result() self.assertEqual(result['LogFileData'], 'foo') self.assertIn('NextToken', result) except StubAssertionError as e: self.fail(str(e))
class TestS3ObjectSummary(unittest.TestCase): def setUp(self): self.session = boto3.session.Session( aws_access_key_id='foo', aws_secret_access_key='bar', region_name='us-west-2') self.s3 = self.session.resource('s3') self.obj_summary = self.s3.ObjectSummary('my_bucket', 'my_key') self.obj_summary_size = 12 self.stubber = Stubber(self.s3.meta.client) self.stubber.activate() self.stubber.add_response( method='head_object', service_response={ 'ContentLength': self.obj_summary_size, 'ETag': 'my-etag', 'ContentType': 'binary' }, expected_params={ 'Bucket': 'my_bucket', 'Key': 'my_key' } ) def tearDown(self): self.stubber.deactivate() def test_has_load(self): self.assertTrue(hasattr(self.obj_summary, 'load'), 'load() was not injected onto ObjectSummary resource.') def test_autoloads_correctly(self): # In HeadObject the parameter returned is ContentLength, this # should get mapped to Size of ListObject since the resource uses # the shape returned to by ListObjects. self.assertEqual(self.obj_summary.size, self.obj_summary_size) def test_cannot_access_other_non_related_parameters(self): # Even though an HeadObject was used to load this, it should # only expose the attributes from its shape defined in ListObjects. self.assertFalse(hasattr(self.obj_summary, 'content_length'))
class TestMturk(BaseSessionTest): def setUp(self): super(TestMturk, self).setUp() self.region = 'us-west-2' self.client = self.session.create_client( 'mturk', self.region) self.stubber = Stubber(self.client) self.stubber.activate() def tearDown(self): self.stubber.deactivate() def test_list_hits_aliased(self): self.stubber.add_response('list_hits_for_qualification_type', {}) self.stubber.add_response('list_hits_for_qualification_type', {}) params = {'QualificationTypeId': 'foo'} self.client.list_hi_ts_for_qualification_type(**params) self.client.list_hits_for_qualification_type(**params) self.stubber.assert_no_pending_responses()
class TestSagemaker(BaseSessionTest): def setUp(self): super(TestSagemaker, self).setUp() self.region = 'us-west-2' self.client = self.session.create_client( 'sagemaker', self.region) self.stubber = Stubber(self.client) self.stubber.activate() self.hook_calls = [] def _hook(self, **kwargs): self.hook_calls.append(kwargs['event_name']) def tearDown(self): self.stubber.deactivate() def test_event_with_old_prefix(self): self.client.meta.events.register( 'provide-client-params.sagemaker.ListEndpoints', self._hook ) self.stubber.add_response('list_endpoints', {'Endpoints': []}) self.client.list_endpoints() self.assertEqual(self.hook_calls, [ 'provide-client-params.sagemaker.ListEndpoints' ]) def test_event_with_new_prefix(self): self.client.meta.events.register( 'provide-client-params.api.sagemaker.ListEndpoints', self._hook ) self.stubber.add_response('list_endpoints', {'Endpoints': []}) self.client.list_endpoints() self.assertEqual(self.hook_calls, [ 'provide-client-params.sagemaker.ListEndpoints' ])
def test_put_persistent_graph(self, mocker: MockerFixture) -> None: """Test put_persistent_graph.""" mocker.patch.object( CfnginContext, "persistent_graph_location", { "Bucket": "test-bucket", "Key": "something.json" }, ) mocker.patch.object(CfnginContext, "persistent_graph_locked", True) mocker.patch.object(CfnginContext, "persistent_graph_lock_code", "123") obj = CfnginContext() obj.persistent_graph = Graph.from_dict(self.persist_graph_raw, context=obj) stubber = Stubber(obj.s3_client) stubber.add_response( "put_object", {}, { "Body": json.dumps(self.persist_graph_raw, default=json_serial, indent=4).encode(), "ServerSideEncryption": "AES256", "ACL": "bucket-owner-full-control", "ContentType": "application/json", "Tagging": "cfngin_lock_code=123", **obj.persistent_graph_location, }, ) with stubber: assert not obj.put_persistent_graph("123")
def test_authenticate_user(self, _): stub = Stubber(self.aws.client) # By the stubber nature, we need to add the sequence # of calls for the AWS SRP auth to test the whole process stub.add_response(method='initiate_auth', service_response={ 'ChallengeName': 'PASSWORD_VERIFIER', 'ChallengeParameters': {} }, expected_params={ 'AuthFlow': 'USER_SRP_AUTH', 'AuthParameters': _mock_get_params(None), 'ClientId': self.app_id }) stub.add_response(method='respond_to_auth_challenge', service_response={ 'AuthenticationResult': { 'IdToken': 'dummy_token', 'AccessToken': 'dummy_token', 'RefreshToken': 'dummy_token' } }, expected_params={ 'ClientId': self.app_id, 'ChallengeName': 'PASSWORD_VERIFIER', 'ChallengeResponses': {} }) with stub: tokens = self.aws.authenticate_user() self.assertTrue('IdToken' in tokens['AuthenticationResult']) self.assertTrue('AccessToken' in tokens['AuthenticationResult']) self.assertTrue('RefreshToken' in tokens['AuthenticationResult']) stub.assert_no_pending_responses()
def _get_stubber(resp_trails, resp_trail_status, resp_event_selectors): b3_cloudtrail = boto3.client("cloudtrail") stubber = Stubber(b3_cloudtrail) stubber.add_response("describe_trails", resp_trails) stubber.add_response("get_trail_status", resp_trail_status) stubber.add_response("get_event_selectors", resp_event_selectors) return b3_cloudtrail, stubber
def test_bucket(self, mocked_bucket, mocked_info, mocked_client): ls_s3(bucket=True) mocked_info.assert_called_with(ANY, True, False, []) self.capturedOutput.truncate(0) self.capturedOutput.seek(0) s3 = boto3.client("s3") stubber = Stubber(s3) stubber.add_response("get_bucket_location", {"LocationConstraint": "us-east-1"}) stubber.activate() mocked_client.return_value = s3 ls_s3(bucket=True, url=True, bucketpath="kazhala-lol/") self.assertEqual( self.capturedOutput.getvalue(), "https://s3-us-east-1.amazonaws.com/kazhala-lol/\n", ) self.capturedOutput.truncate(0) self.capturedOutput.seek(0) ls_s3(bucket=True, uri=True, bucketpath="kazhala-lol/") self.assertEqual( self.capturedOutput.getvalue(), "s3://kazhala-lol/\n", ) self.capturedOutput.truncate(0) self.capturedOutput.seek(0) ls_s3(bucket=True, name=True, bucketpath="kazhala-lol/") self.assertEqual( self.capturedOutput.getvalue(), "kazhala-lol\n", ) self.capturedOutput.truncate(0) self.capturedOutput.seek(0) ls_s3(bucket=True, arn=True, bucketpath="kazhala-lol/") self.assertEqual( self.capturedOutput.getvalue(), "arn:aws:s3:::kazhala-lol/\n", )
def mock_s3_get_object(bucket_name, granted_prefixes, key, success_response): _keep_it_real() client = boto3.real_client("s3") stubber = Stubber(client) if any([key.startswith(prefix) for prefix in granted_prefixes]): stubber.add_response( "get_object", success_response, {"Bucket": bucket_name, "Key": key}, ) else: stubber.add_client_error( "get_object", expected_params={"Bucket": bucket_name, "Key": key} ) # replace the get_presigned_url so it runs without AWS creds client.generate_presigned_url = lambda op, Params, ExpiresIn, HttpMethod: fake_url( Params["Bucket"], Params["Key"] ) stubber.activate() # override boto.client to return the mock client boto3.client = lambda service, region_name=None, config=None: client return stubber
def test_get_elb_targets(self): stubber = Stubber(self.rolling_update.elb_client) response = { 'TargetHealthDescriptions': [ { 'Target': { 'Id': 'i-121121212', 'Port': 80, 'AvailabilityZone': 'us-east-1' }, 'HealthCheckPort': '80', 'TargetHealth': { 'State': 'healthy' } }, ] } expected_params = {'TargetGroupArn': self.config.get("target-group").get("arn")} stubber.add_response('describe_target_health', response, expected_params) with stubber: self.rolling_update._get_elb_targets() self.assertEqual(len(self.rolling_update.instances), 1)
def test_table_query_can_be_stubbed_with_expressions(self): table = self.resource.Table('mytable') key_expr = Key('mykey').eq('testkey') filter_expr = Attr('myattr').eq('foo') & ( Attr('myattr2').lte('buzz') | Attr('myattr2').gte('fizz') ) stubber = Stubber(table.meta.client) stubber.add_response( 'query', dict(Items=list()), expected_params=dict( TableName='mytable', KeyConditionExpression=key_expr, FilterExpression=filter_expr ) ) with stubber: response = table.query(KeyConditionExpression=key_expr, FilterExpression=filter_expr) assert response['Items'] == [] stubber.assert_no_pending_responses()
def test_get_asg_scheduled_actions(): asg_boto3_client = boto3.client('autoscaling') stubber = Stubber(asg_boto3_client) action = { 'AutoScalingGroupName': 'foo', 'ScheduledActionName': 'foo', 'ScheduledActionARN': 'foo', 'Time': datetime(2020, 1, 1), 'StartTime': datetime(2020, 1, 1), 'EndTime': datetime(2020, 1, 1), 'Recurrence': 'foo', 'MinSize': 123, 'MaxSize': 123, 'DesiredCapacity': 123 } response = {'ScheduledUpdateGroupActions': [action]} stubber.add_response('describe_scheduled_actions', response, {'AutoScalingGroupName': 'foo'}) stubber.activate() client = AutoScalingService(asg_boto3_client) actions = client.get_asg_scheduled_actions('foo') assert actions == [action]
def test_on_done_delete_request_payer(self) -> None: """Test on_done.""" client = boto3.client("s3") stubber = Stubber(client) stubber.add_response( "delete_object", {}, { "Bucket": self.bucket, "Key": self.key, "RequestPayer": "requester" }, ) future = Mock(meta=FakeTransferFutureMeta( call_args=FakeTransferFutureCallArgs( copy_source={ "Bucket": self.bucket, "Key": self.key }, extra_args={"RequestPayer": "requester"}, ))) with stubber: assert not DeleteCopySourceObjectSubscriber(client).on_done(future) future.set_exception.assert_not_called()
def test_spark_job_add(notebook_maker, spark_job_provisioner): notebook = notebook_maker() identifier = 'test-identifier' key = 'jobs/%s/%s' % (identifier, notebook.name) stubber = Stubber(spark_job_provisioner.s3) response = { 'Expiration': 'whatever', 'ETag': '12345', 'VersionId': '1.0', } expected_params = { 'Body': notebook, 'Bucket': settings.AWS_CONFIG['CODE_BUCKET'], 'Key': key, } stubber.add_response('put_object', response, expected_params) with stubber: result = spark_job_provisioner.add( identifier=identifier, notebook_file=notebook, ) assert result == key
def test_not_remediated(mocker): #-------------------------- # Test data # test_event = open(test_data + 'cis29.json') event = json.loads(test_event.read()) test_event.close() os.environ['AWS_SESSION_TOKEN'] = 'FAKETOKEN' os.environ['FLOW_LOG_ROLE_ARN'] = 'FAKELOGROLEARN' #-------------------------- # Mock/stub # # Mock the constructor. We don't need the session created mocker.patch('lib.awsapi_helpers.BotoSession.__init__', return_value=None) mocker.patch('lib.awsapi_helpers.AWSClient.connect', return_value=None) awsc = [boto3.client('logs'), boto3.client('ec2')] def mock_select(thing1, thing2): if thing2 == 'logs': return awsc[0] else: return awsc[1] awsc_s = Stubber(awsc[0]) awsc_s.add_response('create_log_group', {}) awsc_s.activate() aws2c_s = Stubber(awsc[1]) aws2c_s.add_response('create_flow_logs', {}) aws2c_s.add_response('describe_flow_logs', {'FlowLogs': []}) aws2c_s.activate() # redirect to mock_select above to return the proper stub mocker.patch('lib.awsapi_helpers.BotoSession.client', new=mock_select) # Mock notifications init = mocker.patch('lib.sechub_findings.Finding.flag') resolve = mocker.patch('lib.sechub_findings.Finding.resolve') update = mocker.patch('lib.sechub_findings.Finding.update_text') mocker.patch('lib.applogger.LogHandler.flush', return_value=None) #-------------------------- # Run the lambda # cis29.lambda_handler(event, None) init.assert_called_once_with( 'INITIAL: "Enable VPC flow logging in all VPCs" remediation started') update.assert_called_once_with( 'FAILED: "Enable VPC flow logging in all VPCs" remediation failed. Please remediate manually' ) resolve.assert_not_called()
def test_set_tags(self, mocked_input, mocked_client): mocked_input.return_value = "hello=world" self.s3_args.set_tags() self.assertEqual(self.s3_args._extra_args["Tagging"], "hello=world") data_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "../data/s3_tags.json" ) with open(data_path, "r") as file: response = json.load(file) self.capturedOutput.truncate(0) self.capturedOutput.seek(0) s3 = boto3.client("s3") stubber = Stubber(s3) stubber.add_response("get_object_tagging", response) stubber.activate() mocked_client.return_value = s3 mocked_input.return_value = "foo=boo" self.s3_args.set_tags(original=True) self.assertEqual(self.s3_args._extra_args["Tagging"], "foo=boo") self.assertRegex(self.capturedOutput.getvalue(), r"Orignal: name=yes") self.capturedOutput.truncate(0) self.capturedOutput.seek(0) s3 = boto3.client("s3") stubber = Stubber(s3) stubber.add_response("get_object_tagging", response) stubber.activate() mocked_client.return_value = s3 mocked_input.return_value = "foo=boo" self.s3_args.set_tags( original=True, version=[{"Key": "hello", "VersionId": "11111111"}] ) self.assertEqual(self.s3_args._extra_args["Tagging"], "foo=boo") self.assertRegex(self.capturedOutput.getvalue(), r"Orignal: name=yes")
def test_can_clean_snapshots(self, mock_sleep): client = boto3.client('ec2') stubber = Stubber(client) tags = [ { "Key": "UUID", "Value": "01c6b711-a7d4-4bdf-bb2b-10b4b60594bc" }, { "Key": "Name", "Value": "myvolume" }, ] snapshots = [ { "StartTime": datetime.datetime.now(), "State": "available", "SnapshotId": "old", "Tags": tags }, { "StartTime": datetime.datetime.now() + datetime.timedelta(days=1), "State": "available", "SnapshotId": "new", "Tags": tags }, { "StartTime": datetime.datetime.now() + datetime.timedelta(days=2), "State": "pending", "SnapshotId": "newest", "Tags": tags }, { "StartTime": datetime.datetime.now() + datetime.timedelta(days=2), "State": "pending", "SnapshotId": "backup", "Tags": tags + [{ "Key": "aws:dlm:lifecycle:schedule-name", "Value": "Default Schedule" }] }, ] stubber.add_response('describe_snapshots', {"Snapshots": snapshots}) stubber.add_response('delete_snapshot', [], {"SnapshotId": "old"}) stubber.add_response('delete_snapshot', [], {"SnapshotId": "new"}) stubber.add_response('delete_snapshot', [], {"SnapshotId": "newest"}) stubber.activate() ebspin_ec2 = ec2.Ec2(client) ebspin_ec2.clean_snapshots("01c6b711-a7d4-4bdf-bb2b-10b4b60594bc") stubber.assert_no_pending_responses()
def mock_sns_publish(sns_event, topic_arn, subject, response_id): _keep_it_real() client = boto3.real_client("sns") stubber = Stubber(client) # mock get_function response mock_sns_publish_response = {"MessageId": response_id} stubber.add_response( "publish", mock_sns_publish_response, { "TopicArn": topic_arn, "Message": sns_event, "Subject": subject, "MessageStructure": "json", }, ) stubber.activate() # override boto.client to return the mock client boto3.client = lambda service: client return stubber
def test_modify_workspace_properties_Auto_stop(mocker): settings = { 'region': 'us-east-1', 'hourlyLimits': 10, 'testEndOfMonth': 'yes', 'isDryRun': False, 'startTime': 1, 'endTime': 2 } workspace_helper = WorkspacesHelper(settings) client_stubber = Stubber(workspace_helper.workspaces_client) response = {} expected_params = { 'WorkspaceId': '123qwer', 'WorkspaceProperties': {'RunningMode': 'AUTO_STOP'} } client_stubber.add_response('modify_workspace_properties', response, expected_params) client_stubber.activate() workspace_id = '123qwer' new_running_mode = 'AUTO_STOP' result = workspace_helper.modify_workspace_properties(workspace_id, new_running_mode) assert result == '-H-' client_stubber.deactivate()
def _can_use_parameter_in_client_call(session, case, use_alias=True): client = session.create_client( case['service'], region_name='us-east-1', aws_access_key_id='foo', aws_secret_access_key='bar') stubber = Stubber(client) stubber.activate() operation = case['operation'] params = case.get('extra_args', {}) params = params.copy() param_name = case['original_name'] if use_alias: param_name = case['new_name'] params[param_name] = case['parameter_value'] stubbed_response = case.get('stubbed_response', {}) stubber.add_response(operation, stubbed_response) try: getattr(client, operation)(**params) except ParamValidationError as e: raise AssertionError( 'Expecting %s to be valid parameter for %s.%s but received ' '%s.' % ( case['new_name'], case['service'], case['operation'], e) )
def sts_boto_patch(client_type): "A patch for the boto.client('sts') call made in new_contest" if client_type == 'sts': sts = botocore.session.get_session().create_client('sts') stub = Stubber(sts) response = {'Credentials': {'AccessKeyId': 'access_key_id_string', 'SecretAccessKey': 'secret_access_string', 'SessionToken': 'session_string', 'Expiration': datetime.datetime.now(), }, 'AssumedRoleUser': {'AssumedRoleId': 'assumed_role_id_string_get_long', 'Arn': 'arn_string_this_needs_to_be_long', }, 'PackedPolicySize': 123, } expected = {'RoleArn': FAKE_ARN, 'RoleSessionName': 'LoadContest'} stub.add_response('assume_role', response, expected) stub.activate() return sts else: raise ValueError('Expected "sts" as the patched boto client')
def test_check_workspace_available_no(mocker): settings = { 'region': 'us-east-1', 'hourlyLimits': 10, 'testEndOfMonth': 'yes', 'isDryRun': True, 'startTime': 1, 'endTime': 2 } start_time = time.strftime("%Y-%m") + '-01T00:00:00Z' end_time = time.strftime("%Y-%m") + '-02T00:00:00Z' workspace_helper = WorkspacesHelper(settings) client_stubber = Stubber(workspace_helper.cloudwatch_client) workspace_id = '123qwer' response = { 'Datapoints': [] } expected_params = { 'Dimensions': [ { 'Name': 'WorkspaceId', 'Value': workspace_id } ], 'Namespace': 'AWS/WorkSpaces', 'MetricName': 'Available', 'StartTime': start_time, 'EndTime': end_time, 'Period': 300, 'Statistics': ['Maximum'] } client_stubber.add_response('get_metric_statistics', response, expected_params) client_stubber.activate() result = workspace_helper.check_if_workspace_available_on_first_day(workspace_id) assert result is False client_stubber.deactivate()
def test_list_user_tasks(ddb_stubber: Stubber): params = { 'KeyConditionExpression': Key('user').eq('user-sub'), 'TableName': 'tasks', 'ProjectionExpression': '#attr_objective, #attr_original_objective, ' '#attr_personal_objective, #attr_completed, ' '#attr_tasks, #attr_user', 'ExpressionAttributeNames': { '#attr_completed': 'completed', '#attr_objective': 'objective', '#attr_original_objective': 'original-objective', '#attr_personal_objective': 'personal-objective', '#attr_tasks': 'tasks', '#attr_user': '******' }, } response = {} ddb_stubber.add_response('query', response, params) response = fetch_user_tasks( HTTPEvent({ "pathParameters": { "sub": 'user-sub' }, "requestContext": { "authorizer": { "claims": { "sub": 'user-sub' } } } })) assert response.status == 200 ddb_stubber.assert_no_pending_responses()
def test_delete_ecr_images_failures() -> None: """Test delete_ecr_images with failures.""" client = boto3.client("ecr") stubber = Stubber(client) image_ids: List[ImageIdentifierTypeDef] = [{"imageDigest": "image0"}] repo_name = "test-repo" stubber.add_response( "batch_delete_image", { "imageIds": image_ids, "failures": [ { "imageId": {"imageDigest": "abc123"}, "failureCode": "InvalidImageDigest", "failureReason": "reason", } ], }, {"repositoryName": repo_name, "imageIds": image_ids}, ) with stubber, pytest.raises(ValueError): delete_ecr_images(client, image_ids=image_ids, repository_name=repo_name)
def test_delete_object_single(self, mocked_bucket, mocked_object, mocked_confirm, mocked_client): # param test delete_s3() mocked_bucket.assert_called_once() mocked_object.assert_called_once_with(version=False, multi_select=True, deletemark=False) # single deletion test self.capturedOutput.truncate(0) self.capturedOutput.seek(0) s3 = boto3.client("s3") stubber = Stubber(s3) stubber.add_response( "delete_object", { "DeleteMarker": False, "VersionId": "string", "RequestCharged": "requester", }, expected_params={ "Bucket": "kazhala-lol", "Key": "wtf.pem", }, ) stubber.activate() mocked_client.return_value = s3 delete_s3(bucket="kazhala-lol/wtf.pem") self.assertEqual( self.capturedOutput.getvalue(), "(dryrun) delete: s3://kazhala-lol/wtf.pem\ndelete: s3://kazhala-lol/wtf.pem\n", ) mocked_object.assert_called_with(version=False, multi_select=True, deletemark=False)
def test_adjusts_nothing_when_equal(cluster_response): ecs = lambda_function.ecs_client() stubber = Stubber(ecs) describe_services_response = { 'services': [{ 'serviceArn': AGENT_SERVICE_ARN, 'serviceName': 'AgentService', 'clusterArn': CLUSTER_ARN, 'desiredCount': 3, }] } expected_params = {'cluster': 'cluster1', 'services': [AGENT_SERVICE_ARN]} stubber.add_response('describe_services', describe_services_response, expected_params) expected_params = {'clusters': ['cluster1']} stubber.add_response('describe_clusters', cluster_response, expected_params) with stubber: response = lambda_function.adjust_service_desired_count( ecs, 'cluster1', AGENT_SERVICE_ARN) assert response is None
def test_get(ddb_stubber: Stubber): response = { 'Item': { 'name': { 'S': 'An item' }, 'description': { 'S': 'An item description' }, 'release-id': { 'N': '312345' }, 'category': { 'S': 'category' }, } } params = { 'TableName': 'rewards', 'Key': { 'category': 'category', 'release-id': 312345 }, 'ProjectionExpression': '#model_name, category, description, #model_release_id, price', 'ExpressionAttributeNames': { '#model_name': 'name', '#model_release_id': 'release-id' }, } ddb_stubber.add_response('get_item', response, params) RewardsService.get("category", 3, 12345) ddb_stubber.assert_no_pending_responses()
def test_ensure_cfn_bucket_does_not_exist_us_east(self): """Test ensure cfn bucket does not exist us east.""" session = get_session("us-east-1") provider = Provider(session) action = BaseAction( context=mock_context("mynamespace"), provider_builder=MockProviderBuilder(provider) ) stubber = Stubber(action.s3_conn) stubber.add_client_error( "head_bucket", service_error_code="NoSuchBucket", service_message="Not Found", http_status_code=404, ) stubber.add_response( "create_bucket", service_response={}, expected_params={ "Bucket": ANY, } ) with stubber: action.ensure_cfn_bucket()
def test_lock_persistent_graph_locked(self): """Error raised when when object is locked.""" code = '0000' context = Context(config=self.persist_graph_config) context._s3_bucket_verified = True context._persistent_graph = Graph() stubber = Stubber(context.s3_conn) expected_params = { 'Tagging': { 'TagSet': gen_tagset({context._persistent_graph_lock_tag: code}) } } expected_params.update(context.persistent_graph_location) stubber.add_response('get_object_tagging', { 'TagSet': gen_tagset({context._persistent_graph_lock_tag: '1111'}) }, context.persistent_graph_location) with stubber: with self.assertRaises(PersistentGraphLocked): context.lock_persistent_graph(code) stubber.assert_no_pending_responses()
def test_list_tasks(self): ecs = boto3.client('ecs', region_name="eu-west-1") stubber = Stubber(ecs) stubber.add_response('list_tasks', service_response={ "taskArns": ["task1", "task2"], "nextToken": "next_token" }, expected_params={ "cluster": "my_cluster", "family": "my_family" }) stubber.add_response('list_tasks', service_response={"taskArns": ["task3", "task4"]}, expected_params={ "cluster": "my_cluster", "family": "my_family", "nextToken": "next_token" }) stubber.activate() inst = containers.ContainerService(cluster="my_cluster", ecs=ecs) tasks = inst.list_tasks(family="my_family") self.assertEqual(tasks, ["task1", "task2", "task3", "task4"])
def test_create(self): # GIVEN events_response = self._build_stack_events() resource_response = self._build_stack_resource_response() expected_params = {'StackName': TEST_STACK_ID} client = boto3.client('cloudformation') cf_stubber = Stubber(client) cf_stubber.add_response('describe_stack_events', events_response, expected_params) cf_stubber.add_response('describe_stack_resources', resource_response, expected_params) cf_stubber.activate() context = _set_up_context(mock_client=client) # WHEN mon = Monitor(context=context, stack_id=TEST_STACK_ID, operation='OPERATION') # THEN self.assertEqual(mon.stack_id, TEST_STACK_ID) self.assertEqual(mon.operation, 'OPERATION') self.assertEqual(mon.client, client) self.assertEqual(len(mon.events_seen), 1, "Expected to see 1 event")
def test_change_password(self, _): # u = cognito_user(self.cognito_user_pool_id, self.app_id, # username=self.username) self.user.authenticate(self.password) stub = Stubber(self.user.client) stub.add_response( method='change_password', service_response={'ResponseMetadata': { 'HTTPStatusCode': 200 }}, expected_params={ 'PreviousPassword': self.password, 'ProposedPassword': '******', 'AccessToken': self.user.access_token }) with stub: self.user.change_password(self.password, 'crazypassword$45DOG') stub.assert_no_pending_responses() with self.assertRaises(ParamValidationError): self.user.change_password(self.password, None)
def test_tags_queues_on_create(): # Given that I have an SQS broker with tags broker = SQSBroker(namespace="dramatiq_sqs_tests", tags={ "key1": "value1", "key2": "value2" }) # And I've stubbed out all the relevant API calls stubber = Stubber(broker.sqs.meta.client) stubber.add_response("create_queue", {"QueueUrl": ""}) stubber.add_response("tag_queue", {}, { "QueueUrl": "", "Tags": { "key1": "value1", "key2": "value2" } }) # When I create a queue # Then the queue should have the specified tags with stubber: broker.declare_queue("test") stubber.assert_no_pending_responses()
def test_get_thing_and_certificate_exists(): ''' Test getting a thing with an existing certificate ''' client = botocore.session.get_session().create_client('iot') stubber = Stubber(client) response = { 'principals': [ 'arn:aws:iot:region:account_id:cert/foobar', 'arn:aws:iot:region:account_id:cert/baz', ], } expected_params = {'thingName': 'my-test-core'} stubber.add_response('list_thing_principals', response, expected_params) stubber.activate() cert = get_certificate( thing='my-test-core', client=client, ) assert cert == [ 'arn:aws:iot:region:account_id:cert/foobar', 'arn:aws:iot:region:account_id:cert/baz', ]
class TestSSMStoreHandler(unittest.TestCase): client = boto3.client('ssm', region_name='us-east-1') def setUp(self): self.stubber = Stubber(self.client) self.get_parameters_response = { 'Parameters': [{ 'Name': 'ssmkey', 'Type': 'String', 'Value': 'ssmvalue' }], 'InvalidParameters': ['invalidssmparam'] } self.invalid_get_parameters_response = { 'InvalidParameters': ['ssmkey'] } self.expected_params = {'Names': ['ssmkey'], 'WithDecryption': True} self.ssmkey = "ssmkey" self.ssmvalue = "ssmvalue" @mock.patch('stacker.lookups.handlers.ssmstore.get_session', return_value=SessionStub(client)) def test_ssmstore_handler(self, mock_client): self.stubber.add_response('get_parameters', self.get_parameters_response, self.expected_params) with self.stubber: value = handler(self.ssmkey) self.assertEqual(value, self.ssmvalue) self.assertIsInstance(value, str) @mock.patch('stacker.lookups.handlers.ssmstore.get_session', return_value=SessionStub(client)) def test_ssmstore_invalid_value_handler(self, mock_client): self.stubber.add_response('get_parameters', self.invalid_get_parameters_response, self.expected_params) with self.stubber: try: handler(self.ssmkey) except ValueError: assert True @mock.patch('stacker.lookups.handlers.ssmstore.get_session', return_value=SessionStub(client)) def test_ssmstore_handler_with_region(self, mock_client): self.stubber.add_response('get_parameters', self.get_parameters_response, self.expected_params) region = "us-east-1" temp_value = "%s@%s" % (region, self.ssmkey) with self.stubber: value = handler(temp_value) self.assertEqual(value, self.ssmvalue)
class TestSSMStoreHandler(unittest.TestCase): client = boto3.client('ssm', region_name='us-east-1') def setUp(self): self.stubber = Stubber(self.client) self.get_parameters_response = { 'Parameters': [ { 'Name': 'ssmkey', 'Type': 'String', 'Value': 'ssmvalue' } ], 'InvalidParameters': [ 'invalidssmparam' ] } self.invalid_get_parameters_response = { 'InvalidParameters': [ 'ssmkey' ] } self.expected_params = { 'Names': ['ssmkey'], 'WithDecryption': True } self.ssmkey = "ssmkey" self.ssmvalue = "ssmvalue" @mock.patch('stacker.lookups.handlers.ssmstore.get_session', return_value=SessionStub(client)) def test_ssmstore_handler(self, mock_client): self.stubber.add_response('get_parameters', self.get_parameters_response, self.expected_params) with self.stubber: value = SsmstoreLookup.handle(self.ssmkey) self.assertEqual(value, self.ssmvalue) self.assertIsInstance(value, str) @mock.patch('stacker.lookups.handlers.ssmstore.get_session', return_value=SessionStub(client)) def test_ssmstore_invalid_value_handler(self, mock_client): self.stubber.add_response('get_parameters', self.invalid_get_parameters_response, self.expected_params) with self.stubber: try: SsmstoreLookup.handle(self.ssmkey) except ValueError: assert True @mock.patch('stacker.lookups.handlers.ssmstore.get_session', return_value=SessionStub(client)) def test_ssmstore_handler_with_region(self, mock_client): self.stubber.add_response('get_parameters', self.get_parameters_response, self.expected_params) region = "us-east-1" temp_value = "%s@%s" % (region, self.ssmkey) with self.stubber: value = SsmstoreLookup.handle(temp_value) self.assertEqual(value, self.ssmvalue)
class TestS3Uploader(unittest.TestCase): def setUp(self): self.s3client = botocore.session.get_session().create_client( 's3', region_name="us-east-1") self.s3client_stub = Stubber(self.s3client) self.transfer_manager_mock = Mock(spec=S3Transfer) self.transfer_manager_mock.upload = Mock() self.bucket_name = "bucketname" self.prefix = None self.region = "us-east-1" self.s3uploader = S3Uploader( self.s3client, self.bucket_name, self.region, self.prefix, None, False, self.transfer_manager_mock) @patch("awscli.customizations.cloudformation.s3uploader.ProgressPercentage") def test_upload_successful(self, progress_percentage_mock): file_name = "filename" remote_path = "remotepath" prefix = "SomePrefix" remote_path_with_prefix = "{0}/{1}".format(prefix, remote_path) s3uploader = S3Uploader( self.s3client, self.bucket_name, self.region, prefix, None, False, self.transfer_manager_mock) expected_upload_url = "s3://{0}/{1}/{2}".format( self.bucket_name, prefix, remote_path) # Setup mock to fake that file does not exist s3uploader.file_exists = Mock() s3uploader.file_exists.return_value = False upload_url = s3uploader.upload(file_name, remote_path) self.assertEquals(expected_upload_url, upload_url) expected_encryption_args = { "ServerSideEncryption": "AES256" } self.transfer_manager_mock.upload.assert_called_once_with( file_name, self.bucket_name, remote_path_with_prefix, expected_encryption_args, mock.ANY) s3uploader.file_exists.assert_called_once_with(remote_path_with_prefix) @patch("awscli.customizations.cloudformation.s3uploader.ProgressPercentage") def test_upload_idempotency(self, progress_percentage_mock): file_name = "filename" remote_path = "remotepath" # Setup mock to fake that file was already uploaded self.s3uploader.file_exists = Mock() self.s3uploader.file_exists.return_value = True self.s3uploader.upload(file_name, remote_path) self.transfer_manager_mock.upload.assert_not_called() self.s3uploader.file_exists.assert_called_once_with(remote_path) @patch("awscli.customizations.cloudformation.s3uploader.ProgressPercentage") def test_upload_force_upload(self, progress_percentage_mock): file_name = "filename" remote_path = "remotepath" expected_upload_url = "s3://{0}/{1}".format(self.bucket_name, remote_path) # Set ForceUpload = True self.s3uploader = S3Uploader( self.s3client, self.bucket_name, self.region, self.prefix, None, True, self.transfer_manager_mock) # Pretend file already exists self.s3uploader.file_exists = Mock() self.s3uploader.file_exists.return_value = True # Because we forced an update, this should reupload even if file exists upload_url = self.s3uploader.upload(file_name, remote_path) self.assertEquals(expected_upload_url, upload_url) expected_encryption_args = { "ServerSideEncryption": "AES256" } self.transfer_manager_mock.upload.assert_called_once_with( file_name, self.bucket_name, remote_path, expected_encryption_args, mock.ANY) # Since ForceUpload=True, we should NEVER do the file-exists check self.s3uploader.file_exists.assert_not_called() @patch("awscli.customizations.cloudformation.s3uploader.ProgressPercentage") def test_upload_successful_custom_kms_key(self, progress_percentage_mock): file_name = "filename" remote_path = "remotepath" kms_key_id = "kms_id" expected_upload_url = "s3://{0}/{1}".format(self.bucket_name, remote_path) # Set KMS Key Id self.s3uploader = S3Uploader( self.s3client, self.bucket_name, self.region, self.prefix, kms_key_id, False, self.transfer_manager_mock) # Setup mock to fake that file does not exist self.s3uploader.file_exists = Mock() self.s3uploader.file_exists.return_value = False upload_url = self.s3uploader.upload(file_name, remote_path) self.assertEquals(expected_upload_url, upload_url) expected_encryption_args = { "ServerSideEncryption": "aws:kms", "SSEKMSKeyId": kms_key_id } self.transfer_manager_mock.upload.assert_called_once_with( file_name, self.bucket_name, remote_path, expected_encryption_args, mock.ANY) self.s3uploader.file_exists.assert_called_once_with(remote_path) @patch("awscli.customizations.cloudformation.s3uploader.ProgressPercentage") def test_upload_successful_nobucket(self, progress_percentage_mock): file_name = "filename" remote_path = "remotepath" # Setup mock to fake that file does not exist self.s3uploader.file_exists = Mock() self.s3uploader.file_exists.return_value = False # Setup uploader to return a NOSuchBucket exception exception = botocore.exceptions.ClientError( {"Error": {"Code": "NoSuchBucket"}}, "OpName") self.transfer_manager_mock.upload.side_effect = exception with self.assertRaises(exceptions.NoSuchBucketError): self.s3uploader.upload(file_name, remote_path) @patch("awscli.customizations.cloudformation.s3uploader.ProgressPercentage") def test_upload_successful_exceptions(self, progress_percentage_mock): file_name = "filename" remote_path = "remotepath" # Setup mock to fake that file does not exist self.s3uploader.file_exists = Mock() self.s3uploader.file_exists.return_value = False # Raise an unrecognized botocore error exception = botocore.exceptions.ClientError( {"Error": {"Code": "SomeError"}}, "OpName") self.transfer_manager_mock.upload.side_effect = exception with self.assertRaises(botocore.exceptions.ClientError): self.s3uploader.upload(file_name, remote_path) # Some other exception self.transfer_manager_mock.upload.side_effect = FloatingPointError() with self.assertRaises(FloatingPointError): self.s3uploader.upload(file_name, remote_path) def test_upload_with_dedup(self): checksum = "some md5 checksum" filename = "filename" extension = "extn" self.s3uploader.file_checksum = Mock() self.s3uploader.file_checksum.return_value = checksum self.s3uploader.upload = Mock() self.s3uploader.upload_with_dedup(filename, extension) remotepath = "{0}.{1}".format(checksum, extension) self.s3uploader.upload.assert_called_once_with(filename, remotepath) def test_file_exists(self): key = "some/path" expected_params = { "Bucket": self.bucket_name, "Key": key } response = { "AcceptRanges": "bytes", "ContentType": "text/html", "LastModified": "Thu, 16 Apr 2015 18:19:14 GMT", "ContentLength": 77, "VersionId": "null", "ETag": "\"30a6ec7e1a9ad79c203d05a589c8b400\"", "Metadata": {} } # Let's pretend file exists self.s3client_stub.add_response("head_object", response, expected_params) with self.s3client_stub: self.assertTrue(self.s3uploader.file_exists(key)) # Let's pretend file does not exist self.s3client_stub.add_client_error( 'head_object', "ClientError", "some error") with self.s3client_stub: self.assertFalse(self.s3uploader.file_exists(key)) # Let's pretend some other unknown exception happened s3mock = Mock() uploader = S3Uploader(s3mock, self.bucket_name, self.region) s3mock.head_object = Mock() s3mock.head_object.side_effect = RuntimeError() with self.assertRaises(RuntimeError): uploader.file_exists(key) def test_file_checksum(self): num_chars = 4096*5 data = ''.join(random.choice(string.ascii_uppercase) for _ in range(num_chars)).encode('utf-8') md5 = hashlib.md5() md5.update(data) expected_checksum = md5.hexdigest() tempdir = tempfile.mkdtemp() try: filename = os.path.join(tempdir, 'tempfile') with open(filename, 'wb') as f: f.write(data) actual_checksum = self.s3uploader.file_checksum(filename) self.assertEqual(expected_checksum, actual_checksum) finally: shutil.rmtree(tempdir) def test_make_url(self): path = "Hello/how/are/you" expected = "s3://{0}/{1}".format(self.bucket_name, path) self.assertEquals(expected, self.s3uploader.make_url(path)) def test_to_path_style_s3_url_us_east_1(self): key = "path/to/file" version = "someversion" region = "us-east-1" s3uploader = S3Uploader(self.s3client, self.bucket_name, region) result = s3uploader.to_path_style_s3_url(key, version) self.assertEqual( result, "https://s3.amazonaws.com/{0}/{1}?versionId={2}".format( self.bucket_name, key, version)) # Without versionId, that query parameter should be omitted s3uploader = S3Uploader(self.s3client, self.bucket_name, region) result = s3uploader.to_path_style_s3_url(key) self.assertEqual( result, "https://s3.amazonaws.com/{0}/{1}".format( self.bucket_name, key, version)) def test_to_path_style_s3_url_other_regions(self): key = "path/to/file" version = "someversion" region = "us-west-2" s3uploader = S3Uploader(self.s3client, self.bucket_name, region) result = s3uploader.to_path_style_s3_url(key, version) self.assertEqual( result, "https://s3-{0}.amazonaws.com/{1}/{2}?versionId={3}".format( region, self.bucket_name, key, version)) # Without versionId, that query parameter should be omitted s3uploader = S3Uploader(self.s3client, self.bucket_name, region) result = s3uploader.to_path_style_s3_url(key) self.assertEqual( result, "https://s3-{0}.amazonaws.com/{1}/{2}".format( region, self.bucket_name, key))
class TestStubber(unittest.TestCase): def setUp(self): self.event_emitter = hooks.HierarchicalEmitter() self.client = mock.Mock() self.client.meta.events = self.event_emitter self.client.meta.method_to_api_mapping.get.return_value = 'foo' self.stubber = Stubber(self.client) self.validate_parameters_mock = mock.Mock() self.validate_parameters_patch = mock.patch( 'botocore.stub.validate_parameters', self.validate_parameters_mock) self.validate_parameters_patch.start() def tearDown(self): self.validate_parameters_patch.stop() def emit_get_response_event(self, model=None, request_dict=None, signer=None, context=None): if model is None: model = mock.Mock() model.name = 'foo' handler, response = self.event_emitter.emit_until_response( event_name='before-call.myservice.foo', model=model, params=request_dict, request_signer=signer, context=context) return response def test_stubber_registers_events(self): self.event_emitter = mock.Mock() self.client.meta.events = self.event_emitter self.stubber.activate() # This just ensures that we register at the correct event # and nothing more self.event_emitter.register_first.assert_called_with( 'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY) self.event_emitter.register.assert_called_with( 'before-call.*.*', mock.ANY, unique_id=mock.ANY) def test_stubber_unregisters_events(self): self.event_emitter = mock.Mock() self.client.meta.events = self.event_emitter self.stubber.activate() self.stubber.deactivate() self.event_emitter.unregister.assert_any_call( 'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY) self.event_emitter.unregister.assert_any_call( 'before-call.*.*', mock.ANY, unique_id=mock.ANY) def test_add_response(self): response = {'foo': 'bar'} self.stubber.add_response('foo', response) with self.assertRaises(AssertionError): self.stubber.assert_no_pending_responses() def test_add_response_fails_when_missing_client_method(self): del self.client.foo with self.assertRaises(ValueError): self.stubber.add_response('foo', {}) def test_validates_service_response(self): self.stubber.add_response('foo', {}) self.assertTrue(self.validate_parameters_mock.called) def test_validate_ignores_response_metadata(self): service_response = {'ResponseMetadata': {'foo': 'bar'}} service_model = ServiceModel({ 'documentation': '', 'operations': { 'foo': { 'name': 'foo', 'input': {'shape': 'StringShape'}, 'output': {'shape': 'StringShape'} } }, 'shapes': { 'StringShape': {'type': 'string'} } }) op_name = service_model.operation_names[0] output_shape = service_model.operation_model(op_name).output_shape self.client.meta.service_model = service_model self.stubber.add_response('TestOperation', service_response) self.validate_parameters_mock.assert_called_with( {}, output_shape) # Make sure service response hasn't been mutated self.assertEqual( service_response, {'ResponseMetadata': {'foo': 'bar'}}) def test_validates_on_empty_output_shape(self): service_model = ServiceModel({ 'documentation': '', 'operations': { 'foo': { 'name': 'foo' } } }) self.client.meta.service_model = service_model with self.assertRaises(ParamValidationError): self.stubber.add_response('TestOperation', {'foo': 'bar'}) def test_get_response(self): service_response = {'bar': 'baz'} self.stubber.add_response('foo', service_response) self.stubber.activate() response = self.emit_get_response_event() self.assertEqual(response[1], service_response) self.assertEqual(response[0].status_code, 200) def test_get_client_error_response(self): error_code = "foo" service_message = "bar" self.stubber.add_client_error('foo', error_code, service_message) self.stubber.activate() response = self.emit_get_response_event() self.assertEqual(response[1]['Error']['Message'], service_message) self.assertEqual(response[1]['Error']['Code'], error_code) def test_get_response_errors_with_no_stubs(self): self.stubber.activate() with self.assertRaises(StubResponseError): self.emit_get_response_event() def test_assert_no_responses_remaining(self): self.stubber.add_response('foo', {}) with self.assertRaises(AssertionError): self.stubber.assert_no_pending_responses()
def test_spark_job_run(mocker, is_public, spark_job_provisioner, user): identifier = 'test-flow' notebook_key = 'notebook.ipynb' emr_release = '1.0' job_timeout = 60 size = 1 stubber = Stubber(spark_job_provisioner.emr) response = {'JobFlowId': '12345'} expected_params = { 'Applications': [ {'Name': 'Spark'}, {'Name': 'Hive'}, {'Name': 'Zeppelin'} ], 'BootstrapActions': [ { 'Name': 'setup-telemetry-spark-job', 'ScriptBootstrapAction': { 'Args': [ '--timeout', str(job_timeout * 60), ], 'Path': spark_job_provisioner.script_uri, } } ], 'Configurations': [{'Classification': 'atmo-tests', 'Properties': {'covering': 'everything', 'passing': 'of-course'}}], 'Instances': { 'Ec2KeyName': spark_job_provisioner.config['EC2_KEY_NAME'], 'InstanceGroups': [ { 'InstanceCount': size, 'InstanceRole': 'MASTER', 'InstanceType': ( spark_job_provisioner.config['WORKER_INSTANCE_TYPE'] ), 'Market': 'ON_DEMAND', 'Name': 'Master', } ], 'KeepJobFlowAliveWhenNoSteps': False, }, 'JobFlowRole': constance.config.AWS_SPARK_INSTANCE_PROFILE, 'LogUri': ( 's3://log-bucket/%s/%s/2017-02-03T13:48:09+00:00' % (spark_job_provisioner.log_dir, identifier) ), 'Name': ANY, 'ReleaseLabel': 'emr-%s' % emr_release, 'ServiceRole': 'EMR_DefaultRole', 'Steps': [ { 'ActionOnFailure': 'TERMINATE_JOB_FLOW', 'HadoopJarStep': { 'Args': [ 's3://telemetry-spark-emr-2-stage/steps/zeppelin/zeppelin.sh' ], 'Jar': 's3://us-west-2.elasticmapreduce/libs/script-runner/' 'script-runner.jar'}, 'Name': 'setup-zeppelin' }, { 'ActionOnFailure': 'TERMINATE_JOB_FLOW', 'HadoopJarStep': { 'Args': [ spark_job_provisioner.batch_uri, '--job-name', identifier, '--notebook', 's3://telemetry-analysis-code-2/%s' % notebook_key, '--data-bucket', settings.AWS_CONFIG['PUBLIC_DATA_BUCKET'] if is_public else spark_job_provisioner.config['PRIVATE_DATA_BUCKET'], ], 'Jar': spark_job_provisioner.jar_uri, }, 'Name': 'RunNotebookStep', } ], 'Tags': [ {'Key': 'Owner', 'Value': user.email}, {'Key': 'Name', 'Value': identifier}, {'Key': 'Environment', 'Value': 'test'}, {'Key': 'Application', 'Value': spark_job_provisioner.config['INSTANCE_APP_TAG']}, {'Key': 'App', 'Value': spark_job_provisioner.config['ACCOUNTING_APP_TAG']}, {'Key': 'Type', 'Value': spark_job_provisioner.config['ACCOUNTING_TYPE_TAG']}, ], 'VisibleToAllUsers': True } stubber.add_response('run_job_flow', response, expected_params) with stubber: jobflow_id = spark_job_provisioner.run( user_username=user.username, user_email=user.email, identifier=identifier, emr_release=emr_release, size=size, notebook_key=notebook_key, is_public=is_public, job_timeout=job_timeout, ) assert jobflow_id == '12345'