Ejemplo n.º 1
0
def boto_volume_for_test(test, cluster_id):
    """
    Create an in-memory boto3 Volume, avoiding any AWS API calls.
    """
    # Create a session directly rather than allow lazy loading of a default
    # session.
    region_name = u"some-test-region-1"
    s = Boto3Session(
        botocore_session=botocore_get_session(),
        region_name=region_name,
    )
    ec2 = s.resource("ec2", region_name=region_name)
    stubber = Stubber(ec2.meta.client)
    # From this point, any attempt to interact with AWS API should fail with
    # botocore.exceptions.StubResponseError
    stubber.activate()
    volume_id = u"vol-{}".format(random_name(test))
    v = ec2.Volume(id=volume_id)
    tags = []
    if cluster_id is not None:
        tags.append(
            dict(
                Key=CLUSTER_ID_LABEL,
                Value=cluster_id,
            ),
        )
    # Pre-populate the metadata to prevent any attempt to load the metadata by
    # API calls.
    v.meta.data = dict(
        Tags=tags
    )
    return v
Ejemplo n.º 2
0
 def create_client_sts_stub(service, *args, **kwargs):
     client = _original_create_client(service, *args, **kwargs)
     stub = Stubber(client)
     response = self.create_assume_role_response(expected_creds)
     self.actual_client_region = client.meta.region_name
     stub.add_response('assume_role', response)
     stub.activate()
     return client
Ejemplo n.º 3
0
 def test_delete_tags(self):
     stubber = Stubber(self.instance_resource.meta.client)
     stubber.add_response('delete_tags', {})
     stubber.activate()
     response = self.instance_resource.delete_tags(Tags=[{'Key': 'foo'}])
     stubber.assert_no_pending_responses()
     self.assertEqual(response, {})
     stubber.deactivate()
    def set_cloudformation_stubber_for_client(self, redshift_client):
        stubber = Stubber(redshift_client)
        with open(self.resource_path+'/DescribeSourceClusterResponse.json') as describe_source_response:
            describe_source_cluster_response = json.load(describe_source_response)
        expected_source_params = {'ClusterIdentifier': 'rscopyunloadtest3-redshiftclustersource-1so4t2ip0ei3a'}
        stubber.add_response('describe_clusters', describe_source_cluster_response, expected_source_params)

        with open(self.resource_path+'/DescribeTargetClusterResponse.json') as describe_target_response:
            describe_target_cluster_response = json.load(describe_target_response)
        expected_target_params = {'ClusterIdentifier': 'rscopyunloadtest3-redshiftclustertarget-oaw35zvu02h'}
        stubber.add_response('describe_clusters', describe_target_cluster_response, expected_target_params)
        stubber.activate()
Ejemplo n.º 5
0
 def test_multipart_download_with_multiple_parts_and_extra_args(self):
     client = Session().create_client('s3')
     stubber = Stubber(client)
     response_body = b'foobarbaz'
     response = {'Body': six.BytesIO(response_body)}
     expected_params = {
         'Range': mock.ANY, 'Bucket': mock.ANY, 'Key': mock.ANY,
         'RequestPayer': 'requester'}
     stubber.add_response('get_object', response, expected_params)
     stubber.activate()
     downloader = MultipartDownloader(
         client, TransferConfig(), InMemoryOSLayer({}), SequentialExecutor)
     downloader.download_file(
         'bucket', 'key', 'filename', len(response_body),
         {'RequestPayer': 'requester'})
     stubber.assert_no_pending_responses()
Ejemplo n.º 6
0
    def create_session(self, profile=None):
        session = Session(profile=profile)

        # We have to set bogus credentials here or otherwise we'll trigger
        # an early credential chain resolution.
        sts = session.create_client(
            'sts',
            aws_access_key_id='spam',
            aws_secret_access_key='eggs',
        )
        stubber = Stubber(sts)
        stubber.activate()
        assume_role_provider = AssumeRoleProvider(
            load_config=lambda: session.full_config,
            client_creator=lambda *args, **kwargs: sts,
            cache={},
            profile_name=profile,
            credential_sourcer=CanonicalNameCredentialSourcer([
                self.env_provider, self.container_provider,
                self.metadata_provider
            ])
        )

        component_name = 'credential_provider'
        resolver = session.get_component(component_name)
        available_methods = [p.METHOD for p in resolver.providers]
        replacements = {
            'env': self.env_provider,
            'iam-role': self.metadata_provider,
            'container-role': self.container_provider,
            'assume-role': assume_role_provider
        }
        for name, provider in replacements.items():
            try:
                index = available_methods.index(name)
            except ValueError:
                # The provider isn't in the session
                continue

            resolver.providers[index] = provider

        session.register_component(
            'credential_provider', resolver
        )
        return session, stubber
Ejemplo n.º 7
0
    def _boto3_stubber(service, mocked_requests):
        client = boto3.client(service, region)
        stubber = Stubber(client)
        # Save a ref to the stubber so that we can deactivate it at the end of the test.
        created_stubbers.append(stubber)

        # Attach mocked requests to the Stubber and activate it.
        if not isinstance(mocked_requests, list):
            mocked_requests = [mocked_requests]
        for mocked_request in mocked_requests:
            stubber.add_response(
                mocked_request.method, mocked_request.response, expected_params=mocked_request.expected_params
            )
        stubber.activate()

        # Add stubber to the collection of mocked clients. This allows to mock multiple clients.
        # Mocking twice the same client will replace the previous one.
        mocked_clients[service] = client
        return client
Ejemplo n.º 8
0
class TestRDSPagination(BaseSessionTest):
    def setUp(self):
        super(TestRDSPagination, self).setUp()
        self.region = 'us-west-2'
        self.client = self.session.create_client(
            'rds', self.region)
        self.stubber = Stubber(self.client)

    def test_can_specify_zero_marker(self):
        service_response = {
            'LogFileData': 'foo',
            'Marker': '2',
            'AdditionalDataPending': True
        }
        expected_params = {
            'DBInstanceIdentifier': 'foo',
            'LogFileName': 'bar',
            'NumberOfLines': 2,
            'Marker': '0'
        }
        function_name = 'download_db_log_file_portion'

        # The stubber will assert that the function is called with the expected
        # parameters.
        self.stubber.add_response(
            function_name, service_response, expected_params)
        self.stubber.activate()

        try:
            paginator = self.client.get_paginator(function_name)
            result = paginator.paginate(
                DBInstanceIdentifier='foo',
                LogFileName='bar',
                NumberOfLines=2,
                PaginationConfig={
                    'StartingToken': '0',
                    'MaxItems': 3
                }).build_full_result()
            self.assertEqual(result['LogFileData'], 'foo')
            self.assertIn('NextToken', result)
        except StubAssertionError as e:
            self.fail(str(e))
Ejemplo n.º 9
0
    def test_provide_copy_source_client(self):
        source_client = self.session.create_client(
            's3', 'eu-central-1', aws_access_key_id='foo',
            aws_secret_access_key='bar')
        source_stubber = Stubber(source_client)
        source_stubber.activate()
        self.addCleanup(source_stubber.deactivate)

        self.add_head_object_response(stubber=source_stubber)
        self.add_successful_copy_responses()

        call_kwargs = self.create_call_kwargs()
        call_kwargs['source_client'] = source_client
        future = self.manager.copy(**call_kwargs)
        future.result()

        # Make sure that all of the responses were properly
        # used for both clients.
        source_stubber.assert_no_pending_responses()
        self.stubber.assert_no_pending_responses()
Ejemplo n.º 10
0
class TestRDS(unittest.TestCase):
    def setUp(self):
        self.session = botocore.session.get_session()
        self.client = self.session.create_client('rds', 'us-west-2')
        self.stubber = Stubber(self.client)
        self.stubber.activate()

    def test_generate_db_auth_token(self):
        hostname = 'host.us-east-1.rds.amazonaws.com'
        port = 3306
        username = '******'
        auth_token = self.client.generate_db_auth_token(
            DBHostname=hostname, Port=port, DBUsername=username)

        endpoint_url = 'host.us-east-1.rds.amazonaws.com:3306'
        self.assertIn(endpoint_url, auth_token)
        self.assertIn('Action=connect', auth_token)

        # Asserts that there is no scheme in the url
        self.assertTrue(auth_token.startswith(hostname))
Ejemplo n.º 11
0
class TestMturk(BaseSessionTest):
    def setUp(self):
        super(TestMturk, self).setUp()
        self.region = 'us-west-2'
        self.client = self.session.create_client(
            'mturk', self.region)
        self.stubber = Stubber(self.client)
        self.stubber.activate()

    def tearDown(self):
        self.stubber.deactivate()

    def test_list_hits_aliased(self):
        self.stubber.add_response('list_hits_for_qualification_type', {})
        self.stubber.add_response('list_hits_for_qualification_type', {})

        params = {'QualificationTypeId': 'foo'}

        self.client.list_hi_ts_for_qualification_type(**params)
        self.client.list_hits_for_qualification_type(**params)

        self.stubber.assert_no_pending_responses()
Ejemplo n.º 12
0
class TestS3ObjectSummary(unittest.TestCase):
    def setUp(self):
        self.session = boto3.session.Session(
            aws_access_key_id='foo', aws_secret_access_key='bar',
            region_name='us-west-2')
        self.s3 = self.session.resource('s3')
        self.obj_summary = self.s3.ObjectSummary('my_bucket', 'my_key')
        self.obj_summary_size = 12
        self.stubber = Stubber(self.s3.meta.client)
        self.stubber.activate()
        self.stubber.add_response(
            method='head_object',
            service_response={
                'ContentLength': self.obj_summary_size, 'ETag': 'my-etag',
                'ContentType': 'binary'
            },
            expected_params={
                'Bucket': 'my_bucket',
                'Key': 'my_key'
            }
        )

    def tearDown(self):
        self.stubber.deactivate()

    def test_has_load(self):
        self.assertTrue(hasattr(self.obj_summary, 'load'),
                        'load() was not injected onto ObjectSummary resource.')

    def test_autoloads_correctly(self):
        # In HeadObject the parameter returned is ContentLength, this
        # should get mapped to Size of ListObject since the resource uses
        # the shape returned to by ListObjects.
        self.assertEqual(self.obj_summary.size, self.obj_summary_size)

    def test_cannot_access_other_non_related_parameters(self):
        # Even though an HeadObject was used to load this, it should
        # only expose the attributes from its shape defined in ListObjects.
        self.assertFalse(hasattr(self.obj_summary, 'content_length'))
Ejemplo n.º 13
0
class TestSagemaker(BaseSessionTest):
    def setUp(self):
        super(TestSagemaker, self).setUp()
        self.region = 'us-west-2'
        self.client = self.session.create_client(
            'sagemaker', self.region)
        self.stubber = Stubber(self.client)
        self.stubber.activate()
        self.hook_calls = []

    def _hook(self, **kwargs):
        self.hook_calls.append(kwargs['event_name'])

    def tearDown(self):
        self.stubber.deactivate()

    def test_event_with_old_prefix(self):
        self.client.meta.events.register(
            'provide-client-params.sagemaker.ListEndpoints',
            self._hook
        )
        self.stubber.add_response('list_endpoints', {'Endpoints': []})
        self.client.list_endpoints()
        self.assertEqual(self.hook_calls, [
            'provide-client-params.sagemaker.ListEndpoints'
        ])

    def test_event_with_new_prefix(self):
        self.client.meta.events.register(
            'provide-client-params.api.sagemaker.ListEndpoints',
            self._hook
        )
        self.stubber.add_response('list_endpoints', {'Endpoints': []})
        self.client.list_endpoints()
        self.assertEqual(self.hook_calls, [
            'provide-client-params.sagemaker.ListEndpoints'
        ])
Ejemplo n.º 14
0
def _can_use_parameter_in_client_call(session, case, use_alias=True):
    client = session.create_client(
        case['service'], region_name='us-east-1',
        aws_access_key_id='foo', aws_secret_access_key='bar')

    stubber = Stubber(client)
    stubber.activate()
    operation = case['operation']
    params = case.get('extra_args', {})
    params = params.copy()
    param_name = case['original_name']
    if use_alias:
        param_name = case['new_name']
    params[param_name] = case['parameter_value']
    stubbed_response = case.get('stubbed_response', {})
    stubber.add_response(operation, stubbed_response)
    try:
        getattr(client, operation)(**params)
    except ParamValidationError as e:
        raise AssertionError(
            'Expecting %s to be valid parameter for %s.%s but received '
            '%s.' % (
                case['new_name'], case['service'], case['operation'], e)
        )
Ejemplo n.º 15
0
class StubbedClientTest(unittest.TestCase):
    def setUp(self):
        self.session = botocore.session.get_session()
        self.region = 'us-west-2'
        self.client = self.session.create_client(
            's3', self.region, aws_access_key_id='foo',
            aws_secret_access_key='bar')
        self.stubber = Stubber(self.client)
        self.stubber.activate()

    def tearDown(self):
        self.stubber.deactivate()

    def reset_stubber_with_new_client(self, override_client_kwargs):
        client_kwargs = {
            'service_name': 's3',
            'region_name': self.region,
            'aws_access_key_id': 'foo',
            'aws_secret_access_key': 'bar'
        }
        client_kwargs.update(override_client_kwargs)
        self.client = self.session.create_client(**client_kwargs)
        self.stubber = Stubber(self.client)
        self.stubber.activate()
Ejemplo n.º 16
0
class TestSTSPresignedUrl(BaseSessionTest):
    def setUp(self):
        super(TestSTSPresignedUrl, self).setUp()
        self.client = self.session.create_client('sts', 'us-west-2')
        # Makes sure that no requests will go through
        self.stubber = Stubber(self.client)
        self.stubber.activate()

    def test_presigned_url_contains_no_content_type(self):
        timestamp = datetime(2017, 3, 22, 0, 0)
        with mock.patch('botocore.auth.datetime') as _datetime:
            _datetime.datetime.utcnow.return_value = timestamp
            url = self.client.generate_presigned_url('get_caller_identity', {})

        # There should be no 'content-type' in x-amz-signedheaders
        expected_url = (
            'https://sts.amazonaws.com/?Action=GetCallerIdentity&'
            'Version=2011-06-15&X-Amz-Algorithm=AWS4-HMAC-SHA256&'
            'X-Amz-Credential=access_key%2F20170322%2Fus-east-1%2Fsts%2F'
            'aws4_request&X-Amz-Date=20170322T000000Z&X-Amz-Expires=3600&'
            'X-Amz-SignedHeaders=host&X-Amz-Signature=767845d2ee858069a598d5f'
            '8b497b75c7d57356885b1b3dba46dbbc0fc62bf5a'
        )
        assert_url_equal(url, expected_url)
Ejemplo n.º 17
0
class TestADICommandS3(TransactionTestCase):
    fixtures = ('base/addon_3615', 'base/featured', 'base/appversion.json')
    date = '2014-07-10'
    stats_source = 's3'

    def add_response(self, stat):
        stat_path = os.path.join(hive_folder, 'src', '%s.hive' % stat)
        data = get_stats_data(stat_path)
        response = {
            'Body': data,
        }
        expected_params = {
            'Bucket': 'test-bucket',
            'Key': os.path.join('amo_stats', stat, self.date, '000000_0'),
            'Range': ANY
        }
        self.stubber.add_response('get_object', response, expected_params)

    def setUp(self):
        self.client = boto3.client('s3')
        self.stubber = Stubber(self.client)
        self.stubber.activate()

    def tearDown(self):
        self.stubber.deactivate()

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_update_counts_from_s3(self, mock_boto3):
        stats = ['app', 'locale', 'os', 'status', 'version']

        for x in range(2):
            for stat in stats:
                self.add_response('update_counts_by_%s' % stat)

        mock_boto3.client.return_value = self.client
        management.call_command('update_counts_from_file',
                                date=self.date,
                                stats_source=self.stats_source)

        assert UpdateCount.objects.all().count() == 1
        update_count = UpdateCount.objects.last()
        # should be identical to `statuses.userEnabled`
        assert update_count.count == 4
        assert update_count.date == date(2014, 7, 10)
        assert update_count.versions == {u'3.8': 2, u'3.7': 3}
        assert update_count.statuses == {u'userDisabled': 1, u'userEnabled': 4}
        application = u'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'
        assert update_count.applications[application] == {u'3.6': 18}
        assert update_count.oses == {u'WINNT': 5}
        assert update_count.locales == {u'en-us': 1, u'en-US': 4}

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_download_counts_from_s3(self, mock_boto3):
        for x in range(2):
            self.add_response('download_counts')

        mock_boto3.client.return_value = self.client

        management.call_command('download_counts_from_file',
                                date=self.date,
                                stats_source=self.stats_source)
        assert DownloadCount.objects.all().count() == 2
        download_count = DownloadCount.objects.get(addon_id=3615)
        assert download_count.count == 3
        assert download_count.date == date(2014, 7, 10)
        assert download_count.sources == {u'search': 2, u'cb-dl-bob': 1}
Ejemplo n.º 18
0
def stubber(client):
    stubber = Stubber(client)
    stubber.activate()
    yield stubber
    stubber.deactivate()
Ejemplo n.º 19
0
class TestAccessCounts(TestCase):
    """Tests S3 Select"""
    def setUp(self):
        self.env_patcher = patch.dict(
            os.environ, {
                'AWS_ACCESS_KEY_ID': 'test_key',
                'AWS_SECRET_ACCESS_KEY': 'test_secret',
                'AWS_DEFAULT_REGION': 'ng-north-1',
                'ATHENA_DATABASE': 'athena-db',
                'CLOUDTRAIL_BUCKET': 'cloudtrail-bucket',
                'QUERY_RESULT_BUCKET': 'results-bucket',
                'ACCESS_COUNTS_OUTPUT_DIR': 'AccessCounts',
            })
        self.env_patcher.start()

        import index

        self.s3_stubber = Stubber(index.s3)
        self.s3_stubber.activate()

        self.athena_stubber = Stubber(index.athena)
        self.athena_stubber.activate()

    def tearDown(self):
        self.athena_stubber.deactivate()
        self.s3_stubber.deactivate()
        self.env_patcher.stop()

    def _start_query(self, query, execution_id):
        self.athena_stubber.add_response(
            method='start_query_execution',
            expected_params={
                'QueryExecutionContext': {
                    'Database': 'athena-db'
                },
                'QueryString': query,
                'ResultConfiguration': {
                    'OutputLocation': 's3://results-bucket/AthenaQueryResults/'
                }
            },
            service_response={'QueryExecutionId': execution_id},
        )

    def _end_query(self, execution_id=None):
        self.athena_stubber.add_response(
            method='get_query_execution',
            expected_params={'QueryExecutionId': execution_id}
            if execution_id is not None else None,
            service_response={
                'QueryExecution': {
                    'Status': {
                        'State': 'SUCCEEDED'
                    }
                }
            })

    def _run_queries(self, queries):
        for idx, query in enumerate(queries):
            self._start_query(query, str(idx))

        for _ in queries:
            self._end_query()

    def test_access_counts(self):
        import index

        now = datetime.fromtimestamp(1234567890, timezone.utc)
        end_ts = now - timedelta(minutes=15)
        start_ts = now - timedelta(days=1)

        self.s3_stubber.add_response(
            method='get_object',
            expected_params={
                'Bucket': 'results-bucket',
                'Key': 'ObjectAccessLog.last_updated_ts.txt',
            },
            service_response={
                'Body': BytesIO(str(start_ts.timestamp()).encode()),
            })

        self.s3_stubber.add_response(method='list_objects_v2',
                                     expected_params={
                                         'Bucket': 'results-bucket',
                                         'Prefix': 'AthenaQueryResults',
                                         'MaxKeys': 1000,
                                     },
                                     service_response={})

        self.s3_stubber.add_response(method='list_objects_v2',
                                     expected_params={
                                         'Bucket': 'cloudtrail-bucket',
                                         'Prefix': 'AWSLogs/',
                                         'Delimiter': '/',
                                     },
                                     service_response={
                                         'CommonPrefixes': [{
                                             'Prefix':
                                             'AWSLogs/123456/'
                                         }]
                                     })

        self.s3_stubber.add_response(
            method='list_objects_v2',
            expected_params={
                'Bucket': 'cloudtrail-bucket',
                'Prefix': 'AWSLogs/123456/CloudTrail/',
                'Delimiter': '/',
            },
            service_response={
                'CommonPrefixes': [{
                    'Prefix':
                    'AWSLogs/123456/CloudTrail/ng-north-1/'
                }]
            })

        self._run_queries([
            index.DROP_CLOUDTRAIL, index.DROP_OBJECT_ACCESS_LOG,
            index.DROP_PACKAGE_HASHES
        ])

        self._run_queries([
            index.CREATE_CLOUDTRAIL, index.CREATE_OBJECT_ACCESS_LOG,
            index.CREATE_PACKAGE_HASHES
        ])

        self._run_queries([
            index.REPAIR_OBJECT_ACCESS_LOG,
            index.ADD_CLOUDTRAIL_PARTITION.format(account='123456',
                                                  region='ng-north-1',
                                                  year=2009,
                                                  month=2,
                                                  day=12),
            index.ADD_CLOUDTRAIL_PARTITION.format(account='123456',
                                                  region='ng-north-1',
                                                  year=2009,
                                                  month=2,
                                                  day=13),
        ])

        self.s3_stubber.add_response(method='delete_object',
                                     expected_params={
                                         'Bucket':
                                         'results-bucket',
                                         'Key':
                                         'ObjectAccessLog.last_updated_ts.txt',
                                     },
                                     service_response={})

        self._run_queries([
            index.INSERT_INTO_OBJECT_ACCESS_LOG.format(
                start_ts=start_ts.timestamp(), end_ts=end_ts.timestamp())
        ])

        self.s3_stubber.add_response(method='put_object',
                                     expected_params={
                                         'Bucket': 'results-bucket',
                                         'Key':
                                         'ObjectAccessLog.last_updated_ts.txt',
                                         'ContentType': 'text/plain',
                                         'Body': str(end_ts.timestamp()),
                                     },
                                     service_response={})

        self._run_queries([
            index.OBJECT_ACCESS_COUNTS, index.PACKAGE_ACCESS_COUNTS,
            index.PACKAGE_VERSION_ACCESS_COUNTS, index.BUCKET_ACCESS_COUNTS,
            index.EXTS_ACCESS_COUNTS
        ])

        for idx, name in enumerate(
            ['Objects', 'Packages', 'PackageVersions', 'Bucket', 'Exts']):
            self.s3_stubber.add_response(
                method='head_object',
                expected_params={
                    'Bucket': 'results-bucket',
                    'Key': f'AthenaQueryResults/{idx}.csv',
                },
                service_response={'ContentLength': 123})
            self.s3_stubber.add_response(
                method='copy_object',
                expected_params={
                    'CopySource': {
                        'Bucket': 'results-bucket',
                        'Key': f'AthenaQueryResults/{idx}.csv',
                    },
                    'Bucket': 'results-bucket',
                    'Key': f'AccessCounts/{name}.csv',
                },
                service_response={})

        with patch('index.now', return_value=now), \
             patch('time.sleep', return_value=None):
            index.handler(None, None)
Ejemplo n.º 20
0
class TestAutoscalingPagination(BaseSessionTest):
    def setUp(self):
        super(TestAutoscalingPagination, self).setUp()
        self.region = 'us-west-2'
        self.client = self.session.create_client(
            'autoscaling', self.region, aws_secret_access_key='foo',
            aws_access_key_id='bar', aws_session_token='baz'
        )
        self.stubber = Stubber(self.client)
        self.stubber.activate()

    def _setup_scaling_pagination(
        self, page_size=200, max_items=100, total_items=600
    ):
        """
        Add to the stubber to test paginating describe_scaling_activities.

        WARNING: This only handles cases where max_items cleanly divides
        page_size.
        """
        requests_per_page = page_size / max_items
        if requests_per_page != ceil(requests_per_page):
            raise NotImplementedError(
                "This only handles setup where max_items is less than "
                "page_size and where max_items evenly divides page_size."
            )
        requests_per_page = int(requests_per_page)
        num_pages = int(ceil(total_items / page_size))

        previous_next_token = None
        for i in range(num_pages):
            page = self.create_describe_scaling_response(page_size=page_size)

            # Don't create a next_token for the final page
            if i + 1 == num_pages:
                next_token = None
            else:
                next_token = random_chars(10)

            expected_args = {}
            if previous_next_token:
                expected_args['StartingToken'] = previous_next_token

            # The same page may be accessed multiple times because we are
            # truncating it at max_items
            for _ in range(requests_per_page - 1):
                # The page is copied because the paginator will modify the
                # response object, causing issues when using the stubber.
                self.stubber.add_response(
                    'describe_scaling_activities', page.copy()
                )

            if next_token is not None:
                page['NextToken'] = next_token

            # Copying the page here isn't necessary because it is about to
            # be blown away anyway.
            self.stubber.add_response(
                'describe_scaling_activities', page
            )

            previous_next_token = next_token

    def create_describe_scaling_response(self, page_size=200):
        """Create a valid describe_scaling_activities response."""
        page = []
        date = datetime.now()
        for _ in range(page_size):
            page.append({
                'AutoScalingGroupName': 'test',
                'ActivityId': random_chars(10),
                'Cause': 'test',
                'StartTime': date,
                'StatusCode': '200',
            })
        return {'Activities': page}

    def test_repeated_build_full_results(self):
        # This ensures that we can cleanly paginate using build_full_results.
        max_items = 100
        total_items = 600
        self._setup_scaling_pagination(
            max_items=max_items,
            total_items=total_items,
            page_size=200
        )
        paginator = self.client.get_paginator('describe_scaling_activities')
        conf = {'MaxItems': max_items}

        pagination_tokens = []

        result = paginator.paginate(PaginationConfig=conf).build_full_result()
        all_results = result['Activities']
        while 'NextToken' in result:
            starting_token = result['NextToken']
            # We should never get a duplicate pagination token.
            self.assertNotIn(starting_token, pagination_tokens)
            pagination_tokens.append(starting_token)

            conf['StartingToken'] = starting_token
            pages = paginator.paginate(PaginationConfig=conf)
            result = pages.build_full_result()
            all_results.extend(result['Activities'])

        self.assertEqual(len(all_results), total_items)
Ejemplo n.º 21
0
def stub(client):

    stubber = Stubber(client)
    stubber.add_response('change_resource_record_sets', response, expected)
    stubber.activate()
Ejemplo n.º 22
0
class TestAutoscalingPagination(BaseSessionTest):
    def setUp(self):
        super(TestAutoscalingPagination, self).setUp()
        self.region = 'us-west-2'
        self.client = self.session.create_client(
            'autoscaling', self.region, aws_secret_access_key='foo',
            aws_access_key_id='bar', aws_session_token='baz'
        )
        self.stubber = Stubber(self.client)
        self.stubber.activate()

    def _setup_scaling_pagination(self, page_size=200, max_items=100,
                                 total_items=600):
        """
        Add to the stubber to test paginating describe_scaling_activities.

        WARNING: This only handles cases where max_items cleanly divides
        page_size.
        """
        requests_per_page = page_size / max_items
        if requests_per_page != ceil(requests_per_page):
            raise NotImplementedError(
                "This only handles setup where max_items is less than "
                "page_size and where max_items evenly divides page_size."
            )
        requests_per_page = int(requests_per_page)
        num_pages = int(ceil(total_items / page_size))

        previous_next_token = None
        for i in range(num_pages):
            page = self.create_describe_scaling_response(page_size=page_size)

            # Don't create a next_token for the final page
            if i + 1 == num_pages:
                next_token = None
            else:
                next_token = random_chars(10)

            expected_args = {}
            if previous_next_token:
                expected_args['StartingToken'] = previous_next_token

            # The same page may be accessed multiple times because we are
            # truncating it at max_items
            for _ in range(requests_per_page - 1):
                # The page is copied because the paginator will modify the
                # response object, causing issues when using the stubber.
                self.stubber.add_response(
                    'describe_scaling_activities', page.copy()
                )

            if next_token is not None:
                page['NextToken'] = next_token

            # Copying the page here isn't necessary because it is about to
            # be blown away anyway.
            self.stubber.add_response(
                'describe_scaling_activities', page
            )

            previous_next_token = next_token

    def create_describe_scaling_response(self, page_size=200):
        """Create a valid describe_scaling_activities response."""
        page = []
        date = datetime.now()
        for _ in range(page_size):
            page.append({
                'AutoScalingGroupName': 'test',
                'ActivityId': random_chars(10),
                'Cause': 'test',
                'StartTime': date,
                'StatusCode': '200',
            })
        return {'Activities': page}

    def test_repeated_build_full_results(self):
        # This ensures that we can cleanly paginate using build_full_results.
        max_items = 100
        total_items = 600
        self._setup_scaling_pagination(
            max_items=max_items,
            total_items=total_items,
            page_size=200
        )
        paginator = self.client.get_paginator('describe_scaling_activities')
        conf = {'MaxItems': max_items}

        pagination_tokens = []

        result = paginator.paginate(PaginationConfig=conf).build_full_result()
        all_results = result['Activities']
        while 'NextToken' in result:
            starting_token = result['NextToken']
            # We should never get a duplicate pagination token.
            self.assertNotIn(starting_token, pagination_tokens)
            pagination_tokens.append(starting_token)

            conf['StartingToken'] = starting_token
            pages = paginator.paginate(PaginationConfig=conf)
            result = pages.build_full_result()
            all_results.extend(result['Activities'])

        self.assertEqual(len(all_results), total_items)
def kinesisanalyticsv2_stubber():
    kinesisanalyticsv2_stubber = Stubber(kinesisanalyticsv2)
    kinesisanalyticsv2_stubber.activate()
    yield kinesisanalyticsv2_stubber
    kinesisanalyticsv2_stubber.deactivate()
Ejemplo n.º 24
0
def ram_stubber():
    ram_stubber = Stubber(ram)
    ram_stubber.activate()
    yield ram_stubber
    ram_stubber.deactivate()
Ejemplo n.º 25
0
    def test_anon_access(self):
        """
        Test anonymous call w/ ALLOW_ANONYMOUS_ACCESS
        """
        bucket = "bucket"
        key = ".quilt/packages/manifest_hash"
        params = dict(
            bucket=bucket,
            manifest=key,
        )

        expected_args = {
            'Bucket': bucket,
            'Key': key,
            'Expression':
            "SELECT SUBSTRING(s.logical_key, 1) AS logical_key FROM s3object s",
            'ExpressionType': 'SQL',
            'InputSerialization': {
                'CompressionType': 'NONE',
                'JSON': {
                    'Type': 'LINES'
                }
            },
            'OutputSerialization': {
                'JSON': {
                    'RecordDelimiter': '\n'
                }
            },
        }

        env_patcher = patch.dict(
            os.environ, {
                'AWS_ACCESS_KEY_ID': 'test_key',
                'AWS_SECRET_ACCESS_KEY': 'test_secret',
                'ALLOW_ANONYMOUS_ACCESS': '1'
            })
        env_patcher.start()

        mock_s3 = boto3.client('s3')
        response = {
            'ETag': '12345',
            'VersionId': '1.0',
            'ContentLength': 123,
        }
        expected_params = {
            'Bucket': bucket,
            'Key': key,
        }
        s3_stubber = Stubber(mock_s3)
        s3_stubber.activate()
        s3_stubber.add_response('head_object', response, expected_params)
        with patch.object(mock_s3,
                          'select_object_content',
                          side_effect=[
                              self.s3response, self.s3response_meta
                          ]) as client_patch, patch('boto3.Session.client',
                                                    return_value=mock_s3):
            response = lambda_handler(self._make_event(params), None)
            print(response)
            assert response['statusCode'] == 200
            folder = json.loads(read_body(response))['contents']
            print(folder)
            assert len(folder['prefixes']) == 1
            assert len(folder['objects']) == 1
            assert folder['objects'][0]['logical_key'] == 'foo.csv'
            assert folder['prefixes'][0]['logical_key'] == 'bar/'
        s3_stubber.deactivate()
        env_patcher.stop()
Ejemplo n.º 26
0
def test_event_good(mocker):
    #--------------------------
    # Test data
    #
    event = utils.load_test_data(test_data + 'cis26.json', my_region)

    sns_message = {
        'Note':
        '"Enable Access Logging on CloudTrail logs bucket" remediation was successfully invoked via AWS Systems Manager',
        'State': 'RESOLVED',
        'Account': '111111111111',
        'Remediation': 'Enable Access Logging on CloudTrail logs bucket',
        'AffectedObject':
        'CloudTrail: cloudtrail-awslogs-111111111111-cv5ddz5l-isengard-do-not-delete',
        'metrics_data': mocker.ANY
    }

    post_metrics_expected_parms = {
        'Solution': 'SO0111',
        'UUID': '12345678-1234-1234-1234-123412341234',
        'TimeStamp': mocker.ANY,
        'Data': {
            'generator_id':
            'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/2.6',
            'type':
            '2.6 Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket',
            'productArn': mocker.ANY,
            'finding_triggered_by': 'Security Hub Findings - Custom Action',
            'region': mocker.ANY,
            'status': 'RESOLVED'
        },
        'Version': 'v1.2.0TEST'
    }

    ssmc = boto3.client('ssm', region_name=my_region)
    ssmc_s = Stubber(ssmc)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.activate()
    mocker.patch('lib.metrics.Metrics.connect_to_ssm', return_value=ssmc)
    post_metrics = mocker.patch('lib.metrics.Metrics.post_metrics_to_api',
                                return_value=None)

    mocker.patch('lib.awsapi_helpers.BotoSession.__init__', return_value=None)
    mocker.patch('lib.awsapi_helpers.AWSClient.connect', return_value=None)

    awsc = [boto3.client('ssm'), boto3.client('s3')]

    def mock_select(thing1, thing2):
        if thing2 == 'ssm':
            return awsc[0]
        elif thing2 == 's3':
            return awsc[1]
        else:
            return

    # Mock the boto client and replace the BotoSession client with our stub
    awsc_s = Stubber(awsc[0])
    awsc_s.add_response('start_automation_execution', {})
    awsc_s.activate()

    awss3_s = Stubber(awsc[1])
    awss3_s.add_response('create_bucket', {})
    awss3_s.add_response('put_bucket_encryption', {})
    awss3_s.activate()

    mocker.patch('lib.awsapi_helpers.BotoSession.client', new=mock_select)

    sns = mocker.patch('lib.awsapi_helpers.AWSClient.postit',
                       return_value=None)

    # Mock Notifier
    init = mocker.patch('lib.sechub_findings.Finding.flag')
    resolve = mocker.patch('lib.sechub_findings.Finding.resolve')

    # Prevent flushing to logs
    mocker.patch('lib.applogger.LogHandler.flush', return_value=None)

    #--------------------------
    # Run the lambda
    #
    cis26.lambda_handler(event, None)

    init.assert_called_once_with(
        'INITIAL: "Enable Access Logging on CloudTrail logs bucket" remediation started'
    )
    resolve.assert_called_once_with(
        'RESOLVED: "Enable Access Logging on CloudTrail logs bucket" remediation was successfully invoked via AWS Systems Manager'
    )
    sns.assert_called_with('SO0111-SHARR_Topic', sns_message, my_region)
    post_metrics.assert_called_with(post_metrics_expected_parms)
def qldb_stubber():
    qldb_stubber = Stubber(qldb)
    qldb_stubber.activate()
    yield qldb_stubber
    qldb_stubber.deactivate()
    def _get_test_plan(self, max_changes):

        provider = Route53Provider('test', 'abc', '123', max_changes)

        # Use the stubber
        stubber = Stubber(provider._conn)
        stubber.activate()

        got = Zone('unit.tests.', [])

        list_hosted_zones_resp = {
            'HostedZones': [],
            'Marker': 'm',
            'IsTruncated': False,
            'MaxItems': '100',
        }
        stubber.add_response('list_hosted_zones', list_hosted_zones_resp, {})

        create_hosted_zone_resp = {
            'HostedZone': {
                'Name': 'unit.tests.',
                'Id': 'z42',
                'CallerReference': 'abc',
            },
            'ChangeInfo': {
                'Id': 'a12',
                'Status': 'PENDING',
                'SubmittedAt': '2017-01-29T01:02:03Z',
                'Comment': 'hrm',
            },
            'DelegationSet': {
                'Id': 'b23',
                'CallerReference': 'blip',
                'NameServers': [
                    'n12.unit.tests.',
                ],
            },
            'Location': 'us-east-1',
        }
        stubber.add_response('create_hosted_zone', create_hosted_zone_resp, {
            'Name': got.name,
            'CallerReference': ANY,
        })

        stubber.add_response(
            'list_health_checks', {
                'HealthChecks': self.health_checks,
                'IsTruncated': False,
                'MaxItems': '100',
                'Marker': '',
            })

        stubber.add_response('delete_health_check', {}, {
            'HealthCheckId': ANY,
        })
        stubber.add_response('delete_health_check', {}, {
            'HealthCheckId': ANY,
        })
        stubber.add_response('delete_health_check', {}, {
            'HealthCheckId': ANY,
        })

        stubber.add_response(
            'change_resource_record_sets', {
                'ChangeInfo': {
                    'Id': 'id',
                    'Status': 'PENDING',
                    'SubmittedAt': '2017-01-29T01:02:03Z',
                }
            }, {
                'HostedZoneId': 'z42',
                'ChangeBatch': ANY
            })

        plan = provider.plan(self.expected)

        return provider, plan
Ejemplo n.º 29
0
class TestADICommandS3(TransactionTestCase):
    fixtures = ('base/addon_3615', 'base/featured', 'addons/persona',
                'base/appversion.json')
    date = '2014-07-10'
    stats_source = 's3'

    def add_response(self, stat):
        stat_path = os.path.join(hive_folder, 'src', '%s.hive' % stat)
        data = get_stats_data(stat_path)
        response = {
            'Body': data,
        }
        expected_params = {'Bucket': 'test-bucket',
                           'Key': os.path.join('amo_stats', stat,
                                               self.date, '000000_0'),
                           'Range': ANY}
        self.stubber.add_response('get_object', response, expected_params)

    def setUp(self):
        self.client = boto3.client('s3')
        self.stubber = Stubber(self.client)
        self.stubber.activate()

    def tearDown(self):
        self.stubber.deactivate()

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch(
        'olympia.stats.management.commands.update_counts_from_file.'
        'save_stats_to_file')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_update_counts_from_s3(self, mock_boto3, mock_save_stats_to_file):
        stats = ['app', 'locale', 'os', 'status', 'version']

        for x in range(2):
            for stat in stats:
                self.add_response('update_counts_by_%s' % stat)

        mock_boto3.client.return_value = self.client
        management.call_command('update_counts_from_file',
                                date=self.date, stats_source=self.stats_source)

        assert UpdateCount.objects.all().count() == 1
        update_count = UpdateCount.objects.last()
        # should be identical to `statuses.userEnabled`
        assert update_count.count == 4
        assert update_count.date == date(2014, 7, 10)
        assert update_count.versions == {u'3.8': 2, u'3.7': 3}
        assert update_count.statuses == {u'userDisabled': 1, u'userEnabled': 4}
        application = u'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'
        assert update_count.applications[application] == {u'3.6': 18}
        assert update_count.oses == {u'WINNT': 5}
        assert update_count.locales == {u'en-us': 1, u'en-US': 4}

        # save_stats_to_file is called with a non-saved model.
        assert isinstance(mock_save_stats_to_file.call_args[0][0], UpdateCount)

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch(
        'olympia.stats.management.commands.download_counts_from_file.'
        'save_stats_to_file')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_download_counts_from_s3(self, mock_boto3,
                                     mock_save_stats_to_file):
        for x in range(2):
            self.add_response('download_counts')

        mock_boto3.client.return_value = self.client

        management.call_command('download_counts_from_file',
                                date=self.date, stats_source=self.stats_source)
        assert DownloadCount.objects.all().count() == 2
        download_count = DownloadCount.objects.get(addon_id=3615)
        assert download_count.count == 3
        assert download_count.date == date(2014, 7, 10)
        assert download_count.sources == {u'search': 2, u'cb-dl-bob': 1}

        # save_stats_to_file is called with a non-saved model.
        assert isinstance(
            mock_save_stats_to_file.call_args[0][0], DownloadCount)

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch(
        'olympia.stats.management.commands.theme_update_counts_from_file.'
        'save_stats_to_file')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_theme_update_counts_from_s3(self, mock_boto3,
                                         mock_save_stats_to_file):
        for x in range(2):
            self.add_response('theme_update_counts')

        mock_boto3.client.return_value = self.client
        management.call_command('theme_update_counts_from_file',
                                date=self.date, stats_source=self.stats_source)
        assert ThemeUpdateCount.objects.all().count() == 1
        # Persona 813 has addon id 15663: we need the count to be the sum of
        # the "old" request on the persona_id 813 (only the one with the source
        # "gp") and the "new" request on the addon_id 15663.
        tuc2 = ThemeUpdateCount.objects.get(addon_id=15663)
        assert tuc2.count == 15

        assert mock_save_stats_to_file.call_count == 1

        # save_stats_to_file is called with a non-saved model.
        assert isinstance(
            mock_save_stats_to_file.call_args[0][0], ThemeUpdateCount)
Ejemplo n.º 30
0
class TestADICommandS3(TransactionTestCase):
    fixtures = ('base/addon_3615', 'base/featured', 'addons/persona',
                'base/appversion.json')
    date = '2014-07-10'
    stats_source = 's3'

    def add_response(self, stat):
        stat_path = os.path.join(hive_folder, 'src', '%s.hive' % stat)
        data = get_stats_data(stat_path)
        response = {
            'Body': data,
        }
        expected_params = {'Bucket': 'test-bucket',
                           'Key': os.path.join('amo_stats', stat,
                                               self.date, '000000_0'),
                           'Range': ANY}
        self.stubber.add_response('get_object', response, expected_params)

    def setUp(self):
        self.client = boto3.client('s3')
        self.stubber = Stubber(self.client)
        self.stubber.activate()

    def tearDown(self):
        self.stubber.deactivate()

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_update_counts_from_s3(self, mock_boto3):
        stats = ['app', 'locale', 'os', 'status', 'version']

        for x in range(2):
            for stat in stats:
                self.add_response('update_counts_by_%s' % stat)

        mock_boto3.client.return_value = self.client
        management.call_command('update_counts_from_file',
                                date=self.date, stats_source=self.stats_source)

        assert UpdateCount.objects.all().count() == 1
        update_count = UpdateCount.objects.last()
        # should be identical to `statuses.userEnabled`
        assert update_count.count == 4
        assert update_count.date == date(2014, 7, 10)
        assert update_count.versions == {u'3.8': 2, u'3.7': 3}
        assert update_count.statuses == {u'userDisabled': 1, u'userEnabled': 4}
        application = u'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'
        assert update_count.applications[application] == {u'3.6': 18}
        assert update_count.oses == {u'WINNT': 5}
        assert update_count.locales == {u'en-us': 1, u'en-US': 4}

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_download_counts_from_s3(self, mock_boto3):
        for x in range(2):
            self.add_response('download_counts')

        mock_boto3.client.return_value = self.client

        management.call_command('download_counts_from_file',
                                date=self.date, stats_source=self.stats_source)
        assert DownloadCount.objects.all().count() == 2
        download_count = DownloadCount.objects.get(addon_id=3615)
        assert download_count.count == 3
        assert download_count.date == date(2014, 7, 10)
        assert download_count.sources == {u'search': 2, u'cb-dl-bob': 1}

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_theme_update_counts_from_s3(self, mock_boto3):
        for x in range(2):
            self.add_response('theme_update_counts')

        mock_boto3.client.return_value = self.client
        management.call_command('theme_update_counts_from_file',
                                date=self.date, stats_source=self.stats_source)
        assert ThemeUpdateCount.objects.all().count() == 1
        # Persona 813 has addon id 15663: we need the count to be the sum of
        # the "old" request on the persona_id 813 (only the one with the source
        # "gp") and the "new" request on the addon_id 15663.
        tuc2 = ThemeUpdateCount.objects.get(addon_id=15663)
        assert tuc2.count == 15

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_lwt_stats_go_to_migrated_static_theme(self, mock_boto3):
        lwt = Addon.objects.get(id=15663)
        lwt.delete()
        static_theme = addon_factory(type=amo.ADDON_STATICTHEME)
        MigratedLWT.objects.create(
            lightweight_theme=lwt, static_theme=static_theme)
        for x in range(2):
            self.add_response('theme_update_counts')

        mock_boto3.client.return_value = self.client
        management.call_command('theme_update_counts_from_file',
                                date=self.date, stats_source=self.stats_source)
        assert ThemeUpdateCount.objects.all().count() == 0
        assert UpdateCount.objects.all().count() == 1
        assert UpdateCount.objects.get(addon_id=static_theme.id).count == 15

    @override_settings(AWS_STATS_S3_BUCKET='test-bucket')
    @mock.patch('olympia.stats.management.commands.boto3')
    def test_lwt_stats_go_to_migrated_with_stats_already(self, mock_boto3):
        lwt = Addon.objects.get(id=15663)
        lwt.delete()
        static_theme = addon_factory(type=amo.ADDON_STATICTHEME)
        MigratedLWT.objects.create(
            lightweight_theme=lwt, static_theme=static_theme)
        UpdateCount.objects.create(
            addon=static_theme, count=123, date=date(2014, 7, 10))
        for x in range(2):
            self.add_response('theme_update_counts')

        mock_boto3.client.return_value = self.client
        management.call_command('theme_update_counts_from_file',
                                date=self.date, stats_source=self.stats_source)
        assert ThemeUpdateCount.objects.all().count() == 0
        assert UpdateCount.objects.all().count() == 1
        assert UpdateCount.objects.get(addon_id=static_theme.id).count == 138
Ejemplo n.º 31
0
class TestStubber(unittest.TestCase):
    def setUp(self):
        session = botocore.session.get_session()
        config = botocore.config.Config(signature_version=botocore.UNSIGNED)
        self.client = session.create_client('s3', config=config)

        self.stubber = Stubber(self.client)

    def test_stubber_returns_response(self):
        service_response = {'ResponseMetadata': {'foo': 'bar'}}
        self.stubber.add_response('list_objects', service_response)
        self.stubber.activate()
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_activated_stubber_errors_with_no_registered_stubs(self):
        self.stubber.activate()
        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

    def test_stubber_errors_when_stubs_are_used_up(self):
        self.stubber.add_response('list_objects', {})
        self.stubber.activate()
        self.client.list_objects(Bucket='foo')

        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

    def test_client_error_response(self):
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error(
            'list_objects', error_code, error_message)
        self.stubber.activate()

        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

    def test_expected_params_success(self):
        service_response = {}
        expected_params = {'Bucket': 'foo'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.activate()
        # This should be called successfully with no errors being thrown
        # for mismatching expected params.
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.activate()
        # This should call should raise an for mismatching expected params.
        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

    def test_expected_params_mixed_with_errors_responses(self):
        # Add an error response
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error(
            'list_objects', error_code, error_message)

        # Add a response with incorrect expected params
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()

        # The first call should throw and error as expected.
        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

        # The second call should throw an error for unexpected parameters
        with self.assertRaisesRegexp(StubResponseError, 'Expected parameters'):
            self.client.list_objects(Bucket='foo')

    def test_can_continue_to_call_after_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for unexpected parameters
        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

        # The stubber should still have the responses queued up
        # even though the original parameters did not match the expected ones.
        self.client.list_objects(Bucket='bar')
        self.stubber.assert_no_pending_responses()

    def test_still_relies_on_param_validation_with_expected_params(self):
        service_response = {}
        expected_params = {'Buck': 'bar'}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for invalid parameters
        with self.assertRaises(ParamValidationError):
            self.client.list_objects(Buck='bar')
def test_event_good(mocker):
    # Read test data
    event = utils.load_test_data(test_data + 'cis_1-3-iamuser1.json', my_region)

    sns_message = {
        'Note': 'Access key over 90 days old found: AKIAGHJGJFGHJFGETHFG',
        'State': 'INFO',
        'Account': '111111111111',
        'Remediation': 'Deactivate unused keys over 90 days old',
        'AffectedObject': 'Access Key: AKIAGHJGJFGHJFGETHFG',
        'metrics_data': {'status': 'INFO'},
    }

    iam_keys = {
        "AccessKeyMetadata": [
            {
                "UserName": "******",
                "AccessKeyId": "AKIAADFHWEREFGFHSDDF",
                "Status": "Active",
                "CreateDate": "2015-05-22T14:43:16+00:00"
            },
            {
                "UserName": "******",
                "AccessKeyId": "AKIAGHJGJFGHJFGETHFG",
                "Status": "Active",
                "CreateDate": "2020-05-15T15:20:04+00:00"
            }
        ]
    }

    # Mock the constructor. We don't need the session created
    mocker.patch('lib.awsapi_helpers.BotoSession.__init__', return_value=None)
    mocker.patch('lib.awsapi_helpers.AWSClient.connect', return_value=None)

    # sess = BotoSession()
    iamc = boto3.client('iam', region_name=my_region)
    iamr = boto3.resource('iam', region_name=my_region)

    iamc_s = Stubber(iamc)
    iamr_s = Stubber(iamr.meta.client)

    iamc_s.add_response(
        'list_access_keys',
        iam_keys
    )
    iamr_s.add_response(
        'update_access_key',
        {}
    )
    iam_keys['AccessKeyMetadata'][0]['Status'] = 'Inactive'
    iamc_s.add_response(
        'list_access_keys',
        iam_keys
    )

    iamr_s.add_response(
        'update_access_key',
        {}
    )

    iam_keys['AccessKeyMetadata'][0]['Status'] = 'Inactive'
    iamc_s.add_response(
        'list_access_keys',
        iam_keys
    )

    iamc_s.activate()
    iamr_s.activate()

    # Replace BotoSession client/resource with our stub
    mocker.patch('lib.awsapi_helpers.BotoSession.client', return_value=iamc)
    mocker.patch('lib.awsapi_helpers.BotoSession.resource', return_value=iamr)
    sns = mocker.patch('lib.awsapi_helpers.AWSClient.postit', return_value=None)

    # Mock Notifier
    resolve = mocker.patch('lib.sechub_findings.Finding.resolve')
    flag = mocker.patch('lib.sechub_findings.Finding.flag')

    mocker.patch('lib.applogger.LogHandler.flush', return_value=None)

    cis1314.lambda_handler(event, None)
    flag.assert_called_once_with(
        'INITIAL: "Deactivate unused keys over 90 days old" remediation started'
    )
    resolve.assert_called_once_with(
        'RESOLVED: Remediation completed successfully, create new access keys using IAM console.'
    )
    sns.assert_called_with('SO0111-SHARR_Topic', sns_message, my_region)
Ejemplo n.º 33
0
    def test_s3_delete(self):
        client = get_s3_client()
        stubber = Stubber(client)
        stubber.activate()

        stubber.add_response('put_object', self._base_response,
                             dict(Bucket=ANY, Key=self._path, Body='test'))

        list_objects_response = {
            'IsTruncated':
            False,
            'Name':
            'test-bucket',
            'MaxKeys':
            1000,
            'Prefix':
            '',
            'Contents': [{
                u'LastModified':
                datetime.datetime(2016, 9, 23, 11, 17, 14),
                u'ETag':
                '"20d2cb13afb394301bbea0bcff19e12b"',
                u'StorageClass':
                'STANDARD',
                u'Key':
                self._path,
                u'Owner': {
                    u'DisplayName':
                    'test',
                    u'ID':
                    '31d89f79718dbd4435290740e6fa5e41cffafa7d9a3c323c85b525342e6341ae'
                },
                u'Size':
                77824
            }],
            'EncodingType':
            'url',
            'ResponseMetadata': {
                'RequestId': 'abc123',
                'HTTPStatusCode': 200,
                'HostId': 'abc123'
            },
            'Marker':
            ''
        }
        stubber.add_response('list_objects', list_objects_response, {
            'Bucket': ANY,
            'Prefix': self._uuid
        })
        stubber.add_response('delete_object', self._base_response, {
            'Bucket': ANY,
            'Key': self._path
        })

        with patch('eventkit_cloud.utils.s3.open',
                   mock_open(read_data='test'),
                   create=True) as mock_open_obj:
            upload_to_s3(self._uuid,
                         self._filename,
                         self._filename,
                         client=client)

        delete_from_s3(self._uuid, client=client)
Ejemplo n.º 34
0
                'DisplayName': 'string',
                'ID': 'string'
            }
        },
    ],
    'Name':
    'string',
    'EncodingType':
    'url',
    'KeyCount':
    123,
    'ContinuationToken':
    'string'
}
s3_stubber.add_response('list_objects_v2', list_objects_response)
s3_stubber.activate()

mock_sm_client = boto3.client('secretsmanager')
sm_stubber = Stubber(mock_sm_client)
mock_secret_value_response = {
    'ARN':
    'arn:aws:secretsmanager:eu-west-7:123456789012:secret:tutorials/MyFirstSecret-jiObOV',
    'Name': 'string',
    'VersionId': 'EXAMPLE1-90ab-cdef-fedc-ba987EXAMPLE',
    'SecretBinary':
    b'{"azkaban_username": "******", "azkaban_password": "******"}',
    'CreatedDate': datetime(2015, 1, 1)
}
sm_stubber.add_response('get_secret_value', mock_secret_value_response)
sm_stubber.add_response('get_secret_value', mock_secret_value_response)
sm_stubber.activate()
Ejemplo n.º 35
0
def sns_stubber():
    sns_stubber = Stubber(sns)
    sns_stubber.activate()
    yield sns_stubber
    sns_stubber.deactivate()
Ejemplo n.º 36
0
class StaxAuthTests(unittest.TestCase):
    """
    Inherited class to run all unit tests for this module
    """
    def setUp(self):
        self.cognito_client = botocore.session.get_session().create_client(
            "cognito-identity",
            region_name="ap-southeast-2",
            config=BotoConfig(signature_version=UNSIGNED),
        )
        self.cognito_stub = Stubber(self.cognito_client)

        self.aws_srp_client = botocore.session.get_session().create_client(
            "cognito-idp",
            region_name="ap-southeast-2",
            config=BotoConfig(signature_version=UNSIGNED),
        )
        self.aws_srp_stubber = Stubber(self.aws_srp_client)

    def tearDown(self):
        self.cognito_stub.deactivate()
        self.aws_srp_stubber.deactivate()

    def testStaxAuthInit(self):
        """
        Test to initialise StaxAuth
        """
        sa = StaxAuth("ApiAuth")
        self.assertEqual(sa.aws_region, "ap-southeast-2")

    def testToken(self):
        """
        Test valid JWT is returned
        """
        sa = StaxAuth("ApiAuth")
        self.stub_aws_srp(sa, "valid_username")
        token = sa.id_token_from_cognito(
            username="******",
            password="******",
            srp_client=self.aws_srp_client,
        )
        self.assertEqual(token, "valid_token")

    def testTokenClient(self):
        """
        Test the AWSSRP client is invoked and throws an error
        """
        sa = StaxAuth("ApiAuth")
        with self.assertRaises(InvalidCredentialsException):
            sa.id_token_from_cognito(username="******", password="******")

    def testCredentialErrors(self):
        """
        Test that boto errors are caught and converted to InvalidCredentialExceptions
        """
        sa = StaxAuth("ApiAuth")
        # Test with invalid username password
        self.stub_aws_srp(sa, "bad_password", "NotAuthorizedException")
        user_not_found_success = False
        try:
            sa.id_token_from_cognito(
                username="******",
                password="******",
                srp_client=self.aws_srp_client,
            )
        except InvalidCredentialsException as e:
            self.assertIn("Please check your Secret Key is correct", str(e))
            user_not_found_success = True
        self.assertTrue(user_not_found_success)

        # Test with no access
        self.stub_aws_srp(sa, "no_access", "UserNotFoundException")
        no_access_success = False
        try:
            sa.id_token_from_cognito(username="******",
                                     password="******",
                                     srp_client=self.aws_srp_client)
        except InvalidCredentialsException as e:
            self.assertIn(
                "Please check your Access Key, that you have created your Api Token and that you are using the right STAX REGION",
                str(e),
            )
            no_access_success = True
        self.assertTrue(no_access_success)

        # Test Unknown Error
        self.stub_aws_srp(sa, "Unknown", "UnitTesting")
        with self.assertRaises(InvalidCredentialsException):
            sa.id_token_from_cognito(username="******",
                                     password="******",
                                     srp_client=self.aws_srp_client)

    def testCreds(self):
        """
        Test valid credentials are returned
        """
        sa = StaxAuth("ApiAuth")
        token = jwt.encode({"sub": "unittest"}, "secret", algorithm="HS256")
        jwt_token = jwt.decode(token, verify=False)
        self.stub_cognito_creds(sa, jwt_token.get("sub"))
        creds = sa.sts_from_cognito_identity_pool(jwt_token.get("sub"),
                                                  self.cognito_client)
        self.assertIn("Credentials", creds)
        self.assertTrue(creds.get("IdentityId").startswith("ap-southeast-2"))

    def testCredsClient(self):
        """
        Test the cognito client is invoked and throws an error
        """
        sa = StaxAuth("ApiAuth")

        # Test Invalid Credentials
        token = jwt.encode({"sub": "unittest"}, "secret", algorithm="HS256")
        jwt_token = jwt.decode(token, verify=False)
        with self.assertRaises(InvalidCredentialsException):
            sa.sts_from_cognito_identity_pool(jwt_token.get("sub"))

        # Test "Couldn't verify signed token" retry
        expected_parameters = {
            "IdentityPoolId": sa.identity_pool,
            "Logins": {
                f"cognito-idp.{sa.aws_region}.amazonaws.com/{sa.user_pool}":
                "unittest"
            }
        }
        for i in range(sa.max_retries):
            self.cognito_stub.add_client_error(
                "get_id",
                service_error_code="NotAuthorizedException",
                service_message=
                "Invalid login token. Couldn't verify signed token.",
                expected_params=expected_parameters,
            )
        self.cognito_stub.activate()

        with self.assertRaises(InvalidCredentialsException) as e:
            sa.sts_from_cognito_identity_pool(
                jwt_token.get("sub"), cognito_client=self.cognito_client)

        self.assertEqual(
            str(e.exception),
            "InvalidCredentialsException: Retries Exceeded: Unexpected Client Error"
        )
        self.assertEqual(len(self.cognito_stub._queue), 0)

    def testAuthErrors(self):
        """
        Test that errors are thrown when keys are invalid
        """
        sa = StaxAuth("ApiAuth")
        # Test with no username
        with self.assertRaises(InvalidCredentialsException):
            sa.requests_auth(username=None, password="******")

        # Test with no username
        with self.assertRaises(InvalidCredentialsException):
            sa.requests_auth(username="******", password=None)

    def stub_aws_srp(self, sa, username, error_code=None):
        expected_parameters = {
            "AuthFlow": "USER_SRP_AUTH",
            "AuthParameters": {
                "SRP_A": ANY,
                "USERNAME": username
            },
            "ClientId": sa.client_id,
        }
        if error_code:
            self.aws_srp_stubber.add_client_error(
                "initiate_auth",
                service_error_code=error_code,
                expected_params=expected_parameters,
            )
        else:
            self.aws_srp_stubber.add_response(
                "initiate_auth",
                {
                    "ChallengeParameters": {
                        "USER_ID_FOR_SRP": "user",
                        "SALT": "4",
                        "SRP_B": "5",
                        "SECRET_BLOCK": "secblock",
                    },
                    "ChallengeName": "PASSWORD_VERIFIER",
                },
                expected_parameters,
            )
            self.aws_srp_stubber.add_response(
                "respond_to_auth_challenge",
                {
                    "AuthenticationResult": {
                        "IdToken": "valid_token"
                    },
                },
                {
                    "ClientId": sa.client_id,
                    "ChallengeName": ANY,
                    "ChallengeResponses": ANY,
                },
            )
        self.aws_srp_stubber.activate()

    def stub_cognito_creds(self, sa, token: str):

        id_response = {"IdentityId": "ap-southeast-2"}
        id_params = {
            "IdentityPoolId": sa.identity_pool,
            "Logins": {
                f"cognito-idp.{sa.aws_region}.amazonaws.com/{sa.user_pool}":
                token
            },
        }
        self.cognito_stub.add_response("get_id", id_response, id_params)

        id_creds_response = {
            "IdentityId": id_response["IdentityId"],
            "Credentials": {
                "AccessKeyId": "ASIAX000000000000000",
                "SecretKey": "0000000000000000000000000000000000000000",
                "SessionToken": "a-totally-valid-JWT",
                "Expiration": datetime(2020, 1, 14, 11, 52, 26),
            },
        }
        id_creds_params = {
            "IdentityId": id_response["IdentityId"],
            "Logins": {
                f"cognito-idp.{sa.aws_region}.amazonaws.com/{sa.user_pool}":
                token
            },
        }
        self.cognito_stub.add_response("get_credentials_for_identity",
                                       id_creds_response, id_creds_params)

        self.cognito_stub.activate()

    @responses.activate
    def testSigV4Headers(self):
        """
        Test sigv4 signed auth headers
        """
        # Get signed auth headers
        sa = StaxAuth("ApiAuth")
        id_creds = {
            "Credentials": {
                "AccessKeyId": "ASIAX000000000000000",
                "SecretKey": "0000000000000000000000000000000000000000",
                "SessionToken": "a-totally-valid-JWT",
                "Expiration": datetime(2020, 1, 14, 11, 52, 26),
            }
        }
        auth = sa.sigv4_signed_auth_headers(id_creds)

        # Mock request
        response_dict = {"Status": "OK"}
        responses.add(
            responses.GET,
            f"{Config.api_base_url()}/auth",
            json=response_dict,
            status=200,
        )
        response = requests.get(f"{Config.api_base_url()}/auth", auth=auth)
        self.assertEqual(response.json(), response_dict)
        self.assertIn("Authorization", response.request.headers)

    def testApiTokenAuthNotExpired(self):
        """
        Test credentials have not expired
        """
        StaxConfig = Config
        StaxConfig.expiration = datetime.now(timezone.utc) + timedelta(hours=8)
        self.assertIsNotNone(StaxConfig.expiration)

        ApiTokenAuth.requests_auth("username", "password")
        self.assertIsNotNone(StaxConfig.auth)

    def testApiTokenAuth(self):
        """
        Test generating new credentials
        """
        sa = StaxAuth("ApiAuth")
        StaxConfig = Config
        StaxConfig.expiration = None
        token = jwt.encode({"sub": "valid_token"}, "secret", algorithm="HS256")
        jwt_token = jwt.decode(token, verify=False)
        self.stub_cognito_creds(sa, jwt_token.get("sub"))
        self.stub_aws_srp(sa, "username")

        ApiTokenAuth.requests_auth(
            "username",
            "password",
            srp_client=self.aws_srp_client,
            cognito_client=self.cognito_client,
        )
        self.assertIsNotNone(StaxConfig.auth)

    @patch("test_auth.StaxAuth.requests_auth")
    def testApiTokenAuthExpiring(self, requests_auth_mock):
        """
        Test credentials close to expiration get refreshed
        """
        sa = StaxAuth("ApiAuth")
        StaxConfig = Config
        ## expiration 20 minutes in the future, no need to refresh
        StaxConfig.expiration = datetime.now(
            timezone.utc) + timedelta(minutes=20)

        ApiTokenAuth.requests_auth(
            "username",
            "password",
            srp_client=self.aws_srp_client,
            cognito_client=self.cognito_client,
        )
        requests_auth_mock.assert_not_called()

        requests_auth_mock.reset_mock()
        ## expiration in 5 seconds from now, refresh to avoid token becoming stale used
        StaxConfig.expiration = datetime.now(
            timezone.utc) + timedelta(seconds=5)

        ApiTokenAuth.requests_auth(
            "username",
            "password",
            srp_client=self.aws_srp_client,
            cognito_client=self.cognito_client,
        )
        requests_auth_mock.assert_called_once()

        requests_auth_mock.reset_mock()
        ## expiration in 5 minutes from now, refresh to avoid token becoming stale used
        ## override default triggering library to not refresh
        environ["TOKEN_EXPIRY_THRESHOLD_IN_MINS"] = "10"
        StaxConfig.expiration = datetime.now(
            timezone.utc) + timedelta(minutes=2)

        ApiTokenAuth.requests_auth(
            "username",
            "password",
            srp_client=self.aws_srp_client,
            cognito_client=self.cognito_client,
        )
        requests_auth_mock.assert_called_once()

    def testRootAuthNotExpired(self):
        """
        Test credentials have not expired
        """
        StaxConfig = Config
        StaxConfig.expiration = datetime.now(timezone.utc) + timedelta(hours=8)
        self.assertIsNotNone(StaxConfig.expiration)

        RootAuth.requests_auth("username", "password")
        self.assertIsNotNone(StaxConfig.auth)

    def testRootAuth(self):
        """
        Test generating new credentials
        """
        sa = StaxAuth("JumaAuth")
        StaxConfig = Config
        StaxConfig.expiration = None
        token = jwt.encode({"sub": "valid_token"}, "secret", algorithm="HS256")
        jwt_token = jwt.decode(token, verify=False)
        self.stub_cognito_creds(sa, jwt_token.get("sub"))
        self.stub_aws_srp(sa, "username")

        RootAuth.requests_auth(
            "username",
            "password",
            srp_client=self.aws_srp_client,
            cognito_client=self.cognito_client,
        )
        self.assertIsNotNone(StaxConfig.auth)

    def testApiAuth(self):
        """
        Test auth through the Api class
        """
        sa = StaxAuth("ApiAuth")
        StaxConfig = Config
        StaxConfig.expiration = None
        StaxConfig.access_key = "username"
        StaxConfig.secret_key = "password"

        token = jwt.encode({"sub": "valid_token"}, "secret", algorithm="HS256")
        jwt_token = jwt.decode(token, verify=False)

        self.stub_cognito_creds(sa, jwt_token.get("sub"))
        self.stub_aws_srp(sa, "username")

        Api._requests_auth = None
        Api._auth(
            srp_client=self.aws_srp_client,
            cognito_client=self.cognito_client,
        )
        self.assertIsNotNone(Api._requests_auth)
class TestConfigServiceMetricsCollector(unittest.TestCase):
    def setUp(self):
        self.gdClient = boto3.client("config")
        self.gdStubber = Stubber(self.gdClient)
        self.gdStubber.activate()

        self.botoSessionMock = MagicMock()
        self.botoSessionMock.client.return_value = self.gdClient

    def testCollectShouldReturnCurrentFindingsMetricFromSingleRegionWithSingleDetectorOnSuccess(
            self):
        # Mock Config Service
        self.gdStubber.add_response("get_discovered_resource_counts", {
            "totalDiscoveredResources": 0,
            "resourceCounts": []
        }, {})

        self.gdStubber.add_response(
            "get_compliance_summary_by_resource_type", {
                "ComplianceSummariesByResourceType": [{
                    "ComplianceSummary": {
                        "CompliantResourceCount": {
                            "CappedCount": 2,
                            "CapExceeded": False
                        },
                        "NonCompliantResourceCount": {
                            "CappedCount": 11,
                            "CapExceeded": False
                        },
                        "ComplianceSummaryTimestamp": 1565880614.85
                    }
                }]
            }, {})

        # Collect metrics
        with patch("boto3.session.Session", return_value=self.botoSessionMock):
            collector = ConfigServiceMetricsCollector(regions=["eu-west-1"])
            metrics = collector.collect()

        currentResourcesMetric = metrics[0]
        self.assertEqual(currentResourcesMetric.name,
                         "aws_config_current_resources")
        self.assertEqual(currentResourcesMetric.type, "gauge")
        self.assertEqual(len(currentResourcesMetric.samples), 3)

        self.assertEqual(currentResourcesMetric.samples[0].value, 1)
        self.assertEqual(currentResourcesMetric.samples[0].labels, {
            "region": "eu-west-1",
            "severity": "low"
        })
        self.assertEqual(currentResourcesMetric.samples[1].value, 2)
        self.assertEqual(currentResourcesMetric.samples[1].labels, {
            "region": "eu-west-1",
            "severity": "medium"
        })
        self.assertEqual(currentResourcesMetric.samples[2].value, 3)
        self.assertEqual(currentResourcesMetric.samples[2].labels, {
            "region": "eu-west-1",
            "severity": "high"
        })

        scrapeErrorsMetric = metrics[1]
        self.assertEqual(scrapeErrorsMetric.name,
                         "aws_configservice_scrape_errors")
        self.assertEqual(scrapeErrorsMetric.type, "counter")
        self.assertEqual(len(scrapeErrorsMetric.samples), 1)

        self.assertEqual(scrapeErrorsMetric.samples[0].value, 0)
        self.assertEqual(scrapeErrorsMetric.samples[0].labels,
                         {"region": "eu-west-1"})

        self.gdStubber.assert_no_pending_responses()
Ejemplo n.º 38
0
class TestStubber(unittest.TestCase):
    def setUp(self):
        session = botocore.session.get_session()
        config = botocore.config.Config(signature_version=botocore.UNSIGNED,
                                        s3={'addressing_style': 'path'})
        self.client = session.create_client('s3',
                                            region_name='us-east-1',
                                            config=config)
        self.stubber = Stubber(self.client)

    def test_stubber_returns_response(self):
        service_response = {'ResponseMetadata': {'foo': 'bar'}}
        self.stubber.add_response('list_objects', service_response)
        self.stubber.activate()
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_context_manager_returns_response(self):
        service_response = {'ResponseMetadata': {'foo': 'bar'}}
        self.stubber.add_response('list_objects', service_response)

        with self.stubber:
            response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_activated_stubber_errors_with_no_registered_stubs(self):
        self.stubber.activate()
        # Params one per line for readability.
        with self.assertRaisesRegexp(UnStubbedResponseError,
                                     "Unexpected API Call"):
            self.client.list_objects(Bucket='asdfasdfasdfasdf',
                                     Delimiter='asdfasdfasdfasdf',
                                     Prefix='asdfasdfasdfasdf',
                                     EncodingType='url')

    def test_stubber_errors_when_stubs_are_used_up(self):
        self.stubber.add_response('list_objects', {})
        self.stubber.activate()
        self.client.list_objects(Bucket='foo')

        with self.assertRaises(UnStubbedResponseError):
            self.client.list_objects(Bucket='foo')

    def test_client_error_response(self):
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error('list_objects', error_code,
                                      error_message)
        self.stubber.activate()

        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

    def test_can_add_expected_params_to_client_error(self):
        self.stubber.add_client_error('list_objects',
                                      'Error',
                                      'error',
                                      expected_params={'Bucket': 'foo'})
        self.stubber.activate()
        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

    def test_can_expected_param_fails_in_client_error(self):
        self.stubber.add_client_error('list_objects',
                                      'Error',
                                      'error',
                                      expected_params={'Bucket': 'foo'})
        self.stubber.activate()
        # We expect an AssertionError instead of a ClientError
        # because we're calling the operation with the wrong
        # param value.
        with self.assertRaises(AssertionError):
            self.client.list_objects(Bucket='wrong-argument-value')

    def test_expected_params_success(self):
        service_response = {}
        expected_params = {'Bucket': 'foo'}
        self.stubber.add_response('list_objects', service_response,
                                  expected_params)
        self.stubber.activate()
        # This should be called successfully with no errors being thrown
        # for mismatching expected params.
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response('list_objects', service_response,
                                  expected_params)
        self.stubber.activate()
        # This should call should raise an for mismatching expected params.
        with self.assertRaisesRegexp(StubResponseError,
                                     "{'Bucket': 'bar'},\n"):
            self.client.list_objects(Bucket='foo')

    def test_expected_params_mixed_with_errors_responses(self):
        # Add an error response
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error('list_objects', error_code,
                                      error_message)

        # Add a response with incorrect expected params
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response('list_objects', service_response,
                                  expected_params)

        self.stubber.activate()

        # The first call should throw and error as expected.
        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

        # The second call should throw an error for unexpected parameters
        with self.assertRaisesRegexp(StubResponseError, 'Expected parameters'):
            self.client.list_objects(Bucket='foo')

    def test_can_continue_to_call_after_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}

        self.stubber.add_response('list_objects', service_response,
                                  expected_params)

        self.stubber.activate()
        # Throw an error for unexpected parameters
        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

        # The stubber should still have the responses queued up
        # even though the original parameters did not match the expected ones.
        self.client.list_objects(Bucket='bar')
        self.stubber.assert_no_pending_responses()

    def test_still_relies_on_param_validation_with_expected_params(self):
        service_response = {}
        expected_params = {'Buck': 'bar'}

        self.stubber.add_response('list_objects', service_response,
                                  expected_params)

        self.stubber.activate()
        # Throw an error for invalid parameters
        with self.assertRaises(ParamValidationError):
            self.client.list_objects(Buck='bar')

    def test_any_ignores_param_for_validation(self):
        service_response = {}
        expected_params = {'Bucket': stub.ANY}

        self.stubber.add_response('list_objects', service_response,
                                  expected_params)
        self.stubber.add_response('list_objects', service_response,
                                  expected_params)

        try:
            with self.stubber:
                self.client.list_objects(Bucket='foo')
                self.client.list_objects(Bucket='bar')
        except StubAssertionError:
            self.fail("stub.ANY failed to ignore parameter for validation.")

    def test_mixed_any_and_concrete_params(self):
        service_response = {}
        expected_params = {'Bucket': stub.ANY, 'Key': 'foo.txt'}

        self.stubber.add_response('head_object', service_response,
                                  expected_params)
        self.stubber.add_response('head_object', service_response,
                                  expected_params)

        try:
            with self.stubber:
                self.client.head_object(Bucket='foo', Key='foo.txt')
                self.client.head_object(Bucket='bar', Key='foo.txt')
        except StubAssertionError:
            self.fail("stub.ANY failed to ignore parameter for validation.")

    def test_nested_any_param(self):
        service_response = {}
        expected_params = {
            'Bucket': 'foo',
            'Key': 'bar.txt',
            'Metadata': {
                'MyMeta': stub.ANY,
            }
        }

        self.stubber.add_response('put_object', service_response,
                                  expected_params)
        self.stubber.add_response('put_object', service_response,
                                  expected_params)

        try:
            with self.stubber:
                self.client.put_object(Bucket='foo',
                                       Key='bar.txt',
                                       Metadata={
                                           'MyMeta': 'Foo',
                                       })
                self.client.put_object(Bucket='foo',
                                       Key='bar.txt',
                                       Metadata={
                                           'MyMeta': 'Bar',
                                       })
        except StubAssertionError:
            self.fail(
                "stub.ANY failed to ignore nested parameter for validation.")

    def test_ANY_repr(self):
        self.assertEqual(repr(stub.ANY), '<ANY>')

    def test_none_param(self):
        service_response = {}
        expected_params = {'Buck': None}

        self.stubber.add_response('list_objects', service_response,
                                  expected_params)

        self.stubber.activate()
        # Throw an error for invalid parameters
        with self.assertRaises(StubAssertionError):
            self.client.list_objects(Buck='bar')

    def test_many_expected_params(self):
        service_response = {}
        expected_params = {
            'Bucket': 'mybucket',
            'Prefix': 'myprefix',
            'Delimiter': '/',
            'EncodingType': 'url'
        }
        self.stubber.add_response('list_objects', service_response,
                                  expected_params)
        try:
            with self.stubber:
                self.client.list_objects(**expected_params)
        except StubAssertionError:
            self.fail(
                "Stubber inappropriately raised error for same parameters.")

    def test_no_stub_for_presign_url(self):
        try:
            with self.stubber:
                url = self.client.generate_presigned_url(
                    ClientMethod='get_object',
                    Params={
                        'Bucket': 'mybucket',
                        'Key': 'mykey'
                    })
                self.assertEqual(url,
                                 'https://s3.amazonaws.com/mybucket/mykey')
        except StubResponseError:
            self.fail(
                'Stubbed responses should not be required for generating '
                'presigned requests')

    def test_can_stub_with_presign_url_mixed_in(self):
        desired_response = {}
        expected_params = {
            'Bucket': 'mybucket',
            'Prefix': 'myprefix',
        }
        self.stubber.add_response('list_objects', desired_response,
                                  expected_params)
        with self.stubber:
            url = self.client.generate_presigned_url(ClientMethod='get_object',
                                                     Params={
                                                         'Bucket':
                                                         'myotherbucket',
                                                         'Key': 'myotherkey'
                                                     })
            self.assertEqual(
                url, 'https://s3.amazonaws.com/myotherbucket/myotherkey')
            actual_response = self.client.list_objects(**expected_params)
            self.assertEqual(desired_response, actual_response)
        self.stubber.assert_no_pending_responses()

    def test_parse_get_bucket_location(self):
        error_code = "NoSuchBucket"
        error_message = "The specified bucket does not exist"
        self.stubber.add_client_error('get_bucket_location', error_code,
                                      error_message)
        self.stubber.activate()

        with self.assertRaises(ClientError):
            self.client.get_bucket_location(Bucket='foo')

    def test_parse_get_bucket_location_returns_response(self):
        service_response = {"LocationConstraint": "us-west-2"}
        self.stubber.add_response('get_bucket_location', service_response)
        self.stubber.activate()
        response = self.client.get_bucket_location(Bucket='foo')
        self.assertEqual(response, service_response)
Ejemplo n.º 39
0
class TestStubber(unittest.TestCase):
    def setUp(self):
        session = botocore.session.get_session()
        config = botocore.config.Config(signature_version=botocore.UNSIGNED)
        self.client = session.create_client('s3', config=config)

        self.stubber = Stubber(self.client)

    def test_stubber_returns_response(self):
        service_response = {'ResponseMetadata': {'foo': 'bar'}}
        self.stubber.add_response('list_objects', service_response)
        self.stubber.activate()
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_context_manager_returns_response(self):
        service_response = {'ResponseMetadata': {'foo': 'bar'}}
        self.stubber.add_response('list_objects', service_response)

        with self.stubber:
            response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_activated_stubber_errors_with_no_registered_stubs(self):
        self.stubber.activate()
        # Params one per line for readability.
        with self.assertRaisesRegexp(StubResponseError,
                                     "'Bucket': 'asdfasdfasdfasdf',\n"):
            self.client.list_objects(
                Bucket='asdfasdfasdfasdf',
                Delimiter='asdfasdfasdfasdf',
                Prefix='asdfasdfasdfasdf',
                EncodingType='url')

    def test_stubber_errors_when_stubs_are_used_up(self):
        self.stubber.add_response('list_objects', {})
        self.stubber.activate()
        self.client.list_objects(Bucket='foo')

        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

    def test_client_error_response(self):
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error(
            'list_objects', error_code, error_message)
        self.stubber.activate()

        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

    def test_expected_params_success(self):
        service_response = {}
        expected_params = {'Bucket': 'foo'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.activate()
        # This should be called successfully with no errors being thrown
        # for mismatching expected params.
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.activate()
        # This should call should raise an for mismatching expected params.
        with self.assertRaisesRegexp(StubResponseError,
                                     "{'Bucket': 'bar'},\n"):
            self.client.list_objects(Bucket='foo')

    def test_expected_params_mixed_with_errors_responses(self):
        # Add an error response
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error(
            'list_objects', error_code, error_message)

        # Add a response with incorrect expected params
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()

        # The first call should throw and error as expected.
        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

        # The second call should throw an error for unexpected parameters
        with self.assertRaisesRegexp(StubResponseError, 'Expected parameters'):
            self.client.list_objects(Bucket='foo')

    def test_can_continue_to_call_after_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for unexpected parameters
        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

        # The stubber should still have the responses queued up
        # even though the original parameters did not match the expected ones.
        self.client.list_objects(Bucket='bar')
        self.stubber.assert_no_pending_responses()

    def test_still_relies_on_param_validation_with_expected_params(self):
        service_response = {}
        expected_params = {'Buck': 'bar'}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for invalid parameters
        with self.assertRaises(ParamValidationError):
            self.client.list_objects(Buck='bar')

    def test_any_ignores_param_for_validation(self):
        service_response = {}
        expected_params = {'Bucket': stub.ANY}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        try:
            with self.stubber:
                self.client.list_objects(Bucket='foo')
                self.client.list_objects(Bucket='bar')
        except StubAssertionError:
            self.fail("stub.ANY failed to ignore parameter for validation.")

    def test_mixed_any_and_concrete_params(self):
        service_response = {}
        expected_params = {'Bucket': stub.ANY, 'Key': 'foo.txt'}

        self.stubber.add_response(
            'head_object', service_response, expected_params)
        self.stubber.add_response(
            'head_object', service_response, expected_params)

        try:
            with self.stubber:
                self.client.head_object(Bucket='foo', Key='foo.txt')
                self.client.head_object(Bucket='bar', Key='foo.txt')
        except StubAssertionError:
            self.fail("stub.ANY failed to ignore parameter for validation.")

    def test_none_param(self):
        service_response = {}
        expected_params = {'Buck': None}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for invalid parameters
        with self.assertRaises(StubAssertionError):
            self.client.list_objects(Buck='bar')

    def test_many_expected_params(self):
        service_response = {}
        expected_params = {
            'Bucket': 'mybucket',
            'Prefix': 'myprefix',
            'Delimiter': '/',
            'EncodingType': 'url'
        }
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        try:
            with self.stubber:
                self.client.list_objects(**expected_params)
        except StubAssertionError:
            self.fail(
                "Stubber inappropriately raised error for same parameters.")
Ejemplo n.º 40
0
def test_event_good(mocker):
    #--------------------------
    # Test data
    #
    event = utils.load_test_data(test_data + 'cis43.json', my_region)

    sns_message = {
        'Note':
        '"Remove all rules from the default security group" remediation was successful',
        'State': 'RESOLVED',
        'Account': '111111111111',
        'Remediation': 'Remove all rules from the default security group',
        'AffectedObject': 'Security Group: sg-02cfbecbc814a3c24',
        'metrics_data': {
            'status': 'RESOLVED'
        }
    }

    desc_sg = {
        "SecurityGroups": [{
            "Description":
            "Default SG",
            "GroupName":
            "SC-111111111111-pp-gz465ubujkfrs-SandboxSecurityGroup-175ZDF23V5MGX",
            "IpPermissions": [{
                "FromPort": 80,
                "IpProtocol": "tcp",
                "IpRanges": [{
                    "CidrIp": "0.0.0.0/0"
                }],
                "Ipv6Ranges": [],
                "PrefixListIds": [],
                "ToPort": 80,
                "UserIdGroupPairs": []
            }, {
                "FromPort": 9000,
                "IpProtocol": "tcp",
                "IpRanges": [{
                    "CidrIp": "72.21.198.65/32"
                }],
                "Ipv6Ranges": [],
                "PrefixListIds": [],
                "ToPort": 9000,
                "UserIdGroupPairs": []
            }, {
                "FromPort": 22,
                "IpProtocol": "tcp",
                "IpRanges": [{
                    "CidrIp": "0.0.0.0/0"
                }],
                "Ipv6Ranges": [],
                "PrefixListIds": [],
                "ToPort": 22,
                "UserIdGroupPairs": []
            }],
            "OwnerId":
            "123412341234",
            "GroupId":
            "sg-006bf520b9581b2d9",
            "IpPermissionsEgress": [{
                "IpProtocol": "-1",
                "IpRanges": [{
                    "CidrIp": "0.0.0.0/0"
                }],
                "Ipv6Ranges": [],
                "PrefixListIds": [],
                "UserIdGroupPairs": []
            }],
            "Tags": [],
            "VpcId":
            "vpc-11111113"
        }]
    }

    #--------------------------
    # Mock/stub
    #
    mocker.patch('lib.awsapi_helpers.BotoSession.__init__', return_value=None)
    mocker.patch('lib.awsapi_helpers.AWSClient.connect', return_value=None)

    # Mock the boto client and replace the BotoSession client with our stub
    awsc = boto3.resource('ec2')
    awsc_s = Stubber(awsc.meta.client)
    awsc_s.add_response('describe_security_groups', desc_sg)
    awsc_s.add_response('revoke_security_group_ingress', {})
    awsc_s.add_response('revoke_security_group_egress', {})
    awsc_s.activate()
    mocker.patch('lib.awsapi_helpers.BotoSession.resource', return_value=awsc)

    sns = mocker.patch('lib.awsapi_helpers.AWSClient.postit',
                       return_value=None)

    # Mock Notifier
    init = mocker.patch('lib.sechub_findings.Finding.flag')
    resolve = mocker.patch('lib.sechub_findings.Finding.resolve')

    # Prevent flushing to logs
    mocker.patch('lib.applogger.LogHandler.flush', return_value=None)

    #--------------------------
    # Run the lambda
    #
    cis43.lambda_handler(event, None)
    init.assert_called_once_with(
        'INITIAL: "Remove all rules from the default security group" remediation started'
    )
    resolve.assert_called_once_with(
        'RESOLVED: "Remove all rules from the default security group" remediation was successful'
    )
    sns.assert_called_with('SO0111-SHARR_Topic', sns_message, my_region)
Ejemplo n.º 41
0
class TestStubber(unittest.TestCase):
    def setUp(self):
        session = botocore.session.get_session()
        config = botocore.config.Config(
            signature_version=botocore.UNSIGNED,
            s3={'addressing_style': 'path'}
        )
        self.client = session.create_client(
            's3', region_name='us-east-1', config=config)
        self.stubber = Stubber(self.client)

    def test_stubber_returns_response(self):
        service_response = {'ResponseMetadata': {'foo': 'bar'}}
        self.stubber.add_response('list_objects', service_response)
        self.stubber.activate()
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_context_manager_returns_response(self):
        service_response = {'ResponseMetadata': {'foo': 'bar'}}
        self.stubber.add_response('list_objects', service_response)

        with self.stubber:
            response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_activated_stubber_errors_with_no_registered_stubs(self):
        self.stubber.activate()
        # Params one per line for readability.
        with self.assertRaisesRegexp(UnStubbedResponseError,
                                     "Unexpected API Call"):
            self.client.list_objects(
                Bucket='asdfasdfasdfasdf',
                Delimiter='asdfasdfasdfasdf',
                Prefix='asdfasdfasdfasdf',
                EncodingType='url')

    def test_stubber_errors_when_stubs_are_used_up(self):
        self.stubber.add_response('list_objects', {})
        self.stubber.activate()
        self.client.list_objects(Bucket='foo')

        with self.assertRaises(UnStubbedResponseError):
            self.client.list_objects(Bucket='foo')

    def test_client_error_response(self):
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error(
            'list_objects', error_code, error_message)
        self.stubber.activate()

        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

    def test_can_add_expected_params_to_client_error(self):
        self.stubber.add_client_error(
            'list_objects', 'Error', 'error',
            expected_params={'Bucket': 'foo'}
        )
        self.stubber.activate()
        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

    def test_can_expected_param_fails_in_client_error(self):
        self.stubber.add_client_error(
            'list_objects', 'Error', 'error',
            expected_params={'Bucket': 'foo'}
        )
        self.stubber.activate()
        # We expect an AssertionError instead of a ClientError
        # because we're calling the operation with the wrong
        # param value.
        with self.assertRaises(AssertionError):
            self.client.list_objects(Bucket='wrong-argument-value')

    def test_expected_params_success(self):
        service_response = {}
        expected_params = {'Bucket': 'foo'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.activate()
        # This should be called successfully with no errors being thrown
        # for mismatching expected params.
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.activate()
        # This should call should raise an for mismatching expected params.
        with self.assertRaisesRegexp(StubResponseError,
                                     "{'Bucket': 'bar'},\n"):
            self.client.list_objects(Bucket='foo')

    def test_expected_params_mixed_with_errors_responses(self):
        # Add an error response
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error(
            'list_objects', error_code, error_message)

        # Add a response with incorrect expected params
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()

        # The first call should throw and error as expected.
        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

        # The second call should throw an error for unexpected parameters
        with self.assertRaisesRegexp(StubResponseError, 'Expected parameters'):
            self.client.list_objects(Bucket='foo')

    def test_can_continue_to_call_after_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for unexpected parameters
        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

        # The stubber should still have the responses queued up
        # even though the original parameters did not match the expected ones.
        self.client.list_objects(Bucket='bar')
        self.stubber.assert_no_pending_responses()

    def test_still_relies_on_param_validation_with_expected_params(self):
        service_response = {}
        expected_params = {'Buck': 'bar'}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for invalid parameters
        with self.assertRaises(ParamValidationError):
            self.client.list_objects(Buck='bar')

    def test_any_ignores_param_for_validation(self):
        service_response = {}
        expected_params = {'Bucket': stub.ANY}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        try:
            with self.stubber:
                self.client.list_objects(Bucket='foo')
                self.client.list_objects(Bucket='bar')
        except StubAssertionError:
            self.fail("stub.ANY failed to ignore parameter for validation.")

    def test_mixed_any_and_concrete_params(self):
        service_response = {}
        expected_params = {'Bucket': stub.ANY, 'Key': 'foo.txt'}

        self.stubber.add_response(
            'head_object', service_response, expected_params)
        self.stubber.add_response(
            'head_object', service_response, expected_params)

        try:
            with self.stubber:
                self.client.head_object(Bucket='foo', Key='foo.txt')
                self.client.head_object(Bucket='bar', Key='foo.txt')
        except StubAssertionError:
            self.fail("stub.ANY failed to ignore parameter for validation.")

    def test_nested_any_param(self):
        service_response = {}
        expected_params = {
            'Bucket': 'foo',
            'Key': 'bar.txt',
            'Metadata': {
                'MyMeta': stub.ANY,
            }
        }

        self.stubber.add_response(
            'put_object', service_response, expected_params)
        self.stubber.add_response(
            'put_object', service_response, expected_params)

        try:
            with self.stubber:
                self.client.put_object(
                    Bucket='foo',
                    Key='bar.txt',
                    Metadata={
                        'MyMeta': 'Foo',
                    }
                )
                self.client.put_object(
                    Bucket='foo',
                    Key='bar.txt',
                    Metadata={
                        'MyMeta': 'Bar',
                    }
                )
        except StubAssertionError:
            self.fail(
                "stub.ANY failed to ignore nested parameter for validation.")

    def test_ANY_repr(self):
        self.assertEqual(repr(stub.ANY), '<ANY>')

    def test_none_param(self):
        service_response = {}
        expected_params = {'Buck': None}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for invalid parameters
        with self.assertRaises(StubAssertionError):
            self.client.list_objects(Buck='bar')

    def test_many_expected_params(self):
        service_response = {}
        expected_params = {
            'Bucket': 'mybucket',
            'Prefix': 'myprefix',
            'Delimiter': '/',
            'EncodingType': 'url'
        }
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        try:
            with self.stubber:
                self.client.list_objects(**expected_params)
        except StubAssertionError:
            self.fail(
                "Stubber inappropriately raised error for same parameters.")

    def test_no_stub_for_presign_url(self):
        try:
            with self.stubber:
                url = self.client.generate_presigned_url(
                    ClientMethod='get_object',
                    Params={
                        'Bucket': 'mybucket',
                        'Key': 'mykey'
                    }
                )
                self.assertEqual(
                    url, 'https://s3.amazonaws.com/mybucket/mykey')
        except StubResponseError:
            self.fail(
                'Stubbed responses should not be required for generating '
                'presigned requests'
            )

    def test_can_stub_with_presign_url_mixed_in(self):
        desired_response = {}
        expected_params = {
            'Bucket': 'mybucket',
            'Prefix': 'myprefix',
        }
        self.stubber.add_response(
            'list_objects', desired_response, expected_params)
        with self.stubber:
            url = self.client.generate_presigned_url(
                ClientMethod='get_object',
                Params={
                    'Bucket': 'myotherbucket',
                    'Key': 'myotherkey'
                }
            )
            self.assertEqual(
                    url, 'https://s3.amazonaws.com/myotherbucket/myotherkey')
            actual_response = self.client.list_objects(**expected_params)
            self.assertEqual(desired_response, actual_response)
        self.stubber.assert_no_pending_responses()
def ddb_stubber():
    ddb_stubber = Stubber(ItemsModel.get_table().meta.client)
    ddb_stubber.activate()
    yield ddb_stubber
    ddb_stubber.deactivate()
def test_event_good(mocker):
    #--------------------------
    # Test data
    #
    event = utils.load_test_data(test_data + 'cis43.json', my_region)

    sns_message = {
        'Note':
        '"Remove all rules from the default security group" remediation was successful',
        'State': 'RESOLVED',
        'Account': '111111111111',
        'Remediation': 'Remove all rules from the default security group',
        'AffectedObject': 'Security Group: sg-02cfbecbc814a3c24',
        'metrics_data': mocker.ANY
    }

    desc_sg = {
        "SecurityGroups": [{
            "Description":
            "Default SG",
            "GroupName":
            "SC-111111111111-pp-gz465ubujkfrs-SandboxSecurityGroup-175ZDF23V5MGX",
            "IpPermissions": [{
                "FromPort": 80,
                "IpProtocol": "tcp",
                "IpRanges": [{
                    "CidrIp": "0.0.0.0/0"
                }],
                "Ipv6Ranges": [],
                "PrefixListIds": [],
                "ToPort": 80,
                "UserIdGroupPairs": []
            }, {
                "FromPort": 9000,
                "IpProtocol": "tcp",
                "IpRanges": [{
                    "CidrIp": "72.21.198.65/32"
                }],
                "Ipv6Ranges": [],
                "PrefixListIds": [],
                "ToPort": 9000,
                "UserIdGroupPairs": []
            }, {
                "FromPort": 22,
                "IpProtocol": "tcp",
                "IpRanges": [{
                    "CidrIp": "0.0.0.0/0"
                }],
                "Ipv6Ranges": [],
                "PrefixListIds": [],
                "ToPort": 22,
                "UserIdGroupPairs": []
            }],
            "OwnerId":
            "123412341234",
            "GroupId":
            "sg-006bf520b9581b2d9",
            "IpPermissionsEgress": [{
                "IpProtocol": "-1",
                "IpRanges": [{
                    "CidrIp": "0.0.0.0/0"
                }],
                "Ipv6Ranges": [],
                "PrefixListIds": [],
                "UserIdGroupPairs": []
            }],
            "Tags": [],
            "VpcId":
            "vpc-11111113"
        }]
    }

    post_metrics_expected_parms = {
        'Solution': 'SO0111',
        'UUID': '12345678-1234-1234-1234-123412341234',
        'TimeStamp': mocker.ANY,
        'Data': {
            'generator_id':
            'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/4.3',
            'type':
            '4.3 Ensure the default security group of every VPC restricts all traffic',
            'productArn': mocker.ANY,
            'finding_triggered_by': 'Security Hub Findings - Custom Action',
            'region': mocker.ANY,
            'status': 'RESOLVED'
        },
        'Version': 'v1.2.0TEST'
    }

    ssmc = boto3.client('ssm', region_name=my_region)
    ssmc_s = Stubber(ssmc)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.activate()
    mocker.patch('lib.metrics.Metrics.connect_to_ssm', return_value=ssmc)
    post_metrics = mocker.patch('lib.metrics.Metrics.post_metrics_to_api',
                                return_value=None)

    mocker.patch('lib.awsapi_helpers.BotoSession.__init__', return_value=None)
    mocker.patch('lib.awsapi_helpers.AWSClient.connect', return_value=None)

    # Mock the boto client and replace the BotoSession client with our stub
    awsc = boto3.resource('ec2')
    awsc_s = Stubber(awsc.meta.client)
    awsc_s.add_response('describe_security_groups', desc_sg)
    awsc_s.add_response('revoke_security_group_ingress', {})
    awsc_s.add_response('revoke_security_group_egress', {})
    awsc_s.activate()
    mocker.patch('lib.awsapi_helpers.BotoSession.resource', return_value=awsc)

    sns = mocker.patch('lib.awsapi_helpers.AWSClient.postit',
                       return_value=None)

    # Mock Notifier
    init = mocker.patch('lib.sechub_findings.Finding.flag')
    resolve = mocker.patch('lib.sechub_findings.Finding.resolve')

    # Prevent flushing to logs
    mocker.patch('lib.applogger.LogHandler.flush', return_value=None)

    #--------------------------
    # Run the lambda
    #
    cis43.lambda_handler(event, None)
    init.assert_called_once_with(
        'INITIAL: "Remove all rules from the default security group" remediation started'
    )
    resolve.assert_called_once_with(
        'RESOLVED: "Remove all rules from the default security group" remediation was successful'
    )
    sns.assert_called_with('SO0111-SHARR_Topic', sns_message, my_region)
    post_metrics.assert_called_with(post_metrics_expected_parms)
Ejemplo n.º 44
0
class PackagePromoteTestBase(unittest.TestCase):
    credentials = {
        'aws_access_key_id': 'test_aws_access_key_id',
        'aws_secret_access_key': 'test_aws_secret_access_key',
        'aws_session_token': 'test_aws_session_token',
    }
    handler = staticmethod(t4_lambda_pkgpush.promote_package)
    parent_bucket = 'parent-bucket'
    src_registry = f's3://{parent_bucket}'
    parent_pkg_name = 'parent/pkg-name'
    parent_commit_message = 'parent commit message'
    dst_bucket = 'dest-bucket'
    dst_registry = f's3://{dst_bucket}'
    dst_pkg_name = 'dest/pkg-name'
    dst_pkg_loc_params = {
        'registry': dst_registry,
        'name': dst_pkg_name,
    }
    mock_timestamp = 1600298935.9767091
    mock_timestamp_pointer_name = '1600298935'
    file_size = 1
    files_number = 2

    @classmethod
    def get_file_data(cls, pk: PhysicalKey):
        return hashlib.sha256(str(pk).encode()).digest()[:1] * cls.file_size

    @classmethod
    def get_file_hash(cls, pk: PhysicalKey):
        return hashlib.sha256(cls.get_file_data(pk)).hexdigest()

    @classmethod
    def get_file_meta(cls, pk: PhysicalKey):
        return {f'meta-{pk}': f'value-{pk}'}

    @classmethod
    def get_pkg_entry(cls, path):
        pk = PhysicalKey.from_url(
            f's3://{cls.parent_bucket}/{path}?versionId=obj{path}Version')
        return PackageEntry(
            pk,
            cls.file_size,
            {
                'type': 'SHA256',
                'value': cls.get_file_hash(pk)
            },
            cls.get_file_meta(pk),
        )

    @classmethod
    def prepare_prefix_pkg_entries(cls, prefix, files_range, lk_prefix=''):
        return {
            lk_prefix + str(x): cls.get_pkg_entry(f'{prefix}{x}')
            for x in files_range
        }

    @classmethod
    def get_pkg_entries(cls):
        return cls.prepare_prefix_pkg_entries('test/pkg/',
                                              range(cls.files_number))

    @classmethod
    def setUpClass(cls):
        super().setUpClass()

        pkg = Package()
        pkg._set_commit_message(cls.parent_commit_message)
        pkg._workflow = {
            'config':
            f's3://{cls.parent_bucket}/.quilt/workflows/config.yml?versionId=configVersion',
            'id': 'gamma',
            'schemas': {
                'top-secret':
                f's3://{cls.parent_bucket}/top-secret.schema.json?versionId=schemaVersion'
            },
        }
        pkg.set_meta({'meta': 'old meta'})
        cls.entries = cls.get_pkg_entries()
        for lk, entry in cls.entries.items():
            pkg.set(lk, entry)
        manifest_buf = io.BytesIO()
        pkg._dump(manifest_buf)
        cls.parent_manifest = manifest_buf.getvalue()
        cls.parent_top_hash = pkg.top_hash
        cls.src_params = {
            'parent': {
                'registry': cls.src_registry,
                'name': cls.parent_pkg_name,
                'top_hash': cls.parent_top_hash,
            },
        }

    @staticmethod
    def make_lambda_s3_stubber():
        return Stubber(t4_lambda_pkgpush.s3)

    def setUp(self):
        super().setUp()
        self.headers = {
            'content-type': 'application/json',
        }
        self.s3_stubber = Stubber(boto3.client('s3'))
        self.s3_stubber.activate()
        self.addCleanup(self.s3_stubber.deactivate)

        s3_client_patcher = mock.patch(
            'quilt3.data_transfer.S3ClientProvider.find_correct_client',
            lambda *args, **kwargs: self.s3_stubber.client,
        )
        s3_client_patcher.start()
        self.addCleanup(s3_client_patcher.stop)

        user_session_mock = mock.NonCallableMagicMock(
            spec_set=boto3.session.Session)
        user_session_mock.client.return_value = self.s3_stubber.client
        get_user_boto_session_patcher = mock.patch(
            't4_lambda_pkgpush.get_user_boto_session',
            return_value=user_session_mock,
        )
        self.get_user_boto_session_mock = get_user_boto_session_patcher.start()
        self.addCleanup(get_user_boto_session_patcher.stop)

        def calculate_pkg_hashes_side_effect(s3_client, pkg):
            for lk, entry in pkg.walk():
                if entry.hash is None:
                    entry.hash = {
                        'type': 'SHA256',
                        'value': self.get_file_hash(entry.physical_key),
                    }

        calculate_pkg_hashes_patcher = mock.patch.object(
            t4_lambda_pkgpush,
            'calculate_pkg_hashes',
            side_effect=calculate_pkg_hashes_side_effect,
        )
        calculate_pkg_hashes_patcher.start()
        self.addCleanup(calculate_pkg_hashes_patcher.stop)

    @contextlib.contextmanager
    def mock_successors(self, successors):
        workflow_config_mock = mock.MagicMock()
        workflow_config_mock.config = {
            'successors': successors,
        }
        src_registry = get_package_registry(self.src_registry)

        def side_effect(registry_url):
            if registry_url == self.src_registry:
                return src_registry
            return mock.DEFAULT

        with mock.patch.object(src_registry, 'get_workflow_config', return_value=workflow_config_mock), \
             mock.patch('t4_lambda_pkgpush.get_package_registry', side_effect=side_effect, wraps=get_package_registry):
            yield

    def make_request_wrapper(self, params, *, credentials, **kwargs):
        return {
            "params": params,
            "credentials": credentials,
        }

    def make_request_base(self, data, *, credentials, **kwargs):
        result = self.handler(
            self.make_request_wrapper(data, credentials=credentials, **kwargs),
            None,
        )

        # Check that result can be serialized to JSON.
        json.dumps(result)

        return result

    @mock.patch('time.time', mock.MagicMock(return_value=mock_timestamp))
    def make_request(self, params, **kwargs):
        self.get_user_boto_session_mock.reset_mock()
        with mock.patch('quilt3.telemetry.reset_session_id') as reset_session_id_mock, \
             calculate_sha256_patcher(return_value=[]) as calculate_sha256_mock:
            response = self.make_request_base(params,
                                              credentials=self.credentials,
                                              **kwargs)

        self.get_user_boto_session_mock.assert_called_once_with(
            **self.credentials, )
        reset_session_id_mock.assert_called_once_with()

        if calculate_sha256_mock.called:
            calculate_sha256_mock.assert_called_once_with([], [])

        return response

    def setup_s3_load_pkg_source(self):
        self.s3_stubber.add_response(
            'head_object',
            service_response={
                'VersionId': 'parentManifestVersion',
                'ContentLength': len(self.parent_manifest),
            },
            expected_params={
                'Bucket': self.parent_bucket,
                'Key': f'.quilt/packages/{self.parent_top_hash}',
            },
        )
        self.s3_stubber.add_response(
            'get_object',
            service_response={
                'VersionId': 'manifestVersion',
                'ContentLength': len(self.parent_manifest),
                'Body': io.BytesIO(self.parent_manifest),
            },
            expected_params={
                'Bucket': self.parent_bucket,
                'Key': f'.quilt/packages/{self.parent_top_hash}',
                'VersionId': 'parentManifestVersion',
            },
        )

    def setup_s3(self, expected_pkg, *, copy_data):
        manifest = io.BytesIO()
        expected_pkg.dump(manifest)
        top_hash = expected_pkg.top_hash

        self.setup_s3_load_pkg_source()

        if copy_data:
            for src, (lk, dst) in zip(self.entries.values(),
                                      expected_pkg.walk()):
                self.s3_stubber.add_response(
                    method='copy_object',
                    service_response={
                        'VersionId': 'dst_' + src.physical_key.version_id,
                    },
                    expected_params={
                        'CopySource': {
                            'Bucket': src.physical_key.bucket,
                            'Key': src.physical_key.path,
                            'VersionId': src.physical_key.version_id,
                        },
                        'Bucket': self.dst_bucket,
                        'Key': f'{self.dst_pkg_name}/{lk}',
                    })

        # Push new manifest.
        self.s3_stubber.add_response(
            'put_object',
            service_response={},
            expected_params={
                'Bucket': self.dst_bucket,
                'Key': f'.quilt/packages/{top_hash}',
                'Body': manifest.getvalue(),
            },
        )
        self.s3_stubber.add_response(
            'put_object',
            service_response={},
            expected_params={
                'Body':
                top_hash.encode(),
                'Bucket':
                self.dst_bucket,
                'Key':
                f'.quilt/named_packages/{self.dst_pkg_name}/{str(int(self.mock_timestamp))}'
            })
        self.s3_stubber.add_response(
            'put_object',
            service_response={
                'ResponseMetadata': {
                    'RequestId': 'foo'
                },
            },
            expected_params={
                'Body': top_hash.encode(),
                'Bucket': self.dst_bucket,
                'Key': f'.quilt/named_packages/{self.dst_pkg_name}/latest'
            })

    def prepare_pkg(self, *, copy_data):
        expected_pkg = Package()
        pkg_entries = self.entries.items()
        if copy_data:
            pkg_entries = [(
                lk,
                e.with_physical_key(
                    PhysicalKey(self.dst_bucket, f'{self.dst_pkg_name}/{lk}',
                                'dst_' + e.physical_key.version_id)),
            ) for lk, e in pkg_entries]
        for lk, entry in pkg_entries:
            expected_pkg.set(lk, entry)
        expected_pkg._set_commit_message(None)
        return expected_pkg
def test_not_remediated(mocker):
    #--------------------------
    # Test data
    #
    event = utils.load_test_data(test_data + 'cis29.json', my_region)
    post_metrics_expected_parms = {
        'Solution': 'SO0111',
        'UUID': '12345678-1234-1234-1234-123412341234',
        'TimeStamp': mocker.ANY,
        'Data': {
            'generator_id':
            'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/2.9',
            'type': '2.9 Ensure VPC flow logging is enabled in all VPCs',
            'productArn': mocker.ANY,
            'finding_triggered_by': 'Security Hub Findings - Custom Action',
            'region': mocker.ANY,
            'status': 'FAILED'
        },
        'Version': 'v1.2.0TEST'
    }
    os.environ['AWS_SESSION_TOKEN'] = 'FAKETOKEN'
    os.environ['FLOW_LOG_ROLE_ARN'] = 'FAKELOGROLEARN'

    ssmc = boto3.client('ssm', region_name=my_region)
    ssmc_s = Stubber(ssmc)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)

    ssmc_s.activate()
    mocker.patch('lib.metrics.Metrics.connect_to_ssm', return_value=ssmc)
    post_metrics = mocker.patch('lib.metrics.Metrics.post_metrics_to_api',
                                return_value=None)
    mocker.patch('lib.awsapi_helpers.BotoSession.__init__', return_value=None)
    mocker.patch('lib.awsapi_helpers.AWSClient.connect', return_value=None)

    awsc = [boto3.client('logs'), boto3.client('ec2')]

    def mock_select(thing1, thing2):
        if thing2 == 'logs':
            return awsc[0]
        else:
            return awsc[1]

    awsc_s = Stubber(awsc[0])
    awsc_s.add_response('create_log_group', {})
    awsc_s.activate()

    aws2c_s = Stubber(awsc[1])
    aws2c_s.add_response('create_flow_logs', {})
    aws2c_s.add_response('describe_flow_logs', {'FlowLogs': []})
    aws2c_s.activate()

    # redirect to mock_select above to return the proper stub
    mocker.patch('lib.awsapi_helpers.BotoSession.client', new=mock_select)

    # Mock notifications
    init = mocker.patch('lib.sechub_findings.Finding.flag')
    resolve = mocker.patch('lib.sechub_findings.Finding.resolve')
    update = mocker.patch('lib.sechub_findings.Finding.update_text')

    mocker.patch('lib.applogger.LogHandler.flush', return_value=None)

    #--------------------------
    # Run the lambda
    #
    cis29.lambda_handler(event, None)

    init.assert_called_once_with(
        'INITIAL: "Enable VPC flow logging in all VPCs" remediation started')
    update.assert_called_once_with(
        'FAILED: "Enable VPC flow logging in all VPCs" remediation failed. Please remediate manually',
        status='FAILED')
    resolve.assert_not_called()
    post_metrics.assert_called_with(post_metrics_expected_parms)
class TestBatchWatcherDaemon(UploadTestCaseUsingMockAWS):

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

    def setUp(self):
        super().setUp()
        self.environment = {
            'API_KEY': 'test'
        }
        self.environmentor = EnvironmentSetup(self.environment)
        self.environmentor.enter()

        self.batch_watcher = BatchWatcher()
        self.mock_batch_client = Stubber(self.batch_watcher.batch_client)
        self.mock_ec2_client = Stubber(self.batch_watcher.ec2_client)
        self.mock_lambda_client = Stubber(self.batch_watcher.lambda_client)

    def tearDown(self):
        self.environmentor.exit()
        super().tearDown()

    @patch('upload.lambdas.batch_watcher.batch_watcher.UploadDB.run_query')
    def test_find_incomplete_batch_jobs(self, mock_run_query):
        mock_run_query.return_value = QueryResult()
        csum_jobs, val_jobs = self.batch_watcher.find_incomplete_batch_jobs()
        self.assertEqual(csum_jobs, [{"id": "123", "job_id": "123", "file_id": "test/test"}])
        self.assertEqual(val_jobs, [{"id": "123", "job_id": "123", "file_id": "test/test"}])

    def test_find_and_kill_deployment_batch_instances(self):
        describe_params = {
            "Filters": [
                {"Name": 'key-name', "Values": [f"hca-upload-{self.deployment_stage}"]},
                {"Name": 'instance-state-name', "Values": ["running"]}
            ]
        }
        describe_output = {
            "Reservations": [{
                "Instances": [
                    {
                        "InstanceId": "instance_one"
                    },
                    {
                        "InstanceId": "instance_two"
                    }
                ]
            }]
        }
        instance_ids = ["instance_one", "instance_two"]
        terminate_params = {
            "InstanceIds": instance_ids
        }
        self.mock_ec2_client.add_response("describe_instances", describe_output, describe_params)
        self.mock_ec2_client.add_response("terminate_instances", {}, terminate_params)
        self.mock_ec2_client.activate()
        killed_instance_ids = self.batch_watcher.find_and_kill_deployment_batch_instances()
        self.assertEqual(killed_instance_ids, instance_ids)

    @patch('upload.lambdas.batch_watcher.batch_watcher.UploadDB.run_query_with_params')
    @patch('upload.lambdas.batch_watcher.batch_watcher.BatchWatcher.schedule_validation_job')
    def test_schedule_job_with_validation(self, mock_schedule_validation_job, mock_run_query):
        row = {
            "id": "123",
            "file_id": "test_area/test_id",
            "job_id": "124",
            "docker_image": "test_docker_image",
            "original_validation_id": "567"
        }
        self.batch_watcher.schedule_job(row, "validation")
        mock_schedule_validation_job.assert_called_with("test_area", "test_id", "test_docker_image", "567")

        row = {
            "id": "123",
            "file_id": "test_area/test_id",
            "job_id": "124",
            "docker_image": "test_docker_image",
            "original_validation_id": None
        }
        self.batch_watcher.schedule_job(row, "validation")
        mock_schedule_validation_job.assert_called_with("test_area", "test_id", "test_docker_image", "123")

    @patch('upload.lambdas.batch_watcher.batch_watcher.UploadDB.run_query_with_params')
    @patch('upload.lambdas.batch_watcher.batch_watcher.BatchWatcher.invoke_checksum_lambda')
    def test_schedule_job_with_checksum(self, mock_invoke_csum_lambda, mock_run_query):
        row = {
            "id": "123",
            "file_id": "test_area/test_id",
            "job_id": "124"
        }
        self.batch_watcher.schedule_job(row, "checksum")
        mock_invoke_csum_lambda.assert_called_with("test_area/test_id")

    def test_invoke_checksum_lambda(self):
        payload = {
            'Records': [{
                'eventName': 'ObjectCreated:Put',
                "s3": {
                    "bucket": {
                        "name": f"org-humancellatlas-upload-{self.deployment_stage}"
                    },
                    "object": {
                        "key": "test_area/test_file_id"
                    }
                }
            }]
        }
        lambda_params = {
            "FunctionName": f"dcp-upload-csum-{self.deployment_stage}",
            "InvocationType": "Event",
            "Payload": json.dumps(payload).encode()
        }
        self.mock_lambda_client.add_response('invoke', {}, lambda_params)
        self.mock_lambda_client.activate()
        self.batch_watcher.invoke_checksum_lambda("test_area/test_file_id")
        self.mock_lambda_client.deactivate()

    def test_should_instances_be_killed_true(self):
        test_one_rows = [
            {
                "id": "123",
                "file_id": "test/test",
                "job_id": "124"
            },
            {
                "id": "234",
                "file_id": "test/test",
                "job_id": "235"
            }
        ]
        output_one = {
            "jobs": [{
                "status": "FAILED",
                "jobName": "test",
                "jobId": "test",
                "jobQueue": "test",
                "startedAt": 1234,
                "jobDefinition": "test"
            }]
        }
        output_two = {
            "jobs": [{
                "status": "SUCCEEDED",
                "jobName": "test",
                "jobId": "test",
                "jobQueue": "test",
                "startedAt": 1234,
                "jobDefinition": "test"
            }]
        }
        self.mock_batch_client.add_response('describe_jobs', output_one, {"jobs": ["124"]})
        self.mock_batch_client.add_response('describe_jobs', output_two, {"jobs": ["235"]})
        self.mock_batch_client.activate()

        kill_instances = self.batch_watcher.should_instances_be_killed(test_one_rows)
        self.assertEqual(kill_instances, True)
        self.mock_batch_client.deactivate()

    def test_should_instances_be_killed_false(self):
        test_rows = [
            {
                "id": "345",
                "file_id": "test/test",
                "job_id": "346"
            },
            {
                "id": "456",
                "file_id": "test/test",
                "job_id": "457"
            }
        ]
        output_two = {
            "jobs": [{
                "status": "SUCCEEDED",
                "jobName": "test",
                "jobId": "test",
                "jobQueue": "test",
                "startedAt": 1234,
                "jobDefinition": "test"
            }]
        }
        self.mock_batch_client.add_response('describe_jobs', output_two, {"jobs": ["346"]})
        self.mock_batch_client.add_response('describe_jobs', output_two, {"jobs": ["457"]})
        self.mock_batch_client.activate()
        kill_instances = self.batch_watcher.should_instances_be_killed(test_rows)
        self.assertEqual(kill_instances, False)
        self.mock_batch_client.deactivate()

    def test_get_job_status(self):
        output_two = {
            "jobs": [{
                "status": "SUCCEEDED",
                "jobName": "test",
                "jobId": "test",
                "jobQueue": "test",
                "startedAt": 1234,
                "jobDefinition": "test"
            }]
        }
        self.mock_batch_client.add_response('describe_jobs', output_two, {"jobs": ["346"]})
        self.mock_batch_client.activate()
        status = self.batch_watcher._get_job_status("346")
        self.assertEqual(status, "SUCCEEDED")
        self.mock_batch_client.deactivate()
def test_event_good(mocker):
    #--------------------------
    # Test data
    #
    event = utils.load_test_data(test_data + 'cis29.json', my_region)

    sns_message = {
        'Note':
        '"Enable VPC flow logging in all VPCs" remediation was successful',
        'State': 'RESOLVED',
        'Account': '111111111111',
        'Remediation': 'Enable VPC flow logging in all VPCs',
        'AffectedObject': 'VPC Flow Logs for VPC: vpc-d1a07fba',
        'metrics_data': mocker.ANY
    }
    os.environ['AWS_SESSION_TOKEN'] = 'FAKETOKEN'
    os.environ['FLOW_LOG_ROLE_ARN'] = 'FAKELOGROLEARN'

    post_metrics_expected_parms = {
        'Solution': 'SO0111',
        'UUID': '12345678-1234-1234-1234-123412341234',
        'TimeStamp': mocker.ANY,
        'Data': {
            'generator_id':
            'arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/2.9',
            'type': '2.9 Ensure VPC flow logging is enabled in all VPCs',
            'productArn': mocker.ANY,
            'finding_triggered_by': 'Security Hub Findings - Custom Action',
            'region': mocker.ANY,
            'status': 'RESOLVED'
        },
        'Version': 'v1.2.0TEST'
    }

    ssmc = boto3.client('ssm', region_name=my_region)
    ssmc_s = Stubber(ssmc)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_uuid)
    ssmc_s.add_response('get_parameter', mock_ssm_get_parameter_version)
    ssmc_s.activate()
    mocker.patch('lib.metrics.Metrics.connect_to_ssm', return_value=ssmc)
    post_metrics = mocker.patch('lib.metrics.Metrics.post_metrics_to_api',
                                return_value=None)

    # Mock the constructor. We don't need the session created
    mocker.patch('lib.awsapi_helpers.BotoSession.__init__', return_value=None)
    mocker.patch('lib.awsapi_helpers.AWSClient.connect', return_value=None)

    awsc = [boto3.client('logs'), boto3.client('ec2')]

    def mock_select(thing1, thing2):
        if thing2 == 'logs':
            return awsc[0]
        else:
            return awsc[1]

    # Mock the boto clients and replace the BotoSession client with our stub
    awsc_s = Stubber(awsc[0])
    awsc_s.add_response('create_log_group', {})
    awsc_s.activate()

    aws2c_s = Stubber(awsc[1])
    aws2c_s.add_response('create_flow_logs', {})
    aws2c_s.add_response('describe_flow_logs',
                         {'FlowLogs': [{
                             'FlowLogStatus': 'ACTIVE'
                         }]})
    aws2c_s.activate()

    sns = mocker.patch('lib.awsapi_helpers.AWSClient.postit',
                       return_value=None)

    # redirect to mock_select above to return the proper stub
    mocker.patch('lib.awsapi_helpers.BotoSession.client', new=mock_select)

    # Mock notifications
    init = mocker.patch('lib.sechub_findings.Finding.flag')
    resolve = mocker.patch('lib.sechub_findings.Finding.resolve')

    mocker.patch('lib.applogger.LogHandler.flush', return_value=None)

    #--------------------------
    # Run the lambda
    #
    cis29.lambda_handler(event, None)
    init.assert_called_once_with(
        'INITIAL: "Enable VPC flow logging in all VPCs" remediation started')
    resolve.assert_called_once_with(
        'RESOLVED: "Enable VPC flow logging in all VPCs" remediation was successful'
    )
    sns.assert_called_with('SO0111-SHARR_Topic', sns_message, my_region)
    post_metrics.assert_called_with(post_metrics_expected_parms)
Ejemplo n.º 48
0
class TestStubber(unittest.TestCase):
    def setUp(self):
        session = botocore.session.get_session()
        config = botocore.client.Config(signature_version=botocore.UNSIGNED)
        self.client = session.create_client('s3', config=config)

        self.stubber = Stubber(self.client)

    def test_stubber_returns_response(self):
        service_response = {'ResponseMetadata': {'foo': 'bar'}}
        self.stubber.add_response('list_objects', service_response)
        self.stubber.activate()
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_activated_stubber_errors_with_no_registered_stubs(self):
        self.stubber.activate()
        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

    def test_stubber_errors_when_stubs_are_used_up(self):
        self.stubber.add_response('list_objects', {})
        self.stubber.activate()
        self.client.list_objects(Bucket='foo')

        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

    def test_client_error_response(self):
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error(
            'list_objects', error_code, error_message)
        self.stubber.activate()

        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

    def test_expected_params_success(self):
        service_response = {}
        expected_params = {'Bucket': 'foo'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.activate()
        # This should be called successfully with no errors being thrown
        # for mismatching expected params.
        response = self.client.list_objects(Bucket='foo')
        self.assertEqual(response, service_response)

    def test_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)
        self.stubber.activate()
        # This should call should raise an for mismatching expected params.
        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

    def test_expected_params_mixed_with_errors_responses(self):
        # Add an error response
        error_code = "AccessDenied"
        error_message = "Access Denied"
        self.stubber.add_client_error(
            'list_objects', error_code, error_message)

        # Add a response with incorrect expected params
        service_response = {}
        expected_params = {'Bucket': 'bar'}
        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()

        # The first call should throw and error as expected.
        with self.assertRaises(ClientError):
            self.client.list_objects(Bucket='foo')

        # The second call should throw an error for unexpected parameters
        with self.assertRaisesRegexp(StubResponseError, 'Expected parameters'):
            self.client.list_objects(Bucket='foo')

    def test_can_continue_to_call_after_expected_params_fail(self):
        service_response = {}
        expected_params = {'Bucket': 'bar'}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for unexpected parameters
        with self.assertRaises(StubResponseError):
            self.client.list_objects(Bucket='foo')

        # The stubber should still have the responses queued up
        # even though the original parameters did not match the expected ones.
        self.client.list_objects(Bucket='bar')
        self.stubber.assert_no_pending_responses()

    def test_still_relies_on_param_validation_with_expected_params(self):
        service_response = {}
        expected_params = {'Buck': 'bar'}

        self.stubber.add_response(
            'list_objects', service_response, expected_params)

        self.stubber.activate()
        # Throw an error for invalid parameters
        with self.assertRaises(ParamValidationError):
            self.client.list_objects(Buck='bar')
Ejemplo n.º 49
0

@pytest.mark.parametrize("case", ALIAS_CASES)
def test_can_use_original_name(case):
    session = botocore.session.get_session()
    _can_use_parameter_in_client_call(session, case, False)


def _can_use_parameter_in_client_call(session, case, use_alias=True):
    client = session.create_client(case['service'],
                                   region_name='us-east-1',
                                   aws_access_key_id='foo',
                                   aws_secret_access_key='bar')

    stubber = Stubber(client)
    stubber.activate()
    operation = case['operation']
    params = case.get('extra_args', {})
    params = params.copy()
    param_name = case['original_name']
    if use_alias:
        param_name = case['new_name']
    params[param_name] = case['parameter_value']
    stubbed_response = case.get('stubbed_response', {})
    stubber.add_response(operation, stubbed_response)
    try:
        getattr(client, operation)(**params)
    except ParamValidationError as e:
        raise AssertionError(
            'Expecting %s to be valid parameter for %s.%s but received '
            '%s.' % (case['new_name'], case['service'], case['operation'], e))
Ejemplo n.º 50
0
class TestGuardDutyMetricsCollector(unittest.TestCase):
    def setUp(self):
        self.gdClient = boto3.client("guardduty")
        self.gdStubber = Stubber(self.gdClient)
        self.gdStubber.activate()

        self.botoSessionMock = MagicMock()
        self.botoSessionMock.client.return_value = self.gdClient

    def testCollectShouldReturnCurrentFindingsMetricFromSingleRegionWithSingleDetectorOnSuccess(
            self):
        # Mock GuardDuty
        self.gdStubber.add_response("list_detectors",
                                    {"DetectorIds": ["eu-detector-1"]}, {})

        self.gdStubber.add_response(
            "get_findings_statistics", {
                "FindingStatistics": {
                    "CountBySeverity": {
                        "2.0": 1,
                        "4.0": 2,
                        "7.0": 3,
                    }
                }
            }, {
                "DetectorId": "eu-detector-1",
                "FindingCriteria": {
                    "Criterion": {
                        "service.archived": {
                            "Eq": ["false"]
                        }
                    }
                },
                "FindingStatisticTypes": ["COUNT_BY_SEVERITY"]
            })

        # Collect metrics
        with patch("boto3.session.Session", return_value=self.botoSessionMock):
            collector = GuardDutyMetricsCollector(regions=["eu-west-1"])
            metrics = collector.collect()

        findingsMetric = metrics[0]
        self.assertEqual(findingsMetric.name, "aws_guardduty_current_findings")
        self.assertEqual(findingsMetric.type, "gauge")
        self.assertEqual(len(findingsMetric.samples), 3)

        self.assertEqual(findingsMetric.samples[0].value, 1)
        self.assertEqual(findingsMetric.samples[0].labels, {
            "region": "eu-west-1",
            "severity": "low"
        })
        self.assertEqual(findingsMetric.samples[1].value, 2)
        self.assertEqual(findingsMetric.samples[1].labels, {
            "region": "eu-west-1",
            "severity": "medium"
        })
        self.assertEqual(findingsMetric.samples[2].value, 3)
        self.assertEqual(findingsMetric.samples[2].labels, {
            "region": "eu-west-1",
            "severity": "high"
        })

        scrapeErrorsMetric = metrics[1]
        self.assertEqual(scrapeErrorsMetric.name,
                         "aws_guardduty_scrape_errors")
        self.assertEqual(scrapeErrorsMetric.type, "counter")
        self.assertEqual(len(scrapeErrorsMetric.samples), 1)

        self.assertEqual(scrapeErrorsMetric.samples[0].value, 0)
        self.assertEqual(scrapeErrorsMetric.samples[0].labels,
                         {"region": "eu-west-1"})

        self.gdStubber.assert_no_pending_responses()

    def testCollectShouldReturnCurrentFindingsMetricFromSingleRegionWithMultipleDetectorsOnSuccess(
            self):
        # Mock GuardDuty
        self.gdStubber.add_response(
            "list_detectors",
            {"DetectorIds": ["eu-detector-1", "eu-detector-2"]}, {})

        self.gdStubber.add_response(
            "get_findings_statistics", {
                "FindingStatistics": {
                    "CountBySeverity": {
                        "2.0": 1,
                        "4.0": 2,
                        "7.0": 3,
                    }
                }
            }, {
                "DetectorId": "eu-detector-1",
                "FindingCriteria": {
                    "Criterion": {
                        "service.archived": {
                            "Eq": ["false"]
                        }
                    }
                },
                "FindingStatisticTypes": ["COUNT_BY_SEVERITY"]
            })

        self.gdStubber.add_response(
            "get_findings_statistics", {
                "FindingStatistics": {
                    "CountBySeverity": {
                        "2.5": 4,
                        "5.2": 5,
                        "8.1": 6,
                    }
                }
            }, {
                "DetectorId": "eu-detector-2",
                "FindingCriteria": {
                    "Criterion": {
                        "service.archived": {
                            "Eq": ["false"]
                        }
                    }
                },
                "FindingStatisticTypes": ["COUNT_BY_SEVERITY"]
            })

        # Collect metrics
        with patch("boto3.session.Session", return_value=self.botoSessionMock):
            collector = GuardDutyMetricsCollector(regions=["eu-west-1"])
            metrics = collector.collect()

        findingsMetric = metrics[0]
        self.assertEqual(findingsMetric.name, "aws_guardduty_current_findings")
        self.assertEqual(findingsMetric.type, "gauge")
        self.assertEqual(len(findingsMetric.samples), 3)

        self.assertEqual(findingsMetric.samples[0].value, 5)
        self.assertEqual(findingsMetric.samples[0].labels, {
            "region": "eu-west-1",
            "severity": "low"
        })
        self.assertEqual(findingsMetric.samples[1].value, 7)
        self.assertEqual(findingsMetric.samples[1].labels, {
            "region": "eu-west-1",
            "severity": "medium"
        })
        self.assertEqual(findingsMetric.samples[2].value, 9)
        self.assertEqual(findingsMetric.samples[2].labels, {
            "region": "eu-west-1",
            "severity": "high"
        })

        scrapeErrorsMetric = metrics[1]
        self.assertEqual(scrapeErrorsMetric.name,
                         "aws_guardduty_scrape_errors")
        self.assertEqual(scrapeErrorsMetric.type, "counter")
        self.assertEqual(len(scrapeErrorsMetric.samples), 1)

        self.assertEqual(scrapeErrorsMetric.samples[0].value, 0)
        self.assertEqual(scrapeErrorsMetric.samples[0].labels,
                         {"region": "eu-west-1"})

        self.gdStubber.assert_no_pending_responses()

    def testCollectShouldReturnCurrentFindingsMetricFromMultipleRegionsOnSuccess(
            self):
        # Mock GuardDuty
        self.gdStubber.add_response("list_detectors",
                                    {"DetectorIds": ["eu-detector-1"]}, {})

        self.gdStubber.add_response(
            "get_findings_statistics", {
                "FindingStatistics": {
                    "CountBySeverity": {
                        "2.0": 1,
                        "4.0": 2,
                        "7.0": 3,
                    }
                }
            }, {
                "DetectorId": "eu-detector-1",
                "FindingCriteria": {
                    "Criterion": {
                        "service.archived": {
                            "Eq": ["false"]
                        }
                    }
                },
                "FindingStatisticTypes": ["COUNT_BY_SEVERITY"]
            })

        self.gdStubber.add_response("list_detectors",
                                    {"DetectorIds": ["us-detector-1"]}, {})

        self.gdStubber.add_response(
            "get_findings_statistics", {
                "FindingStatistics": {
                    "CountBySeverity": {
                        "2.5": 4,
                        "5.2": 5,
                        "8.1": 6,
                    }
                }
            }, {
                "DetectorId": "us-detector-1",
                "FindingCriteria": {
                    "Criterion": {
                        "service.archived": {
                            "Eq": ["false"]
                        }
                    }
                },
                "FindingStatisticTypes": ["COUNT_BY_SEVERITY"]
            })

        # Collect metrics
        with patch("boto3.session.Session", return_value=self.botoSessionMock):
            collector = GuardDutyMetricsCollector(
                regions=["eu-west-1", "us-east-1"])
            metrics = collector.collect()

        findingsMetric = metrics[0]
        self.assertEqual(findingsMetric.name, "aws_guardduty_current_findings")
        self.assertEqual(findingsMetric.type, "gauge")
        self.assertEqual(len(findingsMetric.samples), 6)

        self.assertEqual(findingsMetric.samples[0].value, 1)
        self.assertEqual(findingsMetric.samples[0].labels, {
            "region": "eu-west-1",
            "severity": "low"
        })
        self.assertEqual(findingsMetric.samples[1].value, 2)
        self.assertEqual(findingsMetric.samples[1].labels, {
            "region": "eu-west-1",
            "severity": "medium"
        })
        self.assertEqual(findingsMetric.samples[2].value, 3)
        self.assertEqual(findingsMetric.samples[2].labels, {
            "region": "eu-west-1",
            "severity": "high"
        })
        self.assertEqual(findingsMetric.samples[3].value, 4)
        self.assertEqual(findingsMetric.samples[3].labels, {
            "region": "us-east-1",
            "severity": "low"
        })
        self.assertEqual(findingsMetric.samples[4].value, 5)
        self.assertEqual(findingsMetric.samples[4].labels, {
            "region": "us-east-1",
            "severity": "medium"
        })
        self.assertEqual(findingsMetric.samples[5].value, 6)
        self.assertEqual(findingsMetric.samples[5].labels, {
            "region": "us-east-1",
            "severity": "high"
        })

        scrapeErrorsMetric = metrics[1]
        self.assertEqual(scrapeErrorsMetric.name,
                         "aws_guardduty_scrape_errors")
        self.assertEqual(scrapeErrorsMetric.type, "counter")
        self.assertEqual(len(scrapeErrorsMetric.samples), 2)

        self.assertEqual(scrapeErrorsMetric.samples[0].value, 0)
        self.assertEqual(scrapeErrorsMetric.samples[0].labels,
                         {"region": "eu-west-1"})
        self.assertEqual(scrapeErrorsMetric.samples[1].value, 0)
        self.assertEqual(scrapeErrorsMetric.samples[1].labels,
                         {"region": "us-east-1"})

        self.gdStubber.assert_no_pending_responses()

    def testCollectShouldSkipCurrentFindingsMetricOnFailingFetchingStatisticsFromARegionOutOfMultipleRegions(
            self):
        # Mock GuardDuty
        self.gdStubber.add_response("list_detectors",
                                    {"DetectorIds": ["eu-detector-1"]}, {})

        self.gdStubber.add_client_error("get_findings_statistics")

        self.gdStubber.add_response("list_detectors",
                                    {"DetectorIds": ["us-detector-1"]}, {})

        self.gdStubber.add_response(
            "get_findings_statistics", {
                "FindingStatistics": {
                    "CountBySeverity": {
                        "2.5": 4,
                        "5.2": 5,
                        "8.1": 6,
                    }
                }
            }, {
                "DetectorId": "us-detector-1",
                "FindingCriteria": {
                    "Criterion": {
                        "service.archived": {
                            "Eq": ["false"]
                        }
                    }
                },
                "FindingStatisticTypes": ["COUNT_BY_SEVERITY"]
            })

        # Collect metrics
        with patch("boto3.session.Session", return_value=self.botoSessionMock):
            collector = GuardDutyMetricsCollector(
                regions=["eu-west-1", "us-east-1"])
            metrics = collector.collect()

        findingsMetric = metrics[0]
        self.assertEqual(findingsMetric.name, "aws_guardduty_current_findings")
        self.assertEqual(findingsMetric.type, "gauge")
        self.assertEqual(len(findingsMetric.samples), 3)

        self.assertEqual(findingsMetric.samples[0].value, 4)
        self.assertEqual(findingsMetric.samples[0].labels, {
            "region": "us-east-1",
            "severity": "low"
        })
        self.assertEqual(findingsMetric.samples[1].value, 5)
        self.assertEqual(findingsMetric.samples[1].labels, {
            "region": "us-east-1",
            "severity": "medium"
        })
        self.assertEqual(findingsMetric.samples[2].value, 6)
        self.assertEqual(findingsMetric.samples[2].labels, {
            "region": "us-east-1",
            "severity": "high"
        })

        scrapeErrorsMetric = metrics[1]
        self.assertEqual(scrapeErrorsMetric.name,
                         "aws_guardduty_scrape_errors")
        self.assertEqual(scrapeErrorsMetric.type, "counter")
        self.assertEqual(len(scrapeErrorsMetric.samples), 2)

        self.assertEqual(scrapeErrorsMetric.samples[0].value, 1)
        self.assertEqual(scrapeErrorsMetric.samples[0].labels,
                         {"region": "eu-west-1"})
        self.assertEqual(scrapeErrorsMetric.samples[1].value, 0)
        self.assertEqual(scrapeErrorsMetric.samples[1].labels,
                         {"region": "us-east-1"})

        self.gdStubber.assert_no_pending_responses()

    def testCollectShouldNeverDecreaseScrapeErrorsOnSubsequentCalls(self):
        # Mock GuardDuty
        self.gdStubber.add_response("list_detectors",
                                    {"DetectorIds": ["us-detector-1"]}, {})

        self.gdStubber.add_client_error("get_findings_statistics")

        # Collect metrics
        with patch("boto3.session.Session", return_value=self.botoSessionMock):
            collector = GuardDutyMetricsCollector(regions=["us-east-1"])
            metrics = collector.collect()

        findingsMetric = metrics[0]
        self.assertEqual(findingsMetric.name, "aws_guardduty_current_findings")
        self.assertEqual(findingsMetric.type, "gauge")
        self.assertEqual(len(findingsMetric.samples), 0)

        scrapeErrorsMetric = metrics[1]
        self.assertEqual(scrapeErrorsMetric.name,
                         "aws_guardduty_scrape_errors")
        self.assertEqual(scrapeErrorsMetric.type, "counter")
        self.assertEqual(len(scrapeErrorsMetric.samples), 1)

        self.assertEqual(scrapeErrorsMetric.samples[0].value, 1)
        self.assertEqual(scrapeErrorsMetric.samples[0].labels,
                         {"region": "us-east-1"})

        with patch("boto3.session.Session", return_value=self.botoSessionMock):
            metrics = collector.collect()

        findingsMetric = metrics[0]
        self.assertEqual(findingsMetric.name, "aws_guardduty_current_findings")
        self.assertEqual(findingsMetric.type, "gauge")
        self.assertEqual(len(findingsMetric.samples), 0)

        scrapeErrorsMetric = metrics[1]
        self.assertEqual(scrapeErrorsMetric.name,
                         "aws_guardduty_scrape_errors")
        self.assertEqual(scrapeErrorsMetric.type, "counter")
        self.assertEqual(len(scrapeErrorsMetric.samples), 1)

        self.assertEqual(scrapeErrorsMetric.samples[0].value, 2)
        self.assertEqual(scrapeErrorsMetric.samples[0].labels,
                         {"region": "us-east-1"})

        self.gdStubber.assert_no_pending_responses()
def test_event_bad(mocker):
    # Read test data
    event = utils.load_test_data(test_data + 'cis_1-3.json', my_region)

    iam_keys = {
        "AccessKeyMetadata": [
            {
                "UserName": "******",
                "AccessKeyId": "AKIAADFHWEREFGFHSDDF",
                "Status": "Active",
                "CreateDate": "2015-05-22T14:43:16+00:00"
            },
            {
                "UserName": "******",
                "AccessKeyId": "AKIAGHJGJFGHJFGETHFG",
                "Status": "Active",
                "CreateDate": "2020-05-15T15:20:04+00:00"
            }
        ]
    }
    # Mock the constructor. We don't need the session created
    mocker.patch('lib.awsapi_helpers.BotoSession.__init__', return_value=None)

    # create client and resource directly through boto3
    iamc = boto3.client('iam', region_name=my_region)
    iamr = boto3.resource('iam', region_name=my_region)

    # stub the client
    iamc_s = Stubber(iamc)
    iamr_s = Stubber(iamr.meta.client)

    iamc_s.add_response(
        'list_access_keys',
        iam_keys
    )
    iam_keys['AccessKeyMetadata'][0]['Status'] = 'Inactive'
    iamc_s.add_response(
        'list_access_keys',
        iam_keys
    )

    iamc_s.activate()
    iamr_s.activate()

    # Mock the client and resource to return the clients we created
    mocker.patch('lib.awsapi_helpers.BotoSession.client', return_value=iamc)
    mocker.patch('lib.awsapi_helpers.BotoSession.resource', return_value=iamr)
    mocker.patch('lib.awsapi_helpers.AWSClient.postit', return_value=None)

    # Mock flush so we don't
    mocker.patch('lib.applogger.LogHandler.flush', return_value=None)

    # Mock Notifier
    init = mocker.patch('lib.sechub_findings.Finding.flag')
    update = mocker.patch('lib.sechub_findings.Finding.update_text')

    cis1314.lambda_handler(event, None)
    init.assert_called_with(
        'INITIAL: "Deactivate unused keys over 90 days old" remediation started'
    )
    update.assert_called_once_with(
        'FAILED: "Deactivate unused keys over 90 days old" remediation failed. Please remediate manually'
    )
Ejemplo n.º 52
0
class TestStubber(unittest.TestCase):
    def setUp(self):
        self.event_emitter = hooks.HierarchicalEmitter()
        self.client = mock.Mock()
        self.client.meta.events = self.event_emitter
        self.client.meta.method_to_api_mapping.get.return_value = 'foo'
        self.stubber = Stubber(self.client)
        self.validate_parameters_mock = mock.Mock()
        self.validate_parameters_patch = mock.patch(
            'botocore.stub.validate_parameters', self.validate_parameters_mock)
        self.validate_parameters_patch.start()

    def tearDown(self):
        self.validate_parameters_patch.stop()

    def emit_get_response_event(self, model=None, request_dict=None,
                                signer=None, context=None):
        if model is None:
            model = mock.Mock()
            model.name = 'foo'

        handler, response = self.event_emitter.emit_until_response(
            event_name='before-call.myservice.foo', model=model,
            params=request_dict, request_signer=signer, context=context)

        return response

    def test_stubber_registers_events(self):
        self.event_emitter = mock.Mock()
        self.client.meta.events = self.event_emitter
        self.stubber.activate()
        # This just ensures that we register at the correct event
        # and nothing more
        self.event_emitter.register_first.assert_called_with(
            'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY)
        self.event_emitter.register.assert_called_with(
            'before-call.*.*', mock.ANY, unique_id=mock.ANY)

    def test_stubber_unregisters_events(self):
        self.event_emitter = mock.Mock()
        self.client.meta.events = self.event_emitter
        self.stubber.activate()
        self.stubber.deactivate()
        self.event_emitter.unregister.assert_any_call(
            'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY)
        self.event_emitter.unregister.assert_any_call(
            'before-call.*.*', mock.ANY, unique_id=mock.ANY)

    def test_add_response(self):
        response = {'foo': 'bar'}
        self.stubber.add_response('foo', response)

        with self.assertRaises(AssertionError):
            self.stubber.assert_no_pending_responses()

    def test_add_response_fails_when_missing_client_method(self):
        del self.client.foo
        with self.assertRaises(ValueError):
            self.stubber.add_response('foo', {})

    def test_validates_service_response(self):
        self.stubber.add_response('foo', {})
        self.assertTrue(self.validate_parameters_mock.called)

    def test_validate_ignores_response_metadata(self):
        service_response = {'ResponseMetadata': {'foo': 'bar'}}
        service_model = ServiceModel({
            'documentation': '',
            'operations': {
                'foo': {
                    'name': 'foo',
                    'input': {'shape': 'StringShape'},
                    'output': {'shape': 'StringShape'}
                }
            },
            'shapes': {
                'StringShape': {'type': 'string'}
            }
        })
        op_name = service_model.operation_names[0]
        output_shape = service_model.operation_model(op_name).output_shape

        self.client.meta.service_model = service_model
        self.stubber.add_response('TestOperation', service_response)
        self.validate_parameters_mock.assert_called_with(
            {}, output_shape)

        # Make sure service response hasn't been mutated
        self.assertEqual(
            service_response, {'ResponseMetadata': {'foo': 'bar'}})

    def test_validates_on_empty_output_shape(self):
        service_model = ServiceModel({
            'documentation': '',
            'operations': {
                'foo': {
                    'name': 'foo'
                }
            }
        })
        self.client.meta.service_model = service_model

        with self.assertRaises(ParamValidationError):
            self.stubber.add_response('TestOperation', {'foo': 'bar'})

    def test_get_response(self):
        service_response = {'bar': 'baz'}
        self.stubber.add_response('foo', service_response)
        self.stubber.activate()
        response = self.emit_get_response_event()
        self.assertEqual(response[1], service_response)
        self.assertEqual(response[0].status_code, 200)

    def test_get_client_error_response(self):
        error_code = "foo"
        service_message = "bar"
        self.stubber.add_client_error('foo', error_code, service_message)
        self.stubber.activate()
        response = self.emit_get_response_event()
        self.assertEqual(response[1]['Error']['Message'], service_message)
        self.assertEqual(response[1]['Error']['Code'], error_code)

    def test_get_response_errors_with_no_stubs(self):
        self.stubber.activate()
        with self.assertRaises(StubResponseError):
            self.emit_get_response_event()

    def test_assert_no_responses_remaining(self):
        self.stubber.add_response('foo', {})
        with self.assertRaises(AssertionError):
            self.stubber.assert_no_pending_responses()
Ejemplo n.º 53
0
class TestIndex(TestCase):
    def setUp(self):
        self.requests_mock = responses.RequestsMock(
            assert_all_requests_are_fired=False)
        self.requests_mock.start()

        # Create a dummy S3 client that (hopefully) can't do anything.
        self.s3_client = boto3.client(
            's3', config=Config(signature_version=UNSIGNED))

        self.s3_client_patcher = patch(__name__ + '.index.make_s3_client',
                                       return_value=self.s3_client)
        self.s3_client_patcher.start()

        self.s3_stubber = Stubber(self.s3_client)
        self.s3_stubber.activate()

        self.env_patcher = patch.dict(
            os.environ, {
                'ES_HOST': 'example.com',
                'AWS_ACCESS_KEY_ID': 'test_key',
                'AWS_SECRET_ACCESS_KEY': 'test_secret',
                'AWS_DEFAULT_REGION': 'ng-north-1',
            })
        self.env_patcher.start()

    def tearDown(self):
        self.env_patcher.stop()

        self.s3_stubber.assert_no_pending_responses()
        self.s3_stubber.deactivate()
        self.s3_client_patcher.stop()

        self.requests_mock.stop()

    def _get_contents(self, name, ext):
        return index.get_contents(
            'test-bucket',
            name,
            ext,
            etag='etag',
            version_id=None,
            s3_client=self.s3_client,
            size=123,
        )

    def test_infer_extensions(self):
        """ensure we are guessing file types well"""
        # parquet
        assert index.infer_extensions("s3/some/file.c000", ".c000") == ".parquet", \
            "Expected .c0000 to infer as .parquet"
        # parquet, nonzero part number
        assert index.infer_extensions("s3/some/file.c001", ".c001") == ".parquet", \
            "Expected .c0001 to infer as .parquet"
        # -c0001 file
        assert index.infer_extensions("s3/some/file-c0001", "") == ".parquet", \
            "Expected -c0001 to infer as .parquet"
        # -c00111 file (should never happen)
        assert index.infer_extensions("s3/some/file-c000121", "") == "", \
            "Expected -c000121 not to infer as .parquet"
        # .txt file, should be unchanged
        assert index.infer_extensions("s3/some/file-c0000.txt", ".txt") == ".txt", \
            "Expected .txt to infer as .txt"

    def test_delete_event(self):
        """
        Check that the indexer doesn't blow up on delete events.
        """
        # don't mock head or get; they should never be called for deleted objects
        self._test_index_event("ObjectRemoved:Delete",
                               mock_head=False,
                               mock_object=False)

    def test_delete_marker_event(self):
        """
        Common event in versioned; buckets, should no-op
        """
        # don't mock head or get; this event should never call them
        self._test_index_event(
            "ObjectRemoved:DeleteMarkerCreated",
            # we should never call Elastic in this case
            mock_elastic=False,
            mock_head=False,
            mock_object=False)

    def test_test_event(self):
        """
        Check that the indexer does not barf when it gets an S3 test notification.
        """
        event = {
            "Records": [{
                "body":
                json.dumps({
                    "Message":
                    json.dumps({
                        "Service":
                        "Amazon S3",
                        "Event":
                        "s3:TestEvent",
                        "Time":
                        "2014-10-13T15:57:02.089Z",
                        "Bucket":
                        "test-bucket",
                        "RequestId":
                        "5582815E1AEA5ADF",
                        "HostId":
                        "8cLeGAmw098X5cv4Zkwcmo8vvZa3eH3eKxsPzbB9wrR+YstdA6Knx4Ip8EXAMPLE"
                    })
                })
            }]
        }

        index.handler(event, None)

    def test_index_file(self):
        """test indexing a single file"""
        self._test_index_event("ObjectCreated:Put")

    @patch(__name__ + '.index.get_contents')
    def test_index_exception(self, get_mock):
        """test indexing a single file that throws an exception"""
        class ContentException(Exception):
            pass

        get_mock.side_effect = ContentException("Unable to get contents")
        with pytest.raises(ContentException):
            # get_mock already mocks get_object, so don't mock it in _test_index_event
            self._test_index_event("ObjectCreated:Put", mock_object=False)

    def _test_index_event(self,
                          event_name,
                          mock_elastic=True,
                          mock_head=True,
                          mock_object=True):
        """
        Reusable helper function to test indexing a single text file.
        """
        assert event_name in RECORDS, f"unexpected event: {event_name}"
        records = {
            "Records": [{
                "body":
                json.dumps({
                    "Message":
                    json.dumps({"Records": [RECORDS[event_name]]})
                })
            }]
        }

        now = index.now_like_boto3()

        metadata = {
            'helium':
            json.dumps({
                'comment': 'blah',
                'user_meta': {
                    'foo': 'bar'
                },
                'x': 'y'
            })
        }

        if mock_head:
            self.s3_stubber.add_response(method='head_object',
                                         service_response={
                                             'Metadata': metadata,
                                             'ContentLength': 100,
                                             'LastModified': now,
                                         },
                                         expected_params={
                                             'Bucket': 'test-bucket',
                                             'Key': 'hello world.txt',
                                             'IfMatch': '123456',
                                         })

        if mock_object:
            self.s3_stubber.add_response(
                method='get_object',
                service_response={
                    'Metadata': metadata,
                    'ContentLength': 100,
                    'LastModified': now,
                    'Body': BytesIO(b'Hello World!'),
                },
                expected_params={
                    'Bucket': 'test-bucket',
                    'Key': 'hello world.txt',
                    'IfMatch': '123456',
                    'Range': f'bytes=0-{index.ELASTIC_LIMIT_BYTES}',
                })

        def es_callback(request):
            response_key = 'delete' if event_name == index.OBJECT_DELETE else 'index'
            actions = [json.loads(line) for line in request.body.splitlines()]
            expected = [{
                response_key: {
                    '_index': 'test-bucket',
                    '_type': '_doc',
                    '_id': 'hello world.txt:None'
                }
            }, {
                'comment': 'blah',
                'content': '' if not mock_object else 'Hello World!',
                'etag': '123456',
                'event': event_name,
                'ext': '.txt',
                'key': 'hello world.txt',
                'last_modified': now.isoformat(),
                'meta_text': 'blah  {"x": "y"} {"foo": "bar"}',
                'size': 100,
                'target': '',
                'updated': ANY,
                'version_id': None
            }]

            if response_key == 'delete':
                # delete events do not include request body
                expected.pop()

            assert actions == expected, "Unexpected request to ElasticSearch"

            response = {'items': [{response_key: {'status': 200}}]}
            return (200, {}, json.dumps(response))

        if mock_elastic:
            self.requests_mock.add_callback(responses.POST,
                                            'https://example.com:443/_bulk',
                                            callback=es_callback,
                                            content_type='application/json')

        index.handler(records, MockContext())

    def test_unsupported_contents(self):
        assert self._get_contents('foo.exe', '.exe') == ""
        assert self._get_contents('foo.exe.gz', '.exe.gz') == ""

    def test_get_plain_text(self):
        self.s3_stubber.add_response(
            method='get_object',
            service_response={
                'Metadata': {},
                'ContentLength': 123,
                'Body': BytesIO(b'Hello World!\nThere is more to know.'),
            },
            expected_params={
                'Bucket': 'test-bucket',
                'Key': 'foo.txt',
                'IfMatch': 'etag',
                'Range': f'bytes=0-{index.ELASTIC_LIMIT_BYTES}',
            })

        contents = index.get_plain_text('test-bucket',
                                        'foo.txt',
                                        compression=None,
                                        etag='etag',
                                        version_id=None,
                                        s3_client=self.s3_client,
                                        size=123)
        assert contents == "Hello World!\nThere is more to know."

    def test_text_contents(self):
        self.s3_stubber.add_response(
            method='get_object',
            service_response={
                'Metadata': {},
                'ContentLength': 123,
                'Body': BytesIO(b'Hello World!'),
            },
            expected_params={
                'Bucket': 'test-bucket',
                'Key': 'foo.txt',
                'IfMatch': 'etag',
                'Range': f'bytes=0-{index.ELASTIC_LIMIT_BYTES}',
            })

        assert self._get_contents('foo.txt', '.txt') == "Hello World!"

    def test_gzipped_text_contents(self):
        self.s3_stubber.add_response(
            method='get_object',
            service_response={
                'Metadata': {},
                'ContentLength': 123,
                'Body': BytesIO(compress(b'Hello World!')),
            },
            expected_params={
                'Bucket': 'test-bucket',
                'Key': 'foo.txt.gz',
                'IfMatch': 'etag',
                'Range': f'bytes=0-{index.ELASTIC_LIMIT_BYTES}',
            })

        assert self._get_contents('foo.txt.gz', '.txt.gz') == "Hello World!"

    def test_notebook_contents(self):
        notebook = (BASE_DIR / 'normal.ipynb').read_bytes()

        self.s3_stubber.add_response(method='get_object',
                                     service_response={
                                         'Metadata': {},
                                         'ContentLength': 123,
                                         'Body': BytesIO(notebook),
                                     },
                                     expected_params={
                                         'Bucket': 'test-bucket',
                                         'Key': 'foo.ipynb',
                                         'IfMatch': 'etag',
                                     })

        assert "model.fit" in self._get_contents('foo.ipynb', '.ipynb')

    def test_gzipped_notebook_contents(self):
        notebook = compress((BASE_DIR / 'normal.ipynb').read_bytes())

        self.s3_stubber.add_response(method='get_object',
                                     service_response={
                                         'Metadata': {},
                                         'ContentLength': 123,
                                         'Body': BytesIO(notebook),
                                     },
                                     expected_params={
                                         'Bucket': 'test-bucket',
                                         'Key': 'foo.ipynb.gz',
                                         'IfMatch': 'etag',
                                     })

        assert "Model results visualization" in self._get_contents(
            'foo.ipynb.gz', '.ipynb.gz')

    def test_parquet_contents(self):
        parquet = (BASE_DIR /
                   'amazon-reviews-1000.snappy.parquet').read_bytes()
        self.s3_stubber.add_response(method='get_object',
                                     service_response={
                                         'Metadata': {},
                                         'ContentLength': 123,
                                         'Body': BytesIO(parquet),
                                     },
                                     expected_params={
                                         'Bucket': 'test-bucket',
                                         'Key': 'foo.parquet',
                                         'IfMatch': 'etag',
                                     })

        contents = self._get_contents('foo.parquet', '.parquet')
        size = len(contents.encode('utf-8', 'ignore'))
        assert size <= index.ELASTIC_LIMIT_BYTES
        # spot check for contents
        assert "This is not even worth the money." in contents
        assert "As for results; I felt relief almost immediately." in contents
        assert "R2LO11IPLTDQDX" in contents

    # see PRE conditions in conftest.py
    @pytest.mark.extended
    def test_parquet_extended(self):
        directory = (BASE_DIR / 'amazon-reviews-pds')
        files = directory.glob('**/*.parquet')
        for f in files:
            print(f"Testing {f}")
            parquet = f.read_bytes()

            self.s3_stubber.add_response(method='get_object',
                                         service_response={
                                             'Metadata': {},
                                             'ContentLength': 123,
                                             'Body': BytesIO(parquet),
                                         },
                                         expected_params={
                                             'Bucket': 'test-bucket',
                                             'Key': 'foo.parquet',
                                             'IfMatch': 'etag',
                                         })