def put_mock_creds(output_name, creds, bucket, region, alias): """Helper function to mock encrypt creds and put on s3""" creds_string = json.dumps(creds) enc_creds = encrypt_with_kms(creds_string, region, alias) put_mock_s3_object(bucket, output_name, enc_creds, region)
def test_save_credentials_into_s3(self): """S3Driver - Save Credentials We test a full cycle of using save_credentials() then subsequently pulling them out with load_credentials().""" creds = {'url': 'http://best.website.ever/test'} input_credentials = Credentials(creds, is_encrypted=False, region=REGION) descriptor = 'test_descriptor' # Annoyingly, moto needs us to create the bucket first # We put a random unrelated object into the bucket and this will set up the bucket for us put_mock_s3_object(self._s3_driver.get_s3_secrets_bucket(), 'aaa', 'bbb', REGION) result = self._s3_driver.save_credentials_into_s3( descriptor, input_credentials, KMS_ALIAS) assert_true(result) credentials = self._s3_driver.load_credentials(descriptor) assert_is_not_none(credentials) assert_true(credentials.is_encrypted()) loaded_creds = json.loads(credentials.get_data_kms_decrypted()) assert_equal(loaded_creds, creds)
def _put_mock_tables(self): for bucket, files in self.buckets_info.iteritems(): for json_file in files: put_mock_s3_object( bucket, json_file, json.dumps({ '{}_key'.format(bucket): '{}_value'.format(os.path.splitext(json_file)[0]) }))
def _put_mock_tables(self): put_mock_s3_object('bucket_name', 'foo.json', json.dumps({ 'key_1': 'foo_1', 'key_2': 'foo_2', })) put_mock_s3_object( 'bucket_name', 'bar.json', zlib.compress( json.dumps({ 'key_1': 'compressed_bar_1', 'key_2': 'compressed_bar_2', }).encode()))
def test_get_decompression_fallback(self, mock_logger): """LookupTables - Drivers - S3 Driver - Compressed Get - Compression Fallback""" put_mock_s3_object( 'bucket_name', 'bar.json', json.dumps({ 'key_1': 'not_compressed_bar_1', 'key_2': 'not_compressed_bar_2', })) self._bar_driver.initialize() assert_equal(self._bar_driver.get('key_1'), 'not_compressed_bar_1') mock_logger.assert_any_call( 'LookupTable (%s): Data is not compressed; defaulting to original payload', 's3:bucket_name/bar.json')
def _put_mock_data(self): # S3 mock data put_mock_s3_object('bucket_name', 'foo.json', json.dumps({ 'key_1': 'foo_1', 'key_2': 'foo_2', })) put_mock_s3_object( 'bucket_name', 'bar.json', zlib.compress(json.dumps({ 'key_1': 'compressed_bar_1', 'key_2': 'compressed_bar_2', }).encode()) ) # DynamoDB Mock data # Build a new dynamodb schema matching the tables configured put_mock_dynamod_data( 'table_name', { 'AttributeDefinitions': [ { 'AttributeName': 'MyPartitionKey', 'AttributeType': 'S' }, { 'AttributeName': 'MySortKey', 'AttributeType': 'S' } ], 'KeySchema': [ { 'AttributeName': 'MyPartitionKey', 'KeyType': 'HASH' }, { 'AttributeName': 'MySortKey', 'KeyType': 'RANGE' } ], }, [ { 'MyPartitionKey': 'aaaa', 'MySortKey': '1', 'MyValueKey': 'Over 9000!', } ] )
def test_put_download(): """AwsApiClient - AwsS3 - PutObject/Download - Upload then download object""" payload = 'zzzzz' bucket = 'bucket' key = 'key' # Annoyingly, moto needs us to create the bucket first # We put a random unrelated object into the bucket and this will set up the bucket for us put_mock_s3_object(bucket, 'aaa', 'bbb', REGION) AwsS3.put_object(payload, bucket=bucket, key=key, region=REGION) with tempfile.SpooledTemporaryFile(0, 'a+b') as file_handle: result = AwsS3.download_fileobj(file_handle, bucket=bucket, key=key, region=REGION) assert_equal(result, payload)
def test_get_creds_from_s3(self): """OutputDispatcher - Get Creds From S3""" test_data = 'credential test string' bucket_name = self._dispatcher.secrets_bucket key = self._dispatcher.output_cred_name(self._descriptor) local_cred_location = os.path.join(self._dispatcher._local_temp_dir(), key) put_mock_s3_object(bucket_name, key, test_data, REGION) self._dispatcher._get_creds_from_s3(local_cred_location, self._descriptor) with open(local_cred_location) as creds: line = creds.readline() assert_equal(line, test_data)
def setup(self): service_name = 'service' defaults = {'property2': 'abcdef'} prefix = 'test_asdf' aws_account_id = '1234567890' self._provider = OutputCredentialsProvider( service_name, config=CONFIG, defaults=defaults, region=REGION, prefix=prefix, aws_account_id=aws_account_id) # Pre-create the bucket so we dont get a "Bucket does not exist" error s3_driver = S3Driver('test_asdf', 'service', REGION) put_mock_s3_object(s3_driver.get_s3_secrets_bucket(), 'laskdjfaouhvawe', 'lafhawef', REGION)
def test_load_credentials_plain_object(self): """S3Driver - Load String from S3 In this test we save a simple string, unencrypted, into a mock S3 file. We use the driver to pull out this payload verbatim.""" test_data = 'encrypted credential test string' descriptor = 'test_descriptor' # Stick some fake data into the credentials bucket file. bucket_name = self._s3_driver.get_s3_secrets_bucket() key = self._s3_driver.get_s3_key(descriptor) put_mock_s3_object(bucket_name, key, test_data, REGION) credentials = self._s3_driver.load_credentials(descriptor) # (!) Notably, in this test the credential contents are not encrypted when setup. They # are supposed to be encrypted PRIOR to putting it in. assert_true(credentials.is_encrypted()) assert_equal(credentials.data(), test_data)
def test_load_credentials(self): """S3Driver - With File Driver - Load Credentials - Pulls into LocalFileStore Here we use the S3Driver's caching ability to yank stuff into a local driver.""" remove_temp_secrets() creds = {'my_secret': 'i ate two portions of biscuits and gravy'} input_credentials = Credentials(creds, is_encrypted=False, region=REGION) descriptor = 'test_descriptor' # Annoyingly, moto needs us to create the bucket first # We put a random unrelated object into the bucket and this will set up the bucket for us put_mock_s3_object(self._s3_driver.get_s3_secrets_bucket(), 'aaa', 'bbb', REGION) # First, check if the Local driver can find the credentials (we don't expect it to) assert_false(self._fs_driver.has_credentials(descriptor)) # Save the credentials using S3 driver result = self._s3_driver.save_credentials_into_s3( descriptor, input_credentials, KMS_ALIAS) assert_true(result) # We still don't expect the Local driver to find the credentials assert_false(self._fs_driver.has_credentials(descriptor)) # Use S3Driver to warm up the Local driver self._s3_driver.load_credentials(descriptor) # Now we should be able to get the credentials from the local fs assert_true(self._fs_driver.has_credentials(descriptor)) credentials = self._fs_driver.load_credentials(descriptor) assert_is_not_none(credentials) assert_true(credentials.is_encrypted()) loaded_creds = json.loads(credentials.get_data_kms_decrypted()) assert_equal(loaded_creds, creds) remove_temp_secrets()
def test_download_s3_object_compressed(self, mock_logger): """LookupTables - Download S3 Object, Compressed File""" put_mock_s3_object( 'bucket_name', 'bar.json', zlib.compress(json.dumps({'compressed_key': 'compressed_val'}))) expected_result = { 'foo': { 'bucket_name_key': 'foo_value' }, 'bar': { 'compressed_key': 'compressed_val' } } LookupTables._download_s3_objects(self.buckets_info) assert_equal(LookupTables._tables, expected_result) mock_logger.assert_any_call('Data in \'%s\' is not compressed', 'foo.json')