def encrypt(plaintext_data, region, key_alias): """Encrypts the given plaintext data using AWS KMS See: https://docs.aws.amazon.com/kms/latest/APIReference/API_Encrypt.html Args: plaintext_data (str): The raw, unencrypted data to be encrypted region (str): AWS region key_alias (str): KMS Key Alias Returns: string: The encrypted ciphertext Raises: ClientError """ try: key_id = 'alias/{}'.format(key_alias) client = boto3.client('kms', config=default_config(region=region)) response = client.encrypt(KeyId=key_id, Plaintext=plaintext_data) return response['CiphertextBlob'] except ClientError: LOGGER.error('An error occurred during KMS encryption') raise
def download_fileobj(file_handle, bucket, key, region): """Downloads the requested S3 object and saves it into the given file handle. This method also returns the downloaded payload. Args: file_handle (File): A File-like object to save the downloaded contents region (str): AWS region bucket (str): AWS S3 bucket name key (str): AWS S3 key name Returns: str: The downloaded payload Raises: ClientError """ try: client = boto3.client('s3', config=default_config(region=region)) client.download_fileobj( bucket, key, file_handle ) file_handle.seek(0) return file_handle.read() except ClientError: LOGGER.error('An error occurred during S3 DownloadFileobj') raise
def put_parameter(name, value, region, kms_key_alias): """puts a parameter into SSM Parameter store Args: name (str): The name of the parameter to save value (str or dict): The value of the parameter to save region (str): AWS region kms_key_alias (str): The kms key alias in use for secrets Returns: bool: True if successful else False """ client = boto3.client('ssm', config=default_config(region=region)) result = False key_id = 'alias/{}'.format(kms_key_alias) parameter_value = json.dumps(value) if isinstance(value, dict) else str(value) try: client.put_parameter(Name=name, Description='StreamAlert Secret', Value=parameter_value, Type='SecureString', KeyId=key_id, Overwrite=True, Tier='Standard') except ClientError as err: LOGGER.exception( 'Error saving parameter %s to SSM Param Store\n%s', name, err) result = False else: result = True return result
def __init__(self, prefix, firehose_config=None, log_sources=None): self._prefix = prefix if firehose_config.get('use_prefix', True) else '' self._client = boto3.client('firehose', config=boto_helpers.default_config()) self.load_enabled_log_sources(firehose_config, log_sources, force_load=True)
def __init__(self, prefix, firehose_config=None, log_sources=None): self._prefix = ( '{}_'.format(prefix) # This default value must be consistent with the classifier Terraform config if firehose_config and firehose_config.get('use_prefix', True) else '') self._client = boto3.client('firehose', config=boto_helpers.default_config()) self.load_enabled_log_sources(firehose_config, log_sources, force_load=True)
def initialize(self): # Setup DynamoDB client LOGGER.info('LookupTable (%s): Running initialization routine', self.id) try: boto_config = boto_helpers.default_config(timeout=10) resource = boto3.resource('dynamodb', config=boto_config) self._table = resource.Table(self._dynamo_db_table) _ = self._table.table_arn # This is only here to blow up on invalid tables except ClientError as err: message = ( 'LookupTable ({}): Encountered error while connecting with DynamoDB: \'{}\'' ).format(self.id, err.response['Error']['Message']) raise LookupTablesInitializationError(message)
def __init__(self, database_name, results_bucket, results_prefix, region=None): """Initialize the Boto3 Athena Client, and S3 results bucket/key Args: database_name (str): Athena database name where tables will be queried results_bucket (str): S3 bucket in which to store Athena results results_prefix (str): S3 key prefix to prepend too results in the bucket """ self._client = boto3.client('athena', config=boto_helpers.default_config(region=region)) self.database = database_name.strip() results_bucket = results_bucket.strip() # Make sure the required 's3://' prefix is included if not results_bucket.startswith('s3://'): results_bucket = 's3://{}'.format(results_bucket) # Produces s3://<results_bucket_name>/<results_prefix> self._s3_results_path_prefix = posixpath.join(results_bucket, results_prefix)
def head_bucket(bucket, region): """Determines if given bucket exists with correct permissions. See: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketHEAD.html Args: bucket (str): AWS S3 bucket name region (str): AWS Region Returns: bool: True on success Raises: ClientError; Raises when the bucket does not exist or is denying permission to access. """ try: client = boto3.client('s3', config=default_config(region=region)) client.head_bucket(Bucket=bucket) except ClientError: LOGGER.error('An error occurred during S3 HeadBucket') raise
def put_object(object_data, bucket, key, region): """Saves the given data into AWS S3 Args: object_data (str): The raw object data to save region (str): AWS region bucket (str): AWS S3 bucket name key (str): AWS S3 key name Returns: bool: True on success Raises: ClientError """ try: client = boto3.client('s3', config=default_config(region=region)) client.put_object(Body=object_data, Bucket=bucket, Key=key) return True except ClientError: LOGGER.error('An error occurred during S3 PutObject') raise
def decrypt(ciphertext, region): """Decrypts the given ciphertext using AWS KMS See: https://docs.aws.amazon.com/kms/latest/APIReference/API_Decrypt.html Args: ciphertext (str): The raw, encrypted data to be decrypted region (str): AWS region Returns: string: The decrypted plaintext Raises: ClientError """ try: client = boto3.client('kms', config=default_config(region=region)) response = client.decrypt(CiphertextBlob=ciphertext) return response['Plaintext'] except ClientError: LOGGER.error('An error occurred during KMS decryption') raise
def get_parameter(parameter_name, region, with_decryption=True): """gets the parameter_name from SSM Parameter store Args: parameter_name (str): The name of the parameter to fetch region (str): AWS region with_decryption (bool): Should decryption be attempted via this call Returns: str: The parameter either encrypted or not Raises: ClientError """ client = boto3.client('ssm', config=default_config(region=region)) try: response = client.get_parameter(Name=parameter_name, WithDecryption=with_decryption) except ClientError: LOGGER.error('Error getting parameter %s', parameter_name) raise else: return response["Parameter"]["Value"]
def create_bucket(bucket, region): """Creates the given S3 bucket See: https://docs.aws.amazon.com/cli/latest/reference/s3api/create-bucket.html Args: bucket (str): The string name of the intended S3 bucket region (str): AWS Region Returns: bool: True on success Raises: ClientError """ try: client = boto3.client('s3', config=default_config(region=region)) client.create_bucket(Bucket=bucket) return True except ClientError: LOGGER.error('An error occurred during S3 CreateBucket') raise
def __init__(self, configuration): # Example configuration: # { # "driver": "s3", # "bucket": "airbnb.sample.lookuptable", # "key": "resource_map.gz", # "cache_refresh_minutes": 10, # "compression": "gzip" # }, super(S3Driver, self).__init__(configuration) self._s3_bucket = configuration['bucket'] self._s3_key = configuration['key'] self._compression = configuration.get('compression', False) self._cache_refresh_minutes = configuration.get( 'cache_refresh_minutes', self._DEFAULT_CACHE_REFRESH_MINUTES ) self._cache = DriverCache(maximum_key_count=0) # S3 cannot support a per-key TTL so I use a separate DriverCache that stores # the global cache invalidation timer. self._cache_clock = DriverCache() self._dirty = False # Explicitly set timeout for S3 connection. The boto default timeout is 60 seconds. boto_config = boto_helpers.default_config(timeout=10) self._s3_adapter = S3Adapter( self, boto3.resource('s3', config=boto_config), self._s3_bucket, self._s3_key )