Exemplo n.º 1
0
def storage():
    driver = S3Driver(AMAZON_KEY, AMAZON_SECRET, AMAZON_REGION)

    yield driver

    for container in driver:  # cleanup
        if container.name.startswith(CONTAINER_PREFIX):
            for blob in container:
                blob.delete()

            container.delete()
Exemplo n.º 2
0
from cloudstorage.drivers.amazon import S3Driver

storage = S3Driver(key='AKIAI43MM4QUVYVDT5WA',
                   secret='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')

container = storage.create_container('has1234567')
print(container.cdn_url)
#'https://avatars.s3.amazonaws.com/'
'''
avatar_blob = container.upload_blob('/path/my-avatar.png')
print(avatar_blob.cdn_url)
#'https://s3.amazonaws.com/avatars/my-avatar.png'

print(avatar_blob.generate_download_url(expires=3600))
#'https://avatars.s3.amazonaws.com/my-avatar.png?'
#'AWSAccessKeyId=<my-aws-access-key-id>'
#'&Signature=<generated-signature>'
#'&Expires=1491849102'

print(container.generate_upload_url('user-1-avatar.png', expires=3600))
#{
   # 'url': 'https://avatars.s3.amazonaws.com/',
   # 'fields': {
    #    'key': 'user-1-avatar.png',
   #     'AWSAccessKeyId': '<my-aws-access-key-id>',
 #       'policy': '<generated-policy>',
  #      'signature': '<generated-signature>'
    #}
#}
'''
Exemplo n.º 3
0
    def __init_aws_conn(self):
        # initialises and returns the AWS driver from cloudstorage

        # required. params
        if self.__aws_access_key is None:
            # No azure_credential provided, check spark config
            self.__aws_access_key = Utils.get_spark_config(
                'sagacious.io.aws.access_key')
            # if not successful throw exception, parameter is required!
            if self.__aws_access_key is None:
                raise Exception(
                    "Unable to initialise connection to AWS S3, required parameter 'aws_access_key'"
                )

        if self.__aws_secret_key is None:
            # No azure_credential provided, check spark config
            self.__aws_secret_key = Utils.get_spark_config(
                'sagacious.io.aws.secret_key')
            # if not successful throw exception, parameter is required!
            if self.__aws_secret_key is None:
                raise Exception(
                    "Unable to initialise connection to AWS S3, required parameter 'aws_secret_key'"
                )

        # optional params
        if self.__databricks_scope is None:
            self.__databricks_scope = Utils.get_spark_config(
                'sagacious.io.databricks_scope')

        if self.__aws_region is None:
            self.__aws_region = Utils.get_spark_config(
                'sagacious.io.aws.region')

        try:

            if self.__databricks_scope is not None:
                # fetch the actual credential from databrick vault
                dbutils = Utils.get_db_utils()
                aws_access_key = dbutils.secrets.get(
                    scope=self.__databricks_scope, key=self.__aws_access_key)
                aws_secret_key = dbutils.secrets.get(
                    scope=self.__databricks_scope, key=self.__aws_secret_key)
            else:
                # else creds supplied directly
                aws_access_key = self.__aws_access_key
                aws_secret_key = self.__aws_secret_key

            # init driver
            if self.__aws_region is None:
                s3_driver = S3Driver(key=aws_access_key, secret=aws_secret_key)
            else:
                # region is optional but may be required by AWS in some circumstances
                s3_driver = S3Driver(key=aws_access_key,
                                     secret=aws_secret_key,
                                     region=self.__aws_region)

        except Exception as e:
            raise Exception(
                'Unable to initialise connection to AWS S3 storage\n{0}'.
                format(e))

        return s3_driver
Exemplo n.º 4
0
import boto3
import csv

# Import Driver for Amazon AWS
from cloudstorage.drivers.amazon import S3Driver

# Name of the Backet
bucket_name=""

# For more details of the installation, follow this link: https://pypi.org/project/cloudstorage/
# For more details of the documentation, follow this link: https://cloudstorage.readthedocs.io/en/latest/api/container.html#cloudstorage.base.Container.upload_blob

# Creation of the reference for Amazon S3
s3 = S3Driver(key='<my-aws-access-key-id>', secret='<my-aws-secret-access-key>')

# With the function "creare_container", we create a new container in Amazon S3 with the
# name specified in the string bucket_name
# In the case of existence of a container with backet_name, the function return the instance of if.
container = storage.create_container(bucket_name)

dyndb=boto3.resource('dynamodb',region_name='us-west-2')

# Path of the directory where are contined the our credentials of AWS.
base_path="<path_of_the_credentials"

# Creation of a table in DynamoDB where we going to push the metadata of CSV File
table = None
try:

	# In the case that the table not exist DynamoDB yet
	table = dyndb.create_table(TableName='DataTable',