def configure(cls, settings): kwargs = super(GCSStorage, cls).configure(settings) kwargs['expire_after'] = int( getdefaults(settings, 'storage.expire_after', 'aws.expire_after', 60 * 60 * 24)) kwargs['bucket_prefix'] = getdefaults(settings, 'storage.prefix', 'aws.prefix', '') kwargs['prepend_hash'] = asbool( getdefaults(settings, 'storage.prepend_hash', 'aws.prepend_hash', True)) is_secure = getdefaults(settings, 'storage.is_secure', 'aws.is_secure', True) calling_format = settings.get('storage.calling_format', 'SubdomainCallingFormat') bucket_name = settings.get('storage.bucketname') kwargs['redirect_urls'] = asbool( settings.get('storage.redirect_urls', False)) if calling_format not in SUPPORTED_CALLING_FORMATS: raise ValueError( "Only {0} are supported for calling_format".format( ', '.join(SUPPORTED_CALLING_FORMATS))) bucket = getdefaults(settings, 'storage.bucket', 'aws.bucket', None) if bucket is None: raise ValueError("You must specify the 'storage.bucket'") bucket = boto.storage_uri(bucket, 'gs') bucket = bucket.get_bucket() kwargs['bucket'] = bucket return kwargs
def configure(cls, settings): kwargs = super(GCSStorage, cls).configure(settings) kwargs['expire_after'] = int(getdefaults( settings, 'storage.expire_after', 'aws.expire_after', 60 * 60 * 24)) kwargs['bucket_prefix'] = getdefaults( settings, 'storage.prefix', 'aws.prefix', '') kwargs['prepend_hash'] = asbool(getdefaults( settings, 'storage.prepend_hash', 'aws.prepend_hash', True)) is_secure = getdefaults(settings, 'storage.is_secure', 'aws.is_secure', True) calling_format = settings.get('storage.calling_format', 'SubdomainCallingFormat') bucket_name = settings.get('storage.bucketname') kwargs['redirect_urls'] = asbool(settings.get('storage.redirect_urls', False)) if calling_format not in SUPPORTED_CALLING_FORMATS: raise ValueError("Only {0} are supported for calling_format" .format(', '.join(SUPPORTED_CALLING_FORMATS))) bucket = getdefaults(settings, 'storage.bucket', 'aws.bucket', None) if bucket is None: raise ValueError("You must specify the 'storage.bucket'") bucket = boto.storage_uri(bucket, 'gs') bucket = bucket.get_bucket() kwargs['bucket'] = bucket return kwargs
def configure(cls, settings): kwargs = super(CloudFrontS3Storage, cls).configure(settings) kwargs['cloud_front_domain'] = getdefaults( settings, 'storage.cloud_front_domain', 'aws.cloud_front_domain', '') kwargs['cloud_front_key_file'] = getdefaults( settings, 'storage.cloud_front_key_file', 'aws.cloud_front_key_file', None) kwargs['cloud_front_key_string'] = getdefaults( settings, 'storage.cloud_front_key_string', 'aws.cloud_front_key_string', None) kwargs['cloud_front_key_id'] = getdefaults( settings, 'storage.cloud_front_key_id', 'aws.cloud_front_key_id', '') return kwargs
def configure(cls, settings): kwargs = super(S3Storage, cls).configure(settings) kwargs['expire_after'] = int(getdefaults( settings, 'storage.expire_after', 'aws.expire_after', 60 * 60 * 24)) kwargs['buffer_time'] = int(getdefaults( settings, 'storage.buffer_time', 'aws.buffer_time', 600)) kwargs['bucket_prefix'] = getdefaults( settings, 'storage.prefix', 'aws.prefix', '') kwargs['prepend_hash'] = asbool(getdefaults( settings, 'storage.prepend_hash', 'aws.prepend_hash', True)) access_key = getdefaults(settings, 'storage.access_key', 'aws.access_key', None) secret_key = getdefaults(settings, 'storage.secret_key', 'aws.secret_key', None) s3conn = boto.connect_s3( aws_access_key_id=access_key, aws_secret_access_key=secret_key) aws_bucket = getdefaults(settings, 'storage.bucket', 'aws.bucket', None) if aws_bucket is None: raise ValueError("You must specify the 'storage.bucket'") bucket = s3conn.lookup(aws_bucket, validate=False) if bucket is None: location = getdefaults(settings, 'storage.region', 'aws.region', boto.s3.connection.Location.DEFAULT) bucket = s3conn.create_bucket(aws_bucket, location=location) kwargs['bucket'] = bucket return kwargs
def configure(cls, settings): kwargs = super(S3Storage, cls).configure(settings) kwargs['expire_after'] = int( getdefaults(settings, 'storage.expire_after', 'aws.expire_after', 60 * 60 * 24)) kwargs['buffer_time'] = int( getdefaults(settings, 'storage.buffer_time', 'aws.buffer_time', 600)) kwargs['bucket_prefix'] = getdefaults(settings, 'storage.prefix', 'aws.prefix', '') kwargs['prepend_hash'] = asbool( getdefaults(settings, 'storage.prepend_hash', 'aws.prepend_hash', True)) access_key = getdefaults(settings, 'storage.access_key', 'aws.access_key', None) secret_key = getdefaults(settings, 'storage.secret_key', 'aws.secret_key', None) s3conn = boto.connect_s3(aws_access_key_id=access_key, aws_secret_access_key=secret_key) aws_bucket = getdefaults(settings, 'storage.bucket', 'aws.bucket', None) if aws_bucket is None: raise ValueError("You must specify the 'storage.bucket'") bucket = s3conn.lookup(aws_bucket, validate=False) if bucket is None: location = getdefaults(settings, 'storage.region', 'aws.region', boto.s3.connection.Location.DEFAULT) bucket = s3conn.create_bucket(aws_bucket, location=location) kwargs['bucket'] = bucket return kwargs
def configure(cls, settings): kwargs = super(S3Storage, cls).configure(settings) kwargs['expire_after'] = int(getdefaults( settings, 'storage.expire_after', 'aws.expire_after', 60 * 60 * 24)) kwargs['bucket_prefix'] = getdefaults( settings, 'storage.prefix', 'aws.prefix', '') kwargs['prepend_hash'] = asbool(getdefaults( settings, 'storage.prepend_hash', 'aws.prepend_hash', True)) access_key = getdefaults(settings, 'storage.access_key', 'aws.access_key', None) secret_key = getdefaults(settings, 'storage.secret_key', 'aws.secret_key', None) # We used to always use boto.connect_s3 because it can look up buckets # in any region. New regions require AWS4-HMAC-SHA256, which boto can # only do with a region connection. So if the region is specified (and # it must be for new regions like eu-central-1), use a region # connection. location = settings.get('storage.region') if location is None: s3conn = boto.connect_s3( aws_access_key_id=access_key, aws_secret_access_key=secret_key) else: s3conn = boto.s3.connect_to_region(location, aws_access_key_id=access_key, aws_secret_access_key=secret_key) aws_bucket = getdefaults(settings, 'storage.bucket', 'aws.bucket', None) if aws_bucket is None: raise ValueError("You must specify the 'storage.bucket'") try: bucket = s3conn.get_bucket(aws_bucket) except boto.exception.S3ResponseError as e: if e.error_code != 'NoSuchBucket': if e.status == 301: LOG.warn("Bucket found in different region. Check that " "the S3 bucket specified in 'storage.bucket' is " "in 'storage.region'") raise location = getdefaults(settings, 'storage.region', 'aws.region', boto.s3.connection.Location.DEFAULT) LOG.info("Creating S3 bucket %s in region %s", aws_bucket, location) bucket = s3conn.create_bucket(aws_bucket, location=location) kwargs['bucket'] = bucket kwargs['proxy_address'] = getdefaults( settings, 'storage.proxy_address', 'aws.proxy_address', None) return kwargs
def includeme(config): """ Configure the app """ settings = config.get_settings() resolver = DottedNameResolver(__name__) dotted_name = getdefaults(settings, "pypi.auth", "pypi.access_backend", "config") if dotted_name == "config": dotted_name = ConfigAccessBackend elif dotted_name == "remote": dotted_name = RemoteAccessBackend elif dotted_name == "sql": dotted_name = SQLAccessBackend access_backend = resolver.maybe_resolve(dotted_name) kwargs = access_backend.configure(settings) config.add_request_method(partial(access_backend, **kwargs), name="access", reify=True)
def configure(cls, settings): kwargs = super(S3Storage, cls).configure(settings) kwargs['expire_after'] = int( getdefaults(settings, 'storage.expire_after', 'aws.expire_after', 60 * 60 * 24)) kwargs['bucket_prefix'] = getdefaults(settings, 'storage.prefix', 'aws.prefix', '') kwargs['prepend_hash'] = asbool( getdefaults(settings, 'storage.prepend_hash', 'aws.prepend_hash', True)) access_key = getdefaults(settings, 'storage.access_key', 'aws.access_key', None) secret_key = getdefaults(settings, 'storage.secret_key', 'aws.secret_key', None) # We used to always use boto.connect_s3 because it can look up buckets # in any region. New regions require AWS4-HMAC-SHA256, which boto can # only do with a region connection. So if the region is specified (and # it must be for new regions like eu-central-1), use a region # connection. location = settings.get('storage.region') if location is None: s3conn = boto.connect_s3(aws_access_key_id=access_key, aws_secret_access_key=secret_key) else: s3conn = boto.s3.connect_to_region( location, aws_access_key_id=access_key, aws_secret_access_key=secret_key) aws_bucket = getdefaults(settings, 'storage.bucket', 'aws.bucket', None) if aws_bucket is None: raise ValueError("You must specify the 'storage.bucket'") try: bucket = s3conn.get_bucket(aws_bucket) except boto.exception.S3ResponseError as e: if e.error_code != 'NoSuchBucket': if e.status == 301: LOG.warn("Bucket found in different region. Check that " "the S3 bucket specified in 'storage.bucket' is " "in 'storage.region'") raise location = getdefaults(settings, 'storage.region', 'aws.region', boto.s3.connection.Location.DEFAULT) LOG.info("Creating S3 bucket %s in region %s", aws_bucket, location) bucket = s3conn.create_bucket(aws_bucket, location=location) kwargs['bucket'] = bucket return kwargs
def includeme(config): """ Configure the app """ settings = config.get_settings() resolver = DottedNameResolver(__name__) dotted_name = getdefaults(settings, 'pypi.auth', 'pypi.access_backend', 'config') if dotted_name == 'config': dotted_name = ConfigAccessBackend elif dotted_name == 'remote': dotted_name = RemoteAccessBackend elif dotted_name == 'sql': dotted_name = SQLAccessBackend access_backend = resolver.maybe_resolve(dotted_name) kwargs = access_backend.configure(settings) config.add_request_method(partial(access_backend, **kwargs), name='access', reify=True)
def includeme(config): """ Configure the app """ settings = config.get_settings() resolver = DottedNameResolver(__name__) dotted_name = getdefaults(settings, 'pypi.auth', 'pypi.access_backend', 'config') if dotted_name == 'config': dotted_name = ConfigAccessBackend elif dotted_name == 'remote': dotted_name = RemoteAccessBackend elif dotted_name == 'sql': dotted_name = SQLAccessBackend elif dotted_name == 'ldap': dotted_name = "pypicloud.access.ldap_.LDAPAccessBackend" access_backend = resolver.maybe_resolve(dotted_name) kwargs = access_backend.configure(settings) config.add_request_method(partial(access_backend, **kwargs), name='access', reify=True)
def configure(cls, settings): kwargs = super(S3Storage, cls).configure(settings) kwargs['expire_after'] = int(getdefaults( settings, 'storage.expire_after', 'aws.expire_after', 60 * 60 * 24)) kwargs['bucket_prefix'] = getdefaults( settings, 'storage.prefix', 'aws.prefix', '') kwargs['prepend_hash'] = asbool(getdefaults( settings, 'storage.prepend_hash', 'aws.prepend_hash', True)) access_key = getdefaults(settings, 'storage.access_key', 'aws.access_key', None) secret_key = getdefaults(settings, 'storage.secret_key', 'aws.secret_key', None) host = getdefaults(settings, 'storage.host', 'aws.host', boto.s3.connection.NoHostProvided) is_secure = getdefaults(settings, 'storage.is_secure', 'aws.is_secure', True) calling_format = settings.get('storage.calling_format', 'SubdomainCallingFormat') kwargs['redirect_urls'] = asbool(settings.get('storage.redirect_urls', False)) if calling_format not in SUPPORTED_CALLING_FORMATS: raise ValueError("Only {0} are supported for calling_format" .format(', '.join(SUPPORTED_CALLING_FORMATS))) # We used to always use boto.connect_s3 because it can look up buckets # in any region. New regions require AWS4-HMAC-SHA256, which boto can # only do with a region connection. So if the region is specified (and # it must be for new regions like eu-central-1), use a region # connection. location = settings.get('storage.region') if location is None: s3conn = boto.connect_s3( aws_access_key_id=access_key, aws_secret_access_key=secret_key, host=host, is_secure=asbool(is_secure), calling_format=SUPPORTED_CALLING_FORMATS[calling_format]()) else: s3conn = boto.s3.connect_to_region(location, aws_access_key_id=access_key, aws_secret_access_key=secret_key) aws_bucket = getdefaults(settings, 'storage.bucket', 'aws.bucket', None) if aws_bucket is None: raise ValueError("You must specify the 'storage.bucket'") try: bucket = s3conn.get_bucket(aws_bucket) except boto.exception.S3ResponseError as e: if e.error_code != 'NoSuchBucket': if e.status == 301: LOG.warn("Bucket found in different region. Check that " "the S3 bucket specified in 'storage.bucket' is " "in 'storage.region'") raise location = getdefaults(settings, 'storage.region', 'aws.region', boto.s3.connection.Location.DEFAULT) LOG.info("Creating S3 bucket %s in region %s", aws_bucket, location) bucket = s3conn.create_bucket(aws_bucket, location=location) kwargs['bucket'] = bucket return kwargs
def configure(cls, settings): kwargs = super(S3Storage, cls).configure(settings) kwargs['expire_after'] = int( getdefaults(settings, 'storage.expire_after', 'aws.expire_after', 60 * 60 * 24)) kwargs['bucket_prefix'] = getdefaults(settings, 'storage.prefix', 'aws.prefix', '') kwargs['prepend_hash'] = asbool( getdefaults(settings, 'storage.prepend_hash', 'aws.prepend_hash', True)) access_key = getdefaults(settings, 'storage.access_key', 'aws.access_key', None) secret_key = getdefaults(settings, 'storage.secret_key', 'aws.secret_key', None) host = getdefaults(settings, 'storage.host', 'aws.host', boto.s3.connection.NoHostProvided) is_secure = getdefaults(settings, 'storage.is_secure', 'aws.is_secure', True) kwargs['use_sse'] = asbool( getdefaults(settings, 'storage.server_side_encryption', 'aws.server_side_encryption', False)) calling_format = settings.get('storage.calling_format', 'SubdomainCallingFormat') kwargs['redirect_urls'] = asbool( settings.get('storage.redirect_urls', False)) if calling_format not in SUPPORTED_CALLING_FORMATS: raise ValueError( "Only {0} are supported for calling_format".format( ', '.join(SUPPORTED_CALLING_FORMATS))) # We used to always use boto.connect_s3 because it can look up buckets # in any region. New regions require AWS4-HMAC-SHA256, which boto can # only do with a region connection. So if the region is specified (and # it must be for new regions like eu-central-1), use a region # connection. location = settings.get('storage.region') if location is None: s3conn = boto.connect_s3( aws_access_key_id=access_key, aws_secret_access_key=secret_key, host=host, is_secure=asbool(is_secure), calling_format=SUPPORTED_CALLING_FORMATS[calling_format]()) else: s3conn = boto.s3.connect_to_region( location, aws_access_key_id=access_key, aws_secret_access_key=secret_key) aws_bucket = getdefaults(settings, 'storage.bucket', 'aws.bucket', None) if aws_bucket is None: raise ValueError("You must specify the 'storage.bucket'") try: bucket = s3conn.get_bucket(aws_bucket) except boto.exception.S3ResponseError as e: if e.error_code != 'NoSuchBucket': if e.status == 301: LOG.warn("Bucket found in different region. Check that " "the S3 bucket specified in 'storage.bucket' is " "in 'storage.region'") raise location = getdefaults(settings, 'storage.region', 'aws.region', boto.s3.connection.Location.DEFAULT) LOG.info("Creating S3 bucket %s in region %s", aws_bucket, location) bucket = s3conn.create_bucket(aws_bucket, location=location) kwargs['bucket'] = bucket return kwargs