class CloudFrontStorage(S3Storage):

    def __init__(self, *args, **kwargs):
        super(CloudFrontStorage, self).__init__(*args, **kwargs)

        if AWS_CLOUDFRONT_DOMAIN:
            try:
                access_key = args[1]
            except IndexError:
                access_key = kwargs.get('access_key', None)
            try:
                secret_key = args[2]
            except IndexError:
                secret_key = kwargs.get('secret_key', None)

            if not access_key and not secret_key:
                access_key, secret_key = self._get_access_keys()

            if AWS_CLOUDFRONT_DOMAIN:
                self.url_generator = QueryStringAuthGenerator(access_key, secret_key, 
                                                                   is_secure=SECURE_URLS,
                                                                   server=AWS_CLOUDFRONT_DOMAIN,
                                                                   calling_format=CallingFormat.VANITY)
                self.url_generator.set_expires_in(QUERYSTRING_EXPIRE)


    def url(self, name):
       if not AWS_CLOUDFRONT_DOMAIN:
           return super(CloudFrontStorage, self).url(name)

       name = self._clean_name(name)
       if QUERYSTRING_ACTIVE:
           return self.url_generator.generate_url('GET', '', name)
       else:
           return self.url_generator.make_bare_url('', name)
Exemple #2
0
    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME,
            access_key=None, secret_key=None, acl=DEFAULT_ACL,
            calling_format=CALLING_FORMAT, encrypt=False,
            gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES):
        self.bucket = bucket
        self.acl = acl
        self.encrypt = encrypt
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types

        if encrypt:
            try:
                import ezPyCrypto
            except ImportError:
                raise ImproperlyConfigured("Could not load ezPyCrypto.\nSee "
                    "http://www.freenet.org.nz/ezPyCrypto/ to install it.")
            self.crypto_key = ezPyCrypto.key

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = AWSAuthConnection(access_key, secret_key,
                            calling_format=calling_format)
        self.generator = QueryStringAuthGenerator(access_key, secret_key,
                            calling_format=calling_format,
                            is_secure=SECURE_URLS)
        self.generator.set_expires_in(QUERYSTRING_EXPIRE)

        self.headers = HEADERS
        self.entries = self.preload and self._preload_entries() or {}
Exemple #3
0
    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME, 
            access_key=None, secret_key=None, acl=DEFAULT_ACL, 
            calling_format=settings.AWS_CALLING_FORMAT):
        self.bucket = bucket
        self.acl = acl

        if not access_key and not secret_key:
             access_key, secret_key = self._get_access_keys()

        self.connection = AWSAuthConnection(access_key, secret_key, 
                            calling_format=calling_format)
        self.generator = QueryStringAuthGenerator(access_key, secret_key, 
                            calling_format=calling_format, is_secure=False)
        self.generator.set_expires_in(QUERYSTRING_EXPIRE)
        
        self.headers = getattr(settings, HEADERS, {})
Exemple #4
0
    def __init__(self,
                 bucket=settings.AWS_STORAGE_BUCKET_NAME,
                 access_key=None,
                 secret_key=None,
                 acl=DEFAULT_ACL,
                 calling_format=CALLING_FORMAT,
                 encrypt=False,
                 gzip=IS_GZIPPED,
                 gzip_content_types=GZIP_CONTENT_TYPES,
                 preload_metadata=PRELOAD_METADATA):
        warnings.warn(
            "The s3 backend is deprecated and will be removed in version 1.2. "
            "Use the s3boto backend instead.", PendingDeprecationWarning)
        self.bucket = bucket
        self.acl = acl
        self.encrypt = encrypt
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types
        self.preload_metadata = preload_metadata

        if encrypt:
            try:
                import ezPyCrypto
            except ImportError:
                raise ImproperlyConfigured(
                    "Could not load ezPyCrypto.\nSee "
                    "http://www.freenet.org.nz/ezPyCrypto/ to install it.")
            self.crypto_key = ezPyCrypto.key

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = AWSAuthConnection(access_key,
                                            secret_key,
                                            calling_format=calling_format)
        self.generator = QueryStringAuthGenerator(
            access_key,
            secret_key,
            calling_format=calling_format,
            is_secure=SECURE_URLS)
        self.generator.set_expires_in(QUERYSTRING_EXPIRE)

        self.headers = HEADERS
        self._entries = {}
	def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME, 
			access_key=None, secret_key=None, acl='public-read', 
			calling_format=settings.AWS_CALLING_FORMAT):
		self.bucket = bucket
		self.acl = acl

		if not access_key and not secret_key:
			 access_key, secret_key = self._get_access_keys()

		self.connection = AWSAuthConnection(access_key, secret_key, 
							calling_format=calling_format)
		self.generator = QueryStringAuthGenerator(access_key, secret_key, 
							calling_format=calling_format, is_secure=False)
		
		self.headers = getattr(settings, AWS_HEADERS, {})
    def __init__(self, *args, **kwargs):
        super(CloudFrontStorage, self).__init__(*args, **kwargs)

        if AWS_CLOUDFRONT_DOMAIN:
            try:
                access_key = args[1]
            except IndexError:
                access_key = kwargs.get('access_key', None)
            try:
                secret_key = args[2]
            except IndexError:
                secret_key = kwargs.get('secret_key', None)

            if not access_key and not secret_key:
                access_key, secret_key = self._get_access_keys()

            if AWS_CLOUDFRONT_DOMAIN:
                self.url_generator = QueryStringAuthGenerator(access_key, secret_key, 
                                                                   is_secure=SECURE_URLS,
                                                                   server=AWS_CLOUDFRONT_DOMAIN,
                                                                   calling_format=CallingFormat.VANITY)
                self.url_generator.set_expires_in(QUERYSTRING_EXPIRE)
Exemple #7
0
    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME,
            access_key=None, secret_key=None, acl=DEFAULT_ACL,
            calling_format=CALLING_FORMAT, encrypt=False,
            gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
            preload_metadata=PRELOAD_METADATA):
        warnings.warn(
            "The s3 backend is deprecated and will be removed in version 1.2. "
            "Use the s3boto backend instead.",
            PendingDeprecationWarning
        )
        self.bucket = bucket
        self.acl = acl
        self.encrypt = encrypt
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types
        self.preload_metadata = preload_metadata

        if encrypt:
            try:
                import ezPyCrypto
            except ImportError:
                raise ImproperlyConfigured("Could not load ezPyCrypto.\nSee "
                    "http://www.freenet.org.nz/ezPyCrypto/ to install it.")
            self.crypto_key = ezPyCrypto.key

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = AWSAuthConnection(access_key, secret_key,
                            calling_format=calling_format,
                            is_secure=SECURE_URLS)
        self.generator = QueryStringAuthGenerator(access_key, secret_key,
                            calling_format=calling_format,
                            is_secure=SECURE_URLS)
        self.generator.set_expires_in(QUERYSTRING_EXPIRE)

        self.headers = HEADERS
        self._entries = {}
Exemple #8
0
class S3Storage(Storage):
    """Amazon Simple Storage Service"""
    preload = False

    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME,
            access_key=None, secret_key=None, acl=DEFAULT_ACL,
            calling_format=CALLING_FORMAT, encrypt=False,
            gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES):
        self.bucket = bucket
        self.acl = acl
        self.encrypt = encrypt
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types

        if encrypt:
            try:
                import ezPyCrypto
            except ImportError:
                raise ImproperlyConfigured("Could not load ezPyCrypto.\nSee "
                    "http://www.freenet.org.nz/ezPyCrypto/ to install it.")
            self.crypto_key = ezPyCrypto.key

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = AWSAuthConnection(access_key, secret_key,
                            calling_format=calling_format)
        self.generator = QueryStringAuthGenerator(access_key, secret_key,
                            calling_format=calling_format,
                            is_secure=SECURE_URLS)
        self.generator.set_expires_in(QUERYSTRING_EXPIRE)

        self.headers = HEADERS
        self.entries = self.preload and self._preload_entries() or {}

    def _get_access_keys(self):
        access_key = ACCESS_KEY_NAME
        secret_key = SECRET_KEY_NAME
        if (access_key or secret_key) and (not access_key or not secret_key):
            access_key = os.environ.get(ACCESS_KEY_NAME)
            secret_key = os.environ.get(SECRET_KEY_NAME)

        if access_key and secret_key:
            # Both were provided, so use them
            return access_key, secret_key

        return None, None

    def _preload_entries(self):
        entries = self.connection.list_bucket(self.bucket).entries
        return dict((entry.key, entry) for entry in entries)

    def _get_connection(self):
        return AWSAuthConnection(*self._get_access_keys())

    def _clean_name(self, name):
        # Useful for windows' paths
        return os.path.join(BUCKET_PREFIX, os.path.normpath(name).replace('\\', '/'))

    def _compress_string(self, s):
        """Gzip a given string."""
        zbuf = StringIO()
        zfile = GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)
        zfile.write(s)
        zfile.close()
        return zbuf.getvalue()

    def _put_file(self, name, content):
        if self.encrypt:

            # Create a key object
            key = self.crypto_key()

            # Read in a public key
            fd = open(settings.CRYPTO_KEYS_PUBLIC, "rb")
            public_key = fd.read()
            fd.close()

            # import this public key
            key.importKey(public_key)

            # Now encrypt some text against this public key
            content = key.encString(content)

        content_type = mimetypes.guess_type(name)[0] or "application/x-octet-stream"

        if self.gzip and content_type in self.gzip_content_types:
            content = self._compress_string(content)
            self.headers.update({'Content-Encoding': 'gzip'})

        self.headers.update({
            'x-amz-acl': self.acl,
            'Content-Type': content_type,
            'Content-Length' : str(len(content)),
        })
        response = self.connection.put(self.bucket, name, content, self.headers)
        if response.http_response.status not in (200, 206):
            raise IOError("S3StorageError: %s" % response.message)

    def _open(self, name, mode='rb'):
        name = self._clean_name(name)
        remote_file = S3StorageFile(name, self, mode=mode)
        return remote_file

    def _read(self, name, start_range=None, end_range=None):
        name = self._clean_name(name)
        if start_range is None:
            headers = {}
        else:
            headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
        response = self.connection.get(self.bucket, name, headers)
        if response.http_response.status not in (200, 206):
            raise IOError("S3StorageError: %s" % response.message)
        headers = response.http_response.msg

        if self.encrypt:
            # Read in a private key
            fd = open(settings.CRYPTO_KEYS_PRIVATE, "rb")
            private_key = fd.read()
            fd.close()

            # Create a key object, and auto-import private key
            key = self.crypto_key(private_key)

            # Decrypt this file
            response.object.data = key.decString(response.object.data)

        return response.object.data, headers.get('etag', None), headers.get('content-range', None)

    def _save(self, name, content):
        name = self._clean_name(name)
        content.open()
        if hasattr(content, 'chunks'):
            content_str = ''.join(chunk for chunk in content.chunks())
        else:
            content_str = content.read()
        self._put_file(name, content_str)
        return name

    def delete(self, name):
        name = self._clean_name(name)
        response = self.connection.delete(self.bucket, name)
        if response.http_response.status != 204:
            raise IOError("S3StorageError: %s" % response.message)

    def exists(self, name):
        name = self._clean_name(name)
        if self.entries:
            return name in self.entries
        response = self.connection._make_request('HEAD', self.bucket, name)
        return response.status == 200

    def size(self, name):
        name = self._clean_name(name)
        if self.entries:
            entry = self.entries.get(name)
            if entry:
                return entry.size
            return 0
        response = self.connection._make_request('HEAD', self.bucket, name)
        content_length = response.getheader('Content-Length')
        return content_length and int(content_length) or 0

    def url(self, name):
        name = self._clean_name(name)
        if QUERYSTRING_ACTIVE:
            return self.generator.generate_url('GET', self.bucket, name)
        else:
            return self.generator.make_bare_url(self.bucket, name)

    def modified_time(self, name):
        try:
           from dateutil import parser, tz
        except ImportError:
            raise NotImplementedError()
        name = self._clean_name(name)
        if self.entries:
            last_modified = self.entries.get(name).last_modified
        else:
            response = self.connection._make_request('HEAD', self.bucket, name)
            last_modified = response.getheader('Last-Modified')
        # convert to string to date
        last_modified_date = parser.parse(last_modified)
        # if the date has no timzone, assume UTC
        if last_modified_date.tzinfo == None:
            last_modified_date = last_modified_date.replace(tzinfo=tz.tzutc())
        # convert date to local time w/o timezone
        return last_modified_date.astimezone(tz.tzlocal()).replace(tzinfo=None)
Exemple #9
0
class S3Storage(Storage):
    """Amazon Simple Storage Service"""
    def __init__(self,
                 bucket=settings.AWS_STORAGE_BUCKET_NAME,
                 access_key=None,
                 secret_key=None,
                 acl=DEFAULT_ACL,
                 calling_format=settings.AWS_CALLING_FORMAT):
        self.bucket = bucket
        self.acl = acl

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = AWSAuthConnection(access_key,
                                            secret_key,
                                            calling_format=calling_format)
        self.generator = QueryStringAuthGenerator(
            access_key,
            secret_key,
            calling_format=calling_format,
            is_secure=SECURE_URLS)
        self.generator.set_expires_in(QUERYSTRING_EXPIRE)

        self.headers = getattr(settings, HEADERS, {})

    def _get_access_keys(self):
        access_key = getattr(settings, ACCESS_KEY_NAME, None)
        secret_key = getattr(settings, SECRET_KEY_NAME, None)
        if (access_key or secret_key) and (not access_key or not secret_key):
            access_key = os.environ.get(ACCESS_KEY_NAME)
            secret_key = os.environ.get(SECRET_KEY_NAME)

        if access_key and secret_key:
            # Both were provided, so use them
            return access_key, secret_key

        return None, None

    def _get_connection(self):
        return AWSAuthConnection(*self._get_access_keys())

    def _clean_name(self, name):
        # Useful for windows' paths
        return os.path.normpath(name).replace('\\', '/')

    def _put_file(self, name, content):
        content_type = mimetypes.guess_type(
            name)[0] or "application/x-octet-stream"
        self.headers.update({
            'x-amz-acl': self.acl,
            'Content-Type': content_type,
            'Content-Length': len(content),
        })
        response = self.connection.put(self.bucket, name, content,
                                       self.headers)
        if response.http_response.status not in (200, 206):
            raise IOError("S3StorageError: %s" % response.message)

    def _open(self, name, mode='rb'):
        name = self._clean_name(name)
        remote_file = S3StorageFile(name, self, mode=mode)
        return remote_file

    def _read(self, name, start_range=None, end_range=None):
        name = self._clean_name(name)
        if start_range is None:
            headers = {}
        else:
            headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
        response = self.connection.get(self.bucket, name, headers)
        if response.http_response.status not in (200, 206):
            raise IOError("S3StorageError: %s" % response.message)
        headers = response.http_response.msg
        return response.object.data, headers.get('etag', None), headers.get(
            'content-range', None)

    def _save(self, name, content):
        name = self._clean_name(name)
        content.open()
        if hasattr(content, 'chunks'):
            content_str = ''.join(chunk for chunk in content.chunks())
        else:
            content_str = content.read()
        self._put_file(name, content_str)
        return name

    def delete(self, name):
        name = self._clean_name(name)
        response = self.connection.delete(self.bucket, name)
        if response.http_response.status != 204:
            raise IOError("S3StorageError: %s" % response.message)

    def exists(self, name):
        name = self._clean_name(name)
        response = self.connection._make_request('HEAD', self.bucket, name)
        return response.status == 200

    def size(self, name):
        name = self._clean_name(name)
        response = self.connection._make_request('HEAD', self.bucket, name)
        content_length = response.getheader('Content-Length')
        return content_length and int(content_length) or 0

    def url(self, name):
        name = self._clean_name(name)
        if QUERYSTRING_ACTIVE:
            return self.generator.generate_url('GET', self.bucket, name)
        else:
            return self.generator.make_bare_url(self.bucket, name)
Exemple #10
0
class S3Storage(Storage):
    """Amazon Simple Storage Service"""

    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME,
            access_key=None, secret_key=None, acl=DEFAULT_ACL,
            calling_format=settings.AWS_CALLING_FORMAT):
        self.bucket = bucket
        self.acl = acl

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = AWSAuthConnection(access_key, secret_key,
                            calling_format=calling_format)
        self.generator = QueryStringAuthGenerator(access_key, secret_key, 
                            calling_format=calling_format,
                            is_secure=SECURE_URLS)
        self.generator.set_expires_in(QUERYSTRING_EXPIRE)
        
        self.headers = getattr(settings, HEADERS, {})

    def _get_access_keys(self):
        access_key = getattr(settings, ACCESS_KEY_NAME, None)
        secret_key = getattr(settings, SECRET_KEY_NAME, None)
        if (access_key or secret_key) and (not access_key or not secret_key):
            access_key = os.environ.get(ACCESS_KEY_NAME)
            secret_key = os.environ.get(SECRET_KEY_NAME)

        if access_key and secret_key:
            # Both were provided, so use them
            return access_key, secret_key

        return None, None

    def _get_connection(self):
        return AWSAuthConnection(*self._get_access_keys())

    def _clean_name(self, name):
        # Useful for windows' paths
        return os.path.normpath(name).replace('\\', '/')

    def _put_file(self, name, content):
        content_type = mimetypes.guess_type(name)[0] or "application/x-octet-stream"
        self.headers.update({'x-amz-acl': self.acl, 'Content-Type': content_type})
        response = self.connection.put(self.bucket, name, content, self.headers)
        if response.http_response.status not in (200, 206):
            raise IOError("S3StorageError: %s" % response.message)

    def _open(self, name, mode='rb'):
        name = self._clean_name(name)
        remote_file = S3StorageFile(name, self, mode=mode)
        return remote_file

    def _read(self, name, start_range=None, end_range=None):
        name = self._clean_name(name)
        if start_range is None:
            headers = {}
        else:
            headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
        response = self.connection.get(self.bucket, name, headers)
        if response.http_response.status not in (200, 206):
            raise IOError("S3StorageError: %s" % response.message)
        headers = response.http_response.msg
        return response.object.data, headers.get('etag', None), headers.get('content-range', None)
        
    def _save(self, name, content):
        name = self._clean_name(name)
        content.open()
        if hasattr(content, 'chunks'):
            content_str = ''.join(chunk for chunk in content.chunks())
        else:
            content_str = content.read()
        self._put_file(name, content_str)
        return name
    
    def delete(self, name):
        name = self._clean_name(name)
        response = self.connection.delete(self.bucket, name)
        if response.http_response.status != 204:
            raise IOError("S3StorageError: %s" % response.message)

    def exists(self, name):
        name = self._clean_name(name)
        response = self.connection._make_request('HEAD', self.bucket, name)
        return response.status == 200

    def size(self, name):
        name = self._clean_name(name)
        response = self.connection._make_request('HEAD', self.bucket, name)
        content_length = response.getheader('Content-Length')
        return content_length and int(content_length) or 0
    
    def url(self, name):
        name = self._clean_name(name)
        if QUERYSTRING_ACTIVE:
            return self.generator.generate_url('GET', self.bucket, name)
        else:
            return self.generator.make_bare_url(self.bucket, name)
class S3Storage(Storage):
	"""Amazon Simple Storage Service"""

	def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME, 
			access_key=None, secret_key=None, acl='public-read', 
			calling_format=settings.AWS_CALLING_FORMAT):
		self.bucket = bucket
		self.acl = acl

		if not access_key and not secret_key:
			 access_key, secret_key = self._get_access_keys()

		self.connection = AWSAuthConnection(access_key, secret_key, 
							calling_format=calling_format)
		self.generator = QueryStringAuthGenerator(access_key, secret_key, 
							calling_format=calling_format, is_secure=False)
		
		self.headers = getattr(settings, AWS_HEADERS, {})

	def _get_access_keys(self):
		access_key = getattr(settings, ACCESS_KEY_NAME, None)
		secret_key = getattr(settings, SECRET_KEY_NAME, None)
		if (access_key or secret_key) and (not access_key or not secret_key):
			access_key = os.environ.get(ACCESS_KEY_NAME)
			secret_key = os.environ.get(SECRET_KEY_NAME)

		if access_key and secret_key:
			# Both were provided, so use them
			return access_key, secret_key

		return None, None

	def _get_connection(self):
		return AWSAuthConnection(*self._get_access_keys())

	def _put_file(self, name, content, extra_headers=None):
		name = _normalize_file_name(name)
		content_type = guess_type(name)[0] or "application/x-octet-stream"		
		self.headers.update({'x-amz-acl': self.acl, 'Content-Type': content_type})
		if extra_headers is not None:
			self.headers.update(extra_headers)
		response = self.connection.put(self.bucket, name, content, self.headers)

	def _open(self, name, mode='rb'):
		name = _normalize_file_name(name)
		remote_file = S3StorageFile(name, self, mode=mode)
		return remote_file

	def _read(self, name, start_range=None, end_range=None):
		name = _normalize_file_name(name)
		if start_range is None:
			headers = {}
		else:
			headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
		response = self.connection.get(self.bucket, name, headers)
		headers = response.http_response.msg
		return response.object.data, headers.get('etag', None), headers.get('content-range', None)
		
	def _save(self, name, content, extra_headers=None):
		name = _normalize_file_name(name)
		content.open()
		if hasattr(content, 'chunks'):
			content_str = ''.join(chunk for chunk in content.chunks())
		else:
			content_str = content.read()
		self._put_file(name, content_str, extra_headers)
		cache.set(urlquote(name), time.localtime(), SKIP_CHECKING_IF_FILE_EXISTS_TIMEOUT)
		return name
	
	def delete(self, name):
		name = _normalize_file_name(name)
		self.connection.delete(self.bucket, name)
		cache.delete(urlquote(name))

	def exists(self, name):
		#logging.info("Checking if %s exists" % name)
		name = _normalize_file_name(name)
		if cache.get(urlquote(name)):
			return True
		else:
			response = self.connection._make_request('HEAD', self.bucket, name)
			if response.status != 200:
				return False
			last_modified = response.getheader('Last-Modified')
			last_modified = last_modified and int(mktime(parsedate(last_modified))) or int(0)
			cache.set(urlquote(name), last_modified, SKIP_CHECKING_IF_FILE_EXISTS_TIMEOUT)
			return True
		return False
		
	def getmtime(self, name):
		if cache.get(urlquote(name)):
			return cache.get(urlquote(name))
		response = self.connection._make_request('HEAD', self.bucket, name)
		if response.status != 200:
			return False
		last_modified = response.getheader('Last-Modified')
		last_modified = last_modified and int(mktime(parsedate(last_modified))) or int(0)
		cache.set(urlquote(name), last_modified, SKIP_CHECKING_IF_FILE_EXISTS_TIMEOUT)			
		return last_modified

	def size(self, name):
		name = _normalize_file_name(name)
		response = self.connection.get_info(self.bucket, name)
		content_length = response.getheader('Content-Length')
		return content_length and int(content_length) or 0
	
	def url(self, name):
		name = _normalize_file_name(name)
		return self.generator.make_bare_url(self.bucket, name)