def __init__( self, bucket=OSS_STORAGE_BUCKET_NAME, access_key=None, secret_key=None, acl=DEFAULT_ACL, # calling_format=CALLING_FORMAT, encrypt=False, # gzip=IS_GZIPPED, # gzip_content_types=GZIP_CONTENT_TYPES, # preload_metadata=PRELOAD_METADATA ): self.bucket = bucket self.acl = acl if not access_key and not secret_key: access_key, secret_key = self._get_access_keys() self.connection = OssAPI(ACCESS_ADDRESS, access_key, secret_key) self.headers = HEADERS
def __init__(self, bucket=OSS_STORAGE_BUCKET_NAME, access_key=None, secret_key=None, acl=DEFAULT_ACL, # calling_format=CALLING_FORMAT, encrypt=False, # gzip=IS_GZIPPED, # gzip_content_types=GZIP_CONTENT_TYPES, # preload_metadata=PRELOAD_METADATA ): self.bucket = bucket self.acl = acl if not access_key and not secret_key: access_key, secret_key = self._get_access_keys() self.connection = OssAPI(ACCESS_ADDRESS, access_key, secret_key) self.headers = HEADERS
class OSSStorage(Storage): """Aliyun Open Storage Service""" def __init__(self, bucket=OSS_STORAGE_BUCKET_NAME, access_key=None, secret_key=None, acl=DEFAULT_ACL, # calling_format=CALLING_FORMAT, encrypt=False, # gzip=IS_GZIPPED, # gzip_content_types=GZIP_CONTENT_TYPES, # preload_metadata=PRELOAD_METADATA ): self.bucket = bucket self.acl = acl if not access_key and not secret_key: access_key, secret_key = self._get_access_keys() self.connection = OssAPI(ACCESS_ADDRESS, access_key, secret_key) self.headers = HEADERS def _get_access_keys(self): access_key = ACCESS_KEY_NAME secret_key = SECRET_KEY_NAME if (access_key or secret_key) and (not access_key or not secret_key): access_key = os.environ.get(ACCESS_KEY_NAME) secret_key = os.environ.get(SECRET_KEY_NAME) if access_key and secret_key: return access_key, secret_key return None, None def _clean_name(self, name): # Useful for windows' paths return os.path.join(BUCKET_PREFIX, os.path.normpath(name).replace('\\', '/')) def _put_file(self, name, content, content_type=None): if content_type: pass else: content_type = mimetypes.guess_type(name)[0] or 'application/x-octet-stream' self.headers.update({ 'x-oss-acl': self.acl, 'Content-Type': content_type, 'Content-Length': str(len(content)), }) fp = StringIO(content) response = self.connection.put_object_from_fp(self.bucket, name, fp, content_type, self.headers) if (response.status / 100) != 2: raise IOError('OSSStorageError: %s' % response.read()) def _open(self, name, mode='rb'): name = self._clean_name(name) remote_file = OSSStorageFile(name, self, mode=mode) return remote_file def _read(self, name, start_range=None, end_range=None): name = self._clean_name(name) if start_range is None: headers = {} else: headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)} response = self.connection.get_object(self.bucket, name, headers) if (response.status / 100) != 2: raise IOError('OSSStorageError: %s' % response.read()) header_map = convert_header2map(response.getheaders()) content_len = safe_get_element('content-length', header_map) etag = safe_get_element('etag', header_map).upper() return response.read(), etag, content_len def _save(self, name, content): name = self._clean_name(name) content.open() if hasattr(content, 'chunks'): content_str = ''.join(chunk for chunk in content.chunks()) else: content_str = content.read() self._put_file(name, content_str) return name def delete(self, name): name = self._clean_name(name) response = self.connection.delete_object(self.bucket, name) if response.status != 204: raise IOError('OSSStorageError: %s' % response.read()) def exists(self, name): name = self._clean_name(name) response = self.connection.head_object(self.bucket, name) return response.status == 200 def size(self, name): name = self._clean_name(name) response = self.connection.head_object(self.bucket, name) header_map = convert_header2map(response.getheaders()) content_len = safe_get_element('content-length', header_map) return content_len and int(content_len) or 0 def url(self, name): name = self._clean_name(name) return '%s%s' % (settings.MEDIA_URL, name) def modified_time(self, name): try: from dateutil import parser, tz except ImportError: raise NotImplementedError() name = self._clean_name(name) response = self.connection.head_object(self.bucket, name) header_map = convert_header2map(response.getheaders()) last_modified = response.getheader('Last-Modified') # convert to string to date last_modified_date = parser.parse(last_modified) # if the date has no timzone, assume UTC if last_modified_date.tzinfo is None: last_modified_date = last_modified_date.replace(tzinfo=tz.tzutc()) # convert date to local time w/o timezone return last_modified_date.astimezone(tz.tzlocal()).replace(tzinfo=None) ## UNCOMMENT BELOW IF NECESSARY #def get_available_name(self, name): # """ Overwrite existing file with the same name. """ # name = self._clean_name(name) # return name def copy_to_file(self, name, target): name = self._clean_name(name) response = self.connection.get_object_to_file(self.bucket, name, target) if response.status / 100 != 2: raise IOError('OSSStorageError: %s' % response.read()) def save_file(self, filename, name): name = self._clean_name(name) response = self.connection.put_object_from_file(self.bucket, name, filename=filename, headers=self.headers) if response.status / 100 != 2: raise IOError('OSSStorageError: %s' % response.read())
class OSSStorage(Storage): """Aliyun Open Storage Service""" def __init__( self, bucket=OSS_STORAGE_BUCKET_NAME, access_key=None, secret_key=None, acl=DEFAULT_ACL, # calling_format=CALLING_FORMAT, encrypt=False, # gzip=IS_GZIPPED, # gzip_content_types=GZIP_CONTENT_TYPES, # preload_metadata=PRELOAD_METADATA ): self.bucket = bucket self.acl = acl if not access_key and not secret_key: access_key, secret_key = self._get_access_keys() self.connection = OssAPI(ACCESS_ADDRESS, access_key, secret_key) self.headers = HEADERS def _get_access_keys(self): access_key = ACCESS_KEY_NAME secret_key = SECRET_KEY_NAME if (access_key or secret_key) and (not access_key or not secret_key): access_key = os.environ.get(ACCESS_KEY_NAME) secret_key = os.environ.get(SECRET_KEY_NAME) if access_key and secret_key: return access_key, secret_key return None, None def _clean_name(self, name): # Useful for windows' paths return os.path.join(BUCKET_PREFIX, os.path.normpath(name).replace('\\', '/')) def _put_file(self, name, content, content_type=None): if content_type: pass else: content_type = mimetypes.guess_type( name)[0] or "application/x-octet-stream" self.headers.update({ 'x-oss-acl': self.acl, 'Content-Type': content_type, 'Content-Length': str(len(content)), }) fp = BytesIO(content) logger.debug('') response = self.connection.put_object_from_fp(self.bucket, name, fp, content_type, self.headers) if (response.status / 100) != 2: raise IOError("OSSStorageError: %s" % response.read()) def _open(self, name, mode='rb'): name = self._clean_name(name) remote_file = OSSStorageFile(name, self, mode=mode) return remote_file def _read(self, name, start_range=None, end_range=None): name = self._clean_name(name) if start_range is None: headers = {} else: headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)} response = self.connection.get_object(self.bucket, name, headers) if int(response.status / 100) != 2: raise IOError("OSSStorageError: %s, %s" % (response.status, response.read())) header_map = convert_header2map(response.getheaders()) # content_len = safe_get_element("content-length", header_map) content_len = safe_get_element("content-range", header_map) etag = safe_get_element("etag", header_map).upper() return response.read(), etag, content_len def _save(self, name, content): name = self._clean_name(name) content.open() if hasattr(content, 'chunks'): content_str = b''.join(chunk for chunk in content.chunks()) else: content_str = content.read() self._put_file(name, content_str) return name def delete(self, name): name = self._clean_name(name) response = self.connection.delete_object(self.bucket, name) if response.status != 204: raise IOError("OSSStorageError: %s" % response.read()) def exists(self, name): name = self._clean_name(name) response = self.connection.head_object(self.bucket, name) return response.status == 200 def size(self, name): name = self._clean_name(name) response = self.connection.head_object(self.bucket, name) header_map = convert_header2map(response.getheaders()) content_len = safe_get_element("content-length", header_map) return content_len and int(content_len) or 0 def url(self, name): name = self._clean_name(name) return '%s%s' % (settings.MEDIA_URL, name) def modified_time(self, name): try: from dateutil import parser, tz except ImportError: raise NotImplementedError() name = self._clean_name(name) response = self.connection.head_object(self.bucket, name) # header_map = convert_header2map(response.getheaders()) last_modified = response.getheader('Last-Modified') # convert to string to date last_modified_date = parser.parse(last_modified) # if the date has no timzone, assume UTC if last_modified_date.tzinfo == None: last_modified_date = last_modified_date.replace(tzinfo=tz.tzutc()) # convert date to local time w/o timezone return last_modified_date.astimezone(tz.tzlocal()).replace(tzinfo=None) ## UNCOMMENT BELOW IF NECESSARY # def get_available_name(self, name): # """ Overwrite existing file with the same name. """ # name = self._clean_name(name) # return name def copy_to_file(self, name, target): name = self._clean_name(name) response = self.connection.get_object_to_file(self.bucket, name, target) if response.status / 100 != 2: raise IOError("OSSStorageError: %s" % response.read()) def save_file(self, filename, name): name = self._clean_name(name) response = self.connection.put_object_from_file(self.bucket, name, filename=filename, headers=self.headers) if response.status / 100 != 2: raise IOError("OSSStorageError: %s" % response.read())