コード例 #1
0
def get_archive_list(filelist: list, bucket: oss2.Bucket):
    '''
    分离出归档类型文件
    '''
    arg_dict = locals()
    globalEnv.logger.debug('code:{}.{} {}'.format(
        __name__,
        sys._getframe().f_code.co_name, str(arg_dict)))
    ans = []
    for filename in filelist:
        meta = bucket.head_object(filename)
        if meta.headers[
                'x-oss-storage-class'] == oss2.BUCKET_STORAGE_CLASS_ARCHIVE:
            ans.append(filename)
    globalEnv.logger.debug('archive list:' + str(ans))
    return ans
コード例 #2
0
ファイル: backends.py プロジェクト: zs1621/django-oss-storage
class OssStorage(Storage):
    """
    Aliyun OSS Storage
    """
    def __init__(self,
                 access_key_id=None,
                 access_key_secret=None,
                 end_point=None,
                 bucket_name=None,
                 expire_time=None):
        self.access_key_id = access_key_id if access_key_id else _get_config(
            'OSS_ACCESS_KEY_ID')
        self.access_key_secret = access_key_secret if access_key_secret else _get_config(
            'OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(
            end_point if end_point else _get_config('OSS_ENDPOINT'))
        self.bucket_name = bucket_name if bucket_name else _get_config(
            'OSS_BUCKET_NAME')
        self.expire_time = expire_time if expire_time else int(
            _get_config('OSS_EXPIRE_TIME', default=60 * 60 * 24 * 30))

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)

        # try to get bucket acl to check bucket exist or not
        try:
            self.bucket_acl = self.bucket.get_bucket_acl().acl
        except oss2.exceptions.NoSuchBucket:
            raise SuspiciousOperation("Bucket '%s' does not exist." %
                                      self.bucket_name)

    def _get_key_name(self, name):
        """
        Get the object key name in OSS, e.g.,
        location: /media/
        input   : test.txt
        output  : media/test.txt
        """
        # urljoin won't work if name is absolute path
        name = name.lstrip('/')

        base_path = force_text(self.location)
        final_path = urljoin(base_path + "/", name)
        name = os.path.normpath(final_path.lstrip('/'))

        # Add / to the end of path since os.path.normpath will remove it
        if final_path.endswith('/') and not name.endswith('/'):
            name += '/'

        if six.PY2:
            name = name.encode('utf-8')
        # Store filenames with forward slashes, even on Windows.
        return name.replace('\\', '/')

    def _open(self, name, mode='rb'):
        logger().debug("name: %s, mode: %s", name, mode)
        if mode != "rb":
            raise ValueError("OSS files can only be opened in read-only mode")

        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        try:
            # Load the key into a temporary file
            tmpf = SpooledTemporaryFile(max_size=10 * 1024 * 1024)  # 10MB
            obj = self.bucket.get_object(target_name)
            logger().info("content length: %d, requestid: %s",
                          obj.content_length, obj.request_id)
            if obj.content_length is None:
                shutil.copyfileobj(obj, tmpf)
            else:
                oss2.utils.copyfileobj_and_verify(obj,
                                                  tmpf,
                                                  obj.content_length,
                                                  request_id=obj.request_id)
            tmpf.seek(0)
            return OssFile(tmpf, target_name, self)
        except oss2.exceptions.NoSuchKey:
            raise OssError("%s does not exist" % name)
        except:
            raise OssError("Failed to open %s" % name)

    def _save(self, name, content):
        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        logger().debug("content: %s", content)
        self.bucket.put_object(target_name, content)
        return os.path.normpath(name)

    def create_dir(self, dirname):
        target_name = self._get_key_name(dirname)
        if not target_name.endswith('/'):
            target_name += '/'

        self.bucket.put_object(target_name, '')

    def exists(self, name):
        target_name = self._get_key_name(name)
        logger().debug("name: %s, target name: %s", name, target_name)
        if name.endswith("/"):
            # This looks like a directory, but OSS has no concept of directories
            # need to check whether the key starts with this prefix
            result = self.bucket.list_objects(prefix=target_name,
                                              delimiter='',
                                              marker='',
                                              max_keys=1)
            if len(result.object_list) == 0:
                logger().debug("object list: %s", result.object_list)
            else:
                logger().debug("object list: %s", result.object_list[0].key)
            return bool(result.object_list)

        exist = self.bucket.object_exists(target_name)
        logger().debug("'%s' exist: %s", target_name, exist)
        if not exist:
            # It's not a file, but it might be a directory. Check again that it's not a directory.
            name2 = name + "/"
            logger().debug("to check %s", name2)
            return self.exists(name2)

        return exist

    def get_file_meta(self, name):
        name = self._get_key_name(name)
        return self.bucket.get_object_meta(name)

    def size(self, name):
        file_meta = self.get_file_meta(name)
        return file_meta.content_length

    def modified_time(self, name):
        file_meta = self.get_file_meta(name)
        return datetime.fromtimestamp(file_meta.last_modified)

    created_time = accessed_time = modified_time

    def get_modified_time(self, name):
        file_meta = self.get_file_meta(name)

        if settings.USE_TZ:
            return datetime.utcfromtimestamp(
                file_meta.last_modified).replace(tzinfo=utc)
        else:
            return datetime.fromtimestamp(file_meta.last_modified)

    get_created_time = get_accessed_time = get_modified_time

    def content_type(self, name):
        name = self._get_key_name(name)
        file_info = self.bucket.head_object(name)
        return file_info.content_type

    def listdir(self, name):
        if name == ".":
            name = ""
        name = self._get_key_name(name)
        if not name.endswith('/'):
            name += "/"
        logger().debug("name: %s", name)

        files = []
        dirs = []

        for obj in ObjectIterator(self.bucket, prefix=name, delimiter='/'):
            if obj.is_prefix():
                dirs.append(obj.key)
            else:
                files.append(obj.key)

        logger().debug("dirs: %s", list(dirs))
        logger().debug("files: %s", files)
        return dirs, files

    def url(self, name):
        key = self._get_key_name(name)
        str = self.bucket.sign_url('GET', key, expires=self.expire_time)
        if self.bucket_acl != BUCKET_ACL_PRIVATE:
            idx = str.find('?')
            if idx > 0:
                str = str[:idx].replace('%2F', '/')
        return str

    def delete(self, name):
        name = self._get_key_name(name)
        logger().debug("delete name: %s", name)
        result = self.bucket.delete_object(name)

    def delete_with_slash(self, dirname):
        name = self._get_key_name(dirname)
        if not name.endswith('/'):
            name += '/'
        logger().debug("delete name: %s", name)
        result = self.bucket.delete_object(name)
コード例 #3
0
class AliyunBaseStorage(Storage):
    """
    Aliyun OSS2 Storage
    """
    location = ""
    acl = ""
    bucket_name = ""

    def __init__(self):
        self.access_key_id = self._get_config('ACCESS_KEY_ID')
        self.access_key_secret = self._get_config('ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(self._get_config('END_POINT').strip())

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)

        # if self.bucket_name not in self._list_bucket(self.service):
        # # create bucket if not exists
        # self.bucket = self._create_bucket(self.auth)
        # else:
        # # change bucket acl if not consists
        # self.bucket = self._check_bucket_acl(self._get_bucket(self.auth))
        # make sure the bucket must be there
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)

    def _get_config(self, name):
        """
        Get configuration variable from environment variable
        or django setting.py
        """
        config = os.environ.get(name, getattr(settings, name, None))
        if config is not None:
            if isinstance(config, six.string_types):
                return config.strip()
            else:
                return config
        else:
            raise ImproperlyConfigured(
                "Can't find config for '%s' either in environment"
                "variable or in setting.py" % name)

    def _clean_name(self, name):
        """
        Cleans the name so that Windows style paths work
        """
        # Normalize Windows style paths
        clean_name = posixpath.normpath(name).replace('\\', '/')

        # os.path.normpath() can strip trailing slashes so we implement
        # a workaround here.
        if name.endswith('/') and not clean_name.endswith('/'):
            # Add a trailing slash as it was stripped.
            return clean_name + '/'
        else:
            return clean_name

    def _normalize_name(self, name):
        """
        Normalizes the name so that paths like /path/to/ignored/../foo.txt
        work. We check to make sure that the path pointed to is not outside
        the directory specified by the LOCATION setting.
        """

        base_path = force_text(self.location)
        base_path = base_path.rstrip('/')

        final_path = urljoin(base_path.rstrip('/') + "/", name)

        base_path_len = len(base_path)
        if (not final_path.startswith(base_path) or
                    final_path[base_path_len:base_path_len + 1] not in ('', '/')):
            raise SuspiciousOperation("Attempted access to '%s' denied." %
                                      name)
        return final_path.lstrip('/')

    def _get_target_name(self, name):
        name = self._normalize_name(self._clean_name(name))

        if self.acl == 'private':
            name = name.split('/')[-1]
            name = 'encoded/{0}/{1}/{2}_en'.format(name[0],name[1],name)
        elif self.acl == 'public-read':
            if len(name) < 32:
                name = uuid.uuid4().hex
            name = '{0}/{1}/{2}.jpg'.format(name[0],name[1],name)
        else:
            pass
        if six.PY2:
            name = name.encode('utf-8')
        return name

    def _open(self, name, mode='wrb'):
        name = self._get_target_name(name)
        return AliyunFile(name, self, mode)

    def _save(self, name, content):
        name = self._get_target_name(name)
        content.open()
        content_str = b''.join(chunk for chunk in content.chunks())
        self.bucket.put_object(name, content_str)
        content.close()

        return self._clean_name(name)


    def get_file_header(self, name):
        name = self._get_target_name(name)
        return self.bucket.head_object(name)

    def exists(self, name):
        return self.bucket.object_exists(name)

    def size(self, name):
        file_info = self.get_file_header(name)
        return file_info.content_length

    def modified_time(self, name):
        file_info = self.get_file_header(name)
        return datetime.datetime.fromtimestamp(file_info.last_modified)

    def listdir(self, name):
        name = self._normalize_name(self._clean_name(name))
        if name and name.endswith('/'):
            name = name[:-1]

        files = []
        dirs = set()

        for obj in ObjectIterator(self.bucket, prefix=name, delimiter='/'):
            if obj.is_prefix():
                dirs.add(obj.key)
            else:
                files.append(obj.key)

        return list(dirs), files

    def url(self, name):
        name = self._normalize_name(self._clean_name(name))
        name = filepath_to_uri(name)
        return self.bucket._make_url(self.bucket_name, name)

    def read(self, name):
        pass

    def delete(self, name):
        pass
コード例 #4
0
class OssStorage(Storage):
    """
    Aliyun OSS Storage
    """
    def __init__(self,
                 access_key_id=None,
                 access_key_secret=None,
                 end_point=None,
                 bucket_name=None):
        self.access_key_id = access_key_id if access_key_id else _get_config(
            'OSS_ACCESS_KEY_ID')
        self.access_key_secret = access_key_secret if access_key_secret \
            else _get_config(
                'OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(
            end_point if end_point else _get_config('OSS_ENDPOINT'))
        self.bucket_name = bucket_name if bucket_name else _get_config(
            'OSS_BUCKET_NAME')

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)

        # try to get bucket acl to check bucket exist or not
        # try:
        #     self.bucket.get_bucket_acl().acl
        # except oss2.exceptions.NoSuchBucket:
        #     raise SuspiciousOperation(
        #         "Bucket '%s' does not exist." % self.bucket_name)

    def _get_key_name(self, name):
        """
        Get the object key name in OSS, e.g.,
        location: /media/
        input   : test.txt
        output  : media/test.txt
        """
        base_path = force_str(self.location)
        final_path = urljoin(base_path + "/", name)
        name = os.path.normpath(final_path.lstrip('/'))

        if six.PY2:
            name = name.encode('utf-8')
        return name

    def _open(self, name, mode='rb'):
        logger().debug("name: %s, mode: %s", name, mode)
        if mode != "rb":
            raise ValueError("OSS files can only be opened in read-only mode")

        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        try:
            # Load the key into a temporary file
            tmpf = SpooledTemporaryFile(max_size=10 * 1024 * 1024)  # 10MB
            obj = self.bucket.get_object(target_name)
            logger().info("content length: %d, requestid: %s",
                          obj.content_length, obj.request_id)
            if obj.content_length is None:
                shutil.copyfileobj(obj, tmpf)
            else:
                oss2.utils.copyfileobj_and_verify(obj,
                                                  tmpf,
                                                  obj.content_length,
                                                  request_id=obj.request_id)
            tmpf.seek(0)
            return OssFile(tmpf, target_name, self)
        except oss2.exceptions.NoSuchKey:
            raise OssError("%s does not exist" % name)
        except Exception:
            raise OssError("Failed to open %s" % name)

    def _save(self, name, content):
        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        logger().debug("content: %s", content)
        self.bucket.put_object(target_name, content)
        return os.path.normpath(name)

    def create_dir(self, dirname):
        target_name = self._get_key_name(dirname)
        if not target_name.endswith('/'):
            target_name += '/'

        self.bucket.put_object(target_name, '')

    def exists(self, name):
        target_name = self._get_key_name(name)
        logger().debug("name: %s, target name: %s", name, target_name)
        if name.endswith("/"):
            result = self.bucket.list_objects(prefix=target_name,
                                              delimiter='',
                                              marker='',
                                              max_keys=1)
            if len(result.object_list) == 0:
                logger().debug("object list: %s", result.object_list)
            else:
                logger().debug("object list: %s", result.object_list[0].key)
            return bool(result.object_list)

        exist = self.bucket.object_exists(target_name)
        logger().debug("'%s' exist: %s", target_name, exist)
        if not exist:
            name2 = name + "/"
            logger().debug("to check %s", name2)
            return self.exists(name2)

        return exist

    def get_file_meta(self, name):
        name = self._get_key_name(name)
        return self.bucket.get_object_meta(name)

    def size(self, name):
        file_meta = self.get_file_meta(name)
        return file_meta.content_length

    def modified_time(self, name):
        file_meta = self.get_file_meta(name)
        return datetime.fromtimestamp(file_meta.last_modified)

    created_time = accessed_time = modified_time

    def get_modified_time(self, name):
        file_meta = self.get_file_meta(name)

        if settings.USE_TZ:
            return datetime.utcfromtimestamp(
                file_meta.last_modified).replace(tzinfo=utc)
        else:
            return datetime.fromtimestamp(file_meta.last_modified)

    get_created_time = get_accessed_time = get_modified_time

    def content_type(self, name):
        name = self._get_key_name(name)
        file_info = self.bucket.head_object(name)
        return file_info.content_type

    def listdir(self, name):
        if name == ".":
            name = ""
        name = self._get_key_name(name)
        if not name.endswith('/'):
            name += "/"
        logger().debug("name: %s", name)

        files = []
        dirs = []

        for obj in ObjectIterator(self.bucket, prefix=name, delimiter='/'):
            if obj.is_prefix():
                dirs.append(obj.key)
            else:
                files.append(obj.key)

        logger().debug("dirs: %s", list(dirs))
        logger().debug("files: %s", files)
        return dirs, files

    def url(self, name, expire=24 * 60 * 60):
        key = self._get_key_name(name)
        return self.bucket.sign_url('GET', key, expire)

    def delete(self, name):
        name = self._get_key_name(name)
        logger().debug("delete name: %s", name)
        return self.bucket.delete_object(name)

    def delete_with_slash(self, dirname):
        name = self._get_key_name(dirname)
        if not name.endswith('/'):
            name += '/'
        logger().debug("delete name: %s", name)
        return self.bucket.delete_object(name)
コード例 #5
0
class OssStorage(Storage):
    """
    Aliyun OSS Storage
    """
    def __init__(self,
                 access_key_id=None,
                 access_key_secret=None,
                 end_point=None,
                 bucket_name=None):
        self.access_key_id = access_key_id if access_key_id else _get_config(
            'OSS_ACCESS_KEY_ID')
        self.access_key_secret = access_key_secret if access_key_secret else _get_config(
            'OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(
            end_point if end_point else _get_config('OSS_ENDPOINT'))

        self.bucket_name = bucket_name if bucket_name else _get_config(
            'OSS_BUCKET_NAME')

        sts_token = getattr(settings, 'ALIYUN_STS_TOKEN', None)
        # 这里表示如果有sts_token,需要使用stsauth进行鉴权
        if sts_token:
            self.auth = StsAuth(self.access_key_id, self.access_key_secret,
                                sts_token)
        else:
            self.auth = Auth(self.access_key_id, self.access_key_secret)

        self.service = Service(self.auth, self.end_point)

        use_oss_internal = getattr(settings, 'OSS_USE_INTERNAL', None)
        # 这里表示,如果是阿里云的内网机器,默认走内网的end_point,否则使用外网的end_point
        # 使用内网end_point,速度快,不收费
        if use_oss_internal:
            self.end_point_internal = _normalize_endpoint(
                end_point if end_point else _get_config('OSS_ENDPOINT_INTERNAL'
                                                        ))
            self.bucket = Bucket(self.auth, self.end_point_internal,
                                 self.bucket_name)
        else:
            self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)
        self.bucket_public = Bucket(self.auth, self.end_point,
                                    self.bucket_name)

        # try to get bucket acl to check bucket exist or not
        try:
            self.bucket.get_bucket_acl().acl
        except oss2.exceptions.NoSuchBucket:
            raise SuspiciousOperation("Bucket '%s' does not exist." %
                                      self.bucket_name)

    def _get_key_name(self, name):
        """
        Get the object key name in OSS, e.g.,
        location: /media/
        input   : test.txt
        output  : media/test.txt
        """
        base_path = force_text(self.location)
        final_path = urljoin(base_path + "/", name)
        name = os.path.normpath(final_path.lstrip('/'))
        name = name.replace('\\', '/')
        if six.PY2:
            name = name.encode('utf-8')
        return name

    def _open(self, name, mode='rb'):
        logger().debug("name: %s, mode: %s", name, mode)
        if mode != "rb":
            raise ValueError("OSS files can only be opened in read-only mode")

        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        try:
            # Load the key into a temporary file
            tmpf = NamedTemporaryFile()  # 10MB
            obj = self.bucket.get_object(target_name)
            logger().info("content length: %d, requestid: %s",
                          obj.content_length, obj.request_id)
            if obj.content_length is None:
                shutil.copyfileobj(obj, tmpf)
            else:
                oss2.utils.copyfileobj_and_verify(obj,
                                                  tmpf,
                                                  obj.content_length,
                                                  request_id=obj.request_id)
            tmpf.seek(0)
            return OssFile(tmpf, target_name, self)
        except oss2.exceptions.NoSuchKey:
            raise OssError("%s does not exist" % name)
        except:
            raise OssError("Failed to open %s" % name)

    def _save(self, name, content):
        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        logger().debug("content: %s", content)
        self.bucket.put_object(target_name, content)
        return os.path.normpath(name)

    def create_dir(self, dirname):
        target_name = self._get_key_name(dirname)
        if not target_name.endswith('/'):
            target_name += '/'

        self.bucket.put_object(target_name, '')

    def exists(self, name):
        target_name = self._get_key_name(name)
        logger().debug("name: %s, target name: %s", name, target_name)
        if name.endswith("/"):
            # This looks like a directory, but OSS has no concept of directories
            # need to check whether the key starts with this prefix
            result = self.bucket.list_objects(prefix=target_name,
                                              delimiter='',
                                              marker='',
                                              max_keys=1)
            if len(result.object_list) == 0:
                logger().debug("object list: %s", result.object_list)
            else:
                logger().debug("object list: %s", result.object_list[0].key)
            return bool(result.object_list)

        exist = self.bucket.object_exists(target_name)
        logger().debug("'%s' exist: %s", target_name, exist)
        if not exist:
            # It's not a file, but it might be a directory. Check again that it's not a directory.
            name2 = name + "/"
            logger().debug("to check %s", name2)
            return self.exists(name2)

        return exist

    def get_file_meta(self, name):
        name = self._get_key_name(name)
        return self.bucket.get_object_meta(name)

    def size(self, name):
        file_meta = self.get_file_meta(name)
        return file_meta.content_length

    def modified_time(self, name):
        file_meta = self.get_file_meta(name)
        return datetime.fromtimestamp(file_meta.last_modified)

    created_time = accessed_time = modified_time

    def get_modified_time(self, name):
        file_meta = self.get_file_meta(name)

        if settings.USE_TZ:
            return datetime.utcfromtimestamp(
                file_meta.last_modified).replace(tzinfo=utc)
        else:
            return datetime.fromtimestamp(file_meta.last_modified)

    get_created_time = get_accessed_time = get_modified_time

    def content_type(self, name):
        name = self._get_key_name(name)
        file_info = self.bucket.head_object(name)
        return file_info.content_type

    def listdir(self, name):
        if name == ".":
            name = ""
        name = self._get_key_name(name)
        if not name.endswith('/'):
            name += "/"
        logger().debug("name: %s", name)

        files = []
        dirs = []

        for obj in ObjectIterator(self.bucket, prefix=name, delimiter='/'):
            if obj.is_prefix():
                dirs.append(obj.key)
            else:
                files.append(obj.key)

        logger().debug("dirs: %s", list(dirs))
        logger().debug("files: %s", files)
        return dirs, files

    def url(self, name, expire=60 * 60):
        key = self._get_key_name(name)
        custom_domain = getattr(settings, 'OSS_CUSTOM_DOMAIN', None)
        # return self.bucket.sign_url('GET', key, expire)
        # 这里一般是提供给浏览器用的,所以走外网的end_point
        url = self.bucket_public.sign_url('GET', key, expire)
        url = url.replace('%2F', '/')
        if custom_domain:
            url_list = list(urlsplit(url))
            custom_domain_list = list(urlsplit(custom_domain))
            url_list[0] = custom_domain_list[0]
            url_list[1] = custom_domain_list[1]
            url = urlunsplit(url_list)
        return url

    def delete(self, name):
        name = self._get_key_name(name)
        logger().debug("delete name: %s", name)
        result = self.bucket.delete_object(name)

    def delete_with_slash(self, dirname):
        name = self._get_key_name(dirname)
        if not name.endswith('/'):
            name += '/'
        logger().debug("delete name: %s", name)
        result = self.bucket.delete_object(name)