Example #1
0
    def __init__(self,
                 access_key_id=None,
                 access_key_secret=None,
                 end_point=None,
                 bucket_name=None,
                 expire_time=None):
        self.access_key_id = access_key_id if access_key_id else _get_config(
            'OSS_ACCESS_KEY_ID')
        self.access_key_secret = access_key_secret if access_key_secret else _get_config(
            'OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(
            end_point if end_point else _get_config('OSS_ENDPOINT'))
        self.bucket_name = bucket_name if bucket_name else _get_config(
            'OSS_BUCKET_NAME')
        self.expire_time = expire_time if expire_time else int(
            _get_config('OSS_EXPIRE_TIME', default=60 * 60 * 24 * 30))

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)

        # try to get bucket acl to check bucket exist or not
        try:
            self.bucket_acl = self.bucket.get_bucket_acl().acl
        except oss2.exceptions.NoSuchBucket:
            raise SuspiciousOperation("Bucket '%s' does not exist." %
                                      self.bucket_name)
class ObjectStorageService:
	def __init__(self, access_key_id: str, access_key_secret: str, endpoint: str, bucket_name: str):
		self.access_key_id = access_key_id
		self.access_key_secret = access_key_secret
		self.auth = Auth(access_key_id, access_key_secret)
		self.bucket = Bucket(self.auth, endpoint, bucket_name)

	@staticmethod
	def gen_key(directory: str, id_: str) -> str:
		key = f"{directory}/{id_}.json"
		return key

	def put_object(self, key: str, data: Dict) -> None:
		return self.bucket.put_object(key, serialize_to_json(data))

	def get_object(self, key: str) -> Optional[Dict]:
		try:
			result = self.bucket.get_object(key)
			return json.load(result)
		except NoSuchKey as e:
			logger.error(f"NoSuchKey, detail: {e.details}")
			return None

	def delete_object(self, key: str) -> int:
		try:
			self.bucket.delete_object(key)
			return 1
		except NoSuchKey as e:
			logger.error(f"NoSuchKey, detail: {e.details}")
			return 0

	def delete_multiple_objects(self, prefix: str) -> None:
		for obj in ObjectIterator(self.bucket, prefix=prefix):
			self.delete_object(obj.key)
class OSSUploader(object):
    def __init__(self):
        access_key_id = os.environ['OSS_ACCESS_KEY_ID']
        access_key_secret = os.environ['OSS_ACCESS_KEY_SECRET']

        self.endpoint = os.environ.get('OSS_ENDPOINT',
                                       'https://oss-cn-shanghai.aliyuncs.com')
        self.auth = Auth(access_key_id, access_key_secret)
        self.bucket_name = os.environ.get('OSS_BUCKET', 'seafile-downloads')
        self.bucket = Bucket(self.auth, self.endpoint, self.bucket_name)

    def upload_file(self, tag):
        _, _, _, apk_download_url = get_github_version_info(tag)
        apk_file = download_apk_file(apk_download_url)

        fn = os.path.basename(apk_file)
        logger.info('uploading file %s to bucket %s', fn, self.bucket_name)
        self._remove_existing_file(fn)
        with open(apk_file, 'rb') as f:
            self.bucket.put_object(fn, f)

    def _remove_existing_file(self, fn):
        if self.bucket.object_exists(fn):
            logger.warning('file %s already exists in bucket %s, deleting it',
                           fn, self.bucket_name)
            self.bucket.delete_object(fn)
            logger.warning('previous version of file %s deleted', fn)
Example #4
0
 def _set_bucket(self, bucketName=None, endPoint=None):
     if bucketName:
         self.bucketName = bucketName
     if endPoint:
         self.endPoint = endPoint
     if not self.auth:
         self._auth()
     self.bucket = Bucket(self.auth, bucket_name=self.bucketName, endpoint=self.endPoint)
Example #5
0
File: oss.py Project: hatlonely/ops
def put_object(bucket: oss2.Bucket, obj, filename, override=False):
    if not override:
        if bucket.object_exists(obj):
            return {"status": 304}
    if obj:
        obj = os.path.basename(filename)
    res = bucket.put_object_from_file(obj, filename)
    return {"status": res.status}
Example #6
0
def upimg_delete(sha, upload_path, filename, basedir, save_result):
    auth = get_auth()
    obj = Bucket(auth, g.cfg.aliyun_endpoint, g.cfg.aliyun_bucket)
    aliyun_basedir = g.cfg.aliyun_basedir or ''
    if aliyun_basedir.startswith("/"):
        aliyun_basedir = aliyun_basedir.lstrip('/')
    filepath = join(basedir or aliyun_basedir, upload_path, filename)
    obj.delete_object(filepath)
    def __init__(self):
        access_key_id = os.environ['OSS_ACCESS_KEY_ID']
        access_key_secret = os.environ['OSS_ACCESS_KEY_SECRET']

        self.endpoint = os.environ.get('OSS_ENDPOINT',
                                       'https://oss-cn-shanghai.aliyuncs.com')
        self.auth = Auth(access_key_id, access_key_secret)
        self.bucket_name = os.environ.get('OSS_BUCKET', 'seafile-downloads')
        self.bucket = Bucket(self.auth, self.endpoint, self.bucket_name)
Example #8
0
    def __init__(self):
        self.access_key_id = self._get_config('OSS_ACCESS_KEY_ID')
        self.access_key_secret = self._get_config('OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(self._get_config('OSS_ENDPOINT'))
        self.bucket_name = self._get_config('OSS_BUCKET_NAME')

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)

        # try to get bucket acl to check bucket exist or not
        try:
            self.bucket.get_bucket_acl().acl
        except oss2.exceptions.NoSuchBucket:
            raise SuspiciousOperation("Bucket '%s' does not exist." %
                                      self.bucket_name)
Example #9
0
    def bucket(self):
        if self.connected:
            return self._bucket
        else:
            self.auth = Auth(self.access_key_id, self.access_key_secret)
            self.service = Service(self.auth, self.end_point)
            self._bucket = Bucket(self.auth, self.end_point, self.bucket_name)

            # try to get bucket acl to check bucket exist or not
            try:
                self._bucket.get_bucket_acl().acl
            except oss2.exceptions.NoSuchBucket:
                raise SuspiciousOperation("Bucket '%s' does not exist." %
                                          self.bucket_name)
            self.connected = True
            return self._bucket
Example #10
0
class AliyunOssManager(object):

    def __init__(self, accessKeyId, accessKeySecret, bucketName=None, endPoint=None):
        self.id = accessKeyId
        self.secret = accessKeySecret
        self.bucketName = bucketName
        self.endPoint = endPoint
        self.bucket = None
        self.auth = None

    def _auth(self):
        if not self.auth:
            self.auth = Auth(self.id, self.secret)

    def video_exists(self, videoPath, bucketName=None, endPoint=None):
        self._set_bucket(bucketName,endPoint)
        return self.bucket.object_exists(videoPath)

    def _set_bucket(self, bucketName=None, endPoint=None):
        if bucketName:
            self.bucketName = bucketName
        if endPoint:
            self.endPoint = endPoint
        if not self.auth:
            self._auth()
        self.bucket = Bucket(self.auth, bucket_name=self.bucketName, endpoint=self.endPoint)

    def get_video_url(self, videoPath, expires=3600,bucketName=None, endPoint=None):
        if not self.bucket:
            self._set_bucket(bucketName, endPoint)
        return self.bucket.sign_url(method='GET',key=videoPath, expires=expires)

    def upload_video(self, videoPath):
        pass
Example #11
0
File: oss.py Project: hatlonely/ops
def get_object(bucket: oss2.Bucket, obj, filename, override=False):
    if filename:
        filename = os.path.basename(obj)
    if not override:
        if os.path.exists(filename):
            return {"status": 304}
    res = bucket.get_object_to_file(obj, filename)
    return {"status": res.status}
Example #12
0
def upimg_save(**kwargs):
    res = dict(code=1)
    try:
        filename = kwargs["filename"]
        stream = kwargs["stream"]
        upload_path = kwargs.get("upload_path") or ""
        if not filename or not stream:
            return ValueError
    except (KeyError, ValueError):
        res.update(msg="Parameter error")
    else:
        dn = g.cfg.aliyun_dn
        bucket = g.cfg.aliyun_bucket
        ak = g.cfg.aliyun_ak
        sk = g.cfg.aliyun_sk
        aliyun_basedir = g.cfg.aliyun_basedir or ''
        if not dn or not bucket or not ak or not sk:
            res.update(msg="The aliyun parameter error")
            return res
        if isinstance(upload_path, string_types):
            if upload_path.startswith("/"):
                upload_path = upload_path.lstrip('/')
            if aliyun_basedir.startswith("/"):
                aliyun_basedir = aliyun_basedir.lstrip('/')
            filepath = join(aliyun_basedir, upload_path, filename)
            #: 使用阿里云云OSS官方SDK上传
            auth = get_auth()
            endpoint = g.cfg.aliyun_endpoint
            if not endpoint:
                info = get_bucket_info(bucket, auth)
                if info and isinstance(info, dict) and "endpoint" in info:
                    endpoint = info['endpoint']
                    set_site_config(dict(aliyun_endpoint=endpoint))
            obj = Bucket(auth, endpoint, bucket)
            result = obj.put_object(filepath, stream)
            if result.status == 200:
                res.update(
                    code=0,
                    etag=result.etag,
                    src=slash_join(dn, filepath),
                    basedir=aliyun_basedir,
                )
        else:
            res.update(msg="The upload_path type error")
    return res
Example #13
0
    def __init__(self,
                 access_key_id=None,
                 access_key_secret=None,
                 end_point=None,
                 bucket_name=None):
        self.access_key_id = access_key_id if access_key_id else _get_config(
            'OSS_ACCESS_KEY_ID')
        self.access_key_secret = access_key_secret if access_key_secret \
            else _get_config(
                'OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(
            end_point if end_point else _get_config('OSS_ENDPOINT'))
        self.bucket_name = bucket_name if bucket_name else _get_config(
            'OSS_BUCKET_NAME')

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)
Example #14
0
 def OssGetByteObject(self, bucket: oss2.Bucket, fileName):
     # oss流式下载
     try:
         dataStream = bucket.get_object(fileName)
         # dataStream.close()  # 不能关闭流,关闭之后则变成空的数据流
         return dataStream
     except BaseException as e:
         print("oss get object error, file name: {}, msg: {}".format(
             fileName, e))
         return False
Example #15
0
 def get_bucket_obj(self, bucket_name):
     """
     bucket存在时,修改权限,不存在时创建,权限是私有
     :param bucket_name:
     :return: bucket object
     """
     # bucket_acl = self.bucket_acl_private
     bucket = Bucket(self.auth, self.end_point, bucket_name)
     # if bucket_name not in self._get_bucket_list(self.service):
     #     bucket.create_bucket(bucket_acl)
     # else:
     #     if bucket.get_bucket_acl().acl() != bucket_acl:
     #         bucket.put_bucket_acl(bucket_acl)
     return bucket
def get_archive_list(filelist: list, bucket: oss2.Bucket):
    '''
    分离出归档类型文件
    '''
    arg_dict = locals()
    globalEnv.logger.debug('code:{}.{} {}'.format(
        __name__,
        sys._getframe().f_code.co_name, str(arg_dict)))
    ans = []
    for filename in filelist:
        meta = bucket.head_object(filename)
        if meta.headers[
                'x-oss-storage-class'] == oss2.BUCKET_STORAGE_CLASS_ARCHIVE:
            ans.append(filename)
    globalEnv.logger.debug('archive list:' + str(ans))
    return ans
Example #17
0
    def __init__(self):
        self.access_key_id = self._get_config('ACCESS_KEY_ID')
        self.access_key_secret = self._get_config('ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(self._get_config('END_POINT').strip())

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)

        # if self.bucket_name not in self._list_bucket(self.service):
        # # create bucket if not exists
        # self.bucket = self._create_bucket(self.auth)
        # else:
        # # change bucket acl if not consists
        # self.bucket = self._check_bucket_acl(self._get_bucket(self.auth))
        # make sure the bucket must be there
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)
Example #18
0
    def __init__(self,
                 access_key_id: str,
                 access_key_secret: str,
                 hosts: Union[str, List[str]],
                 buckets: Union[str, List[str]]):

        from oss2 import Auth, Bucket, ObjectIterator
        super().__init__()
        self.ObjectIterator = ObjectIterator
        self.auth = Auth(access_key_id, access_key_secret)
        if isinstance(buckets, str):
            buckets = [buckets]
        if isinstance(hosts, str):
            hosts = [hosts for i in range(len(buckets))]
        else:
            assert len(hosts) == len(buckets), 'number of hosts and number of buckets should be the same'
        self.buckets = {
            bucket_name: Bucket(self.auth, host, bucket_name)
            for host, bucket_name in zip(hosts, buckets)
        }
        self.oss_pattern = re.compile(r'oss://([^/]+)/(.+)')
Example #19
0
    def __init__(self,
                 access_key_id=None,
                 access_key_secret=None,
                 end_point=None,
                 bucket_name=None):
        self.access_key_id = access_key_id if access_key_id else _get_config(
            'OSS_ACCESS_KEY_ID')
        self.access_key_secret = access_key_secret if access_key_secret else _get_config(
            'OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(
            end_point if end_point else _get_config('OSS_ENDPOINT'))

        self.bucket_name = bucket_name if bucket_name else _get_config(
            'OSS_BUCKET_NAME')

        sts_token = getattr(settings, 'ALIYUN_STS_TOKEN', None)
        # 这里表示如果有sts_token,需要使用stsauth进行鉴权
        if sts_token:
            self.auth = StsAuth(self.access_key_id, self.access_key_secret,
                                sts_token)
        else:
            self.auth = Auth(self.access_key_id, self.access_key_secret)

        self.service = Service(self.auth, self.end_point)

        use_oss_internal = getattr(settings, 'OSS_USE_INTERNAL', None)
        # 这里表示,如果是阿里云的内网机器,默认走内网的end_point,否则使用外网的end_point
        # 使用内网end_point,速度快,不收费
        if use_oss_internal:
            self.end_point_internal = _normalize_endpoint(
                end_point if end_point else _get_config('OSS_ENDPOINT_INTERNAL'
                                                        ))
            self.bucket = Bucket(self.auth, self.end_point_internal,
                                 self.bucket_name)
        else:
            self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)
        self.bucket_public = Bucket(self.auth, self.end_point,
                                    self.bucket_name)

        # try to get bucket acl to check bucket exist or not
        try:
            self.bucket.get_bucket_acl().acl
        except oss2.exceptions.NoSuchBucket:
            raise SuspiciousOperation("Bucket '%s' does not exist." %
                                      self.bucket_name)
Example #20
0
def upload(filename, key):
    total_size = os.path.getsize(filename)
    part_size = determine_part_size(total_size, preferred_size=100 * 1024)

    END_POINT = 'oss-cn-shanghai.aliyuncs.com'
    AUTH = Auth('LTAIStvC4wpBWRVG', 'BNXtvOz82JjzlSLjPBdQJyEUpXi4PD')
    BUCKET_NAME = "yunbeifeng"
    bucket = Bucket(AUTH, END_POINT, bucket_name=BUCKET_NAME)

    upload_id = bucket.init_multipart_upload(key).upload_id
    parts = []
    with open(filename, 'rb') as fileobj:
        part_number = 1
        offset = 0
        while offset < total_size:
            num_to_upload = min(part_size, total_size - offset)
            result = bucket.upload_part(
                key, upload_id, part_number,
                SizedFileAdapter(fileobj, num_to_upload))
            parts.append(PartInfo(part_number, result.etag))
            offset += num_to_upload
            part_number += 1

    bucket.complete_multipart_upload(key, upload_id, parts)
Example #21
0
class OssStorage(Storage):
    """
    Aliyun OSS Storage
    """
    def __init__(self,
                 access_key_id=None,
                 access_key_secret=None,
                 end_point=None,
                 bucket_name=None,
                 expire_time=None):
        self.access_key_id = access_key_id if access_key_id else _get_config(
            'OSS_ACCESS_KEY_ID')
        self.access_key_secret = access_key_secret if access_key_secret else _get_config(
            'OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(
            end_point if end_point else _get_config('OSS_ENDPOINT'))
        self.bucket_name = bucket_name if bucket_name else _get_config(
            'OSS_BUCKET_NAME')
        self.expire_time = expire_time if expire_time else int(
            _get_config('OSS_EXPIRE_TIME', default=60 * 60 * 24 * 30))

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)

        # try to get bucket acl to check bucket exist or not
        try:
            self.bucket_acl = self.bucket.get_bucket_acl().acl
        except oss2.exceptions.NoSuchBucket:
            raise SuspiciousOperation("Bucket '%s' does not exist." %
                                      self.bucket_name)

    def _get_key_name(self, name):
        """
        Get the object key name in OSS, e.g.,
        location: /media/
        input   : test.txt
        output  : media/test.txt
        """
        # urljoin won't work if name is absolute path
        name = name.lstrip('/')

        base_path = force_text(self.location)
        final_path = urljoin(base_path + "/", name)
        name = os.path.normpath(final_path.lstrip('/'))

        # Add / to the end of path since os.path.normpath will remove it
        if final_path.endswith('/') and not name.endswith('/'):
            name += '/'

        if six.PY2:
            name = name.encode('utf-8')
        # Store filenames with forward slashes, even on Windows.
        return name.replace('\\', '/')

    def _open(self, name, mode='rb'):
        logger().debug("name: %s, mode: %s", name, mode)
        if mode != "rb":
            raise ValueError("OSS files can only be opened in read-only mode")

        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        try:
            # Load the key into a temporary file
            tmpf = SpooledTemporaryFile(max_size=10 * 1024 * 1024)  # 10MB
            obj = self.bucket.get_object(target_name)
            logger().info("content length: %d, requestid: %s",
                          obj.content_length, obj.request_id)
            if obj.content_length is None:
                shutil.copyfileobj(obj, tmpf)
            else:
                oss2.utils.copyfileobj_and_verify(obj,
                                                  tmpf,
                                                  obj.content_length,
                                                  request_id=obj.request_id)
            tmpf.seek(0)
            return OssFile(tmpf, target_name, self)
        except oss2.exceptions.NoSuchKey:
            raise OssError("%s does not exist" % name)
        except:
            raise OssError("Failed to open %s" % name)

    def _save(self, name, content):
        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        logger().debug("content: %s", content)
        self.bucket.put_object(target_name, content)
        return os.path.normpath(name)

    def create_dir(self, dirname):
        target_name = self._get_key_name(dirname)
        if not target_name.endswith('/'):
            target_name += '/'

        self.bucket.put_object(target_name, '')

    def exists(self, name):
        target_name = self._get_key_name(name)
        logger().debug("name: %s, target name: %s", name, target_name)
        if name.endswith("/"):
            # This looks like a directory, but OSS has no concept of directories
            # need to check whether the key starts with this prefix
            result = self.bucket.list_objects(prefix=target_name,
                                              delimiter='',
                                              marker='',
                                              max_keys=1)
            if len(result.object_list) == 0:
                logger().debug("object list: %s", result.object_list)
            else:
                logger().debug("object list: %s", result.object_list[0].key)
            return bool(result.object_list)

        exist = self.bucket.object_exists(target_name)
        logger().debug("'%s' exist: %s", target_name, exist)
        if not exist:
            # It's not a file, but it might be a directory. Check again that it's not a directory.
            name2 = name + "/"
            logger().debug("to check %s", name2)
            return self.exists(name2)

        return exist

    def get_file_meta(self, name):
        name = self._get_key_name(name)
        return self.bucket.get_object_meta(name)

    def size(self, name):
        file_meta = self.get_file_meta(name)
        return file_meta.content_length

    def modified_time(self, name):
        file_meta = self.get_file_meta(name)
        return datetime.fromtimestamp(file_meta.last_modified)

    created_time = accessed_time = modified_time

    def get_modified_time(self, name):
        file_meta = self.get_file_meta(name)

        if settings.USE_TZ:
            return datetime.utcfromtimestamp(
                file_meta.last_modified).replace(tzinfo=utc)
        else:
            return datetime.fromtimestamp(file_meta.last_modified)

    get_created_time = get_accessed_time = get_modified_time

    def content_type(self, name):
        name = self._get_key_name(name)
        file_info = self.bucket.head_object(name)
        return file_info.content_type

    def listdir(self, name):
        if name == ".":
            name = ""
        name = self._get_key_name(name)
        if not name.endswith('/'):
            name += "/"
        logger().debug("name: %s", name)

        files = []
        dirs = []

        for obj in ObjectIterator(self.bucket, prefix=name, delimiter='/'):
            if obj.is_prefix():
                dirs.append(obj.key)
            else:
                files.append(obj.key)

        logger().debug("dirs: %s", list(dirs))
        logger().debug("files: %s", files)
        return dirs, files

    def url(self, name):
        key = self._get_key_name(name)
        str = self.bucket.sign_url('GET', key, expires=self.expire_time)
        if self.bucket_acl != BUCKET_ACL_PRIVATE:
            idx = str.find('?')
            if idx > 0:
                str = str[:idx].replace('%2F', '/')
        return str

    def delete(self, name):
        name = self._get_key_name(name)
        logger().debug("delete name: %s", name)
        result = self.bucket.delete_object(name)

    def delete_with_slash(self, dirname):
        name = self._get_key_name(dirname)
        if not name.endswith('/'):
            name += '/'
        logger().debug("delete name: %s", name)
        result = self.bucket.delete_object(name)
Example #22
0
File: oss.py Project: hatlonely/ops
def put_symlink(bucket: oss2.Bucket, obj, symlink):
    res = bucket.put_symlink(obj, symlink)
    return {"status": res.status}
Example #23
0
from uuid import uuid4
from datetime import datetime

from oss2 import Auth, Bucket
from tornado.options import options

__auth = Auth(options.oss_access_key_id, options.oss_access_key_secret)
__bucket = Bucket(__auth, 'http://{0}'.format(options.oss_endpoint),
                  options.oss_bucket_name)


def upload_oss(contents, name_or_extension):
    for i in range(3):
        if name_or_extension.find('.') >= 0:
            name = name_or_extension
        else:
            name = '{0}/{1}.{2}'.format(datetime.now().year,
                                        str(uuid4()).replace('-', ''),
                                        name_or_extension.lower())
        if __bucket.object_exists(name):
            continue
        __bucket.put_object(name, contents)
        is_image = name_or_extension in options.upload_image_accept_formats
        return 'http://{0}.{1}/{2}'.format(
            options.oss_bucket_name,
            options.oss_img_endpoint if is_image else options.oss_endpoint,
            name)
    raise Exception('Failed to upload to OSS due to duplicate object name.')
Example #24
0
class OssStorage(Storage):
    """
    Aliyun OSS Storage
    """
    def __init__(self,
                 access_key_id=None,
                 access_key_secret=None,
                 end_point=None,
                 bucket_name=None):
        self.access_key_id = access_key_id if access_key_id else _get_config(
            'OSS_ACCESS_KEY_ID')
        self.access_key_secret = access_key_secret if access_key_secret \
            else _get_config(
                'OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(
            end_point if end_point else _get_config('OSS_ENDPOINT'))
        self.bucket_name = bucket_name if bucket_name else _get_config(
            'OSS_BUCKET_NAME')

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)

        # try to get bucket acl to check bucket exist or not
        # try:
        #     self.bucket.get_bucket_acl().acl
        # except oss2.exceptions.NoSuchBucket:
        #     raise SuspiciousOperation(
        #         "Bucket '%s' does not exist." % self.bucket_name)

    def _get_key_name(self, name):
        """
        Get the object key name in OSS, e.g.,
        location: /media/
        input   : test.txt
        output  : media/test.txt
        """
        base_path = force_str(self.location)
        final_path = urljoin(base_path + "/", name)
        name = os.path.normpath(final_path.lstrip('/'))

        if six.PY2:
            name = name.encode('utf-8')
        return name

    def _open(self, name, mode='rb'):
        logger().debug("name: %s, mode: %s", name, mode)
        if mode != "rb":
            raise ValueError("OSS files can only be opened in read-only mode")

        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        try:
            # Load the key into a temporary file
            tmpf = SpooledTemporaryFile(max_size=10 * 1024 * 1024)  # 10MB
            obj = self.bucket.get_object(target_name)
            logger().info("content length: %d, requestid: %s",
                          obj.content_length, obj.request_id)
            if obj.content_length is None:
                shutil.copyfileobj(obj, tmpf)
            else:
                oss2.utils.copyfileobj_and_verify(obj,
                                                  tmpf,
                                                  obj.content_length,
                                                  request_id=obj.request_id)
            tmpf.seek(0)
            return OssFile(tmpf, target_name, self)
        except oss2.exceptions.NoSuchKey:
            raise OssError("%s does not exist" % name)
        except Exception:
            raise OssError("Failed to open %s" % name)

    def _save(self, name, content):
        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        logger().debug("content: %s", content)
        self.bucket.put_object(target_name, content)
        return os.path.normpath(name)

    def create_dir(self, dirname):
        target_name = self._get_key_name(dirname)
        if not target_name.endswith('/'):
            target_name += '/'

        self.bucket.put_object(target_name, '')

    def exists(self, name):
        target_name = self._get_key_name(name)
        logger().debug("name: %s, target name: %s", name, target_name)
        if name.endswith("/"):
            result = self.bucket.list_objects(prefix=target_name,
                                              delimiter='',
                                              marker='',
                                              max_keys=1)
            if len(result.object_list) == 0:
                logger().debug("object list: %s", result.object_list)
            else:
                logger().debug("object list: %s", result.object_list[0].key)
            return bool(result.object_list)

        exist = self.bucket.object_exists(target_name)
        logger().debug("'%s' exist: %s", target_name, exist)
        if not exist:
            name2 = name + "/"
            logger().debug("to check %s", name2)
            return self.exists(name2)

        return exist

    def get_file_meta(self, name):
        name = self._get_key_name(name)
        return self.bucket.get_object_meta(name)

    def size(self, name):
        file_meta = self.get_file_meta(name)
        return file_meta.content_length

    def modified_time(self, name):
        file_meta = self.get_file_meta(name)
        return datetime.fromtimestamp(file_meta.last_modified)

    created_time = accessed_time = modified_time

    def get_modified_time(self, name):
        file_meta = self.get_file_meta(name)

        if settings.USE_TZ:
            return datetime.utcfromtimestamp(
                file_meta.last_modified).replace(tzinfo=utc)
        else:
            return datetime.fromtimestamp(file_meta.last_modified)

    get_created_time = get_accessed_time = get_modified_time

    def content_type(self, name):
        name = self._get_key_name(name)
        file_info = self.bucket.head_object(name)
        return file_info.content_type

    def listdir(self, name):
        if name == ".":
            name = ""
        name = self._get_key_name(name)
        if not name.endswith('/'):
            name += "/"
        logger().debug("name: %s", name)

        files = []
        dirs = []

        for obj in ObjectIterator(self.bucket, prefix=name, delimiter='/'):
            if obj.is_prefix():
                dirs.append(obj.key)
            else:
                files.append(obj.key)

        logger().debug("dirs: %s", list(dirs))
        logger().debug("files: %s", files)
        return dirs, files

    def url(self, name, expire=24 * 60 * 60):
        key = self._get_key_name(name)
        return self.bucket.sign_url('GET', key, expire)

    def delete(self, name):
        name = self._get_key_name(name)
        logger().debug("delete name: %s", name)
        return self.bucket.delete_object(name)

    def delete_with_slash(self, dirname):
        name = self._get_key_name(dirname)
        if not name.endswith('/'):
            name += '/'
        logger().debug("delete name: %s", name)
        return self.bucket.delete_object(name)
Example #25
0
class AliyunBaseStorage(Storage):
    """
    Aliyun OSS2 Storage
    """
    location = ""
    acl = ""
    bucket_name = ""

    def __init__(self):
        self.access_key_id = self._get_config('ACCESS_KEY_ID')
        self.access_key_secret = self._get_config('ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(self._get_config('END_POINT').strip())

        self.auth = Auth(self.access_key_id, self.access_key_secret)
        self.service = Service(self.auth, self.end_point)

        # if self.bucket_name not in self._list_bucket(self.service):
        # # create bucket if not exists
        # self.bucket = self._create_bucket(self.auth)
        # else:
        # # change bucket acl if not consists
        # self.bucket = self._check_bucket_acl(self._get_bucket(self.auth))
        # make sure the bucket must be there
        self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)

    def _get_config(self, name):
        """
        Get configuration variable from environment variable
        or django setting.py
        """
        config = os.environ.get(name, getattr(settings, name, None))
        if config is not None:
            if isinstance(config, six.string_types):
                return config.strip()
            else:
                return config
        else:
            raise ImproperlyConfigured(
                "Can't find config for '%s' either in environment"
                "variable or in setting.py" % name)

    def _clean_name(self, name):
        """
        Cleans the name so that Windows style paths work
        """
        # Normalize Windows style paths
        clean_name = posixpath.normpath(name).replace('\\', '/')

        # os.path.normpath() can strip trailing slashes so we implement
        # a workaround here.
        if name.endswith('/') and not clean_name.endswith('/'):
            # Add a trailing slash as it was stripped.
            return clean_name + '/'
        else:
            return clean_name

    def _normalize_name(self, name):
        """
        Normalizes the name so that paths like /path/to/ignored/../foo.txt
        work. We check to make sure that the path pointed to is not outside
        the directory specified by the LOCATION setting.
        """

        base_path = force_text(self.location)
        base_path = base_path.rstrip('/')

        final_path = urljoin(base_path.rstrip('/') + "/", name)

        base_path_len = len(base_path)
        if (not final_path.startswith(base_path) or
                    final_path[base_path_len:base_path_len + 1] not in ('', '/')):
            raise SuspiciousOperation("Attempted access to '%s' denied." %
                                      name)
        return final_path.lstrip('/')

    def _get_target_name(self, name):
        name = self._normalize_name(self._clean_name(name))

        if self.acl == 'private':
            name = name.split('/')[-1]
            name = 'encoded/{0}/{1}/{2}_en'.format(name[0],name[1],name)
        elif self.acl == 'public-read':
            if len(name) < 32:
                name = uuid.uuid4().hex
            name = '{0}/{1}/{2}.jpg'.format(name[0],name[1],name)
        else:
            pass
        if six.PY2:
            name = name.encode('utf-8')
        return name

    def _open(self, name, mode='wrb'):
        name = self._get_target_name(name)
        return AliyunFile(name, self, mode)

    def _save(self, name, content):
        name = self._get_target_name(name)
        content.open()
        content_str = b''.join(chunk for chunk in content.chunks())
        self.bucket.put_object(name, content_str)
        content.close()

        return self._clean_name(name)


    def get_file_header(self, name):
        name = self._get_target_name(name)
        return self.bucket.head_object(name)

    def exists(self, name):
        return self.bucket.object_exists(name)

    def size(self, name):
        file_info = self.get_file_header(name)
        return file_info.content_length

    def modified_time(self, name):
        file_info = self.get_file_header(name)
        return datetime.datetime.fromtimestamp(file_info.last_modified)

    def listdir(self, name):
        name = self._normalize_name(self._clean_name(name))
        if name and name.endswith('/'):
            name = name[:-1]

        files = []
        dirs = set()

        for obj in ObjectIterator(self.bucket, prefix=name, delimiter='/'):
            if obj.is_prefix():
                dirs.add(obj.key)
            else:
                files.append(obj.key)

        return list(dirs), files

    def url(self, name):
        name = self._normalize_name(self._clean_name(name))
        name = filepath_to_uri(name)
        return self.bucket._make_url(self.bucket_name, name)

    def read(self, name):
        pass

    def delete(self, name):
        pass
Example #26
0
class OssStorage(Storage):
    """
    Aliyun OSS Storage
    """
    def __init__(self,
                 access_key_id=None,
                 access_key_secret=None,
                 end_point=None,
                 bucket_name=None):
        self.access_key_id = access_key_id if access_key_id else _get_config(
            'OSS_ACCESS_KEY_ID')
        self.access_key_secret = access_key_secret if access_key_secret else _get_config(
            'OSS_ACCESS_KEY_SECRET')
        self.end_point = _normalize_endpoint(
            end_point if end_point else _get_config('OSS_ENDPOINT'))

        self.bucket_name = bucket_name if bucket_name else _get_config(
            'OSS_BUCKET_NAME')

        sts_token = getattr(settings, 'ALIYUN_STS_TOKEN', None)
        # 这里表示如果有sts_token,需要使用stsauth进行鉴权
        if sts_token:
            self.auth = StsAuth(self.access_key_id, self.access_key_secret,
                                sts_token)
        else:
            self.auth = Auth(self.access_key_id, self.access_key_secret)

        self.service = Service(self.auth, self.end_point)

        use_oss_internal = getattr(settings, 'OSS_USE_INTERNAL', None)
        # 这里表示,如果是阿里云的内网机器,默认走内网的end_point,否则使用外网的end_point
        # 使用内网end_point,速度快,不收费
        if use_oss_internal:
            self.end_point_internal = _normalize_endpoint(
                end_point if end_point else _get_config('OSS_ENDPOINT_INTERNAL'
                                                        ))
            self.bucket = Bucket(self.auth, self.end_point_internal,
                                 self.bucket_name)
        else:
            self.bucket = Bucket(self.auth, self.end_point, self.bucket_name)
        self.bucket_public = Bucket(self.auth, self.end_point,
                                    self.bucket_name)

        # try to get bucket acl to check bucket exist or not
        try:
            self.bucket.get_bucket_acl().acl
        except oss2.exceptions.NoSuchBucket:
            raise SuspiciousOperation("Bucket '%s' does not exist." %
                                      self.bucket_name)

    def _get_key_name(self, name):
        """
        Get the object key name in OSS, e.g.,
        location: /media/
        input   : test.txt
        output  : media/test.txt
        """
        base_path = force_text(self.location)
        final_path = urljoin(base_path + "/", name)
        name = os.path.normpath(final_path.lstrip('/'))
        name = name.replace('\\', '/')
        if six.PY2:
            name = name.encode('utf-8')
        return name

    def _open(self, name, mode='rb'):
        logger().debug("name: %s, mode: %s", name, mode)
        if mode != "rb":
            raise ValueError("OSS files can only be opened in read-only mode")

        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        try:
            # Load the key into a temporary file
            tmpf = NamedTemporaryFile()  # 10MB
            obj = self.bucket.get_object(target_name)
            logger().info("content length: %d, requestid: %s",
                          obj.content_length, obj.request_id)
            if obj.content_length is None:
                shutil.copyfileobj(obj, tmpf)
            else:
                oss2.utils.copyfileobj_and_verify(obj,
                                                  tmpf,
                                                  obj.content_length,
                                                  request_id=obj.request_id)
            tmpf.seek(0)
            return OssFile(tmpf, target_name, self)
        except oss2.exceptions.NoSuchKey:
            raise OssError("%s does not exist" % name)
        except:
            raise OssError("Failed to open %s" % name)

    def _save(self, name, content):
        target_name = self._get_key_name(name)
        logger().debug("target name: %s", target_name)
        logger().debug("content: %s", content)
        self.bucket.put_object(target_name, content)
        return os.path.normpath(name)

    def create_dir(self, dirname):
        target_name = self._get_key_name(dirname)
        if not target_name.endswith('/'):
            target_name += '/'

        self.bucket.put_object(target_name, '')

    def exists(self, name):
        target_name = self._get_key_name(name)
        logger().debug("name: %s, target name: %s", name, target_name)
        if name.endswith("/"):
            # This looks like a directory, but OSS has no concept of directories
            # need to check whether the key starts with this prefix
            result = self.bucket.list_objects(prefix=target_name,
                                              delimiter='',
                                              marker='',
                                              max_keys=1)
            if len(result.object_list) == 0:
                logger().debug("object list: %s", result.object_list)
            else:
                logger().debug("object list: %s", result.object_list[0].key)
            return bool(result.object_list)

        exist = self.bucket.object_exists(target_name)
        logger().debug("'%s' exist: %s", target_name, exist)
        if not exist:
            # It's not a file, but it might be a directory. Check again that it's not a directory.
            name2 = name + "/"
            logger().debug("to check %s", name2)
            return self.exists(name2)

        return exist

    def get_file_meta(self, name):
        name = self._get_key_name(name)
        return self.bucket.get_object_meta(name)

    def size(self, name):
        file_meta = self.get_file_meta(name)
        return file_meta.content_length

    def modified_time(self, name):
        file_meta = self.get_file_meta(name)
        return datetime.fromtimestamp(file_meta.last_modified)

    created_time = accessed_time = modified_time

    def get_modified_time(self, name):
        file_meta = self.get_file_meta(name)

        if settings.USE_TZ:
            return datetime.utcfromtimestamp(
                file_meta.last_modified).replace(tzinfo=utc)
        else:
            return datetime.fromtimestamp(file_meta.last_modified)

    get_created_time = get_accessed_time = get_modified_time

    def content_type(self, name):
        name = self._get_key_name(name)
        file_info = self.bucket.head_object(name)
        return file_info.content_type

    def listdir(self, name):
        if name == ".":
            name = ""
        name = self._get_key_name(name)
        if not name.endswith('/'):
            name += "/"
        logger().debug("name: %s", name)

        files = []
        dirs = []

        for obj in ObjectIterator(self.bucket, prefix=name, delimiter='/'):
            if obj.is_prefix():
                dirs.append(obj.key)
            else:
                files.append(obj.key)

        logger().debug("dirs: %s", list(dirs))
        logger().debug("files: %s", files)
        return dirs, files

    def url(self, name, expire=60 * 60):
        key = self._get_key_name(name)
        custom_domain = getattr(settings, 'OSS_CUSTOM_DOMAIN', None)
        # return self.bucket.sign_url('GET', key, expire)
        # 这里一般是提供给浏览器用的,所以走外网的end_point
        url = self.bucket_public.sign_url('GET', key, expire)
        url = url.replace('%2F', '/')
        if custom_domain:
            url_list = list(urlsplit(url))
            custom_domain_list = list(urlsplit(custom_domain))
            url_list[0] = custom_domain_list[0]
            url_list[1] = custom_domain_list[1]
            url = urlunsplit(url_list)
        return url

    def delete(self, name):
        name = self._get_key_name(name)
        logger().debug("delete name: %s", name)
        result = self.bucket.delete_object(name)

    def delete_with_slash(self, dirname):
        name = self._get_key_name(dirname)
        if not name.endswith('/'):
            name += '/'
        logger().debug("delete name: %s", name)
        result = self.bucket.delete_object(name)
Example #27
0
import os
import json
from oss2 import Auth, Bucket, ObjectIterator

with open('package.json') as f:
    package = json.loads(f.read())

Project = package['name']
Deploy = package['deploy']['prod']

auth = Auth(Deploy['accessKeyId'], Deploy['accessKeySecret'])
bucket = Bucket(auth, Deploy['endpoint'], Deploy['bucket'])

for obj in ObjectIterator(bucket):
    bucket.delete_object(obj.key)
    print('Delete <' + obj.key + '> Success')

print('Clear OSS OK!')

for root, _, files in os.walk('dist/' + Project):
    for file in files:
        local = os.path.join(root, file).replace('\\', '/')
        key = local.replace('dist/' + Project + '/', '')
        bucket.put_object_from_file(key, local)
        print('Send <' + key + '> Success')

print('Sync OSS OK!')
Example #28
0
 def _init_bucket(self, bucket: dict):
     return Bucket(self.auth,
                   endpoint=bucket.get('endpoint'),
                   bucket_name=bucket.get('bucket'))
Example #29
0
 def _get_bucket(self, auth):
     if self.cname:
         return Bucket(auth, self.cname, self.bucket_name, is_cname=True)
     else:
         return Bucket(auth, self.end_point, self.bucket_name)
	def __init__(self, access_key_id: str, access_key_secret: str, endpoint: str, bucket_name: str):
		self.access_key_id = access_key_id
		self.access_key_secret = access_key_secret
		self.auth = Auth(access_key_id, access_key_secret)
		self.bucket = Bucket(self.auth, endpoint, bucket_name)
 def _get_bucket(self, auth):
     return Bucket(auth, self.end_point, self.bucket_name)
Example #32
0
 def get_bucket(self, bucket_name):
     return Bucket(auth=self.auth,
                   endpoint=self.endpoint,
                   bucket_name=bucket_name)