Exemple #1
0
 def new_file(self, *args, **kwargs):
     """
     Create the file object to append to as data is coming in.
     """
     super().new_file(*args, **kwargs)
     self.file = FileWrapper(
         ObjectPart(pool_name=self.pool_name, part_key=self.part_key))
     self.file_md5_handler = FileMD5Handler()
Exemple #2
0
 def new_file(self, *args, **kwargs):
     """
     Create the file object to append to as data is coming in.
     """
     super().new_file(*args, **kwargs)
     ho = build_harbor_object(using=self.using,
                              pool_name=self.pool_name,
                              obj_id=self.obj_key)
     self.file = FileWrapper(ho)
     self.file_md5_handler = FileMD5Handler()
Exemple #3
0
    def post_object_by_chunk(self,
                             ho,
                             endpoint_url: str,
                             bucket_name: str,
                             object_key: str,
                             bucket_token: str,
                             per_size: int = 32 * 1024**2):
        """
        分片上传一个对象

        :raises: AsyncError
        """
        backup_str = f"endpoint_url={endpoint_url}, bucket name={bucket_name}, token={bucket_name}"
        file = FileWrapper(ho=ho).open()
        while True:
            offset = file.offset
            reset = None
            if offset == 0:
                reset = True

            api = self._build_post_chunk_url(endpoint_url=endpoint_url,
                                             bucket_name=bucket_name,
                                             object_key=object_key,
                                             offset=offset,
                                             reset=reset)
            data = file.read(per_size)
            if not data:
                if offset >= file.size:
                    break
                raise AsyncError(
                    f'Failed async object({object_key}), to backup({backup_str}), post by chunk, read empty bytes from ceph, '
                    f'对象同步可能不完整',
                    code='FailedAsyncObject')

            md5_handler = FileMD5Handler()
            md5_handler.update(offset=0, data=data)
            hex_md5 = md5_handler.hex_md5
            headers = {
                'Authorization': f'BucketToken {bucket_token}',
                'Content-MD5': hex_md5
            }
            try:
                r = self._do_request(method='post',
                                     url=api,
                                     data=data,
                                     headders=headers)
                if r.status_code == 200:
                    continue
            except requests.exceptions.RequestException as e:
                pass

            try:
                r = self._do_request(method='post',
                                     url=api,
                                     data=data,
                                     headders=headers)
            except requests.exceptions.RequestException as e:
                raise AsyncError(
                    f'Failed async object({object_key}), to backup({backup_str}), post by chunk, {str(e)}',
                    code='FailedAsyncObject')

            if r.status_code == 200:
                continue

            raise AsyncError(
                f'Failed async object({object_key}), to backup({backup_str}), post by chunk, {r.text}',
                code='FailedAsyncObject')

        file.close()
        return True
Exemple #4
0
    def post_object_by_chunk(self,
                             obj,
                             ho,
                             backup: BackupBucket,
                             per_size: int = 32 * 1024**2):
        """
        分片上传一个对象

        :raises: AsyncError
        """
        async_time = timezone.now()
        file = FileWrapper(ho=ho).open()
        while True:
            offset = file.offset
            reset = None
            if offset == 0:
                reset = True

            api = self._build_post_chunk_url(backup=backup,
                                             object_key=obj.na,
                                             offset=offset,
                                             reset=reset)
            data = file.read(per_size)
            if not data:
                if offset >= file.size:
                    break
                raise AsyncError(
                    f'Failed async object({obj.na}), {backup}, post by chunk, read empty bytes from ceph, 对象同步可能不完整',
                    code='FailedAsyncObject')

            md5_handler = FileMD5Handler()
            md5_handler.update(offset=0, data=data)
            hex_md5 = md5_handler.hex_md5
            headers = {
                'Authorization': f'BucketToken {backup.bucket_token}',
                'Content-MD5': hex_md5
            }
            try:
                r = self._do_request(method='post',
                                     url=api,
                                     data=data,
                                     headders=headers)
                if r.status_code == 200:
                    continue
            except requests.exceptions.RequestException as e:
                pass

            try:
                r = self._do_request(method='post',
                                     url=api,
                                     data=data,
                                     headders=headers)
            except requests.exceptions.RequestException as e:
                raise AsyncError(
                    f'Failed async object({obj.na}), {backup}, post by chunk, {str(e)}',
                    code='FailedAsyncObject')

            if r.status_code == 200:
                continue

            raise AsyncError(
                f'Failed async object({obj.na}), {backup}, post by chunk, {r.text}',
                code='FailedAsyncObject')

        file.close()
        self._update_object_async_time(obj=obj,
                                       async_time=async_time,
                                       backup_num=backup.backup_num)
Exemple #5
0
 def new_file(self, *args, **kwargs):
     """
     Create the file object to append to as data is coming in.
     """
     super().new_file(*args, **kwargs)
     self.file_md5_handler = FileMD5Handler()
Exemple #6
0
 def new_file(self, *args, **kwargs):
     super().new_file(*args, **kwargs)
     if self.activated:
         self.file_md5_handler = FileMD5Handler()
Exemple #7
0
    def complete_iter(self, request, bucket, upload, obj, obj_rados, obj_etag,
                      complete_numbers, used_upload_parts,
                      unused_upload_parts):
        white_space_bytes = b' '
        xml_declaration_bytes = b'<?xml version="1.0" encoding="UTF-8"?>\n'
        start_time = time.time()
        yielded_doctype = False
        try:
            # 所有part rados数据组合对象rados
            md5_handler = FileMD5Handler()
            offset = 0
            parts_count = len(complete_numbers)

            part_rados = ObjectPart(part_key='')
            for num in complete_numbers:
                part = used_upload_parts[num]
                for r in self.save_part_to_object_iter(
                        obj=obj,
                        obj_rados=obj_rados,
                        part_rados=part_rados,
                        offset=offset,
                        part=part,
                        md5_handler=md5_handler,
                        obj_etag=obj_etag,
                        parts_count=parts_count):
                    if r is None:
                        if not yielded_doctype:
                            yielded_doctype = True
                            yield xml_declaration_bytes
                        else:
                            yield white_space_bytes
                    elif r is True:
                        break
                    elif isinstance(r, exceptions.S3Error):
                        raise r

                offset = offset + part.size

                # 间隔不断发送空字符防止客户端连接超时
                now_time = time.time()
                if now_time - start_time < 10:
                    start_time = now_time
                    continue
                if not yielded_doctype:
                    yielded_doctype = True
                    yield xml_declaration_bytes
                else:
                    yield white_space_bytes

            # 更新对象元数据
            if not self.update_obj_metedata(obj=obj,
                                            size=offset,
                                            hex_md5=md5_handler.hex_md5,
                                            share_code=upload.obj_perms_code):
                raise exceptions.S3InternalError(
                    extend_msg='update object metadata error.')

            # 多部分上传已完成,清理数据
            # 删除无用的part元数据和rados数据
            for r in self.clear_parts_cache_iter(unused_upload_parts,
                                                 is_rm_metadata=True):
                if r is None:
                    if not yielded_doctype:
                        yielded_doctype = True
                        yield xml_declaration_bytes
                    else:
                        yield white_space_bytes

            # 删除已组合的rados数据, 保留part元数据
            for r in self.clear_parts_cache_iter(used_upload_parts,
                                                 is_rm_metadata=False):
                if r is None:
                    if not yielded_doctype:
                        yielded_doctype = True
                        yield xml_declaration_bytes
                    else:
                        yield white_space_bytes

            # 删除多部分上传upload任务
            if not upload.safe_delete():
                if not upload.safe_delete():
                    upload.set_completed()  # 删除失败,尝试标记已上传完成

            location = request.build_absolute_uri()
            data = {
                'Location': location,
                'Bucket': bucket.name,
                'Key': obj.na,
                'ETag': obj_etag
            }
            content = renders.CommonXMLRenderer(
                root_tag_name='CompleteMultipartUploadResult',
                with_xml_declaration=not yielded_doctype).render(data)
            yield content.encode(encoding='utf-8')  # 合并完成

        except exceptions.S3Error as e:
            upload.set_uploading()  # 发生错误,设置回正在上传
            content = renders.CommonXMLRenderer(
                root_tag_name='Error',
                with_xml_declaration=not yielded_doctype).render(e.err_data())
            yield content.encode(encoding='utf-8')
        except Exception as e:
            upload.set_uploading()  # 发生错误,设置回正在上传
            content = renders.CommonXMLRenderer(
                root_tag_name='Error',
                with_xml_declaration=not yielded_doctype).render(
                    exceptions.S3InternalError().err_data())
            yield content.encode(encoding='utf-8')