def _upload_part_for_process(self, part, partEtag_infos, upload_infos, status): if status.value == 0: try: resp = self.obsClient.uploadPart(self.bucketName, self.objectKey, part['partNumber'], self._record['uploadId'], self.fileName, isFile=True, partSize=part['length'], offset=part['offset']) except IOError: raise IOError('can not attach file {0}'.format(self.fileName)) if resp.status < 300: complete_part = CompletePart(toInt(part['partNumber']), resp.body.etag) partEtag_infos[toInt(part['partNumber'])] = resp.body.etag upload_infos[toInt(part['partNumber'])-1] = True if self.enableCheckPoint: with self._lock: record = self._get_record() record['uploadParts'][part['partNumber']-1]['isCompleted'] = True record['partEtags'].append(complete_part) self._write_record(record) elif resp.status > 300 and resp.status < 500: self.obsClient.log_client.log(ERROR, 'response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}'.format(resp.errorCode, resp.errorMessage)) self._exception.append('errorCode:{0}.errorMessage:{1}'.format(resp.errorCode, resp.errorMessage)) self._lock.acquire() status.value = 1 self._lock.release() upload_infos[toInt(part['partNumber'])-1] = False else: upload_infos[toInt(part['partNumber'])-1] = False
def parseGetBucketLifecycleConfiguration(xml): root = ET.fromstring(xml) rules = root.findall('./Rule') entries = [] for rule in rules: id = rule.find('./ID') id = id.text if id is not None else None prefix = rule.find('./Prefix') prefix = prefix.text if prefix is not None else None status = rule.find('./Status') status = status.text if status is not None else None expira = rule.find('./Expiration') expiration = None if expira is not None: d = expira.find('./Date') date = UTCToLocal(d.text) if d is not None else None # UTC时间转当地时间 day = expira.find('./Days') days = common_util.toInt(day.text) if day is not None else None expiration = Expiration(date=date, days=days) nocurrentExpira = rule.find('./NoncurrentVersionExpiration') noncurrentVersionExpiration = NoncurrentVersionExpiration(noncurrentDays=common_util.toInt(nocurrentExpira.find('./NoncurrentDays').text)) if nocurrentExpira is not None else None Rules = Rule(id=id, prefix=prefix, status=status, expiration=expiration, noncurrentVersionExpiration=noncurrentVersionExpiration) entries.append(Rules) # 向entries列表中添加bucket对象 lifecycle = Lifecycle(rule=entries) return LifecycleResponse(lifecycleConfig=lifecycle)
def parseGetBucketWebsiteConfiguration(xml): root = ET.fromstring(xml) redirectAll = None redirectAllRequestTo = root.find('./RedirectAllRequestsTo') if redirectAllRequestTo is not None: hostname = root.find('.//HostName') hostname = common_util.toString(hostname.text) if hostname is not None else None protocol = root.find('.//Protocol') protocol = common_util.toString(protocol.text) if protocol is not None else None redirectAll = RedirectAllRequestTo(hostName=hostname, protocol=protocol) index = None indexDocument = root.find('./IndexDocument') if indexDocument is not None: Suffix = root.find('.//Suffix') Suffix = common_util.toString(Suffix.text) if Suffix is not None else None index = IndexDocument(suffix=Suffix) error = None errorDocument = root.find('./ErrorDocument') if errorDocument is not None: Key = root.find('.//Key') Key = common_util.toString(Key.text) if Key is not None else None error = ErrorDocument(key=Key) routs = None routingRules = root.findall('./RoutingRules/RoutingRule') if routingRules is not None and len(routingRules) > 0: routs = [] for rout in routingRules: KeyPrefixEquals = rout.find('.//Condition/KeyPrefixEquals') KeyPrefixEquals = common_util.toString(KeyPrefixEquals.text) if KeyPrefixEquals is not None else None HttpErrorCodeReturnedEquals = rout.find( './/Condition/HttpErrorCodeReturnedEquals') HttpErrorCodeReturnedEquals = common_util.toInt(HttpErrorCodeReturnedEquals.text) if HttpErrorCodeReturnedEquals is not None else None condition = Condition(keyPrefixEquals=KeyPrefixEquals, httpErrorCodeReturnedEquals=HttpErrorCodeReturnedEquals) Protocol = rout.find('.//Redirect/Protocol') Protocol = common_util.toString(Protocol.text) if Protocol is not None else None HostName = rout.find('.//Redirect/HostName') HostName = common_util.toString(HostName.text) if HostName is not None else None ReplaceKeyPrefixWith = rout.find('.//Redirect/ReplaceKeyPrefixWith') ReplaceKeyPrefixWith = common_util.toString(ReplaceKeyPrefixWith.text) if ReplaceKeyPrefixWith is not None else None ReplaceKeyWith = rout.find('.//Redirect/ReplaceKeyWith') ReplaceKeyWith = common_util.toString(ReplaceKeyWith.text) if ReplaceKeyWith is not None else None HttpRedirectCode = rout.find('.//Redirect/HttpRedirectCode') HttpRedirectCode = common_util.toInt(HttpRedirectCode.text) if HttpRedirectCode is not None else None redirect = Redirect(protocol=Protocol, hostName=HostName, replaceKeyPrefixWith=ReplaceKeyPrefixWith, replaceKeyWith=ReplaceKeyWith, httpRedirectCode=HttpRedirectCode) routingRule = RoutingRule(condition=condition, redirect=redirect) routs.append(routingRule) return BucketWebsite(redirectAllRequestTo=redirectAll, indexDocument=index, errorDocument=error, routingRules=routs)
def parseListParts(xml): root = ET.fromstring(xml) bucketName = root.find('.//Bucket') bucketName = common_util.toString(bucketName.text) if bucketName is not None else None objectKey = root.find('.//Key') objectKey = common_util.toString(objectKey.text) if objectKey is not None else None uploadId = root.find('.//UploadId') uploadId = common_util.toString(uploadId.text) if uploadId is not None else None storageClass = root.find('.//StorageClass') storageClass = common_util.toString(storageClass.text) if storageClass is not None else None partNumbermarker = root.find('.//PartNumberMarker') partNumbermarker = common_util.toInt(partNumbermarker.text) if partNumbermarker is not None else None nextPartNumberMarker = root.find('.//NextPartNumberMarker') nextPartNumberMarker = common_util.toInt(nextPartNumberMarker.text) if nextPartNumberMarker is not None else None maxParts = root.find('.//MaxParts') maxParts = common_util.toInt(maxParts) if maxParts is not None else None isTruncated = root.find('.//IsTruncated') isTruncated = common_util.toBool(isTruncated.text) if isTruncated is not None else None initiatorid = root.find('.//Initiator/ID') initiatorid = common_util.toString(initiatorid.text) if initiatorid is not None else None displayname = root.find('.//Initiator/DisplayName') displayname = common_util.toString(displayname.text) if displayname is not None else None initiator = Initiator(id=initiatorid, name=displayname) ownerid = root.find('.//Owner/ID') ownerid = common_util.toString(ownerid.text) if ownerid is not None else None ownername = root.find('.//Owner/DisplayName') ownername = common_util.toString(ownername.text) if ownername is not None else None owner = Owner(owner_id=ownerid, owner_name=ownername) part_list = root.findall('./Part') parts = [] if part_list: for part in part_list: partnumber = part.find('./PartNumber') partnumber = common_util.toInt(partnumber.text) if partnumber is not None else None modifieddate = part.find('./LastModified') modifieddate = UTCToLocal(modifieddate.text) if modifieddate is not None else None etag = part.find('./ETag') etag = common_util.toString(etag.text) if etag is not None else None size = part.find('./Size') size = common_util.toLong(size.text) if size is not None else None __part = Part(partNumber=partnumber, lastModified=modifieddate, etag=etag, size=size) parts.append(__part) return ListPartsResponse(bucketName=bucketName, objectKey=objectKey, uploadId=uploadId, initiator=initiator, owner=owner, storageClass=storageClass, partNumberMarker=partNumbermarker, nextPartNumberMarker=nextPartNumberMarker, maxParts=maxParts, isTruncated=isTruncated, parts=parts) # 返回ListPartsResponse的对象
def parseGetBucketCors(xml): root = ET.fromstring(xml) corsList = [] rules = root.findall('./CORSRule') if rules is not None: for rule in rules: id = rule.find('./ID') id = common_util.toString(id.text) if id is not None else None maxAgeSecond = rule.find('./MaxAgeSeconds') maxAgeSecond = common_util.toInt(maxAgeSecond.text) if maxAgeSecond is not None else None method = rule.findall('./AllowedMethod') allowMethod = [] if method is not None: for v in method: allowMethod.append(common_util.toString(v.text)) allowedOrigin = [] method = rule.findall('./AllowedOrigin') if method is not None: for v in method: allowedOrigin.append(common_util.toString(v.text)) allowedHeader = [] method = rule.findall('./AllowedHeader') if method is not None: for v in method: allowedHeader.append(common_util.toString(v.text)) exposeHeader = [] method = rule.findall('./ExposeHeader') if method is not None: for v in method: exposeHeader.append(common_util.toString(v.text)) corsList.append(CorsRule(id=id, allowedMethod=allowMethod, allowedOrigin=allowedOrigin, allowedHeader=allowedHeader, maxAgeSecond=maxAgeSecond, exposeHeader=exposeHeader)) return corsList
def parseGetBucketStorageInfo(xml): root = ET.fromstring(xml) size = root.find('.//Size') size = size.text if size is not None else None objectNumber = root.find('.//ObjectNumber') objectNumber = objectNumber.text if objectNumber is not None else None return GetBucketStorageInfoResponse(size=common_util.toLong(size), objectNumber=common_util.toInt(objectNumber)) # 返回GetBucketStorageInfoResponse的对象
def _upload_part(self, part): if self._status: try: resp = self.obsClient.uploadPart(self.bucketName, self.objectKey, part['partNumber'], self._record['uploadId'], self.fileName, isFile=True, partSize=part['length'], offset=part['offset']) except IOError: raise IOError('can not attach file {0}. Please check'.format( self.fileName)) if resp.status < 300: complete_part = CompletePart(toInt(part['partNumber']), resp.body.etag) with self._lock: self._record['uploadParts'][part['partNumber'] - 1]['isCompleted'] = True self._record['partEtags'].append(complete_part) self._uploadinfos.append(True) if self.enableCheckPoint: self._write_record(self._record) elif resp.status > 300 and resp.status < 500: self.obsClient.log_client.log( ERROR, 'response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}' .format(resp.errorCode, resp.errorMessage)) self._exception.append('errorCode:{0}.errorMessage:{1}'.format( resp.errorCode, resp.errorMessage)) self._change_status() self._uploadinfos.append(False) else: self._uploadinfos.append(False)
def parseOptionsBucket(headers): option = OptionsResp() option.accessContorlAllowOrigin = headers.get('access-control-allow-origin') option.accessContorlAllowHeaders = headers.get('access-control-allow-headers') option.accessContorlAllowMethods = headers.get('access-control-allow-methods') option.accessContorlExposeHeaders = headers.get('access-control-expose-headers') option.accessContorlMaxAge = common_util.toInt(headers.get('access-control-max-age')) return option
def _download_part(self, part): get_object_request = GetObjectRequest(versionId=self.versionId) self.header.range = str( part['offset']) + '-' + str(part['offset'] + part['length']) try: resp = self.obsClient.getObject( bucketName=self.bucketName, objectKey=self.objectKey, getObjectRequest=get_object_request, headers=self.header) if resp.status < 300: respone = resp.body.response chunk_size = 65536 with self._lock: if respone is not None: with open(_to_unicode(self._tmp_file), 'rb+') as fs: fs.seek(part['offset'], 0) position = toInt(part['offset']) while True: chunk = respone.read(chunk_size) if not chunk: break fs.write(chunk) position += chunk_size fs.seek(position, 0) fs.close() respone.close() self._downinfos.append(True) self._record['downloadParts'][part['partNumber'] - 1]['isCompleted'] = True elif resp.status > 300 and resp.status < 500: with self._lock: self._downinfos.append(False) self._change_status() self._exception.append( 'response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}' .format(resp.errorCode, resp.errorMessage)) self.obsClient.log_client.log( ERROR, 'response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}' .format(resp.errorCode, resp.errorMessage)) else: self._downinfos.append(False) self._exception.append( 'response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}' .format(resp.errorCode, resp.errorMessage)) except Exception as e: self.obsClient.log_client.log( ERROR, 'something wraong happened. Please check.') raise e finally: if self.enableCheckPoint: with self._lock: self._record['tmpFileStatus'][1] = os.path.getmtime( self._tmp_file) self._write_record(self._record)
def _download_part_process(self, part, download_infos, status): if status.value == 0: get_object_request = GetObjectRequest(versionId=self._record['versionId']) self.header.range = str(part['offset'])+'-'+str(part['offset']+part['length']) try: resp = self.obsClient.getObject(bucketName=self._record['bucketName'], objectKey=self._record['objectKey'], getObjectRequest=get_object_request, headers=self.header) if resp.status < 300: respone = resp.body.response chunk_size = 65536 with self._lock: if respone is not None: with open(_to_unicode(self._tmp_file), 'rb+') as f: f.seek(part['offset'], 0) position = toInt(part['offset']) while True: chunk = respone.read(chunk_size) if not chunk: break f.write(chunk) position += chunk_size f.seek(position, 0) respone.close() f.close() download_infos[part['partNumber'] - 1] = True if self.enableCheckPoint: with self._lock: record = self._get_record() record['tmpFileStatus'][1] = os.path.getmtime(self._tmp_file) record['downloadParts'][part['partNumber']-1]['isCompleted'] = True self._write_record(record) elif resp.status > 300 and resp.status < 500: with self._lock: download_infos[part['partNumber'] - 1] = False self._lock.acquire() status.value = 1 self._lock.release() self._exception.append('response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}' .format(resp.errorCode, resp.errorMessage)) else: download_infos[part['partNumber'] - 1] = False self._exception.append('response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}' .format(resp.errorCode, resp.errorMessage)) except Exception as e: download_infos[part['partNumber'] - 1] = False self.obsClient.log_client.log(ERROR, 'something wraong happened. Please check.') if self.enableCheckPoint: with self._lock: record = self._get_record() record['tmpFileStatus'][1] = os.path.getmtime(self._tmp_file) self._write_record(record) raise e
def parse_content(cls, conn, objectKey, downloadPath=None, chuckSize=65536, loadStreamInMemory=False): if not conn: return cls.getNoneResult('connection is null') closeConn = True try: result = conn.getresponse() if not result: return cls.getNoneResult('response is null') if not result.status < 300: return cls.__parse_xml(result) if loadStreamInMemory: LOG(DEBUG, 'loadStreamInMemory is True, read stream into memory') buffer = result.read() body = ObjectStream(buffer=buffer, size=len(buffer)) elif downloadPath is None or common_util.toString( downloadPath).strip() == '': LOG(DEBUG, 'DownloadPath is null, return conn directly') closeConn = False body = ObjectStream(response=ResponseWrapper(conn, result)) else: objectKey = common_util.safe_encode(objectKey) downloadPath = common_util.safe_encode(downloadPath) file_path = cls.get_data(result, objectKey, downloadPath, chuckSize) body = 'DownloadPath : %s' % str(file_path) LOG(DEBUG, body) status = common_util.toInt(result.status) reason = result.reason header = cls.__parse_headers(dict(result.getheaders())) return GetResult(status=status, reason=reason, header=header, body=body) except RedirectException as re: raise re except Exception as e: LOG(ERROR, traceback.format_exc()) return cls.getNoneResult(common_util.toString(e)) finally: if closeConn: try: conn.close() except Exception as ex: LOG(ERROR, ex)
def parseListMultipartUploads(xml): root = ET.fromstring(xml) Bucket = root.find('./Bucket') Bucket = common_util.toString(Bucket.text) if Bucket is not None else None KeyMarker = root.find('./KeyMarker') KeyMarker = common_util.toString(KeyMarker.text) if KeyMarker is not None else None UploadIdMarker = root.find('./UploadIdMarker') UploadIdMarker = common_util.toString(UploadIdMarker.text) if UploadIdMarker is not None else None NextKeyMarker = root.find('./NextKeyMarker') NextKeyMarker = common_util.toString(NextKeyMarker.text) if NextKeyMarker is not None else None NextUploadIdMarker = root.find('./NextUploadIdMarker') NextUploadIdMarker = common_util.toString(NextUploadIdMarker.text) if NextUploadIdMarker is not None else None MaxUploads = root.find('./MaxUploads') MaxUploads = common_util.toInt(MaxUploads.text) if MaxUploads is not None else None IsTruncated = root.find('./IsTruncated') IsTruncated = common_util.toBool(IsTruncated.text) if IsTruncated is not None else None Prefix = root.find('./Prefix') prefix = common_util.toString(Prefix.text) if Prefix is not None else None delimiter = root.find('./Delimiter') delimiter = common_util.toString(delimiter.text) if delimiter is not None else None rules = root.findall('./Upload') uploadlist = [] if rules: for rule in rules: Key = rule.find('./Key') Key = common_util.toString(Key.text) if Key is not None else None UploadId = rule.find('./UploadId') UploadId = common_util.toString(UploadId.text) if UploadId is not None else None ID = rule.find('./Initiator/ID') ID = common_util.toString(ID.text) if ID is not None else None DisplayName = rule.find('./Initiator/DisplayName') DisplayName = common_util.toString(DisplayName.text) if DisplayName is not None else None initiator = Initiator(id=ID, name=DisplayName) owner_id = rule.find('./Owner/ID') owner_id = common_util.toString(owner_id.text) if owner_id is not None else None owner_name = rule.find('./Owner/DisplayName') owner_name = common_util.toString(owner_name.text) if owner_name is not None else None ower = Owner(owner_id=owner_id, owner_name=owner_name) StorageClass = rule.find('./StorageClass') StorageClass = common_util.toString(StorageClass.text) if StorageClass is not None else None Initiated = rule.find('./Initiated') Initiated = UTCToLocal(Initiated.text) if Initiated is not None else None upload = Upload(key=Key, uploadID=UploadId, initiator=initiator, owner=ower, storageClass=StorageClass, initiated=Initiated) uploadlist.append(upload) common = root.findall('./CommonPrefixes') commonlist = [] if common: for comm in common: comm_prefix = comm.find('./Prefix') comm_prefix = common_util.toString(comm_prefix.text) if comm_prefix is not None else None Comm_Prefix = CommonPrefix(prefix=comm_prefix) commonlist.append(Comm_Prefix) return ListMultipartUploadsResponse(bucket=Bucket, keyMarker=KeyMarker, uploadIdMarker=UploadIdMarker, nextKeyMarker=NextKeyMarker, nextUploadIdMarker=NextUploadIdMarker, maxUploads=MaxUploads, isTruncated=IsTruncated, prefix=prefix, delimiter=delimiter, upload=uploadlist, commonPrefixs=commonlist)
def parseListVersions(xml): root = ET.fromstring(xml) Name = root.find('./Name') Name = Name.text if Name is not None else None Prefix = root.find('./Prefix') Prefix = Prefix.text if Prefix is not None else None KeyMarker = root.find('./KeyMarker') KeyMarker = KeyMarker.text if KeyMarker is not None else None VersionIdMarker = root.find('./VersionIdMarker') VersionIdMarker = VersionIdMarker.text if VersionIdMarker is not None else None NextKeyMarker = root.find('./NextKeyMarker') NextKeyMarker = NextKeyMarker.text if NextKeyMarker is not None else None NextVersionIdMarker = root.find('./NextVersionIdMarker') NextVersionIdMarker = NextVersionIdMarker.text if NextVersionIdMarker is not None else None MaxKeys = root.find('./MaxKeys') MaxKeys = MaxKeys.text if MaxKeys is not None else None IsTruncated = root.find('./IsTruncated') IsTruncated = IsTruncated.text if IsTruncated is not None else None head = ObjectVersionHead(name=Name, prefix=Prefix, keyMarker=KeyMarker, versionIdMarker=VersionIdMarker, nextKeyMarker=NextKeyMarker, nextVersionIdMarker=NextVersionIdMarker, maxKeys=common_util.toInt(MaxKeys), isTruncated=common_util.toBool(IsTruncated)) versions = root.findall('./Version') version_list = [] for version in versions: Key = version.find('./Key') Key = Key.text if Key is not None else None VersionId = version.find('./VersionId') VersionId = VersionId.text if VersionId is not None else None IsLatest = version.find('./IsLatest') IsLatest = IsLatest.text if IsLatest is not None else None LastModified = version.find('./LastModified') LastModified = LastModified.text if LastModified is not None else None ETag = version.find('./ETag') ETag = ETag.text if ETag is not None else None Size = version.find('./Size') Size = Size.text if Size is not None else None owner = version.find('./Owner') Owners = None if owner is not None: ID = version.find('.//ID') ID = ID.text if ID is not None else None DisplayName = version.find('.//DisplayName') DisplayName = DisplayName.text if DisplayName is not None else None Owners = Owner(owner_id=ID, owner_name=DisplayName) StorageClass = version.find('./StorageClass') StorageClass = StorageClass.text if StorageClass is not None else None Version = ObjectVersion(key=Key, versionId=VersionId, isLatest=common_util.toBool(IsLatest), lastModified=UTCToLocal(LastModified), eTag=ETag, size=common_util.toLong(Size), owner=Owners, storageClass=StorageClass) version_list.append(Version) markers = root.findall('./DeleteMarker') marker_list = [] for marker in markers: Key = marker.find('./Key') Key = Key.text if Key is not None else None VersionId = marker.find('./VersionId') VersionId = VersionId.text if VersionId is not None else None IsLatest = marker.find('./IsLatest') IsLatest = IsLatest.text if IsLatest is not None else None LastModified = marker.find('./LastModified') LastModified = LastModified.text if LastModified is not None else None owner = marker.find('./Owner') Owners = None if owner is not None: ID = marker.find('.//ID') ID = ID.text if ID is not None else None DisplayName = marker.find('.//DisplayName') DisplayName = DisplayName.text if DisplayName is not None else None Owners = Owner(owner_id=ID, owner_name=DisplayName) Marker = ObjectDeleteMarker(key=Key, versionId=VersionId, isLatest=common_util.toBool(IsLatest), lastModified=UTCToLocal(LastModified), owner=Owners) marker_list.append(Marker) prefixs = root.findall('./CommonPrefixes') prefix_list = [] for prefix in prefixs: Prefix = prefix.find('./Prefix') Prefix = Prefix.text if Prefix is not None else None Pre = CommonPrefix(prefix=Prefix) prefix_list.append(Pre) ret = ObjectVersions(head=head, markers=marker_list, commonPrefixs=prefix_list) ret.versions=version_list return ret
def parseGetBucketQuota(xml): root = ET.fromstring(xml) quota = root.find('.//StorageQuota') quota = quota.text if quota is not None else None return GetBucketQuotaResponse(quota=common_util.toInt(quota)) # 返回GetBucketQuotaResponse的对象
def _upload(self): if not self.enableCheckPoint: self._prepare() else: self._load() self.__upload_parts = self._get_upload_parts() if IS_WINDOWS: self._uploadinfos = [] self._status = True thread_pools = _ThreadPool(functools.partial(self._produce, upload_parts=self.__upload_parts), [self._consume] * self.taskNum, self._lock) thread_pools.run() if not min(self._uploadinfos): if not self._status: self.obsClient.abortMultipartUpload(self.bucketName, self.objectKey, self._record['uploadId']) self.obsClient.log_client.log(ERROR, 'the code from server is 4**, please check space、persimission and so on.') self._delete_record() if len(self._exception) > 0: raise Exception(self._exception[0]) raise Exception('some parts are failed when upload. Please try agagin') part_Etags = [] for part in sorted(self._record['partEtags'], key=lambda x: x['partNum']): part_Etags.append(CompletePart(partNum=part['partNum'], etag=part['etag'])) self.obsClient.log_client.log(INFO, 'Completing to upload multiparts') resp = self.obsClient.completeMultipartUpload(self.bucketName, self.objectKey, self._record['uploadId'], CompleteMultipartUploadRequest(part_Etags)) else: partEtag_info = {} for part_info in self._record['partEtags']: partEtag_info[part_info['partNum']] = part_info['etag'] partEtag_infos = multiprocessing.Manager().dict(partEtag_info) upload_info = [] for part_info in self._record['uploadParts']: upload_info.append(part_info['isCompleted']) upload_infos = multiprocessing.Manager().list(upload_info) status = multiprocessing.Manager().Value('i', 0) pool = Pool(processes=self.taskNum) for part in self.__upload_parts: pool.apply_async(self._upload_part_for_process(part, partEtag_infos, upload_infos, status)) pool.close() pool.join() if not min(upload_infos): if status.value == 1: self.obsClient.abortMultipartUpload(self.bucketName, self.objectKey, self._record['uploadId']) self.obsClient.log_client.log(ERROR, 'the code from server is 4**, please check') self._delete_record() if len(self._exception) > 0: raise Exception(self._exception[0]) raise Exception('some parts are failed when upload. Please try agagin') part_Etags = [] partETags = sorted(partEtag_infos.items(), key=lambda d: d[0]) for key, value in partETags: part_Etags.append(CompletePart(partNum=toInt(key), etag=value)) self.obsClient.log_client.log(INFO, 'Completing to upload multiparts') resp = self.obsClient.completeMultipartUpload(self.bucketName, self.objectKey, self._record['uploadId'], CompleteMultipartUploadRequest(part_Etags)) if resp.status < 300: if self.enableCheckPoint: self._delete_record() else: if not self.enableCheckPoint: self.obsClient.abortMultipartUpload(self.bucketName, self.objectKey, self._record['uploadId']) self.obsClient.log_client.log(ERROR, 'something is wrong when complete multipart.ErrorCode:{0}. ErrorMessage:{1}'.format( resp.errorCode, resp.errorMessage)) else: if resp.status > 300 and resp.status < 500: self.obsClient.abortMultipartUpload(self.bucketName, self.objectKey, self._record['uploadId']) self.obsClient.log_client.log(ERROR, 'something is wrong when complete multipart.ErrorCode:{0}. ErrorMessage:{1}'.format( resp.errorCode, resp.errorMessage)) self._delete_record() return resp
def __parse_xml(cls, result, methodName=None, chuckSize=65536, readable=False): status = common_util.toInt(result.status) reason = result.reason code = None message = None body = None requestId = None hostId = None resource = None headers = dict(result.getheaders()) xml = None while True: chunk = result.read(chuckSize) if not chunk: break xml = chunk if xml is None else xml + chunk if status == 307 and not readable and ('location' in headers or 'Location' in headers): location = headers.get('location') if location is None: location = headers.get('Location') LOG(WARNING, 'http code is %d, need to redirect to %s', status, location) cls.CONTEXT.location = location raise RedirectException( 'http code is {0}, need to redirect to {1}'.format( status, location)) else: header = cls.__parse_headers(headers) if status < 300: if methodName is not None: methodName = 'parse' + methodName[:1].upper( ) + methodName[1:] parseMethod = getattr(convert_util, methodName) if parseMethod is not None: if xml: xml = xml if IS_PYTHON2 else xml.decode('UTF-8') LOG(DEBUG, 'recv Msg:%s', xml) try: search = cls.PATTERN.search(xml) xml = xml if search is None else xml.replace( search.group(), '') body = parseMethod(xml, dict(header)) except Exception as e: LOG(ERROR, e) else: body = parseMethod(dict(header)) requestId = headers.get('x-amz-request-id') elif xml: xml = xml if IS_PYTHON2 else xml.decode('UTF-8') try: search = cls.PATTERN.search(xml) xml = xml if search is None else xml.replace( search.group(), '') root = ET.fromstring(xml) code = root.find('./Code') code = code.text if code is not None else None message = root.find('./Message') message = message.text if message is not None else None requestId = root.find('./RequestId') requestId = requestId.text if requestId is not None else None hostId = root.find('./HostId') hostId = hostId.text if hostId is not None else None key = root.find('./Key') bucket = root.find('./BucketName') resource = bucket if bucket is not None else key resource = resource.text if resource is not None else None except Exception as ee: LOG(ERROR, common_util.toString(ee)) LOG(ERROR, traceback.format_exc()) LOG( DEBUG, 'http response result:status:%d,reason:%s,code:%s,message:%s,headers:%s', status, reason, code, message, header) return GetResult(code=code, message=message, status=status, reason=reason, body=body, requestId=requestId, hostId=hostId, resource=resource, header=header)
def parse_content(cls, conn, objectKey, downloadPath=None, chuckSize=65536, loadStreamInMemory=False, connHolder=None): if not conn: return cls.getNoneResult('connection is null') closeConn = True result = None try: result = conn.getresponse() if not result: return cls.getNoneResult('response is null') if connHolder and hasattr(connHolder, 'createTimeStamp'): connHolder.createTimeStamp = time.time() if not common_util.toInt(result.status) < 300: return cls.__parse_xml(result) if loadStreamInMemory: LOG(DEBUG, 'loadStreamInMemory is True, read stream into memory') buf = None while True: chunk = result.read(chuckSize) if not chunk: break if buf is None: buf = chunk else: buf += chunk body = ObjectStream(buffer=buf, size=common_util.toLong(len(buf))) elif downloadPath is None or common_util.toString( downloadPath).strip() == '': LOG(DEBUG, 'DownloadPath is null, return conn directly') closeConn = False body = ObjectStream( response=ResponseWrapper(conn, result, connHolder)) else: objectKey = common_util.safe_encode(objectKey) downloadPath = common_util.safe_encode(downloadPath) file_path = cls.get_data(result, downloadPath, chuckSize) body = ObjectStream(url=common_util.toString(file_path)) LOG(DEBUG, 'DownloadPath is ' + common_util.toString(file_path)) status = common_util.toInt(result.status) reason = result.reason headers = dict(result.getheaders()) header = cls.__parse_headers(headers) requestId = headers.get('x-amz-request-id') convert_util.parseGetObject(dict(header), body) return GetResult(status=status, reason=reason, header=header, body=body, requestId=requestId) except RedirectException as ex: raise ex except Exception as e: LOG(ERROR, traceback.format_exc()) raise e finally: if closeConn: GetResult.doClose(result, conn, connHolder)
def parseListObjects(xml): root = ET.fromstring(xml) # 获取xml文件的根节点root key_entries = [] commonprefix_entries = [] name = root.find('Name') name = name.text if name is not None else None prefix = find_item(root, 'Prefix') marker = find_item(root, 'Marker') delimiter = find_item(root, 'Delimiter') max_keys = find_item(root, 'MaxKeys') is_truncated = find_item(root, 'IsTruncated') next_marker = find_item(root, 'NextMarker') # 获取对象key的相关信息,在Contents节点中 contents = root.findall('Contents') if contents is not None: for node in contents: key = find_item(node, 'Key') lastmodified = find_item(node, 'LastModified') etag = find_item(node, 'ETag') size = find_item(node, 'Size') storage = find_item(node, 'StorageClass') # 获取Owner相关信息 owner_id = find_item(node, './/ID') owner_name = find_item(node, './/DisplayName') owner = Owner(owner_id=owner_id, owner_name=owner_name) # 创建Owner对象 key_entry = Content(key=key, lastmodified=UTCToLocal(lastmodified), etag=etag, size=common_util.toLong(size), owner=owner, storageClass=storage) key_entries.append(key_entry) # 将对象添加到对象列表中 # 获取CommonPrefixes的相关信息 prefixes = root.findall('CommonPrefixes') if prefixes is not None: for p in prefixes: pre = find_item(p, 'Prefix') commonprefix = CommonPrefix(prefix=pre) commonprefix_entries.append(commonprefix) # 返回ListObjectsResponse的对象 return ListObjectsResponse(name=name, prefix=prefix, marker=marker, delimiter=delimiter, max_keys=common_util.toInt(max_keys), is_truncated=common_util.toBool(is_truncated), next_marker=next_marker, contents=key_entries, commonprefixs=commonprefix_entries)