def downloadFile(self, bucketName, objectKey, downloadFile=None, partSize=5 * 1024 * 1024, taskNum=1, enableCheckpoint=False, checkpointFile=None, header=None, versionId=None, progressCallback=None, imageProcess=None, extensionHeaders=None): if header is None: header = GetObjectHeader() if downloadFile is None: downloadFile = objectKey down_operation = DecryptedDownloadOperation( self.cipher_generator, util.to_string(bucketName), util.to_string(objectKey), util.to_string(downloadFile), partSize, taskNum, enableCheckpoint, util.to_string(checkpointFile), header, versionId, progressCallback, self, imageProcess, progress.NONE_NOTIFIER, extensionHeaders=extensionHeaders) return _resume_download_with_operation(down_operation)
def _resumer_download(bucketName, objectKey, downloadFile, partSize, taskNum, enableCheckPoint, checkPointFile, header, versionId, progressCallback, obsClient, imageProcess=None, notifier=progress.NONE_NOTIFIER): down_operation = downloadOperation(util.to_string(bucketName), util.to_string(objectKey), util.to_string(downloadFile), partSize, taskNum, enableCheckPoint, util.to_string(checkPointFile), header, versionId, progressCallback, obsClient, imageProcess, notifier) if down_operation.size == 0: down_operation._delete_record() down_operation._delete_tmp_file() with open(down_operation.fileName, 'wb') as _: pass if down_operation.progressCallback is not None and callable( down_operation.progressCallback): down_operation.progressCallback(0, 0, 0) return down_operation._metedata_resp return down_operation._download()
def _resumer_upload(bucketName, objectKey, uploadFile, partSize, taskNum, enableCheckPoint, checkPointFile, checkSum, metadata, progressCallback, obsClient, headers, extensionHeaders=None): upload_operation = uploadOperation(util.to_string(bucketName), util.to_string(objectKey), util.to_string(uploadFile), partSize, taskNum, enableCheckPoint, util.to_string(checkPointFile), checkSum, metadata, progressCallback, obsClient, headers, extensionHeaders=extensionHeaders) return upload_operation._upload()
def uploadFile(self, bucketName, objectKey, uploadFile, partSize=9 * 1024 * 1024, taskNum=1, enableCheckpoint=False, checkpointFile=None, checkSum=False, metadata=None, progressCallback=None, headers=None, extensionHeaders=None, encoding_type=None): self.log_client.log(INFO, 'enter resume upload file...') self._assert_not_null(bucketName, 'bucketName is empty') self._assert_not_null(objectKey, 'objectKey is empty') self._assert_not_null(uploadFile, 'uploadFile is empty') upload_operation = EncryptedUploadOperation( self.cipher_generator, util.to_string(bucketName), util.to_string(objectKey), util.to_string(uploadFile), partSize, taskNum, enableCheckpoint, util.to_string(checkpointFile), checkSum, metadata, progressCallback, self, headers, extensionHeaders=extensionHeaders, encoding_type=encoding_type) return upload_operation._upload()
def _download_part(self, part): get_object_request = GetObjectRequest(versionId=self.versionId, imageProcess=self.imageProcess) get_object_header = self._copy_get_object_header(self.header) get_object_header.range = util.to_string(part['offset']) + '-' + util.to_string(part['length']) if not self._is_abort(): # todo 检视 response 的作用与 part_response 的重构 response = None try: resp = self.obsClient.getObject(bucketName=self.bucketName, objectKey=self.objectKey, getObjectRequest=get_object_request, headers=get_object_header, notifier=self.notifier, extensionHeaders=self.extensionHeaders) if resp.status < 300: part_response = resp.body.response self._download_part_write(part_response, part) self._record['downloadParts'][part['partNumber'] - 1]['isCompleted'] = True if self.enableCheckPoint: with self._lock: self._write_record(self._record) else: if 300 < resp.status < 500: self._do_abort('errorCode:{0}, errorMessage:{1}'.format(resp.errorCode, resp.errorMessage)) self._exception += ( 'response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}\n'.format( resp.errorCode, resp.errorMessage)) self.obsClient.log_client.log( ERROR, 'response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}'.format( resp.errorCode, resp.errorMessage)) except Exception as e: self.obsClient.log_client.log(DEBUG, 'download part %s error, %s' % (part['partNumber'], e)) self.obsClient.log_client.log(ERROR, traceback.format_exc()) finally: if response is not None: part_response.close()
def _resumer_download(bucketName, objectKey, downloadFile, partSize, taskNum, enableCheckPoint, checkPointFile, header, versionId, obsClient): down_operation = downloadOperation(to_string(bucketName), to_string(objectKey), to_string(downloadFile), partSize, taskNum, enableCheckPoint, to_string(checkPointFile), header, versionId, obsClient) if down_operation.size == 0: with open(down_operation.fileName, 'wb') as _: pass return down_operation._metedata_resp return down_operation._download()
def _resume_download(bucketName, objectKey, downloadFile, partSize, taskNum, enableCheckPoint, checkPointFile, header, versionId, progressCallback, obsClient, imageProcess=None, notifier=progress.NONE_NOTIFIER, extensionHeaders=None): down_operation = downloadOperation(util.to_string(bucketName), util.to_string(objectKey), util.to_string(downloadFile), partSize, taskNum, enableCheckPoint, util.to_string(checkPointFile), header, versionId, progressCallback, obsClient, imageProcess, notifier, extensionHeaders=extensionHeaders) return _resume_download_with_operation(down_operation)
def _download_part(self, part): get_object_request = GetObjectRequest(versionId=self.versionId, imageProcess=self.imageProcess) get_object_header = self._copy_get_object_header(self.header) get_object_header.range = util.to_string( part['offset']) + '-' + util.to_string(part['length']) if not self._is_abort(): response = None try: resp = self.obsClient._getObjectWithNotifier( bucketName=self.bucketName, objectKey=self.objectKey, getObjectRequest=get_object_request, headers=get_object_header, notifier=self.notifier, extensionHeaders=self.extensionHeaders) if resp.status < 300: respone = resp.body.response chunk_size = 65536 if respone is not None: with open(_to_unicode(self._tmp_file), 'rb+') as fs: fs.seek(part['offset'], 0) while True: chunk = respone.read(chunk_size) if not chunk: break fs.write(chunk) self._record['downloadParts'][part['partNumber'] - 1]['isCompleted'] = True if self.enableCheckPoint: with self._lock: self._write_record(self._record) else: if resp.status > 300 and resp.status < 500: self._do_abort( 'errorCode:{0}, errorMessage:{1}'.format( resp.errorCode, resp.errorMessage)) self._exception.append( 'response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}' .format(resp.errorCode, resp.errorMessage)) self.obsClient.log_client.log( ERROR, 'response from server is something wrong. ErrorCode:{0}, ErrorMessage:{1}' .format(resp.errorCode, resp.errorMessage)) except Exception as e: self.obsClient.log_client.log( DEBUG, 'download part %s error, %s' % (part['partNumber'], e)) self.obsClient.log_client.log(ERROR, traceback.format_exc()) finally: if response is not None: respone.close()
def __make_canonicalstring_str_list(self, keylist, method, interesting_headers): str_list = [method + '\n'] for k in keylist: header_key = util.to_string(k) val = '' if interesting_headers[ header_key] is None else interesting_headers[header_key] if header_key.startswith(self.ha._get_meta_header_prefix()): str_list.append(header_key + ':' + util.to_string(val).strip()) elif header_key.startswith(self.ha._get_header_prefix()): str_list.append(header_key + ':' + val) else: str_list.append(val) str_list.append('\n') return str_list
def UTCToLocalMid(strUTC): if strUTC is None: return None date_format = '%Y-%m-%dT%H:%M:%S.%fZ' CST_FORMAT = '%Y/%m/%d 00:00:00' gmt_time = time.strptime(util.to_string(strUTC), date_format) cst_time = time.localtime(time.mktime(gmt_time) - time.timezone) dt = time.strftime(CST_FORMAT, cst_time) return dt
def __make_canonical_string(self, method, bucket_name, key, path_args, headers, expires=None): interesting_headers = self.__make_canonicalstring_interesting_headers( headers, expires) key_list = sorted(interesting_headers.keys()) str_list = self.__make_canonicalstring_str_list( key_list, method, interesting_headers) URI = '' _bucket_name = self.server if self.is_cname else bucket_name if _bucket_name: URI += '/' URI += _bucket_name if not self.path_style or self.is_cname: URI += '/' if key: if not URI.endswith('/'): URI += '/' URI += util.encode_object_key(key) if URI: str_list.append(URI) else: str_list.append('/') if path_args: e = '?' cannoList = sorted(path_args.items(), key=lambda d: d[0]) for path_key, path_value in cannoList: if path_key.lower( ) in const.ALLOWED_RESOURCE_PARAMTER_NAMES or path_key.lower( ).startswith(self.ha._get_header_prefix()): path_key = util.encode_item(path_key, '/') if path_value is None: e += path_key + '&' continue e += path_key + '=' + util.to_string(path_value) + '&' e = e[:-1] str_list.append(e) return ''.join(str_list)
def __make_canonicalstring(self, method, bucket_name, key, path_args, headers, expires=None): str_list = [] str_list.append(method + '\n') interesting_headers = {} if isinstance(headers, dict): for hash_key in headers.keys(): lk = hash_key.lower() if lk in const.CONTENT_LIST or lk.startswith( self.ha._get_header_prefix()): s = headers.get(hash_key) interesting_headers[lk] = ''.join(s) keylist = interesting_headers.keys() if self.ha.date_header() in keylist: interesting_headers[const.DATE_HEADER.lower()] = '' if expires: interesting_headers[const.DATE_HEADER.lower()] = expires if const.CONTENT_TYPE_HEADER.lower() not in keylist: interesting_headers[const.CONTENT_TYPE_HEADER.lower()] = '' if const.CONTENT_MD5_HEADER.lower() not in keylist: interesting_headers[const.CONTENT_MD5_HEADER.lower()] = '' keylist = sorted(interesting_headers.keys()) for k in keylist: header_key = util.to_string(k) val = '' if interesting_headers[ header_key] is None else interesting_headers[header_key] if header_key.startswith(self.ha._get_meta_header_prefix()): str_list.append(header_key + ':' + util.to_string(val).strip()) elif header_key.startswith(self.ha._get_header_prefix()): str_list.append(header_key + ':' + val) else: str_list.append(val) str_list.append('\n') URI = '' _bucket_name = self.server if self.is_cname else bucket_name if _bucket_name: URI += '/' URI += _bucket_name if not self.path_style or self.is_cname: URI += '/' if key: if not URI.endswith('/'): URI += '/' URI += util.encode_object_key(key) if URI: str_list.append(URI) else: str_list.append('/') if path_args: e1 = '?' e2 = '&' cannoList = sorted(path_args.items(), key=lambda d: d[0]) for path_key, path_value in cannoList: if path_key.lower( ) in const.ALLOWED_RESOURCE_PARAMTER_NAMES or path_key.lower( ).startswith(self.ha._get_header_prefix()): path_key = util.encode_item(path_key, '/') if path_value is None: e1 += path_key + '&' continue e2 += path_key + '=' + util.to_string(path_value) + '&' e = (e1 + e2).replace('&&', '&').replace('?&', '?')[:-1] str_list.append(e) return ''.join(str_list)
def _read(*args, **kwargs): chunk = self.result.read(*args, **kwargs) if not chunk: if self.contentLength is not None and self.contentLength != self.readedCount: raise Exception('premature end of Content-Length delimiter message body (expected:' + util.to_string(self.contentLength) + '; received:' + util.to_string(self.readedCount) + ')') else: newReadCount = len(chunk) if newReadCount > 0: self.notifier.send(newReadCount) self.readedCount += newReadCount return chunk
def _resumer_upload(bucketName, objectKey, uploadFile, partSize, taskNum, enableCheckPoint, checkPointFile, checkSum, metadata, obsClient): upload_operation = uploadOperation(to_string(bucketName), to_string(objectKey), to_string(uploadFile), partSize, taskNum, enableCheckPoint, to_string(checkPointFile), checkSum, metadata, obsClient) return upload_operation._upload()