def set_bos_config(self, bos_ak, bos_sk, bos_sts, bos_host="bj.bcebos.com"): self.config = BceClientConfiguration( credentials=BceCredentials(bos_ak, bos_sk), endpoint=bos_host, security_token=bos_sts) self.bos_client = BosClient(self.config)
def upload_file(key, file_name): logging.basicConfig( level=logging.DEBUG, format= '%(asctime)s%(filename)s[line:%(lineno)d] %(levelname)s%(message)s', datefmt='%a,%d %b %Y %H:%M:%S', filename='/tmp/bos.log', filemode='a') __logger = logging.getLogger(__name__) bucket_name = 'mysqlbackup' ###################################################################################################### # bucket operation samples ###################################################################################################### # create a bos client bos_client = BosClient(bos_conf.config) # check if bucket exists #if not bos_client.does_bucket_exist(bucket_name): # bos_client.create_bucket(bucket_name) ###################################################################################################### # object operation samples ###################################################################################################### # put a file as object bos_client.put_object_from_file(bucket_name, key, file_name)
def __init__(self): self.max_contents_count = 1 self.max_contents_time = 1 self.get_bos_config() self.bos_client = BosClient(self.config) self.file_length_map = {} self._file_contents_to_add = b'' self._file_contents_count = 0 self._start_append_time = time.time()
def __init__(self, location=settings.MEDIA_URL, base_url=settings.MEDIA_URL): super(BOSStorage, self).__init__(location, base_url) config = BceClientConfiguration(credentials=BceCredentials( BCS_BUCKET['AK'], BCS_BUCKET['SK']), endpoint=BCS_BUCKET['END_POINT']) self.bos_client = BosClient(config) # check if bucket exists if not self.bos_client.does_bucket_exist(self.bucket_name): self.bos_client.create_bucket(self.bucket_name)
def get_bos_file_bytes_io(path): logger = logging.getLogger("baidubce.http.bce_http_client") logger.setLevel(logging.DEBUG) logging.info('Getting file from Baidu BOS...') bos_config = BceClientConfiguration(credentials=BceCredentials( access_key_id, secret_access_key), endpoint=bos_host) bos_client = BosClient(bos_config) content = bos_client.get_object_as_string(bucket_name=bucket_name, key=path) audio = io.BytesIO(content) # this would auto seek(0) return audio
class PythonBosClient(object): def __init__(self): #新建BOSClient self._client = BosClient(bos_conf.config) def list_buckets(self): response = self._client.list_buckets() for bucket in response.buckets: print bucket.name def list_objects(self,bucket_name,prefix = ""): response = self._client.list_objects(bucket_name,1000,prefix) for object in response.contents: print object.key def del_objects(self,bucket_name,key_name): response = self._client.list_objects(bucket_name,1000,prefix) for object in response.contents: self._client.delete_object(bucket_name,object.key) print object.key + " Deleted" def dels_objects(self,bucket_name,prefix): response = self._client.list_objects(bucket_name,1000,prefix) for object in response.contents: self._client.delete_object(bucket_name,object.key) print object.key + " Deleted"
def up_image(self,file): config = BceClientConfiguration(credentials=BceCredentials(self.access_key_id,self.secret_access_key),endpoint=self.bos_host) client = BosClient(config) self.key_name = '1234.jpg' try: res = client.put_object_from_string(bucket=self.back_name, key=self.key_name, data=file) except Exception as e: raise e else: result = res.__dict__ if result['metadata']: url = 'https://' + self.back_name + '.bj.bcebos.com/'+ self.key_name print('图片上传成功') return url
class PythonBosClient(object): def __init__(self): #新建BOSClient self._client = BosClient(bos_conf.config) def list_buckets(self): response = self._client.list_buckets() for bucket in response.buckets: print bucket.name def list_objects(self, bucket_name, prefix=""): response = self._client.list_objects(bucket_name, 1000, prefix) for object in response.contents: print object.key def del_objects(self, bucket_name, key_name): response = self._client.list_objects(bucket_name, 1000, prefix) for object in response.contents: self._client.delete_object(bucket_name, object.key) print object.key + " Deleted" def dels_objects(self, bucket_name, prefix): response = self._client.list_objects(bucket_name, 1000, prefix) for object in response.contents: self._client.delete_object(bucket_name, object.key) print object.key + " Deleted"
def up_image(self, key_name, file): config = BceClientConfiguration(credentials=BceCredentials( self.access_key_id, self.secret_access_key), endpoint=self.bos_host) client = BosClient(config) key_name = key_name try: res = client.put_object_from_string(bucket=self.back_name, key=key_name, data=file) except Exception as e: return None else: print('put success!')
def file_upload_bos(config, local_filename): try: bucket_name = config['bucket_name'] bucket_name = str(bucket_name) #p = config['dir'] AK = config['AK'] SK = config['SK'] HOST = config['HOST'] #print('step 1') #print('file name:',local_filename) bos_config = BceClientConfiguration(credentials=BceCredentials( str(AK), str(SK)), endpoint=str(HOST)) bos_client = BosClient(bos_config) #print('step 2') #print 'bucket_name:',bucket_name,type(bucket_name) if not bos_client.does_bucket_exist(bucket_name): bos_client.create_bucket(bucket_name) #print('step 3') key = str(local_filename.split('/')[-1]) #print('key:',key) bos_client.put_object_from_file(bucket_name, str(key), str(local_filename)) print(local_filename + ' has been uploaded to bucket:' + bucket_name) return 0 except Exception as e: print('file_upload_bos exception:' + str(e)) traceback.print_exc() return -1
def __init__(self, bucket_name, file_name, download, withRoot, upload_class=1): self.bos_client = BosClient(bos_sample_conf.config) self.bucket_name = bucket_name self.file_name = file_name self.user_headers = { 'Content-Disposition': 'attachement; filename=' + file_name } self.download = download self.withRoot = withRoot if upload_class == 1: self._part_upload() if upload_class == 0: self._small_upload()
def before_request(): global article_detail_spider global video_detail_spider article_detail_spider = ArticleDetailProcessor() video_detail_spider = VideoDetailProcessor() gl.sql_operator = SqlOperator() gl.sql_operator.connect() gl.bucket_name = "dota2bucket" gl.bos_client = BosClient(bos_conf.config)
def __init__(self, access_key_id, secret_access_key, bucket_name='', endpoint=''): """ 初始化 """ super(UBosClient, self).__init__() # 创建BceClientConfiguration config = BceClientConfiguration(credentials=BceCredentials( access_key_id, secret_access_key), endpoint=endpoint) # 设置请求超时时间 config.connection_timeout_in_mills = 3000 # 新建BosClient self.client = BosClient(config) self.bucket = bucket_name
def _bce_init_connection(self): try: bce_config = BceClientConfiguration(credentials=BceCredentials( access_key_id=self.bce_access_key_id, secret_access_key=self.bce_secret_access_key), endpoint=self.bce_bos_host) bos_client = BosClient(bce_config) return bos_client except BceError as e: self.logger.error('使用BCE当前凭证,在连接时发生错误 {}'.format(e)) return [] except Exception as e: self.logger.exception('使用BCE当前凭证,在连接时发生异常错误 {}'.format(e)) return []
class BosConfigClient(object): def __init__(self, bos_ak, bos_sk, bos_sts, bos_host="bj.bcebos.com"): self.config = BceClientConfiguration(credentials=BceCredentials( bos_ak, bos_sk), endpoint=bos_host, security_token=bos_sts) self.bos_client = BosClient(self.config) def exists(self, path): bucket_name, object_key = get_object_info(path) try: self.bos_client.get_object_meta_data(bucket_name, object_key) return True except exception.BceError: return False def makedirs(self, path): if not path.endswith('/'): path += '/' if self.exists(path): return bucket_name, object_key = get_object_info(path) if not object_key.endswith('/'): object_key += '/' init_data = b'' self.bos_client.append_object(bucket_name=bucket_name, key=object_key, data=init_data, content_md5=content_md5(init_data), content_length=len(init_data)) @staticmethod def join(path, *paths): result = os.path.join(path, *paths) result.replace('\\', '/') return result def upload_object_from_file(self, path, filename): if not self.exists(path): self.makedirs(path) bucket_name, object_key = get_object_info(path) object_key = self.join(object_key, filename) # if not object_key.endswith('/'): # object_key += '/' print('Uploading file `%s`' % filename) self.bos_client.put_object_from_file(bucket=bucket_name, key=object_key, file_name=filename)
def _getSTSToken(): """ Get the token to upload the file :return: """ if not Define.hubToken: raise Error.ParamError("please provide a valid token") config = invokeBackend("circuit/genSTS", {"token": Define.hubToken}) bosClient = BosClient( BceClientConfiguration( credentials=BceCredentials( str( config['accessKeyId']), str( config['secretAccessKey'])), endpoint='http://bd.bcebos.com', security_token=str( config['sessionToken']))) return [Define.hubToken, bosClient, config['dest']]
def _bce_init_connection_sts(self): try: bce_config = BceClientConfiguration(credentials=BceCredentials( access_key_id=self.bce_access_key_id, secret_access_key=self.bce_secret_access_key), endpoint=self.bce_sts_host) sts_client = StsClient(bce_config) access_dict = {} duration_seconds = 3600 access_dict["service"] = "bce:bos" access_dict["region"] = "bj" access_dict["effect"] = "Allow" resource = ["*"] access_dict["resource"] = resource permission = ["*"] access_dict["permission"] = permission access_control_dict = {"accessControlList": [access_dict]} response = sts_client.get_session_token( acl=access_control_dict, duration_seconds=duration_seconds) config = BceClientConfiguration( credentials=BceCredentials(str(response.access_key_id), str(response.secret_access_key)), endpoint=self.bce_bos_host, security_token=response.session_token) bos_client = BosClient(config) return bos_client except BceError as e: self.logger.error('使用BCE当前连接令牌,在连接时发生错误 {}'.format(e)) return [] except Exception as e: self.logger.exception('使用BCE当前连接令牌,在连接时发生异常错误 {}'.format(e)) return []
class BOSStorage(BcsStorage): bucket_name = BCS_BUCKET['BUCKET_NAME'] """ 这是一个支持BOS和本地django的FileStorage基类 修改存储文件的路径和基本url """ def __init__(self, location=settings.MEDIA_URL, base_url=settings.MEDIA_URL): super(BOSStorage, self).__init__(location, base_url) config = BceClientConfiguration(credentials=BceCredentials( BCS_BUCKET['AK'], BCS_BUCKET['SK']), endpoint=BCS_BUCKET['END_POINT']) self.bos_client = BosClient(config) # check if bucket exists if not self.bos_client.does_bucket_exist(self.bucket_name): self.bos_client.create_bucket(self.bucket_name) def saveToBucket(self, name, content): data = None if hasattr(content, '_get_file'): # admin entry data = content._get_file().read() elif isinstance(content, (ContentFile)): # view entry(ContentFile) data = content.read() else: data = content md5 = hashlib.md5() md5.update(data) md5value = base64.standard_b64encode(md5.digest()) self.bos_client.put_object(self.bucket_name, name, data, len(data), md5value) def delete(self, name): """ Delete a file from bos. """ self.bos_client.delete_object(self.bucket_name, name)
class BosFileSystem(object): def __init__(self, write_flag=True): if write_flag: self.max_contents_count = 1 self.max_contents_time = 1 self.get_bos_config() self.bos_client = BosClient(self.config) self.file_length_map = {} self._file_contents_to_add = b'' self._file_contents_count = 0 self._start_append_time = time.time() def get_bos_config(self): bos_host = os.getenv("BOS_HOST") if not bos_host: raise KeyError('${BOS_HOST} is not found.') access_key_id = os.getenv("BOS_AK") if not access_key_id: raise KeyError('${BOS_AK} is not found.') secret_access_key = os.getenv("BOS_SK") if not secret_access_key: raise KeyError('${BOS_SK} is not found.') self.max_contents_count = int(os.getenv('BOS_CACHE_COUNT', 1)) self.max_contents_time = int(os.getenv('BOS_CACHE_TIME', 1)) bos_sts = os.getenv("BOS_STS") self.config = BceClientConfiguration( credentials=BceCredentials(access_key_id, secret_access_key), endpoint=bos_host, security_token=bos_sts) def set_bos_config(self, bos_ak, bos_sk, bos_sts, bos_host="bj.bcebos.com"): self.config = BceClientConfiguration( credentials=BceCredentials(bos_ak, bos_sk), endpoint=bos_host, security_token=bos_sts) self.bos_client = BosClient(self.config) def renew_bos_client_from_server(self): import requests import json from visualdl.utils.dir import CONFIG_PATH with open(CONFIG_PATH, 'r') as fp: server_url = json.load(fp)['server_url'] url = server_url + '/sts/' res = requests.post(url=url).json() err_code = res.get('code') msg = res.get('msg') if '000000' == err_code: sts_ak = msg.get('sts_ak') sts_sk = msg.get('sts_sk') sts_token = msg.get('token') self.set_bos_config(sts_ak, sts_sk, sts_token) else: print('Renew bos client error. Error msg: {}'.format(msg)) return def isfile(self, filename): return exists(filename) def read_file(self, filename, binary=True): bucket_name, object_key = get_object_info(filename) result = self.bos_client.get_object_as_string(bucket_name, object_key) return result def exists(self, path): bucket_name, object_key = get_object_info(path) try: self.bos_client.get_object_meta_data(bucket_name, object_key) return True except exception.BceError: return False def get_meta(self, bucket_name, object_key): return self.bos_client.get_object_meta_data(bucket_name, object_key) def makedirs(self, path): if not path.endswith('/'): path += '/' if self.exists(path): return bucket_name, object_key = get_object_info(path) if not object_key.endswith('/'): object_key += '/' init_data = b'' self.bos_client.append_object( bucket_name=bucket_name, key=object_key, data=init_data, content_md5=content_md5(init_data), content_length=len(init_data)) @staticmethod def join(path, *paths): result = os.path.join(path, *paths) result.replace('\\', '/') return result def read(self, filename, binary_mode=False, size=0, continue_from=None): bucket_name, object_key = get_object_info(filename) offset = 0 if continue_from is not None: offset = continue_from.get("last_offset", 0) length = int( self.get_meta(bucket_name, object_key).metadata.content_length) if offset < length: data = self.bos_client.get_object_as_string( bucket_name=bucket_name, key=object_key, range=[offset, length - 1]) else: data = b'' continue_from_token = {"last_offset": length} return data, continue_from_token def ready_to_append(self): if self._file_contents_count >= self.max_contents_count or \ time.time() - self._start_append_time > self.max_contents_time: return True else: return False def append(self, filename, file_content, binary_mode=False, force=False): self._file_contents_to_add += file_content self._file_contents_count += 1 if not force and not self.ready_to_append(): return file_content = self._file_contents_to_add bucket_name, object_key = get_object_info(filename) if not self.exists(filename): init_data = b'' try: self.bos_client.append_object( bucket_name=bucket_name, key=object_key, data=init_data, content_md5=content_md5(init_data), content_length=len(init_data)) except (exception.BceServerError, exception.BceHttpClientError): self.renew_bos_client_from_server() self.bos_client.append_object( bucket_name=bucket_name, key=object_key, data=init_data, content_md5=content_md5(init_data), content_length=len(init_data)) return content_length = len(file_content) try: offset = self.get_meta(bucket_name, object_key).metadata.content_length self.bos_client.append_object( bucket_name=bucket_name, key=object_key, data=file_content, content_md5=content_md5(file_content), content_length=content_length, offset=offset) except (exception.BceServerError, exception.BceHttpClientError): self.renew_bos_client_from_server() offset = self.get_meta(bucket_name, object_key).metadata.content_length self.bos_client.append_object( bucket_name=bucket_name, key=object_key, data=file_content, content_md5=content_md5(file_content), content_length=content_length, offset=offset) self._file_contents_to_add = b'' self._file_contents_count = 0 self._start_append_time = time.time() def write(self, filename, file_content, binary_mode=False): self.append(filename, file_content, binary_mode=False) # bucket_name, object_key = BosFileSystem._get_object_info(filename) # # self.bos_client.append_object(bucket_name=bucket_name, # key=object_key, # data=file_content, # content_md5=content_md5(file_content), # content_length=len(file_content)) def walk(self, dir): class WalkGenerator(): def __init__(self, bucket_name, contents): self.contents = None self.length = 0 self.bucket = bucket_name self.handle_contents(contents) self.count = 0 def handle_contents(self, contents): contents_map = {} for item in contents: try: rindex = item.rindex('/') key = item[0:rindex] value = item[rindex + 1:] except ValueError: key = '.' value = item if key in contents_map.keys(): contents_map[key].append(value) else: contents_map[key] = [value] temp_walk = [] for key, value in contents_map.items(): temp_walk.append([ BosFileSystem.join('bos://' + self.bucket, key), [], value ]) self.length = len(temp_walk) self.contents = temp_walk def __iter__(self): return self def __next__(self): if self.count < self.length: self.count += 1 return self.contents[self.count - 1] else: raise StopIteration bucket_name, object_key = get_object_info(dir) if object_key in ['.', './']: prefix = None else: prefix = object_key if object_key.endswith( '/') else object_key + '/' response = self.bos_client.list_objects(bucket_name, prefix=prefix) contents = [content.key for content in response.contents] return WalkGenerator(bucket_name, contents)
#coding=utf-8 #导入BosClient配置文件 import bos_sample_conf import sys #导入BOS相关模块 from baidubce import exception from baidubce.services import bos from baidubce.services.bos import canned_acl from baidubce.services.bos.bos_client import BosClient #新建BosClient bos_client = BosClient(bos_sample_conf.config) path = sys.argv[1] file_name = sys.argv[2] bos_client.put_object_from_file("paddle-serving", "{}/{}".format(path, file_name), "{}".format(file_name)) print("upload {} to paddle/serving/{} success".format(file_name, path))
# SuperFile step 3: complete multi-upload bos_client.complete_multipart_upload(bucket_name, key, upload_id, part_list) if __name__ == "__main__": import logging logging.basicConfig(level=logging.DEBUG, filename='./bos_upload_log.log', filemode='w') __logger = logging.getLogger(__name__) if (len(sys.argv) >= 2) and (sys.argv[1] != ""): last_hour = sys.argv[1] else: last_hour = time.strftime("%Y%m%d%H", time.localtime(time.time() - 3600)) Last_hour = time.strftime("%Y-%m-%d_%H", time.localtime(time.time() - 3600)) last_day = last_hour[0:8] Last_day = Last_hour[0:10] bos_client = BosClient(bos_sample_conf.config) # chulihou active log upload active_bucket_name = 'hs-test/bes/active/' + Last_day + '/' + Last_hour + '/' active_key = 'active.' + Last_hour + '.log' active_file_name = '/home/work/hs/active_log/json_active.' + last_hour + '.log' active_raw_bucket = 'hs-test' upload_file(active_bucket_name, active_key, active_file_name, active_raw_bucket)
source_key = 'sourcekey' + _random_string(6) target_key = 'targetkey' + _random_string(6) prefix = 'prefix' + _random_string(6) bucket_name = 'samplebucket' key = 'samplekey' + _random_string(6) file_name = 'samplefile' download = 'download' ###################################################################################################### # bucket operation samples ###################################################################################################### # create a bos client config = BceClientConfiguration(credentials=BceCredentials(AK, SK), endpoint=HOST) bos_client = BosClient(config) # check if bucket exists if not bos_client.does_bucket_exist(bucket_name): bos_client.create_bucket(bucket_name) # delete a bucket(you can't delete a bucket which is not empty) # clear it first #for obj in bos_client.list_all_objects(bucket_name): # bos_client.delete_object(bucket_name, obj.key) #bos_client.delete_bucket(bucket_name) # create the bucket again #bos_client.create_bucket(bucket_name) # list your buckets
bos_host = "bj.bcebos.com" logger = logging.getLogger("baidubce.http.bce_http_client") fh = logging.FileHandler("sample.log") fh.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') fh.setFormatter(formatter) logger.setLevel(logging.DEBUG) logger.addHandler(fh) bos_config = BceClientConfiguration(credentials=BceCredentials( access_key_id, secret_access_key), endpoint=bos_host) bos_client = BosClient(bos_config) response = bos_client.list_buckets() for bucket in response.buckets: print(bucket.name) # TSDB example tsdb_host = "xxx.tsdb-xxx.tsdb.iot.gz.baidubce.com" protocol = baidubce.protocol.HTTP connection_timeout_in_mills = None #连接超时时间 send_buf_size = None #发送缓冲区大小 recv_buf_size = None #接收缓冲区大小 retry_policy = None #重试策略 #生成config对象
from photo import globalConfig # 配置BOS变量及BOS Client _bucket = 'adventure030-image' _bos_host = "http://bj.baidubos.com/" _access_key = globalConfig.config['django-photosite']['baidu-access-key'] _secret_access_key = globalConfig.config['django-photosite'][ 'baidu-secret-key'] H.ensure(_access_key != '' and _secret_access_key != '', E.AssertFailed, '百度 Access Key 未提供.') _bos_config = BceClientConfiguration(credentials=BceCredentials( _access_key, _secret_access_key), endpoint=_bos_host) _bos_client = BosClient(_bos_config) _ensure_path = lambda path: H.ensure(path.startswith('/'), E.AssertFailed, '路径必须以"/"开头.') or True _ensure_folder_path = lambda folderPath: _ensure_path(folderPath) and H.ensure( folderPath.endswith('/'), E.AssertFailed, '文件夹路径必须以"/"结尾.') _ensure_file_path = lambda filePath: _ensure_path(filePath) and H.ensure( not filePath.endswith('/'), E.AssertFailed, '文件路径不能"/"结尾.') PREFIX = 'http://adventure030-image.bceimg.com' ensure_file_path = _ensure_file_path def _is_folder(path): _ensure_path(path)
class BosFileSystem(object): def __init__(self): self.max_contents_count = 1 self.max_contents_time = 1 self.get_bos_config() self.bos_client = BosClient(self.config) self.file_length_map = {} self._file_contents_to_add = b'' self._file_contents_count = 0 self._start_append_time = time.time() def get_bos_config(self): bos_host = os.getenv("BOS_HOST") if not bos_host: raise KeyError('${BOS_HOST} is not found.') access_key_id = os.getenv("BOS_AK") if not access_key_id: raise KeyError('${BOS_AK} is not found.') secret_access_key = os.getenv("BOS_SK") if not secret_access_key: raise KeyError('${BOS_SK} is not found.') self.max_contents_count = int(os.getenv('BOS_CACHE_COUNT', 1)) self.max_contents_time = int(os.getenv('BOS_CACHE_TIME', 1)) bos_sts = os.getenv("BOS_STS") self.config = BceClientConfiguration( credentials=BceCredentials(access_key_id, secret_access_key), endpoint=bos_host, security_token=bos_sts) def isfile(self, filename): return exists(filename) def read_file(self, filename, binary=True): bucket_name, object_key = BosFileSystem._get_object_info(filename) result = self.bos_client.get_object_as_string(bucket_name, object_key) return result @staticmethod def _get_object_info(path): path = path[6:] index = path.index('/') bucket_name = path[0:index] object_key = path[index + 1:] return bucket_name, object_key def exists(self, path): bucket_name, object_key = BosFileSystem._get_object_info(path) try: self.bos_client.get_object_meta_data(bucket_name, object_key) return True except exception.BceError: return False def get_meta(self, bucket_name, object_key): return self.bos_client.get_object_meta_data(bucket_name, object_key) def makedirs(self, path): if not path.endswith('/'): path += '/' if self.exists(path): return bucket_name, object_key = BosFileSystem._get_object_info(path) if not object_key.endswith('/'): object_key += '/' init_data = b'' self.bos_client.append_object(bucket_name=bucket_name, key=object_key, data=init_data, content_md5=content_md5(init_data), content_length=len(init_data)) @staticmethod def join(path, *paths): result = os.path.join(path, *paths) result.replace('\\', '/') return result def read(self, filename, binary_mode=False, size=0, continue_from=None): bucket_name, object_key = BosFileSystem._get_object_info(filename) offset = 0 if continue_from is not None: offset = continue_from.get("last_offset", 0) length = int( self.get_meta(bucket_name, object_key).metadata.content_length) if offset < length: data = self.bos_client.get_object_as_string(bucket_name=bucket_name, key=object_key, range=[offset, length - 1]) else: data = b'' continue_from_token = {"last_offset": length} return data, continue_from_token def ready_to_append(self): if self._file_contents_count >= self.max_contents_count or \ time.time() - self._start_append_time > self.max_contents_time: return True else: return False def append(self, filename, file_content, binary_mode=False, force=False): self._file_contents_to_add += file_content self._file_contents_count += 1 if not force and not self.ready_to_append(): return file_content = self._file_contents_to_add bucket_name, object_key = BosFileSystem._get_object_info(filename) if not self.exists(filename): init_data = b'' self.bos_client.append_object(bucket_name=bucket_name, key=object_key, data=init_data, content_md5=content_md5(init_data), content_length=len(init_data)) content_length = len(file_content) offset = self.get_meta(bucket_name, object_key).metadata.content_length self.bos_client.append_object(bucket_name=bucket_name, key=object_key, data=file_content, content_md5=content_md5(file_content), content_length=content_length, offset=offset) self._file_contents_to_add = b'' self._file_contents_count = 0 self._start_append_time = time.time() def write(self, filename, file_content, binary_mode=False): self.append(filename, file_content, binary_mode=False) # bucket_name, object_key = BosFileSystem._get_object_info(filename) # # self.bos_client.append_object(bucket_name=bucket_name, # key=object_key, # data=file_content, # content_md5=content_md5(file_content), # content_length=len(file_content)) def walk(self, dir): class WalkGenerator(): def __init__(self, bucket_name, contents): self.contents = None self.length = 0 self.bucket = bucket_name self.handle_contents(contents) self.count = 0 def handle_contents(self, contents): contents_map = {} for item in contents: try: rindex = item.rindex('/') key = item[0:rindex] value = item[rindex + 1:] except ValueError: key = '.' value = item if key in contents_map.keys(): contents_map[key].append(value) else: contents_map[key] = [value] temp_walk = [] for key, value in contents_map.items(): temp_walk.append( [BosFileSystem.join('bos://' + self.bucket, key), [], value]) self.length = len(temp_walk) self.contents = temp_walk def __iter__(self): return self def __next__(self): if self.count < self.length: self.count += 1 return self.contents[self.count - 1] else: raise StopIteration bucket_name, object_key = BosFileSystem._get_object_info(dir) if object_key in ['.', './']: prefix = None else: prefix = object_key if object_key.endswith( '/') else object_key + '/' response = self.bos_client.list_objects(bucket_name, prefix=prefix) contents = [content.key for content in response.contents] return WalkGenerator(bucket_name, contents)
from baidubce.auth.bce_credentials import BceCredentials from baidubce.bce_client_configuration import BceClientConfiguration from baidubce.services.bos.bos_client import BosClient _bos_host = "genious.cdn.bcebos.com" _access_key_id = "9a137e5669e04e1ca994adbe435d644c" _secret_access_key = "9e89feeb688345cda556992a1065a274" logger = logging.getLogger('baidubce.http.bce_http_client') config = BceClientConfiguration(credentials=BceCredentials( _access_key_id, _secret_access_key), endpoint=_bos_host) bos_client = BosClient(config) GENIOUS_BUCKET = 'genious' def md5_obj(fp): buf_size = 8192 md5 = hashlib.md5() while True: bytes_to_read = buf_size buf = fp.read(bytes_to_read) if not buf: break md5.update(buf) content_md5 = base64.standard_b64encode(md5.digest()) return content_md5
import logging logging.basicConfig(level=logging.DEBUG) __logger = logging.getLogger(__name__) bucket_name = 'samplebucket-' + _random_string(6) key = 'samplekey' + _random_string(6) file_name = 'samplefile' download = 'download' ###################################################################################################### # bucket operation samples ###################################################################################################### # create a bos client bos_client = BosClient(bos_sample_conf.config) # check if bucket exists if not bos_client.does_bucket_exist(bucket_name): bos_client.create_bucket(bucket_name) # delete a bucket(you can't delete a bucket which is not empty) # clear it first for obj in bos_client.list_all_objects(bucket_name): bos_client.delete_object(bucket_name, obj.key) bos_client.delete_bucket(bucket_name) # create the bucket again bos_client.create_bucket(bucket_name) # list your buckets
def __init__(self): #新建BOSClient self._client = BosClient(bos_conf.config)
def main(): """ main函数 :return: """ if len(sys.argv) != 3: print "参数个数不正确,请执行 sh test.sh 文件路径 Token" exit(0) path = sys.argv[1] token = sys.argv[2] if not os.path.exists(path): print "文件:" + path + " 不存在,请确认文件路径参数是否正确" exit(0) code, acl = bos_acl() if code != 200: print "服务异常,请稍候再试" exit(0) acl = json.loads(acl) if acl['errorCode'] != 0: print "服务异常,请稍候再试" exit(0) ak = unicode2str(acl['result']['accessKeyId']) sk = unicode2str(acl['result']['secretAccessKey']) bos_token = unicode2str(acl['result']['sessionToken']) config = BceClientConfiguration( credentials=BceCredentials(ak, sk), endpoint=unicode2str("http://bj.bcebos.com"), security_token=bos_token) bos_client = BosClient(config) bucket_name = unicode2str(acl['result']['bucketName']) object_key = unicode2str(acl['result']['fileKey']) token_status_code, token_data = check_token(token) if token_status_code != 200: print "服务异常,请稍候再试" exit(0) token_data = json.loads(token_data) if token_data['errorCode'] != 0: print token_data['errorMsg'] exit(0) print "开始提交" upload_id = bos_client.initiate_multipart_upload(bucket_name, object_key).upload_id left_size = os.path.getsize(path) # left_size用于设置分块开始位置 # 设置分块的开始偏移位置 offset = 0 part_number = 1 part_list = [] total_size = left_size while left_size > 0: # 设置每块为5MB part_size = 10 * 1024 * 1024 if left_size < part_size: part_size = left_size print total_size - left_size, "/", total_size response = bos_client.upload_part_from_file(bucket_name, object_key, upload_id, part_number, part_size, path, offset) left_size -= part_size offset += part_size part_list.append({ "partNumber": part_number, "eTag": response.metadata.etag }) part_number += 1 print total_size, "/", total_size bos_client.complete_multipart_upload(bucket_name, object_key, upload_id, part_list) file_name = os.path.basename(path) status_code, data = submit(object_key, token, file_name) if status_code != 200: print "服务异常,请稍候再试" data = json.loads(data) if data['errorCode'] == 0: print data['result'] else: print data['errorMsg']
import bos_conf import conf from baidubce import exception from baidubce.services import bos from baidubce.services.bos import canned_acl from baidubce.services.bos.bos_client import BosClient bos_client = BosClient(bos_conf.config) bucket_name = conf.BUCKET_NAME object_key = conf.OBJECT_REPORT_ABSTRACT file_name = conf.FILE_PATH + conf.MONGODB_COLLECTION_REPORT_ABSTRACT bos_client.put_object_from_file(bucket_name, object_key, file_name) print("Upload %s successfully. " % file_name) object_key = conf.OBJECT_REPORT_FILE file_name = conf.FILE_PATH + conf.MONGODB_COLLECTION_REPORT_FILE bos_client.put_object_from_file(bucket_name, object_key, file_name) print("Upload %s successfully. " % file_name)
source_bucket = 'sourcebucket' target_bucket = 'targetbucket' source_key = 'sourcekey' + _random_string(6) target_key = 'targetkey' + _random_string(6) prefix = 'prefix' + _random_string(6) bucket_name = 'samplebucket' key = 'samplekey' + _random_string(6) file_name = 'samplefile' download = 'download' ###################################################################################################### # bucket operation samples ###################################################################################################### # create a bos client bos_client = BosClient(bos_sample_conf.config) # check if bucket exists if not bos_client.does_bucket_exist(bucket_name): bos_client.create_bucket(bucket_name) # delete a bucket(you can't delete a bucket which is not empty) # clear it first for obj in bos_client.list_all_objects(bucket_name): bos_client.delete_object(bucket_name, obj.key) bos_client.delete_bucket(bucket_name) # create the bucket again bos_client.create_bucket(bucket_name) # list your buckets
import logging logging.basicConfig(level=logging.DEBUG) __logger = logging.getLogger(__name__) bucket_name = 'samplebucket' key = 'samplekey' + _random_string(6) file_name = 'samplefile' download = 'download' ###################################################################################################### # bucket operation samples ###################################################################################################### # create a bos client bos_client = BosClient(bos_sample_conf.config) # check if bucket exists if not bos_client.does_bucket_exist(bucket_name): bos_client.create_bucket(bucket_name) # delete a bucket(you can't delete a bucket which is not empty) # clear it first for obj in bos_client.list_all_objects(bucket_name): bos_client.delete_object(bucket_name, obj.key) bos_client.delete_bucket(bucket_name) # create the bucket again bos_client.create_bucket(bucket_name) # list your buckets
access_key_id, secret_access_key), endpoint=bos_host) ''' #设置请求超时时间 config.connection_timeout_in_mills = TIMEOUT #设置接收缓冲区大小 config.recv_buf_size(BUF_SIZE) #设置发送缓冲区大小 bos_sample_conf.config.send_buf_size(BUF_SIZE) #设置连接重试策略 #三次指数退避重试 config.retry_policy = BackOffRetryPolicy() #不重试 config.retry_policy = NoRetryPolicy() ''' bos_client = BosClient(config) if not bos_client.does_bucket_exist(bucket_name): bos_client.create_bucket(bucket_name) #设置私有权限,只有owner有读写权限,其他人无权限 bos_client.set_bucket_canned_acl(bucket_name, canned_acl.PRIVATE) response = bos_client.list_buckets() owner = response.owner print 'user id:%s, user name:%s' % (owner.id, owner.display_name) for bucket in response.buckets: print bucket.name #查看bucket_name所属区域 print bos_client.get_bucket_location(bucket_name)