def __init__(self, access_key, secret_key, bucket_name, inputfile, sep, successfile, failurefile, thread_count=3): self.access_key = access_key self.secret_key = secret_key self._inner_threadpool = SimpleThreadPool(3) self.thread_count = thread_count self.inputfile = inputfile self.bucket_name = bucket_name self.successfile = successfile self.failurefile = failurefile self.sep = sep
def batch_delete(self): self._inner_threadpool = SimpleThreadPool(self.thread_count) key_list = self.read_inputfile(self.inputfile) for key in key_list: try: self._inner_threadpool.add_task(self.delete, self.bucket_name, key, self.successfile, self.failurefile) except Exception as e: logger.warn(to_unicode(e)) self._inner_threadpool.wait_completion() result = self._inner_threadpool.get_result() return print(result)
def batch_upload(self): self._inner_threadpool = SimpleThreadPool(self.thread_count) local_filenamelist = self.get_dir_filename(self.file_dir) for local_filename in local_filenamelist: try: self._inner_threadpool.add_task(self._upload, local_filename, self.successfile, self.failurefile) except Exception as e: logger.warn(to_unicode(e)) self._inner_threadpool.wait_completion() result = self._inner_threadpool.get_result() return print(result)
class Batch_delete(object): def __init__(self, access_key, secret_key, bucket_name, inputfile, successfile, failurefile, thread_count=3): self.access_key = access_key self.secret_key = secret_key self._inner_threadpool = SimpleThreadPool(3) self.thread_count = thread_count self.inputfile = inputfile self.bucket_name = bucket_name self.successfile = successfile self.failurefile = failurefile def read_inputfile(self, inputfile): res = [] with open(inputfile, "r") as f: for line in f.readlines(): line = line.strip('\n') if "," in line: line = line.split(",")[0] res.append(line) return tuple(res) def delete(self, bucket_name, key, successfile, failurefile): try: q = Auth(self.access_key, self.secret_key) bucket = BucketManager(q) _, info = bucket.delete(bucket_name, key) return key, info, successfile, failurefile except Exception as e: logger.warn(to_unicode(e)) time.sleep(0.1) def batch_delete(self): self._inner_threadpool = SimpleThreadPool(self.thread_count) key_list = self.read_inputfile(self.inputfile) for key in key_list: try: self._inner_threadpool.add_task(self.delete, self.bucket_name, key, self.successfile, self.failurefile) except Exception as e: logger.warn(to_unicode(e)) self._inner_threadpool.wait_completion() result = self._inner_threadpool.get_result() return print(result)
class Batch_chstatus(object): def __init__(self, access_key, secret_key, bucket_name, inputfile, sep, successfile, failurefile, thread_count=3): self.access_key = access_key self.secret_key = secret_key self._inner_threadpool = SimpleThreadPool(3) self.thread_count = thread_count self.inputfile = inputfile self.bucket_name = bucket_name self.successfile = successfile self.failurefile = failurefile self.sep = sep def read_inputfile(self, inputfile): ret = [] with open(inputfile, "r") as f: for line in f.readlines(): line = line.strip('\n') ret.append(line) return tuple(ret) def _chstatus(self, bucket_name, key, file_status, successfile, failurefile): try: q = Auth(self.access_key, self.secret_key) bucket = BucketManager(q) # 2表示归档存储,1表示低频存储,0是标准存储 _, info = bucket.change_status(bucket_name, key, file_status, cond=None) return key, info, successfile, failurefile except Exception as e: logger.warn(to_unicode(e)) time.sleep(0.1) def batch_chstatus(self): self._inner_threadpool = SimpleThreadPool(self.thread_count) inputfile_list = self.read_inputfile(self.inputfile) for i in inputfile_list: try: _i = i.split(self.sep) except Exception as e: logger.warn(to_unicode(e)) raise e _key = _i[0] _file_status = _i[1] try: self._inner_threadpool.add_task(self._chstatus, self.bucket_name, _key, _file_status, self.successfile, self.failurefile) except Exception as e: logger.warn(to_unicode(e)) self._inner_threadpool.wait_completion() result = self._inner_threadpool.get_result() return print(result)
class Batch_upload(object): def __init__(self, access_key, secret_key, file_dir, bucket_name, successfile, failurefile, thread_count=3): self.access_key = access_key self.secret_key = secret_key self._inner_threadpool = SimpleThreadPool(3) self.thread_count = thread_count self.file_dir = file_dir self.bucket_name = bucket_name self.successfile = successfile self.failurefile = failurefile def get_dir_filename(self, dir): local_filenamelist = [] for dirpath, dirnames, filenames in os.walk(dir, topdown=False): for name in filenames: local_filenamelist.append(os.path.join(dirpath, name)) return local_filenamelist def _upload(self, localfile, successfile, failurefile): try: q = Auth(self.access_key, self.secret_key) key = localfile.split(self.file_dir)[1][1:] token = q.upload_token(self.bucket_name, key, 3600) ret, info = put_file(token, key, localfile) if ret: return localfile, key, info, successfile, failurefile except Exception as e: logger.warn(to_unicode(e)) raise e def batch_upload(self): self._inner_threadpool = SimpleThreadPool(self.thread_count) local_filenamelist = self.get_dir_filename(self.file_dir) for local_filename in local_filenamelist: try: self._inner_threadpool.add_task(self._upload, local_filename, self.successfile, self.failurefile) except Exception as e: logger.warn(to_unicode(e)) self._inner_threadpool.wait_completion() result = self._inner_threadpool.get_result() return print(result)
class Batch_modtype(object): def __init__(self, access_key, secret_key, bucket_name, inputfile, sep, successfile, failurefile, thread_count=3): self.access_key = access_key self.secret_key = secret_key self._inner_threadpool = SimpleThreadPool(3) self.thread_count = thread_count self.inputfile = inputfile self.bucket_name = bucket_name self.successfile = successfile self.failurefile = failurefile self.sep = sep def mod_type(self, bucket_name, key, storage_type, successfile, failurefile): try: q = Auth(self.access_key, self.secret_key) bucket = BucketManager(q) # 2表示归档存储,1表示低频存储,0是标准存储 _, info = bucket.change_type(bucket_name, key, storage_type) return key, info, successfile, failurefile except Exception as e: logger.warn(to_unicode(e)) time.sleep(0.1) def read_inputfile(self, inputfile): with open(inputfile, "r") as f: ret = tuple(f.readlines()) return ret def b_modtype(self): self._inner_threadpool = SimpleThreadPool(self.thread_count) inputfile_list = self.read_inputfile(self.inputfile) for i in inputfile_list: if "\n" in i: i = i.replace("\n", "") try: _i = i.split(self.sep) except Exception as e: logger.warn(to_unicode(e)) raise e _key = _i[0] _storage_type = _i[1] try: self._inner_threadpool.add_task(self.mod_type, self.bucket_name, _key, _storage_type, self.successfile, self.failurefile) except Exception as e: logger.warn(to_unicode(e)) self._inner_threadpool.wait_completion() result = self._inner_threadpool.get_result() return print(result)
def batch_chstatus(self): self._inner_threadpool = SimpleThreadPool(self.thread_count) inputfile_list = self.read_inputfile(self.inputfile) for i in inputfile_list: try: _i = i.split(self.sep) except Exception as e: logger.warn(to_unicode(e)) raise e _key = _i[0] _file_status = _i[1] try: self._inner_threadpool.add_task(self._chstatus, self.bucket_name, _key, _file_status, self.successfile, self.failurefile) except Exception as e: logger.warn(to_unicode(e)) self._inner_threadpool.wait_completion() result = self._inner_threadpool.get_result() return print(result)
def b_modtype(self): self._inner_threadpool = SimpleThreadPool(self.thread_count) inputfile_list = self.read_inputfile(self.inputfile) for i in inputfile_list: if "\n" in i: i = i.replace("\n", "") try: _i = i.split(self.sep) except Exception as e: logger.warn(to_unicode(e)) raise e _key = _i[0] _storage_type = _i[1] try: self._inner_threadpool.add_task(self.mod_type, self.bucket_name, _key, _storage_type, self.successfile, self.failurefile) except Exception as e: logger.warn(to_unicode(e)) self._inner_threadpool.wait_completion() result = self._inner_threadpool.get_result() return print(result)