class SyncThread(threading.Thread): def __init__(self, oss, queue, *args, **kwargs): threading.Thread.__init__(self, *args, **kwargs) self.queue = queue self.oss = oss self._terminate = False self.logger = logging.getLogger('app') dbpath = 'db/ossync.db' self.qm = queue_model.QueueModel(dbpath) def terminate(self): self._terminate = True def upload(self, bucket, oss_obj_name, filename): if not os.path.lexists(filename): return None success = False if(os.path.isdir(filename)): oss_obj_name += '/' res = self.oss.put_object_with_data(bucket = bucket, object = oss_obj_name, input_content = '') if (res.status / 100) == 2: success = True else: file_size = os.path.getsize(filename) if file_size > 2000000: res = self.oss.upload_large_file(bucket = bucket, object = oss_obj_name, filename = filename) else: res = self.oss.put_object_from_file(bucket = bucket, object = oss_obj_name, filename = filename) filehash = helper.calc_file_md5(filename) header_map = convert_header2map(res.getheaders()) etag = safe_get_element("etag", header_map).upper().replace('"', '') if (res.status / 100) == 2 and filehash.upper() == etag: success = True return success
def upload(self, bucket, oss_obj_name, filename): if not os.path.lexists(filename): return None success = False if(os.path.isdir(filename)): oss_obj_name += '/' res = self.oss.put_object_with_data(bucket = bucket, object = oss_obj_name, input_content = '') if (res.status / 100) == 2: success = True else: file_size = os.path.getsize(filename) if file_size > LARGE_FILE_SIZE: is_large_file = True res = self.oss.upload_large_file(bucket = bucket, object = oss_obj_name, filename = filename) else: is_large_file = False res = self.oss.put_object_from_file(bucket = bucket, object = oss_obj_name, filename = filename) filehash = helper.calc_file_md5(filename) header_map = convert_header2map(res.getheaders()) etag = safe_get_element("etag", header_map).upper().replace('"', '') if (res.status / 100) == 2: if is_large_file == False: if filehash.upper() == etag: success = True else: success = False else: success = True return success
def queue_el(self, bucket, root, path): """根据bucket和root以及路径生成队列元素""" relpath = os.path.relpath(path, root) # 相对于root的相对路径 filehash = "" if os.path.isfile(path): filehash = helper.calc_file_md5(path) hashcode = helper.calc_el_md5(root, relpath, bucket, filehash) el = bucket + '::' + root + '::' + relpath + '::C' + '::' + hashcode if not self.is_el_queued(hashcode): data={"root": root, "relpath": relpath, "bucket": bucket, "action": 'C', "status": 0, "hashcode": hashcode, "retries" : 0} try: self.qm.save(data) self.queue.put(el, block = True, timeout = 1) msg = 'queue element:' + el #print msg self.logger.info(msg) except Full as e: self.logger.error(e.message)
def upload(self, bucket, oss_obj_name, filename): if not os.path.lexists(filename): return None success = False if (os.path.isdir(filename)): oss_obj_name += '/' res = self.oss.put_object_with_data(bucket=bucket, object=oss_obj_name, input_content='') if (res.status / 100) == 2: success = True else: file_size = os.path.getsize(filename) if file_size > LARGE_FILE_SIZE: is_large_file = True res = self.oss.upload_large_file(bucket=bucket, object=oss_obj_name, filename=filename) else: is_large_file = False res = self.oss.put_object_from_file(bucket=bucket, object=oss_obj_name, filename=filename) filehash = helper.calc_file_md5(filename) header_map = convert_header2map(res.getheaders()) etag = safe_get_element("etag", header_map).upper().replace('"', '') if (res.status / 100) == 2: if is_large_file == False: if filehash.upper() == etag: success = True else: success = False else: success = True return success