def upload(self): with open(self.realpath, "rb") as f: task_log_id = common.log('正在上传【%s】0%%' % self.filename, self.id, 'info') with tqdm.wrapattr(f, "read", desc='正在上传【%s】' % self.filename, miniters=1, initial=self.part_number * self.chunk_size, total=self.filesize, ascii=True ) as fs: while self.part_number < len(self.part_upload_url_list): upload_url = self.part_upload_url_list[self.part_number]['upload_url'] total_size = min(self.chunk_size, self.filesize) fs.seek(self.part_number * total_size) try: res = requests.put( url=upload_url, data=common.read_in_chunks(fs, 16 * 1024, total_size), verify=False, timeout=None) except Exception as e: self.print(e, 'error') self.part_upload_url_list = self.get_upload_url() return self.upload() if 400 <= res.status_code < 600: common_get_xml_value = common.get_xml_tag_value(res.text, 'Message') if common_get_xml_value == 'Request has expired.': self.part_upload_url_list = self.get_upload_url() continue common_get_xml_value = common.get_xml_tag_value(res.text, 'Code') if common_get_xml_value == 'PartNotSequential': self.part_number -= 1 continue elif common_get_xml_value == 'PartAlreadyExist': pass else: self.print(res.text, 'error') return False self.part_number += 1 common.update_task_log(task_log_id, '正在上传【%s】%.2f%%' % ( self.filename, ((self.part_number * total_size) / self.filesize) * 100)) udata = { "part_number": self.part_number, } common.save_task(self.id, udata) return True
def save_task(self, task): task_id = task.id tmp = [ "filepath", "realpath", "filesize", "hash", "status", "create_time", "finish_time", "spend_time", "drive_id", "file_id", "upload_id", "part_number", "chunk_size", ] data = {} for v in tmp: data[v] = task.__getattribute__(v) if data[v] is None: data[v] = '' return save_task(task_id, data)
def upload_file(self, task): save_task(task['id'], {'status': 2}) drive = AliyunDrive(DATA['config']['DRIVE_ID'], DATA['config']['ROOT_PATH'], DATA['config']['CHUNK_SIZE']) # 加载任务队列 drive.load_task(task) # 刷新token if not os.path.exists(task['realpath']): drive.status = -1 return drive drive.load_file(task['filepath'], task['realpath']) # 创建目录 LOCK.acquire() try: parent_folder_id = drive.get_parent_folder_id(drive.filepath) finally: LOCK.release() # 断点续传 if DATA['config']['RESUME'] and DATA['config']['DRIVE_ID'] == task[ 'drive_id']: if 0 not in [ drive.drive_id, drive.part_number, drive.chunk_size, ] and not drive.file_id and not drive.upload_id: # 获取上传地址 drive.part_upload_url_list = drive.get_upload_url() # 上传 return self.__upload(drive) # 创建上传 create_post_json = drive.create(parent_folder_id) if 'rapid_upload' in create_post_json and create_post_json[ 'rapid_upload']: drive.finish_time = get_timestamp() drive.spend_time = drive.finish_time - drive.start_time self.print( '【{filename}】秒传成功!消耗{s}秒'.format(filename=drive.filename, s=drive.spend_time), 'success', drive.id) drive.status = 1 return drive # 上传 return self.__upload(drive)
def upload(self): with open(self.realpath, "rb") as f: with tqdm.wrapattr(f, "read", desc='正在上传【%s】' % self.filename, miniters=1, initial=self.part_number * self.chunk_size, total=self.filesize, ascii=True) as fs: while self.part_number < len(self.part_upload_url_list): upload_url = self.part_upload_url_list[ self.part_number]['upload_url'] total_size = min(self.chunk_size, self.filesize) fs.seek(self.part_number * total_size) res = requests.put(url=upload_url, data=common.read_in_chunks( fs, 16 * 1024, total_size), verify=False, timeout=None) if 400 <= res.status_code < 600: common_get_xml_value = common.get_xml_tag_value( res.text, 'Message') if common_get_xml_value == 'Request has expired.': self.part_upload_url_list = self.get_upload_url() continue common_get_xml_value = common.get_xml_tag_value( res.text, 'Code') if common_get_xml_value == 'PartAlreadyExist': pass else: print_error(res.text) res.raise_for_status() self.part_number += 1 DATA['tasks'][ self.filepath_hash]['part_number'] = self.part_number DATA['tasks'][ self.filepath_hash]['drive_id'] = self.drive_id DATA['tasks'][self.filepath_hash]['file_id'] = self.file_id DATA['tasks'][ self.filepath_hash]['upload_id'] = self.upload_id DATA['tasks'][ self.filepath_hash]['chunk_size'] = self.chunk_size common.save_task(DATA['tasks'])
def create(self, parent_file_id): create_data = { "drive_id": self.drive_id, "part_info_list": self.part_info_list, "parent_file_id": parent_file_id, "name": self.filename, "type": "file", "check_name_mode": "auto_rename", "size": self.filesize, "content_hash": self.hash, "content_hash_name": 'sha1', 'proof_code': self.proof_code, 'proof_version': "v1" } # 覆盖已有文件 if DATA['config']['OVERWRITE']: create_data['check_name_mode'] = 'refuse' request_post = requests.post( # 'https://api.aliyundrive.com/v2/file/create', 'https://api.aliyundrive.com/adrive/v2/file/createWithFolders', data=json.dumps(create_data), headers=self.headers, verify=False ) requests_post_json = request_post.json() if not self.check_auth(requests_post_json): return self.create(parent_file_id) # 覆盖已有文件 if DATA['config']['OVERWRITE'] and requests_post_json.get('exist'): if self.recycle(requests_post_json.get('file_id')): self.print('【%s】原有文件回收成功' % self.filename, 'info') self.print('【%s】重新上传新文件中' % self.filename, 'info') return self.create(parent_file_id) self.part_upload_url_list = requests_post_json.get('part_info_list', []) self.file_id = requests_post_json.get('file_id') self.upload_id = requests_post_json.get('upload_id') common.save_task(self.id, { 'drive_id': self.drive_id, 'file_id': self.file_id, 'upload_id': self.upload_id, }) return requests_post_json
filepath_hash = sha1(file.encode('utf-8')).hexdigest() if not filepath_hash in common.DATA['tasks']: common.DATA['tasks'][filepath_hash] = tmp if common.DATA['tasks'][filepath_hash]['upload_time'] > 0: print_warn(os.path.basename(file) + ' 已上传,无需重复上传') else: if common.DATA['tasks'][filepath_hash]['upload_time'] <= 0: # 提交线程 future = executor.submit(upload_file, FILE_PATH, file) future_list.append(future) for res in as_completed(future_list): if res.result(): common.DATA['tasks'][res.result()]['upload_time'] = time.time() common.save_task(common.DATA['tasks']) else: print_error(os.path.basename(file) + ' 上传失败') else: for file in file_list: tmp = { "filepath": file, "upload_time": 0, "drive_id": 0, "file_id": 0, "upload_id": 0, "part_number": 0, "chunk_size": 0, } filepath_hash = sha1(file.encode('utf-8')).hexdigest() if not filepath_hash in common.DATA['tasks']:
if filepath_hash not in DATA['tasks']: DATA['tasks'][filepath_hash] = task_template.copy() DATA['tasks'][filepath_hash]['filepath'] = file if DATA['tasks'][filepath_hash]['upload_time'] > 0: print_warn(os.path.basename(file) + ' 已上传,无需重复上传') else: if DATA['tasks'][filepath_hash]['upload_time'] <= 0: # 提交线程 future = executor.submit(upload_file, DATA['FILE_PATH'], file) future_list.append(future) for res in as_completed(future_list): if res.result(): DATA['tasks'][res.result()]['upload_time'] = time.time() save_task(DATA['tasks']) else: print_error(os.path.basename(file) + ' 上传失败') else: for file in file_list: filepath_hash = sha1(file.encode('utf-8')).hexdigest() if filepath_hash not in DATA['tasks']: DATA['tasks'][filepath_hash] = task_template.copy() DATA['tasks'][filepath_hash]['filepath'] = file if DATA['tasks'][filepath_hash]['upload_time'] > 0: print_warn(os.path.basename(file) + ' 已上传,无需重复上传') else: if DATA['tasks'][filepath_hash]['upload_time'] <= 0: if upload_file(DATA['FILE_PATH'], file): DATA['tasks'][filepath_hash]['upload_time'] = time.time() save_task(DATA['tasks'])