logger.warning( f'{len(multipart_uploaded_list)} Unfinished upload, clean them and restart?' ) logger.warning( 'NOTICE: IF CLEAN, YOU CANNOT RESUME ANY UNFINISHED UPLOAD') if not DontAskMeToClean: keyboard_input = input( "CLEAN unfinished upload and restart(input CLEAN) or resume loading(press enter)? Please confirm: (n/CLEAN)" ) else: keyboard_input = 'no' if keyboard_input == 'CLEAN': # 清理所有未完成的Upload for clean_i in multipart_uploaded_list: s3_dest_client.abort_multipart_upload( Bucket=DesBucket, Key=clean_i["Key"], UploadId=clean_i["UploadId"]) multipart_uploaded_list = [] logger.info('CLEAN FINISHED') else: logger.info( 'You choose not to clean, now try to resume unfinished upload') # 对文件列表中的逐个文件进行上传操作 with futures.ThreadPoolExecutor(max_workers=MaxParallelFile) as file_pool: for src_file in src_file_list: file_pool.submit(upload_file, src_file, des_file_list, multipart_uploaded_list) # 再次获取源文件列表和目标文件夹现存文件列表进行比较,每个文件大小一致,输出比较结果 spent_time = int(time.time() - start_time)