def _me_download( self, url: str, localpath: str, cookies: Dict[str, Optional[str]], downloadparams: DownloadParams = DEFAULT_DOWNLOADPARAMS, done_callback: Optional[Callable[[Future], Any]] = None, encrypt_key: Optional[str] = None, ): headers = { "Cookie ": "; ".join( [f"{k}={v if v is not None else ''}" for k, v in cookies.items()] ), "User-Agent": USER_AGENT, "Connection": "Keep-Alive", } task_id: Optional[TaskID] = None if not downloadparams.quiet: if not _progress._started: _progress.start() task_id = _progress.add_task("MeDownloader", start=False, title=localpath) def _wrap_done_callback(fut: Future): if task_id is not None: _progress.remove_task(task_id) if done_callback: done_callback(fut) def monit_callback(task_id: Optional[TaskID], offset: int): if task_id is not None: _progress.update(task_id, completed=offset + 1) def except_callback(task_id: Optional[TaskID]): if task_id is not None and progress_task_exists(task_id): _progress.reset(task_id) meDownloader = MeDownloader( "GET", url, headers=headers, max_workers=downloadparams.concurrency, callback=monit_callback, encrypt_key=encrypt_key, ) if task_id is not None: length = len(meDownloader) _progress.update(task_id, total=length) _progress.start_task(task_id) meDownloader.download( Path(localpath), task_id=task_id, continue_=True, done_callback=_wrap_done_callback, except_callback=except_callback, )
def upload( api: BaiduPCSApi, from_to_list: List[FromTo], ondup: str = "overwrite", encrypt_password: bytes = b"", encrypt_type: EncryptType = EncryptType.No, max_workers: int = CPU_NUM, slice_size: int = DEFAULT_SLICE_SIZE, ignore_existing: bool = True, show_progress: bool = True, ): """Upload from_tos Args: max_workers (int): The number of concurrent workers slice_size (int): The size of slice for uploading slices. ignore_existing (bool): Ignoring these localpath which of remotepath exist. show_progress (bool): Show uploading progress. """ excepts = {} semaphore = Semaphore(max_workers) with _progress: with ThreadPoolExecutor(max_workers=max_workers) as executor: futs = {} for from_to in from_to_list: semaphore.acquire() task_id = None if show_progress: task_id = _progress.add_task("upload", start=False, title=from_to.from_) fut = executor.submit( sure_release, semaphore, upload_file, api, from_to, ondup, encrypt_password=encrypt_password, encrypt_type=encrypt_type, slice_size=slice_size, ignore_existing=ignore_existing, task_id=task_id, ) futs[fut] = from_to for fut in as_completed(futs): e = fut.exception() if e is not None: from_to = futs[fut] excepts[from_to] = e # Summary if excepts: table = Table(title="Upload Error", box=SIMPLE, show_edge=False) table.add_column("From", justify="left", overflow="fold") table.add_column("To", justify="left", overflow="fold") table.add_column("Error", justify="left") for from_to, e in sorted(excepts.items()): table.add_row(from_to.from_, Text(str(e), style="red")) _progress.console.print(table)
def upload( api: BaiduPCSApi, from_to_list: List[FromTo], ondup: str = "overwrite", encrypt_password: bytes = b"", encrypt_type: EncryptType = EncryptType.No, max_workers: int = CPU_NUM, slice_size: int = DEFAULT_SLICE_SIZE, ignore_existing: bool = True, show_progress: bool = True, rapiduploadinfo_file: Optional[str] = None, user_id: Optional[int] = None, user_name: Optional[str] = None, check_md5: bool = False, ): """Upload from_tos Args: max_workers (int): The number of concurrent workers slice_size (int): The size of slice for uploading slices. ignore_existing (bool): Ignoring these localpath which of remotepath exist. show_progress (bool): Show uploading progress. check_md5 (bool): To fix the content md5 after `combine_slices` `combine_slices` always not return correct content md5. To fix it, we need to use `rapid_upload_file` re-upload the content. Warning, if content length is large, it could take some minutes, e.g. it takes 5 minutes about 2GB. """ global _rapiduploadinfo_file if _rapiduploadinfo_file is None: _rapiduploadinfo_file = rapiduploadinfo_file excepts = {} semaphore = Semaphore(max_workers) with _progress: with ThreadPoolExecutor(max_workers=max_workers) as executor: futs = {} for from_to in from_to_list: semaphore.acquire() task_id = None if show_progress: task_id = _progress.add_task("upload", start=False, title=from_to.from_) fut = executor.submit( sure_release, semaphore, upload_file, api, from_to, ondup, encrypt_password=encrypt_password, encrypt_type=encrypt_type, slice_size=slice_size, ignore_existing=ignore_existing, task_id=task_id, user_id=user_id, user_name=user_name, check_md5=check_md5, ) futs[fut] = from_to for fut in as_completed(futs): e = fut.exception() if e is not None: from_to = futs[fut] excepts[from_to] = e # Summary if excepts: table = Table(title="Upload Error", box=SIMPLE, show_edge=False) table.add_column("From", justify="left", overflow="fold") table.add_column("To", justify="left", overflow="fold") table.add_column("Error", justify="left") for from_to, e in sorted(excepts.items()): table.add_row(from_to.from_, Text(str(e), style="red")) _progress.console.print(table)