Пример #1
0
    def _me_download(
        self,
        url: str,
        localpath: str,
        cookies: Dict[str, Optional[str]],
        downloadparams: DownloadParams = DEFAULT_DOWNLOADPARAMS,
        done_callback: Optional[Callable[[Future], Any]] = None,
        encrypt_key: Optional[str] = None,
    ):
        headers = {
            "Cookie ": "; ".join(
                [f"{k}={v if v is not None else ''}" for k, v in cookies.items()]
            ),
            "User-Agent": USER_AGENT,
            "Connection": "Keep-Alive",
        }

        task_id: Optional[TaskID] = None
        if not downloadparams.quiet:
            if not _progress._started:
                _progress.start()
            task_id = _progress.add_task("MeDownloader", start=False, title=localpath)

        def _wrap_done_callback(fut: Future):
            if task_id is not None:
                _progress.remove_task(task_id)
            if done_callback:
                done_callback(fut)

        def monit_callback(task_id: Optional[TaskID], offset: int):
            if task_id is not None:
                _progress.update(task_id, completed=offset + 1)

        def except_callback(task_id: Optional[TaskID]):
            if task_id is not None and progress_task_exists(task_id):
                _progress.reset(task_id)

        meDownloader = MeDownloader(
            "GET",
            url,
            headers=headers,
            max_workers=downloadparams.concurrency,
            callback=monit_callback,
            encrypt_key=encrypt_key,
        )

        if task_id is not None:
            length = len(meDownloader)
            _progress.update(task_id, total=length)
            _progress.start_task(task_id)

        meDownloader.download(
            Path(localpath),
            task_id=task_id,
            continue_=True,
            done_callback=_wrap_done_callback,
            except_callback=except_callback,
        )
Пример #2
0
 def callback_for_slice(monitor: MultipartEncoderMonitor):
     if task_id is not None and progress_task_exists(task_id):
         _progress.update(task_id,
                          completed=slice_completed + monitor.bytes_read)
Пример #3
0
def upload_file(
    api: BaiduPCSApi,
    from_to: FromTo,
    ondup: str,
    encrypt_password: bytes = b"",
    encrypt_type: EncryptType = EncryptType.No,
    slice_size: int = DEFAULT_SLICE_SIZE,
    ignore_existing: bool = True,
    task_id: Optional[TaskID] = None,
):
    _wait_start()

    localpath, remotepath = from_to

    assert exists(localpath), f"`{localpath}` does not exist"

    if ignore_existing:
        try:
            if api.exists(remotepath):
                print(f"`{remotepath}` already exists.")
                logger.debug("`upload`: remote file already exists")
                if task_id is not None and progress_task_exists(task_id):
                    _progress.remove_task(task_id)
                return
        except Exception as err:
            if task_id is not None and progress_task_exists(task_id):
                _progress.remove_task(task_id)
            raise err

    logger.debug("`upload`: encrypt_type: %s", encrypt_type)

    encrypt_io = encrypt_type.encrypt_io(open(localpath, "rb"),
                                         encrypt_password)
    # IO Length
    encrypt_io_len = total_len(encrypt_io)

    logger.debug("`upload`: encrypt_io_len: %s", encrypt_io_len)

    # Progress bar
    if task_id is not None and progress_task_exists(task_id):
        _progress.update(task_id, total=encrypt_io_len)
        _progress.start_task(task_id)

    def callback(monitor: MultipartEncoderMonitor):
        if task_id is not None and progress_task_exists(task_id):
            _progress.update(task_id, completed=monitor.bytes_read)

    slice_completed = 0

    def callback_for_slice(monitor: MultipartEncoderMonitor):
        if task_id is not None and progress_task_exists(task_id):
            _progress.update(task_id,
                             completed=slice_completed + monitor.bytes_read)

    if encrypt_io_len > 256 * constant.OneK:
        # Rapid Upload
        logger.debug("`upload`: rapid_upload starts")
        try:
            slice_md5, content_md5, content_crc32, encrypt_io_len = rapid_upload_params(
                encrypt_io)
            api.rapid_upload_file(
                slice_md5,
                content_md5,
                content_crc32,
                encrypt_io_len,
                remotepath,
                ondup=ondup,
            )
            if task_id is not None and progress_task_exists(task_id):
                _progress.update(task_id, completed=encrypt_io_len)
                _progress.remove_task(task_id)

                logger.debug("`upload`: rapid_upload success")
                return
        except BaiduPCSError as err:
            logger.debug("`upload`: rapid_upload fails")

            if err.error_code != 31079:  # 31079: '未找到文件MD5,请使用上传API上传整个文件。'
                if task_id is not None and progress_task_exists(task_id):
                    _progress.remove_task(task_id)

                logger.warning("`rapid_upload`: unknown error: %s", err)
                raise err
            else:
                logger.info("`rapid_upload`: %s, no exist in remote",
                            localpath)

                if task_id is not None and progress_task_exists(task_id):
                    _progress.reset(task_id)

    try:
        if encrypt_io_len < slice_size:
            # Upload file
            logger.debug("`upload`: upload_file starts")

            reset_encrypt_io(encrypt_io)

            retry(
                30,
                except_callback=lambda err, fail_count: (
                    logger.warning(
                        "`upload`: `upload_file`: error: %s, fail_count: %s",
                        err,
                        fail_count,
                    ),
                    _wait_start(),
                ),
            )(api.upload_file)(encrypt_io,
                               remotepath,
                               ondup=ondup,
                               callback=callback)

            logger.debug("`upload`: upload_file success")
        else:
            # Upload file slice
            logger.debug("`upload`: upload_slice starts")

            slice_md5s = []
            reset_encrypt_io(encrypt_io)

            while True:
                _wait_start()

                logger.debug("`upload`: upload_slice: slice_completed: %s",
                             slice_completed)

                size = min(slice_size, encrypt_io_len - slice_completed)
                if size == 0:
                    break

                data = encrypt_io.read(size) or b""
                io = BytesIO(data)

                logger.debug("`upload`: upload_slice: size should be %s == %s",
                             size, len(data))

                # Retry upload until success
                slice_md5 = retry(
                    -1,
                    except_callback=lambda err, fail_count: (
                        io.seek(0, 0),
                        logger.warning(
                            "`upload`: `upload_slice`: error: %s, fail_count: %s",
                            err,
                            fail_count,
                        ),
                        _wait_start(),
                    ),
                )(api.upload_slice)(io, callback=callback_for_slice)

                slice_md5s.append(slice_md5)
                slice_completed += size

            # Combine slices
            retry(
                30,
                except_callback=lambda err, fail_count: logger.warning(
                    "`upload`: `combine_slices`: error: %s, fail_count: %s",
                    err,
                    fail_count,
                ),
            )(api.combine_slices)(slice_md5s, remotepath, ondup=ondup)

            logger.debug("`upload`: upload_slice and combine_slices success")

        if task_id is not None and progress_task_exists(task_id):
            _progress.remove_task(task_id)
    except Exception as err:
        logger.warning("`upload`: error: %s", err)
        raise err
    finally:
        encrypt_io.close()
        if task_id is not None and progress_task_exists(task_id):
            _progress.reset(task_id)
Пример #4
0
 def callback(monitor: MultipartEncoderMonitor):
     if task_id is not None:
         _progress.update(task_id, completed=monitor.bytes_read)
Пример #5
0
def upload_file(
    api: BaiduPCSApi,
    from_to: FromTo,
    ondup: str,
    slice_size: int = DEFAULT_SLICE_SIZE,
    ignore_existing: bool = True,
    task_id: Optional[TaskID] = None,
):
    localpath, remotepath = from_to

    assert exists(localpath), f"`{localpath}` does not exist"

    if ignore_existing:
        try:
            if api.exists(remotepath):
                print(f"`{remotepath}` already exists.")
                if task_id is not None:
                    _progress.remove_task(task_id)
                return
        except Exception as err:
            if task_id is not None:
                _progress.remove_task(task_id)
            raise err

    local_size = Path(localpath).stat().st_size

    if task_id is not None:
        _progress.update(task_id, total=local_size)
        _progress.start_task(task_id)

    def callback(monitor: MultipartEncoderMonitor):
        if task_id is not None:
            _progress.update(task_id, completed=monitor.bytes_read)

    slice_completed = 0

    def callback_for_slice(monitor: MultipartEncoderMonitor):
        if task_id is not None:
            _progress.update(task_id, completed=slice_completed + monitor.bytes_read)

    if local_size > 256 * constant.OneK:
        try:
            api.rapid_upload_file(localpath, remotepath, ondup=ondup)
            if task_id is not None:
                _progress.update(task_id, completed=local_size)
                _progress.remove_task(task_id)
                return
        except BaiduPCSError as err:
            if err.error_code != 31079:  # 31079: '未找到文件MD5,请使用上传API上传整个文件。'
                if task_id is not None:
                    _progress.remove_task(task_id)
                raise err
            else:
                if task_id is not None:
                    _progress.reset(task_id)

    try:
        if local_size < slice_size:
            api.upload_file(localpath, remotepath, ondup=ondup, callback=callback)
        else:
            slice_md5s = []
            fd = open(localpath, "rb")
            while True:
                buf = fd.read(slice_size)
                if not buf:
                    break

                slice_md5 = api.upload_slice(buf, callback=callback_for_slice)
                slice_md5s.append(slice_md5)
                slice_completed += len(buf)

            api.combine_slices(slice_md5s, remotepath, ondup=ondup)
    finally:
        if task_id is not None:
            _progress.remove_task(task_id)
Пример #6
0
def upload_file(
    api: BaiduPCSApi,
    from_to: FromTo,
    ondup: str,
    encrypt_password: bytes = b"",
    encrypt_type: EncryptType = EncryptType.No,
    slice_size: int = DEFAULT_SLICE_SIZE,
    ignore_existing: bool = True,
    task_id: Optional[TaskID] = None,
    user_id: Optional[int] = None,
    user_name: Optional[str] = None,
    check_md5: bool = False,
):
    _wait_start()

    localpath, remotepath = from_to

    assert exists(localpath), f"`{localpath}` does not exist"

    if ignore_existing:
        try:
            if api.exists(remotepath):
                print(f"`{remotepath}` already exists.")
                logger.debug("`upload`: remote file already exists")
                if task_id is not None and progress_task_exists(task_id):
                    _progress.remove_task(task_id)
                return
        except Exception as err:
            if task_id is not None and progress_task_exists(task_id):
                _progress.remove_task(task_id)
            raise err

    logger.debug(
        "`upload`: encrypt_type: %s, localpath: %s, remotepath, %s",
        encrypt_type,
        localpath,
        remotepath,
    )

    stat = Path(localpath).stat()
    local_ctime, local_mtime = int(stat.st_ctime), int(stat.st_mtime)

    encrypt_io = encrypt_type.encrypt_io(open(localpath, "rb"), encrypt_password)
    # IO Length
    encrypt_io_len = total_len(encrypt_io)

    logger.debug("`upload`: encrypt_io_len: %s", encrypt_io_len)

    # Progress bar
    if task_id is not None and progress_task_exists(task_id):
        _progress.update(task_id, total=encrypt_io_len)
        _progress.start_task(task_id)

    def callback(monitor: MultipartEncoderMonitor):
        if task_id is not None and progress_task_exists(task_id):
            _progress.update(task_id, completed=monitor.bytes_read)

    slice_completed = 0

    def callback_for_slice(monitor: MultipartEncoderMonitor):
        if task_id is not None and progress_task_exists(task_id):
            _progress.update(task_id, completed=slice_completed + monitor.bytes_read)

    slice256k_md5 = ""
    content_md5 = ""
    content_crc32 = 0
    io_len = 0

    if encrypt_io_len > 256 * constant.OneK:
        # Rapid Upload
        logger.debug("`upload`: rapid_upload starts")
        try:
            slice256k_md5, content_md5, content_crc32, io_len = rapid_upload_params(
                encrypt_io
            )
            api.rapid_upload_file(
                slice256k_md5,
                content_md5,
                0,  # not needed
                encrypt_io_len,
                remotepath,
                local_ctime=local_ctime,
                local_mtime=local_mtime,
                ondup=ondup,
            )

            if _rapiduploadinfo_file:
                save_rapid_upload_info(
                    _rapiduploadinfo_file,
                    slice256k_md5,
                    content_md5,
                    content_crc32,
                    io_len,
                    localpath=localpath,
                    remotepath=remotepath,
                    encrypt_password=encrypt_password,
                    encrypt_type=encrypt_type.value,
                    user_id=user_id,
                    user_name=user_name,
                )

            if task_id is not None and progress_task_exists(task_id):
                _progress.update(task_id, completed=encrypt_io_len)
                _progress.remove_task(task_id)

            logger.debug("`upload`: rapid_upload success, task_id: %s", task_id)
            return
        except BaiduPCSError as err:
            logger.warning("`upload`: rapid_upload fails")

            if err.error_code != 31079:  # 31079: '未找到文件MD5,请使用上传API上传整个文件。'
                if task_id is not None and progress_task_exists(task_id):
                    _progress.remove_task(task_id)

                logger.warning("`rapid_upload`: unknown error: %s", err)
                raise err
            else:
                logger.debug("`rapid_upload`: %s, no exist in remote", localpath)

                if task_id is not None and progress_task_exists(task_id):
                    _progress.reset(task_id)

    try:
        # Upload file slice
        logger.debug("`upload`: upload_slice starts")

        slice_md5s = []
        reset_encrypt_io(encrypt_io)

        while True:
            _wait_start()

            logger.debug("`upload`: upload_slice: slice_completed: %s", slice_completed)

            size = min(slice_size, encrypt_io_len - slice_completed)
            if size == 0:
                break

            data = encrypt_io.read(size) or b""
            io = BytesIO(data)

            logger.debug(
                "`upload`: upload_slice: size should be %s == %s", size, len(data)
            )

            # Retry upload until success
            slice_md5 = retry(
                -1,
                except_callback=lambda err, fail_count: (
                    io.seek(0, 0),
                    logger.warning(
                        "`upload`: `upload_slice`: error: %s, fail_count: %s",
                        err,
                        fail_count,
                        exc_info=err,
                    ),
                    _wait_start(),
                ),
            )(api.upload_slice)(io, callback=callback_for_slice)

            slice_md5s.append(slice_md5)
            slice_completed += size

        # Combine slices
        def _handle_combin_slices_error(err, fail_count):
            logger.warning(
                "`upload`: `combine_slices`: error: %s, fail_count: %s",
                err,
                fail_count,
                exc_info=err,
            )

            # If following errors occur, we need to re-upload
            if (
                isinstance(err, BaiduPCSError)
                and err.error_code == 31352  # commit superfile2 failed
                or err.error_code == 31363  # block miss in superfile2
            ):
                raise err

        retry(20, except_callback=_handle_combin_slices_error)(api.combine_slices)(
            slice_md5s,
            remotepath,
            local_ctime=local_ctime,
            local_mtime=local_mtime,
            ondup=ondup,
        )

        logger.debug(
            "`upload`: upload_slice and combine_slices success, task_id: %s", task_id
        )

        # `combine_slices` can not get right content md5.
        # We need to check whether server updates by hand.
        if check_md5:
            _check_md5(
                api,
                localpath,
                remotepath,
                slice256k_md5,
                content_md5,
                content_crc32,
                io_len,
                encrypt_password=encrypt_password,
                encrypt_type=encrypt_type.value,
                user_id=user_id,
                user_name=user_name,
            )

        if task_id is not None and progress_task_exists(task_id):
            _progress.remove_task(task_id)
    except Exception as err:
        logger.warning("`upload`: error: %s", err)
        raise err
    finally:
        encrypt_io.close()
        if task_id is not None and progress_task_exists(task_id):
            _progress.reset(task_id)
Пример #7
0
 def monit_callback(task_id: Optional[TaskID], offset: int):
     if task_id is not None:
         _progress.update(task_id, completed=offset + 1)