Exemple #1
0
    def prepare_env_and_get_db_filepath(self) -> str:
        """
        逻辑说明
        假设key的md5为md5
        本地缓存文件路径为.db/md5{0:3}/md5.json
        文件内容为val_type的实例的json序列化结果
        :return: 是否是首次运行
        """
        from util import make_sure_dir_exists

        db_dir = ""
        db_file = ""

        if self.db_filepath != "":
            db_dir = os.path.dirname(self.db_filepath)
            db_file = self.db_filepath
        else:
            key_md5 = self.get_db_filename()

            db_dir = os.path.join(db_top_dir, key_md5[0:3])
            db_file = os.path.join(db_dir, key_md5)

        make_sure_dir_exists(db_dir)

        return db_file
    def upload_to_lanzouyun(self,
                            filepath,
                            target_folder,
                            history_file_prefix="",
                            also_upload_compressed_version=False,
                            only_upload_compressed_version=False) -> bool:
        if not only_upload_compressed_version:
            ok = self._upload_to_lanzouyun(filepath, target_folder,
                                           history_file_prefix)
            if not ok:
                return False

        if also_upload_compressed_version:
            make_sure_dir_exists('.cached')
            filename = os.path.basename(filepath)
            compressed_filepath = os.path.join(
                '.cached', self.get_compressed_version_filename(filename))
            compressed_history_file_prefix = f"{self.compressed_version_prefix}{history_file_prefix}"

            logger.info(
                color("bold_green") + f"创建压缩版本并上传 {compressed_filepath}")
            # 创建压缩版本
            with open(f"{filepath}", "rb") as file_in:
                with lzma.open(f"{compressed_filepath}", "wb") as file_out:
                    file_out.writelines(file_in)
            # 上传
            return self._upload_to_lanzouyun(compressed_filepath,
                                             target_folder,
                                             compressed_history_file_prefix)

        return True
Exemple #3
0
 def __init__(self, novel: Novel, ext: str, out_path: str = 'out'):
     super(Output, self).__init__()
     if not novel.success:
         self.log.warning("Novel wasn't parsed successfully.")
     self.novel = novel
     self.ext = ext
     self.out_path = out_path
     self.slug_title = slugify(novel.title, lowercase=False)
     self.filename = f"{self.slug_title}.{self.ext}"
     self.path = os.path.join(out_path, self.novel.url.hostname)
     make_sure_dir_exists(self.path)
Exemple #4
0
def download_ide_act_desc_json(actId: str) -> str:
    first_two = str(actId[:2])
    act_cache_dir = f"{cached_dir}/ideActDesc/{first_two}"
    act_cache_file = f"{act_cache_dir}/{actId}.json"

    # 然后从服务器获取活动信息
    url = f"https://comm.ams.game.qq.com/ide/page/{actId}"

    res = requests.get(url, timeout=1)
    if res.status_code != 200:
        return ""

    make_sure_dir_exists(act_cache_dir)
    with open(act_cache_file, "w", encoding="utf-8") as f:
        f.write(res.text)

    return act_cache_file
Exemple #5
0
def test():
    import json

    from util import make_sure_dir_exists

    dir_src = os.getcwd()
    test_root_dir = realpath("test/compress")

    make_sure_dir_exists(test_root_dir)
    os.chdir(test_root_dir)

    # 测试文件解压缩
    test_file_name = "test_file.json"
    test_compressed_file_name = test_file_name + ".7z"
    test_decompressed_file_name = test_compressed_file_name + ".json"
    with open(test_file_name, "w", encoding="utf-8") as f:
        json.dump("test_file_compress", f)
    compress_file_with_lzma(test_file_name, test_compressed_file_name)
    decompress_file_with_lzma(test_compressed_file_name,
                              test_decompressed_file_name)

    # 测试目录解压缩
    test_folder_name = "test_folder"
    test_compressed_folder_name = test_folder_name + ".7z"
    make_sure_dir_exists(test_folder_name)
    with open(os.path.join(test_folder_name, "test.json"),
              "w",
              encoding="utf-8") as f:
        json.dump("test_folder_compress", f)

    compress_dir_with_bandizip(test_folder_name, test_compressed_folder_name,
                               dir_src)

    shutil.rmtree(test_folder_name)
    decompress_dir_with_bandizip(test_compressed_folder_name, dir_src)

    shutil.rmtree(test_root_dir, ignore_errors=True)

    # 测试内存内解压缩
    test_text = "this is a test 测试内容 压缩前"
    compressed_bytes = compress_in_memory_with_lzma(test_text.encode())
    decompressed = decompress_in_memory_with_lzma(compressed_bytes).decode()
    print(compressed_bytes)
    print(decompressed)
    print(test_text == decompressed)
Exemple #6
0
def download_file(url: str,
                  download_dir=".cached/downloads",
                  filename="",
                  connect_timeout=10) -> str:
    download_dir = os.path.realpath(download_dir)
    filename = filename or os.path.basename(url)

    start_time = get_now()

    target_file_path = os.path.join(download_dir, filename)

    logger.info(f"开始下载 {url} 到 {target_file_path}")
    response = requests.get(url, stream=True, timeout=connect_timeout)

    if response.status_code != 200:
        raise Exception(f"下载失败,状态码 {response.status_code}")

    make_sure_dir_exists(download_dir)

    with open(target_file_path, "wb") as f:
        total_length_optional = response.headers.get("content-length")

        if total_length_optional is None:  # no content length header
            f.write(response.content)
        else:
            dl = 0
            total_length = int(total_length_optional)
            for data in response.iter_content(chunk_size=4096):
                # filter out keep-alive new lines
                if not data:
                    continue

                f.write(data)

                dl += len(data)
                show_progress(filename, total_length, dl)

    end_time = get_now()

    logger.info(color("bold_yellow") + f"下载完成,耗时 {end_time - start_time}")

    return target_file_path
Exemple #7
0
def download_act_desc_js(actId: str) -> str:
    last_three = str(actId[-3:])
    act_cache_dir = f"{cached_dir}/actDesc/{last_three}/{actId}"
    act_cache_file = f"{act_cache_dir}/act.desc.js"

    # 然后从服务器获取活动信息
    actUrls = [
        f"https://dnf.qq.com/comm-htdocs/js/ams/actDesc/{last_three}/{actId}/act.desc.js",
        f"https://apps.game.qq.com/comm-htdocs/js/ams/actDesc/{last_three}/{actId}/act.desc.js",
        f"https://apps.game.qq.com/comm-htdocs/js/ams/v0.2R02/act/{actId}/act.desc.js",
    ]
    for url in actUrls:
        res = requests.get(url, timeout=1)
        if res.status_code != 200:
            continue

        make_sure_dir_exists(act_cache_dir)
        with open(act_cache_file, "w", encoding="utf-8") as f:
            f.write(res.text)

        return act_cache_file

    return ""
Exemple #8
0
    def upload_to_lanzouyun(self,
                            filepath: str,
                            target_folder: Folder,
                            history_file_prefix="",
                            also_upload_compressed_version=False,
                            only_upload_compressed_version=False) -> bool:
        if not self.login_ok:
            logger.info("未登录,不能上传文件")
            return False

        if history_file_prefix == "":
            # 未设置历史文件前缀,默认为当前文件名
            history_file_prefix = os.path.basename(filepath)

        if not only_upload_compressed_version:
            ok = self._upload_to_lanzouyun(filepath, target_folder,
                                           history_file_prefix)
            if not ok:
                return False

        if also_upload_compressed_version:
            make_sure_dir_exists('.cached')
            filename = os.path.basename(filepath)
            compressed_filepath = os.path.join(
                '.cached', self.get_compressed_version_filename(filename))
            compressed_history_file_prefix = f"{self.compressed_version_prefix}{history_file_prefix}"

            logger.info(
                color("bold_green") + f"创建压缩版本并上传 {compressed_filepath}")
            # 创建压缩版本
            compress_file_with_lzma(filepath, compressed_filepath)
            # 上传
            return self._upload_to_lanzouyun(compressed_filepath,
                                             target_folder,
                                             compressed_history_file_prefix)

        return True
Exemple #9
0
def test():
    from util import make_sure_dir_exists
    import json

    dir_src = os.getcwd()
    test_root_dir = realpath("test/compress")

    make_sure_dir_exists(test_root_dir)
    os.chdir(test_root_dir)

    # 测试文件解压缩
    test_file_name = "test_file.json"
    test_compressed_file_name = test_file_name + ".7z"
    test_decompressed_file_name = test_compressed_file_name + ".json"
    with open(test_file_name, 'w', encoding='utf-8') as f:
        json.dump("test_file_compress", f)
    compress_file_with_lzma(test_file_name, test_compressed_file_name)
    decompress_file_with_lzma(test_compressed_file_name,
                              test_decompressed_file_name)

    # 测试目录解压缩
    test_folder_name = "test_folder"
    test_compressed_folder_name = test_folder_name + ".7z"
    make_sure_dir_exists(test_folder_name)
    with open(os.path.join(test_folder_name, "test.json"),
              'w',
              encoding='utf-8') as f:
        json.dump("test_folder_compress", f)

    compress_dir_with_bandizip(test_folder_name, test_compressed_folder_name,
                               dir_src)

    shutil.rmtree(test_folder_name)
    decompress_dir_with_bandizip(test_compressed_folder_name, dir_src)

    shutil.rmtree(test_root_dir, ignore_errors=True)
Exemple #10
0
os.system(set_title_cmd)

# 先声明一些需要用到的目录的地址
dir_src = os.path.realpath('.')
dir_all_release = os.path.realpath(os.path.join("releases"))
release_dir_name = f"DNF蚊子腿小助手_{version}_by风之凌殇"
release_7z_name = f'{release_dir_name}.7z'
dir_github_action_artifact = "_github_action_artifact"

# ---------------构建
# 调用构建脚本
os.chdir(dir_src)
build()

# ---------------清除一些历史数据
make_sure_dir_exists(dir_all_release)
os.chdir(dir_all_release)
clear_github_artifact(dir_all_release, dir_github_action_artifact)

# ---------------打包
os.chdir(dir_src)
package(dir_src, dir_all_release, release_dir_name, release_7z_name,
        dir_github_action_artifact)

# ---------------构建增量补丁
create_patch_for_latest_n_version = 3

# ---------------构建增量包
os.chdir(dir_all_release)
show_head_line(f"开始构建增量包,最多包含过去{create_patch_for_latest_n_version}个版本到最新版本的补丁",
               color("bold_yellow"))
Exemple #11
0
def release():
    # ---------------准备工作
    prompt = f"如需直接使用默认版本号:{now_version} 请直接按回车\n或手动输入版本号后按回车:"
    version = input(prompt) or now_version

    version_reg = r"\d+\.\d+\.\d+"

    if re.match(version_reg, version) is None:
        logger.info(f"版本号格式有误,正确的格式类似:1.0.0 ,而不是 {version}")
        pause_and_exit(-1)

    # 最大化窗口
    change_console_window_mode_async(disable_min_console=True)

    version = "v" + version

    run_start_time = datetime.now()
    show_head_line(f"开始发布版本 {version}", color("bold_yellow"))

    set_title_cmd = f"title 发布 {version}"
    os.system(set_title_cmd)

    # 先声明一些需要用到的目录的地址
    dir_src = os.path.realpath(".")
    dir_all_release = os.path.realpath(os.path.join("releases"))
    release_dir_name = f"DNF蚊子腿小助手_{version}_by风之凌殇"
    release_7z_name = f"{release_dir_name}.7z"
    dir_github_action_artifact = "_github_action_artifact"

    # ---------------构建
    # 调用构建脚本
    os.chdir(dir_src)
    build()

    # ---------------清除一些历史数据
    make_sure_dir_exists(dir_all_release)
    os.chdir(dir_all_release)
    clear_github_artifact(dir_all_release, dir_github_action_artifact)

    # ---------------打包
    os.chdir(dir_src)
    package(dir_src, dir_all_release, release_dir_name, release_7z_name, dir_github_action_artifact)

    # ---------------构建增量补丁
    create_patch_for_latest_n_version = 3

    # ---------------构建增量包
    os.chdir(dir_all_release)
    show_head_line(f"开始构建增量包,最多包含过去{create_patch_for_latest_n_version}个版本到最新版本的补丁", color("bold_yellow"))
    create_patch(dir_src, dir_all_release, create_patch_for_latest_n_version, dir_github_action_artifact)

    # ---------------获取补丁地址(分开方便调试)
    os.chdir(dir_all_release)
    patch_file_name = create_patch(
        dir_src,
        dir_all_release,
        create_patch_for_latest_n_version,
        dir_github_action_artifact,
        get_final_patch_path_only=True,
    )

    # ---------------标记新版本
    show_head_line("提交版本和版本变更说明,并同步到docs目录,用于生成github pages", color("bold_yellow"))
    os.chdir(dir_src)
    commit_new_version()

    # ---------------上传到蓝奏云
    show_head_line("开始上传到蓝奏云", color("bold_yellow"))
    os.chdir(dir_src)
    with open("upload_cookie.json") as fp:
        cookie = json.load(fp)
    os.chdir(dir_all_release)
    uploader = Uploader()
    uploader.login(cookie)
    if uploader.login_ok:
        logger.info("蓝奏云登录成功,开始上传压缩包")

        def path_in_src(filepath_relative_to_src: str) -> str:
            return os.path.realpath(os.path.join(dir_src, filepath_relative_to_src))

        realpath = os.path.realpath

        upload_info_list = [
            (
                uploader.folder_djc_helper,
                [
                    (realpath(release_7z_name), uploader.history_version_prefix),
                    (path_in_src("utils/auto_updater.exe"), ""),
                    (path_in_src("使用教程/使用文档.docx"), ""),
                    (path_in_src("使用教程/视频教程.txt"), ""),
                    (path_in_src("付费指引/付费指引.docx"), ""),
                    (path_in_src("utils/不要下载增量更新文件_这个是给自动更新工具使用的.txt"), ""),
                    (realpath(patch_file_name), uploader.history_patches_prefix),
                ],
            ),
            (
                uploader.folder_dnf_calc,
                [
                    (realpath(release_7z_name), uploader.history_version_prefix),
                ],
            ),
        ]

        logger.info(color("bold_green") + "具体上传列表如下:")
        for upload_folder, upload_list in upload_info_list:
            logger.info(color("bold_cyan") + f"\t{upload_folder.name}:")
            for local_filepath, _history_file_prefix in upload_list:
                logger.info(f"\t\t{local_filepath}")

            logger.info("\n")

        for upload_folder, upload_list in upload_info_list:
            for local_filepath, history_file_prefix in reversed(upload_list):
                # 逆序遍历,确保同一个网盘目录中,列在前面的最后才上传,从而在网盘显示时显示在最前方
                total_try_count = 1
                for try_index in range_from_one(total_try_count):
                    upload_ok = uploader.upload_to_lanzouyun(
                        local_filepath, upload_folder, history_file_prefix=history_file_prefix
                    )
                    if upload_ok:
                        break

                    logger.warning(f"第{try_index}/{total_try_count}次尝试上传{local_filepath}失败,等待一会后重试")
                    if try_index < total_try_count:
                        count_down("上传到网盘", 5 * try_index)

    else:
        logger.error("蓝奏云登录失败")

    # ---------------推送版本到github
    # 打包完成后git添加标签
    os.chdir(dir_src)
    show_head_line("开始推送到github", color("bold_yellow"))
    push_github(version)

    # ---------------结束
    logger.info("+" * 40)
    logger.info(color("bold_yellow") + f"{version} 发布完成,共用时{datetime.now() - run_start_time},请检查上传至蓝奏云流程是否OK")
    logger.info("+" * 40)

    os.system("PAUSE")
Exemple #12
0
    def download_file(self,
                      fileinfo: FileInFolder,
                      download_dir: str,
                      overwrite=True,
                      show_log=True,
                      download_only_if_server_version_is_newer=True) -> str:
        """
        下载最新版本压缩包到指定目录,并返回最终压缩包的完整路径
        """
        make_sure_dir_exists(download_dir)

        download_dir = os.path.realpath(download_dir)
        target_path = StrWrapper(os.path.join(download_dir, fileinfo.name))

        if download_only_if_server_version_is_newer and os.path.isfile(
                target_path.value):
            # 仅在服务器版本比本地已有文件要新的时候才重新下载
            # 由于蓝奏云时间显示不精确,将其往前一分钟,避免同一文件下次检查时其蓝奏云时间显示为xx分钟前,解析后会有最多一分钟内的误差,而导致不必要的重新下载
            # 比如本次是x分y秒检查并更新,下次检查时是x+6分y+10秒,此时解析蓝奏云时间得到上传时间为x分y+10秒,就会产生额外的不必要下载
            server_version_upload_time = parse_time(
                fileinfo.time) - timedelta(minutes=1)
            local_version_last_modify_time = parse_timestamp(
                os.stat(target_path.value).st_mtime)

            get_log_func(
                logger.info, show_log
            )(f"{fileinfo.name} 本地修改时间为:{local_version_last_modify_time} 网盘版本上传时间为:{server_version_upload_time}"
              )

            if server_version_upload_time <= local_version_last_modify_time:
                # 暂无最新版本,无需重试
                get_log_func(
                    logger.info, show_log
                )(color("bold_cyan") +
                  f"当前设置了对比修改时间参数,网盘中最新版本 {fileinfo.name} 上传于{server_version_upload_time}左右,在当前版本{local_version_last_modify_time}之前,无需重新下载"
                  )
                return target_path.value

        def after_downloaded(file_name):
            """下载完成后的回调函数"""
            target_path.value = file_name
            get_log_func(logger.info, show_log)(f"最终下载文件路径为 {file_name}")

        get_log_func(logger.info, show_log)(f"即将开始下载 {target_path.value}")
        callback = None
        if show_log: callback = self.show_progress
        retCode = self.down_file_by_url(fileinfo.url,
                                        "",
                                        download_dir,
                                        callback=callback,
                                        downloaded_handler=after_downloaded,
                                        overwrite=overwrite)
        if retCode != LanZouCloud.SUCCESS:
            get_log_func(logger.error, show_log)(f"下载失败,retCode={retCode}")
            if retCode == LanZouCloud.NETWORK_ERROR:
                get_log_func(
                    logger.warning, show_log
                )(color("bold_yellow") +
                  ("蓝奏云api返回网络错误,这很可能是由于dns的问题导致的\n"
                   "分别尝试在浏览器中访问下列两个网页,是否一个打的开一个打不开?\n"
                   "https://fzls.lanzoux.com/s/djc-helper\n"
                   "https://fzls.lanzous.com/s/djc-helper\n"
                   "\n"
                   "如果是这样,请按照下面这个链接,修改本机的dns,使用阿里、腾讯、百度、谷歌dns中的任意一个应该都可以解决。\n"
                   "https://www.ypojie.com/9830.html\n"
                   "\n"
                   "如果两个都打不开,大概率是蓝奏云挂了-。-可选择忽略后面的弹框,继续运行旧版本,或者手动去QQ群或github下载最新版本"
                   ))
            raise Exception("下载失败")

        return target_path.value
Exemple #13
0
def build(disable_douban=False, enable_proxy=False, use_upx=True):
    # 初始化相关路径变量
    venv_path = ".venv"
    pyinstaller_path = os.path.join(venv_path, "Scripts", "pyinstaller")

    # 初始化venv和依赖
    init_venv_and_requirements(".venv", "requirements.txt", disable_douban,
                               enable_proxy)

    show_head_line(f"将使用.venv环境进行编译", color("bold_yellow"))

    temp_remove_file_dir = os.path.join(".cached", "build_temp_remove_files")
    site_packages_path = os.path.join(venv_path, "Lib", "site-packages")
    dep_files_to_remove_during_build = {
        "PyQt5/Qt5": [
            "Translations",
        ],
        "PyQt5/Qt5/bin": [
            "opengl32sw.dll",
            "libEGL.dll",
            "libGLESV2.dll",
            "Qt5Svg.dll",
            "Qt5Network.dll",
            "Qt5Qml.dll",
            "Qt5QmlModels.dll",
            "Qt5Quick.dll",
            "Qt5WebSockets.dll",
            "d3dcompiler_47.dll",
        ],
        "PyQt5/Qt5/plugins": [
            "iconengines/qsvgicon.dll",
            "imageformats/qsvg.dll",
            "imageformats/qwebp.dll",
            "platforms/qwebgl.dll",
        ]
    }
    logger.info(
        color("bold_green") +
        f"开始编译前先尝试移动这些确定用不到的库文件到临时目录 {temp_remove_file_dir},从而尽可能减少最终编译的大小")
    for parent_directory, file_or_directory_name_list in dep_files_to_remove_during_build.items(
    ):
        for file_or_directory_name in file_or_directory_name_list:
            path = os.path.join(site_packages_path, parent_directory,
                                file_or_directory_name)
            backup_path = os.path.join(temp_remove_file_dir, parent_directory,
                                       file_or_directory_name)

            if not os.path.exists(path):
                logger.warning(f"\t{path} 不存在,将跳过")
                continue

            # 将文件移动到备份目录
            logger.info(f"\t开始移动 {path}")
            make_sure_dir_exists(os.path.dirname(backup_path))
            shutil.move(path, backup_path)

    # 实际编译流程
    build_configs = [
        ("main.py", "DNF蚊子腿小助手.exe", "utils/icons/DNF蚊子腿小助手.ico", ".",
         ["PyQt5"], []),
        ("auto_updater.py", "auto_updater.exe", "", "utils", ["PyQt5"], []),
        ("ark_lottery_special_version.py", "DNF蚊子腿小助手_集卡特别版.exe",
         "utils/icons/ark_lottery_special_version.ico", ".", ["PyQt5"], []),
        ("config_ui.py", "DNF蚊子腿小助手配置工具.exe", "utils/icons/config_ui.ico", ".",
         [], ["--noconsole"]),
    ]

    for idx, config in enumerate(build_configs):
        prefix = f"{idx + 1}/{len(build_configs)}"

        src_path, exe_name, icon_path, target_dir, exclude_modules, extra_args = config
        logger.info(color("bold_yellow") + f"{prefix} 开始编译 {exe_name}")

        cmd_build = [
            pyinstaller_path,
            '--name',
            exe_name,
            '-F',
            src_path,
        ]
        if icon_path != "":
            cmd_build.extend(['--icon', icon_path])
        for module in exclude_modules:
            cmd_build.extend(['--exclude-module', module])
        if use_upx:
            cmd_build.extend(['--upx-dir', "utils"])
        cmd_build.extend(extra_args)

        logger.info(f"{prefix} 开始编译 {exe_name},命令为:{' '.join(cmd_build)}")
        subprocess.call(cmd_build)

        logger.info(f"编译结束,进行善后操作")

        # 复制二进制
        logger.info(f"复制{exe_name}到目标目录{target_dir}")
        if not os.path.isdir(target_dir):
            os.mkdir(target_dir)
        target_path = os.path.join(target_dir, exe_name)
        shutil.copyfile(os.path.join("dist", exe_name), target_path)

        # 删除临时文件
        logger.info("删除临时文件")
        for directory in ["build", "dist", "__pycache__"]:
            shutil.rmtree(directory, ignore_errors=True)
        os.remove(f"{exe_name}.spec")

        filesize = os.path.getsize(target_path)
        logger.info(
            color("bold_green") +
            f"{prefix} 编译{exe_name}结束,最终大小为{human_readable_size(filesize)}")

    logger.info(color("bold_green") + f"编译完毕将库文件移动回来 - {site_packages_path}")
    for parent_directory, file_or_directory_name_list in dep_files_to_remove_during_build.items(
    ):
        for file_or_directory_name in file_or_directory_name_list:
            path = os.path.join(site_packages_path, parent_directory,
                                file_or_directory_name)
            backup_path = os.path.join(temp_remove_file_dir, parent_directory,
                                       file_or_directory_name)

            if not os.path.exists(backup_path):
                logger.warning(f"\t备份文件 {backup_path} 不存在,将跳过")
                continue

            # 将文件移动到备份目录
            logger.info(f"开始还原备份文件/目录 {backup_path}")
            make_sure_dir_exists(os.path.dirname(path))
            shutil.move(backup_path, path)

    logger.info("done")