Ejemplo n.º 1
0
    def __init__(self, djc_helper, lr: LoginResult):
        """
        :type djc_helper: DjcHelper
        :type lr: LoginResult
        """
        # 即使没绑定dnf角色,也放行,方便领取分享奖励
        roleinfo: Optional[RoleInfo] = None
        try:
            if "dnf" in djc_helper.bizcode_2_bind_role_map:
                roleinfo = djc_helper.bizcode_2_bind_role_map["dnf"].sRoleInfo
        except Exception:
            pass
        self.roleinfo: Optional[RoleInfo] = roleinfo

        self.djc_helper = djc_helper
        self.lr = lr

        self.cfg: AccountConfig = djc_helper.cfg
        self.common_cfg: CommonConfig = djc_helper.common_cfg
        self.zzconfig: ArkLotteryZzConfig = djc_helper.zzconfig

        self.g_tk = getACSRFTokenForAMS(lr.p_skey)
        self.urls = Urls()
        # 使用QQ空间登录态进行抽卡活动
        self.headers = {
            "Accept": "application/json, text/javascript, */*; q=0.01",
            "User-Agent":
            "Mozilla/5.0 (Linux; Android 9; MIX 2 Build/PKQ1.190118.001; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/77.0.3865.120 MQQBrowser/6.2 TBS/045332 Mobile Safari/537.36 V1_AND_SQ_8.4.8_1492_YYB_D QQ/8.4.8.4810 NetType/WIFI WebP/0.3.0 Pixel/1080 StatusBarHeight/76 SimpleUISwitch/0 QQTheme/1000 InMagicWin/0",
            "Cookie": f"p_uin={self.lr.uin}; p_skey={self.lr.p_skey}; ",
            "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
        }
Ejemplo n.º 2
0
def show_accounts_status(cfg, ctx):
    logger.info("")
    _show_head_line("部分活动信息")
    logger.warning("如果一直卡在这一步,请在小助手目录下创建一个空文件:不查询活动.txt")
    Urls().show_current_valid_act_infos()

    logger.info("")
    _show_head_line("付费相关信息")
    user_buy_info = get_user_buy_info(cfg)
    show_buy_info(user_buy_info)

    if not has_any_account_in_normal_run(cfg):
        return
    _show_head_line(ctx)

    heads = ["序号", "账号名", "启用状态", "聚豆余额", "聚豆历史总数", "成就点", "心悦组队", "心悦G分", "编年史", "年史碎片", "守护者卡片", "马杰洛石头"]
    colSizes = [4, 12, 8, 8, 12, 6, 8, 8, 14, 8, 15, 10]

    logger.info(tableify(heads, colSizes))
    for _idx, account_config in enumerate(cfg.account_configs):
        idx = _idx + 1
        if not account_config.is_enabled():
            # 未启用的账户的账户不走该流程
            continue

        djcHelper = DjcHelper(account_config, cfg.common)
        djcHelper.check_skey_expired()
        djcHelper.get_bind_role_list(print_warning=False)

        status = "启用" if account_config.is_enabled() else "未启用"

        djc_info = djcHelper.query_balance("查询聚豆概览", print_res=False)["data"]
        djc_allin, djc_balance = int(djc_info['allin']), int(djc_info['balance'])

        xinyue_info = djcHelper.query_xinyue_info("查询心悦成就点概览", print_res=False)
        teaminfo = djcHelper.query_xinyue_teaminfo(print_res=False)
        team_score = "无队伍"
        if teaminfo.id != "":
            team_score = f"{teaminfo.score}/20"
            fixed_team = djcHelper.get_fixed_team()
            if fixed_team is not None:
                team_score = f"[{fixed_team.id}]{team_score}"

        gpoints = djcHelper.query_gpoints()

        ui = djcHelper.query_dnf_helper_chronicle_info()
        levelInfo = f"LV{ui.level}({ui.currentExp}/{ui.levelExp})"
        chronicle_points = ui.point
        if ui.totalExp == 0:
            levelInfo = ""
            chronicle_points = ""

        majieluo_cards = djcHelper.query_majieluo_card_info()

        stone_count = djcHelper.query_stone_count()

        cols = [idx, account_config.name, status, djc_balance, djc_allin, xinyue_info.score, team_score, gpoints, levelInfo, chronicle_points, majieluo_cards, stone_count]
        logger.info(color("fg_bold_green") + tableify(cols, colSizes, need_truncate=True))
Ejemplo n.º 3
0
class Spider(object):
    def __init__(self):
        self.download = DefaultDownloader()
        self.urls = Urls()
        self.parse = Parser()
        self.filename = 'readnovel.json'
        self.save_file = SaveMethods()

    def crawl(self, url):
        self.urls.add_new_url(url)
        while self.urls.has_new_url() and self.urls.get_old_urls_size() < 100:
            try:
                new_url = self.urls.get_new_url()
                response = self.download.download(new_url, DEFAULT_USER_AGENT)
                data = self.parse.parse(response)
                pprint(data)
                self.save_file.save_to_json(self.filename, data)
            except Exception as e:
                print(f'[*] crawl exception {e}')
Ejemplo n.º 4
0
 def test_load_dat_from_url(self):
     urls = Urls()
     return
     ## skip for now to save time
     for i in range(5):
         url = urls.random['location']
         try:
             p = Profile(url)
         except:
             print(url)
             raise
Ejemplo n.º 5
0
    def run(self, host: str = '127.0.0.1', port: int = 7000) -> None:
        #Create socket
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sock.bind((host, port))
        sock.listen()

        urls = Urls()
        try:
            print(f'Server is running on {host}:{port}\nPress Ctrl+C to quit')
            while True:
                client, address = sock.accept()

                #Receive and decode package from client
                req = client.recv(4096).decode('utf-8')
                request = HttpRequest(req, address[0])
                print(request)

                #Check url presense
                url_func = urls.check_url(request.url)
                if url_func == request.url:
                    render_url = request.url[len(Settings.static_url)::]
                    response = render_static(render_url)

                elif url_func is None:
                    response = headers['404']

                else:
                    resp = url_func(request)
                    if resp.startswith(headers['redirect']):
                        response = resp
                    else:
                        response = headers['html'] + resp

                client.send(response.encode('utf-8'))
                client.close()
        except KeyboardInterrupt:
            sys.exit(0)
Ejemplo n.º 6
0
def show_accounts_status(cfg, ctx):
    logger.info("")
    _show_head_line("部分活动信息")
    logger.warning("如果一直卡在这一步,请在小助手目录下创建一个空文件:不查询活动.txt")
    Urls().show_current_valid_act_infos()

    logger.info("")
    _show_head_line("付费相关信息")
    user_buy_info = get_user_buy_info(cfg)
    show_buy_info(user_buy_info)

    if not has_any_account_in_normal_run(cfg):
        return
    _show_head_line(ctx)

    # 获取数据
    rows = []
    if cfg.common.enable_multiprocessing:
        logger.warning(f"已开启多进程模式({cfg.get_pool_size()}),将开始并行拉取数据,请稍后")
        for row in get_pool().starmap(
                get_account_status,
            [(_idx + 1, account_config, cfg.common)
             for _idx, account_config in enumerate(cfg.account_configs)
             if account_config.is_enabled()]):
            rows.append(row)
    else:
        logger.warning("拉取数据中,请稍候")
        for _idx, account_config in enumerate(cfg.account_configs):
            idx = _idx + 1
            if not account_config.is_enabled():
                # 未启用的账户的账户不走该流程
                continue

            rows.append(get_account_status(idx, account_config, cfg.common))

    # 打印结果
    heads = [
        "序号", "账号名", "启用状态", "聚豆余额", "聚豆历史总数", "心悦类型", "成就点", "勇士币", "心悦组队",
        "赛利亚", "心悦G分", "编年史", "年史碎片"
    ]
    colSizes = [4, 12, 8, 8, 12, 8, 6, 6, 16, 12, 8, 14, 8]

    logger.info(tableify(heads, colSizes))
    for row in rows:
        logger.info(
            color("fg_bold_green") +
            tableify(row, colSizes, need_truncate=True))
Ejemplo n.º 7
0
 def __init__(self):
     self.download = DefaultDownloader()
     self.urls = Urls()
     self.parse = Parser()
     self.filename = 'readnovel.json'
     self.save_file = SaveMethods()
Ejemplo n.º 8
0
 def makeUrls(suffix, key=0):
     return Urls(suffix, key)
Ejemplo n.º 9
0
def show_activity_info():
    logger.info("")
    _show_head_line("部分活动信息")
    logger.warning("如果一直卡在这一步,请在小助手目录下创建一个空文件:不查询活动.txt")
    Urls().show_current_valid_act_infos()
Ejemplo n.º 10
0
from config import Config
from urls import Urls
from updater import Updater
import time
import sys
from mysqlcli import MysqlCli
from rediscli import RedisCli
from dbhelper import DBHelper

if __name__ == "__main__":
    while True:
        config = Config('./updater-config.json')
        MysqlCli.initialize(config.mysql())
        RedisCli.initialize(config.redis())
        DBHelper.initialize()

        urls = Urls(config.urls())
        updater = Updater(urls)
        updater.start()
        updater.wait()
        updater.done()

        if config.one_shot() is True:
            sys.exit(0)

        time.sleep(config.interval())

        RedisCli.finalize()
        MysqlCli.finalize()
Ejemplo n.º 11
0
 def urls(self):
     from urls import Urls
     return Urls()