Exemplo n.º 1
0
def scribe_ip_in_webdriver(url,
                           selector,
                           proxies_ip_list,
                           ok_tag,
                           next_pagination=None,
                           filter=None,
                           page_size=10):
    while True:
        proxy = random.choice(proxies_ip_list)
        logger.debug(url + '         ' + str(proxy))
        driver = utils.ready(proxy)
        try:
            driver.get(url)
            break
        except Exception as e:
            driver.quit()
            logger.debug(e)

    times = 0
    ip_list, fanqiang_ip_list = [], []
    firsr_server = ''
    while times <= page_size:
        tags = utils.wait_and_get_elements_until_ok(driver, ok_tag)
        if not tags:
            logger.warning('没有找到' + ok_tag)
            driver.quit()
            return scribe_ip_in_webdriver(url, selector, proxies_ip_list,
                                          ok_tag, next_pagination, filter,
                                          page_size)
        soup = BeautifulSoup(driver.page_source, 'lxml')
        temp_first_server = soup.select(selector['trs'])[0]
        if temp_first_server.text == firsr_server:  # 如果翻页了,第一个server还是没变,证明已经翻到最后页,退出循环
            break
        else:
            firsr_server = temp_first_server
        new_ip_list, new_fanqiang_ip_list = scribe_ip_with_soup(soup,
                                                                selector,
                                                                filter=filter)
        ip_list.extend(new_ip_list)
        fanqiang_ip_list.extend(new_fanqiang_ip_list)
        if next_pagination:
            try:
                driver.find_element_by_css_selector(next_pagination).click()
                times = times + 1
            except exceptions.NoSuchElementException as e:
                logger.debug('NoSuchElementException:' + str(e))
                break
        else:
            break
    driver.quit()
    return ip_list, fanqiang_ip_list
Exemplo n.º 2
0
def blurSurf(surface, amt) -> pygame.Surface:
    """

    Blur the given surface by the given 'amount'.  Only values 1 and greater
    are valid.  Value 1 = no blur.
    """

    if amt < 1.0:
        amt = 1.0
        logger.warning("BLUR AMOUNT MUST BE GREATER THAN 1.0")

    scale = 1.0 / float(amt)
    surf_size = surface.get_size()
    scale_size = (int(surf_size[0] * scale), int(surf_size[1] * scale))
    surf = pygame.transform.smoothscale(surface, scale_size)
    surf = pygame.transform.smoothscale(surf, surf_size)
    return surf
Exemplo n.º 3
0
def checkin_queue(context):
    job = context.job
    username, password, region, chat = (
        job.context.get("username"),
        job.context.get("password"),
        job.context.get("region"),
        job.context.get("chat"),
    )
    s = requests.Session()
    s.headers.update(
        {
            "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.87 Safari/537.36"
        }
    )
    retry_count = 5
    message = context.bot.send_message(chat, f"Job: Running for {username}")
    for i in range(retry_count):
        result = login(s, username, password)
        if result:
            append_text = f"Login: {username} Successful!"
            logger.info(append_text)
            message = message.edit_text(f"{message.text}\n{append_text}")
            break
        else:
            append_text = f"Login: {username} Fail {i}"
            logger.warning(append_text)
            message = message.edit_text(f"{message.text}\n{append_text}")
    for i in range(retry_count):
        result = checkin(s, username, region)
        if result:
            append_text = f"Checkin: {username} Successful!"
            logger.info(append_text)
            message = message.edit_text(f"{message.text}\n{append_text}")
            break
        else:
            append_text = f"Checkin: {username} Fail {i}"
            logger.warning(append_text)
            message = message.edit_text(f"{message.text}\n{append_text}")
Exemplo n.º 4
0
def checkin_queue(context):
    job = context.job
    username, password, region, chat = (
        job.context.get("username"),
        job.context.get("password"),
        job.context.get("region"),
        job.context.get("chat"),
    )
    s = requests.Session()
    s.headers.update({
        "User-Agent":
        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.87 Safari/537.36"
    })
    retry_count = 5
    for i in range(retry_count):
        try:
            if login(s, username, password):
                logger.info(f"Login: {username} Success!")
                break
        except:
            continue
        logger.warning(f"Login: {username} Fail {i}")
    for i in range(retry_count):
        try:
            if checkin(s, username, region):
                logger.info(f"Checkin: {username} Success!")
                context.bot.send_message(chat,
                                         f"任务: {username} 执行成功!",
                                         disable_notification=True)
                return
        except:
            continue
        logger.warning(f"Checkin: {username} Fail {i}")
    context.bot.send_message(chat, f"任务: {username} 执行失败!预计下个小时将继续执行。")
    context.job_queue.run_once(
        checkin_queue,
        SystemRandom().randint(1800, 3600),
        context={
            "username": username,
            "password": password,
            "region": region,
            "chat": chat,
        },
    )
    logger.warning(f"Job: {username} fail -> run in next hour")
Exemplo n.º 5
0
def scribe_ip_with_soup(soup,
                        selector,
                        filter=None,
                        ip_list=[],
                        fanqiang_ip_list=[],
                        type_get='request'):
    has_socks4 = False
    trs = soup.select(selector['trs'])
    if filter:
        trs = [tr for tr in trs if filter(tr)]
    threads = []
    for tr in trs:
        try:
            if 'proxy_type' in selector:
                if selector['proxy_type'] in [
                        'http', 'Http', 'https', 'Https', 'socks4', 'Socks4',
                        'socks5', 'Socks5'
                ]:
                    proxy_type = selector['proxy_type'].lower()
                else:
                    if len(tr.select(selector['proxy_type'])) == 0:
                        logger.debug('没有找到proxy_type')
                        continue
                    logger.debug('找到的proxy_type: ' + tr.select(
                        selector['proxy_type'])[0].text.strip().lower())
                    proxy_type = re.search(
                        r'(http|https|socks4|socks5|sock4|sock5)',
                        tr.select(selector['proxy_type'])
                        [0].text.strip().lower()).group()
                    if proxy_type == 'sock5':
                        proxy_type = 'socks5'
                    if proxy_type == 'sock4':
                        proxy_type = 'socks4'
            else:
                logger.warning('无法根据selector找到proxy_type:' + str(selector))
            if proxy_type == 'socks4':
                logger.debug('socks4跳过')
                has_socks4 = True
                continue

            if 'ip' in selector and 'port' in selector:
                if len(tr.select(selector['ip'])) == 0 or len(
                        tr.select(selector['port'])) == 0:
                    logger.debug('没有找到ip或port')
                    continue
                logger.debug('找到的ip: ' +
                             tr.select(selector['ip'])[0].text.strip())
                logger.debug('找到的port: ' +
                             tr.select(selector['port'])[0].text.strip())
                ip = re.search(r'((?:\d+?\.){3}\d{1,3})',
                               tr.select(
                                   selector['ip'])[0].text.strip()).group()
                port = re.search(
                    r'((?!\.)\d+(?!\.))',
                    tr.select(selector['port'])[0].text.strip()).group()
                ip_with_port = ip + ':' + port
            elif 'ip_with_port' in selector:
                if len(tr.select(selector['ip_with_port'])) == 0:
                    logger.debug('没有找到ip_with_port')
                    continue
                ip_with_port_str = tr.select(
                    selector['ip_with_port'])[0].text.strip()
                ip = re.search(r'((?:\d+?\.){3}\d{1,3})',
                               ip_with_port_str).group()
                port = re.search(r'((?!\.)\d{4,5}(?!\.))',
                                 ip_with_port_str).group()
                ip_with_port = ip + ':' + port
                logger.debug('找到的ip_with_port:' + ip_with_port)
            else:
                logger.warning('无法根据selector找到ip和port:' + str(selector))
        except AttributeError as e:
            logger.debug(e)
            continue
        except Exception as e:
            logger.warning(traceback.format_exc())
            continue

        if not type_get == 'request':  # yong webdriver来验证,不能多线程,开太多浏览器容易死机
            get_useful_ip(ip_with_port,
                          proxy_type,
                          ip_list,
                          fanqiang_ip_list,
                          type_get=type_get)
            threads = True
            continue
        # 开启一个线程收集有效的链接
        thread = threading.Thread(target=get_useful_ip,
                                  args=(ip_with_port, proxy_type, ip_list,
                                        fanqiang_ip_list),
                                  kwargs={'type_get': type_get})
        thread.start()
        threads.append(thread)
    if not threads and not has_socks4:
        logger.warning('没有找到任何ip_port' + str(selector))
    # 不join也行不知道为什么
    if threads:
        for thread in threads:
            thread.join()
    logger.info('ip_list:' + str(ip_list))
    logger.info('fanqiang_ip_list:' + str(fanqiang_ip_list))
    return ip_list, fanqiang_ip_list
Exemplo n.º 6
0
    post = s.post(ARRSH_URL, data=data)
    logger.info("Checkin: Checkin...")
    soup = BeautifulSoup(post.content, "lxml")
    return (True if "success" in str(
        soup.find("div", attrs={"class": "form-group"})) else False)


if __name__ == "__main__":
    USERNAME = sys.argv[1] if len(sys.argv) >= 2 else input("Input username:"******"Input password:"******"Login: Successful!")
            break
        else:
            logger.warning(f"Login: {i} Try")
    for i in range(retry_count):
        result = checkin()
        if result:
            logger.info("Checkin: Successful!")
            break
        else:
            logger.warning(f"Checkin: {i} Try")
Exemplo n.º 7
0
async def user(username, password, region, retry_count=5):
    async def login(client, username, password):
        def detect(Img):
            logger.info(f"Detect: Detecting...")
            i = transform(Img).unsqueeze(0)
            predict_label1, predict_label2 = model(i)
            predict_label = LabeltoStr(
                [
                    np.argmax(predict_label1.data.numpy()[0]),
                    np.argmax(predict_label2.data.numpy()[0]),
                ]
            )
            logger.info(f"Detect: Result {predict_label}")
            return predict_label

        home = await client.get(DK_URL)
        soup = BeautifulSoup(home.content, "lxml")
        captcha = await client.get(CAPTCHA_URL)
        valid_code = detect(Image.open(captcha))
        execution = soup.find("input", attrs={"type": "hidden", "name": "execution"})
        data = {
            "username": username,
            "password": password,
            "validateCode": valid_code,
            "execution": execution.get("value"),
            "_eventId": "submit",
            "geolocation": "",
        }
        post = await client.post(home.url, data=data)
        logger.info(f"Login: {username} Login...")
        return True if post.url == DK_URL else False

    async def checkin(client, username, region):
        data = {
            "xgh": username,
            "lon": "",
            "lat": "",
            "region": region,
            "rylx": 4,
            "status": 0,
            "sflx": 1,
        }
        await client.post(CHECKIN_URL, data=data)
        logger.info(f"Checkin: {username} Checkin...")
        home = await client.get(DK_URL)
        soup = BeautifulSoup(home.content, "lxml")
        return (
            True
            if "success" in str(soup.find("div", attrs={"class": "form-group"}))
            else False
        )

    headers = {
        "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.87 Safari/537.36"
    }
    async with httpx.AsyncClient(headers=headers, timeout=None, verify=False) as client:
        for i in range(retry_count):
            result = await login(client, username, password)
            if result:
                logger.info("Login: Successful!")
                break
            else:
                logger.warning(f"Login: {i} Try")
        for i in range(retry_count):
            result = await checkin(client, username, region)
            if result:
                logger.info("Checkin: Successful!")
                break
            else:
                logger.warning(f"Checkin: {i} Try")
Exemplo n.º 8
0
def error(update, context):
    logger.warning(f"Update {context} caused error {error}")