Exemplo n.º 1
0
def download_page_index(url, **kargs):
    cookies = pickle.load(open("cookies.pkl", "rb"))
    # kargs['university_id'] = '52ac2e99747aec013fcf4e6f'
    # kargs['year'] = 2019
    # kargs['wenli'] = 2
    # kargs['page_num'] = 3
    newHeaders = {
        'Accept':
        'application/json',
        'Accept-Encoding':
        'gzip, deflate, br',
        'Accept-Language':
        'zh-CN,zh;q=0.9,en;q=0.8',
        'Authorization':
        '4063523 fadinKtTMMPz/uDnv27CnTgDMcoFK9i8+pZKlqlmf8IXYXnNuD7cBlB9G3oJIOXk',
        'Channel':
        'www.wmzy.com pc',
        'Connection':
        'keep-alive',
        'Content-Length':
        '221',
        'Content-Type':
        'application/json',
        'Host':
        'www.wmzy.com',
        'Origin':
        'https://www.wmzy.com',
        'Referer':
        'https://www.wmzy.com/web/school?type=2&sch_id=' + kargs['sch_id'] +
        '',
        'Sec-Fetch-Dest':
        'empty',
        'Sec-Fetch-Mode':
        'cors',
        'Sec-Fetch-Site':
        'same-origin',
        'User-Agent':
        'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36',
        'x-requested-with':
        'XMLHttpRequest'
    }
    cookie_jar = RequestsCookieJar()
    # batch控制一批还是二批,diploma_id控制本科还是专科
    payload = {
        "sch_id": "" + kargs['sch_id'] + "",
        "stu_province_id": "130000000000",
        "enroll_unit_id": "" + kargs['sch_id'] + "",
        "enroll_adm_type": 2
    }
    for c in cookies:
        cookie_jar.set(c['name'], c['value'], domain="wmzy.com")
    page = requests.post(url,
                         cookies=cookie_jar,
                         headers=newHeaders,
                         json=payload)
    soup = BeautifulSoup(page.text, 'html.parser', from_encoding='utf-8')
    site_json = json.loads(soup.text)
    result = site_json['data']['drop_box']
    print('进度::', kargs['page_num'])
    return result
Exemplo n.º 2
0
def session_request(self,
                    method,
                    url,
                    params=None,
                    data=None,
                    headers=None,
                    cookies=None,
                    files=None,
                    auth=None,
                    timeout=conf.timeout if 'timeout' in conf else None,
                    allow_redirects=True,
                    proxies=None,
                    hooks=None,
                    stream=None,
                    verify=False,
                    cert=None,
                    json=None):
    # Create the Request.
    merged_cookies = merge_cookies(
        merge_cookies(RequestsCookieJar(), self.cookies), cookies
        or (conf.cookie if 'cookie' in conf else None))

    req = Request(
        method=method.upper(),
        url=url,
        headers=merge_setting(
            headers, conf.http_headers if 'http_headers' in conf else {}),
        files=files,
        data=data or {},
        json=json,
        params=params or {},
        auth=auth,
        cookies=merged_cookies,
        hooks=hooks,
    )
    prep = self.prepare_request(req)

    proxies = proxies or (conf.proxies if 'proxies' in conf else {})

    settings = self.merge_environment_settings(prep.url, proxies, stream,
                                               verify, cert)

    # Send the request.
    send_kwargs = {
        'timeout': float(timeout),
        'allow_redirects': allow_redirects,
    }
    send_kwargs.update(settings)
    resp = self.send(prep, **send_kwargs)

    if resp.encoding == 'ISO-8859-1':
        encodings = get_encodings_from_content(resp.text)
        if encodings:
            encoding = encodings[0]
        else:
            encoding = resp.apparent_encoding

        resp.encoding = encoding

    return resp
Exemplo n.º 3
0
 def config_requests(self):
     #从Google获取cookies配置requests
     cookies = driver.get_cookies()
     # print("从se获取改变前的cookies:\n", se.cookies)
     with open(C_Path + "\\Pixiv" + r'\cookies.json', 'w') as fp:
         json.dump(cookies, fp)
     # try:
     #     with open(C_Path +"\\Pixiv" + r'\cookies.json', 'w') as fp:
     #         json.dump(cookies, fp)
     # except:
     #     print("从chrome cookies存放失败!\n")
     #这里用cookies对象进行处理
     jar = RequestsCookieJar()
     with open(C_Path + "\\Pixiv" + r'\cookies.json', 'r') as fp:
         cookies = json.load(fp)
         for cookie in cookies:
             jar.set(cookie['name'], cookie['value'])
     # try:
     #     jar = RequestsCookieJar()
     #     with open(C_Path +"\\Pixiv" + r'\cookies.json', 'r') as fp:
     #         cookies = json.load(fp)
     #         for cookie in cookies:
     #             jar.set(cookie['name'], cookie['value'])
     # except:
     #     print("cookies读取失败!\n")
     se.cookies = jar  #配置session
Exemplo n.º 4
0
def get_data(cookie, offset):
    """
    递归获取数据
    :param cookie: cookie信息,需要拼接url,uuid=cookie
    :param offset: offset值
    """
    # 真正获取数据的接口
    url = "https://apimobile.meituan.com/group/v4/poi/pcsearch/10?uuid=%s&userid=-1&limit=32&offset=%s&cateId=-1&q=塘桥" % (
        cookie, offset)
    print("真正获取数据的url:%s" % url)
    cookie_jar = RequestsCookieJar()
    cookie_jar.set("uuid", cookie)
    # 将cookie数据放入请求中
    res2 = s.get(url, headers=headers, cookies=cookie_jar)
    # 将返回的数据解析成字典类型。
    dicts = json.loads(res2.text)
    # 获取的数据结构,searchResult表示真正展示的数据,通过searchResult结果做判断
    search_result = dicts["data"]["searchResult"]
    if search_result != [] and len(search_result) > 0:
        with open("result.json", mode="a") as f:
            # 将结果数据写入文件
            f.write(res2.text)
            f.write("\n")
            # 随机休息3-12秒,模拟用户的正常行为,可以随意
            time.sleep(random.randint(3, 12))
            # 递归调用,offset以32递增
            get_data(cookie, offset + 32)
    else:
        print("数据请求结束...")
Exemplo n.º 5
0
    def _get_zabbix_graph(self, item_id, zabbix_host, zabbix_user, zabbix_passwd, data_dir):
        """

        :param item_id: zabbix item id
        :param zabbix_host: zabbix ip addr
        :param zabbix_user: zabbix admin username
        :param zabbix_passwd: zabbix admin passwd
        :param data_dir: zabbix graph storage directory
        :return: local absolute zabbix graph path name
        """
        # 创建session会话
        session = requests.Session()

        # 定义session头部
        loginheaders = {
            "Host": zabbix_host,
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
            'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36',
            'Referer': 'http://{}/zabbix/index.php'.format(zabbix_host)
        }

        # 定义payload
        payload = {
            "name": zabbix_user,
            "password": zabbix_passwd,
            "autologin": 1,
            "enter": "Sign in",
        }

        try:
            # session登录
            login_ret = session.post(url='http://{}/zabbix/index.php'.format(zabbix_host),
                                     headers=loginheaders,
                                     data=payload)
            # 获取cookie
            cookies = login_ret.cookies

            # 初始化jar,写入cookie
            jar = RequestsCookieJar()
            for item in cookies.iteritems():
                jar.set(item[0], item[1], domain='{}'.format(zabbix_host), path='/zabbix')

            # 访问图标
            graph_response = requests.get('http://{}/zabbix/chart.php?period=7200&width=600&time=600&itemids={}'.format(zabbix_host, item_id),cookies=jar)

            # 拼接图片路径
            local_time_str = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
            graph_name = 'zabbix_' + local_time_str + '.png'

            graph_path = os.path.join(data_dir, graph_name)

            # 使用绝对路径保存图片,二进制写入
            with open(graph_path, 'wb', ) as f:
                f.write(graph_response.content)

            # 返回图片名称
            return graph_path

        except Exception:
            raise Exception("get zabbix graph failed")
Exemplo n.º 6
0
 def __init__(self, url=None, pool=True):
     """
     初始化过期时间
     :param pool:
     """
     if url:
         self.URL = url  # 初始化cookie请求的url
     self.cookies = RequestsCookieJar()
     self.user_agent = UserAgent().random
     self.header = {
         "Host": "kns.cnki.net",
         "Connection": "keep-alive",
         "Cache-Control": "max-age=0",
         "Upgrade-Insecure-Requests": "1",
         "Accept":
         "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
         "Accept-Encoding": "gzip,deflate",
         "Accept-Language": "zh-CN,zh;q=0.9",
         "User-Agent": self.user_agent
     }
     self.proxymanager = ProxyManager(pool=False)
     self.timeout = 10
     self.pool = pool  # 是否使用cookie池:True or False
     if self.pool:
         self.get_pool()
     else:
         self.set_cookie()
Exemplo n.º 7
0
def getCookiesFromTxt1():
    jar = RequestsCookieJar()
    with open("./templates/cookies/cookies.txt", "r") as fp:
        cookies = json.load(fp)
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])
    return jar
Exemplo n.º 8
0
    def where_cookies_in_domain(self, cookies):
        cj = RequestsCookieJar()
        for c in cookies:
            if c.domain in self.source.cookie_domains:
                cj.set(c.name, c.value, domain=c.domain, path=c.path)

        return cj
Exemplo n.º 9
0
def load_cookies():
    try:
        with open(Project.cookies, 'rb') as f:
            cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
    except:
        cookies = RequestsCookieJar()
    return cookies
Exemplo n.º 10
0
def get_score(cookies):
    try:
        jar = RequestsCookieJar()
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])
        total = requests.get("https://pc-api.xuexi.cn/open/api/score/get", cookies=jar,
                             headers={'Cache-Control': 'no-cache'}).content.decode("utf8")
        total = int(json.loads(total, encoding="utf8")["data"]["score"])
        each1 = requests.get("https://pc-api.xuexi.cn/open/api/score/today/queryrate", cookies=jar,
                             headers={'Cache-Control': 'no-cache'}).content.decode(
            "utf8")
        each1 = json.loads(each1, encoding="utf8")["data"]["dayScoreDtos"]
        each1 = [int(i["currentScore"]) for i in each1 if i["ruleId"] in [1, 2, 9, 1002, 1003, 6, 5, 4]]
        each = [0, 0, 0, 0, 0, 0, 0, 0]
        each[0] = each1[0]
        each[1] = each1[1]
        each[2] = each1[5]
        each[3] = each1[6]
        each[4] = each1[7]
        each[5] = each1[4]
        each[6] = each1[3]
        each[7] = each1[2]
        return total, each
    except:
        print("=" * 120)
        print("get_video_links获取失败")
        print("=" * 120)
        raise
Exemplo n.º 11
0
def search_sagun(form_data):
    with requests.session() as s:
        # download_captcha_img 할 때 쿠키를 다시 가져옵니다.
        cookies = requests.cookies.merge_cookies(RequestsCookieJar(),
                                                 session['cookies'])
        s.cookies = cookies

        url = 'https://safind.scourt.go.kr/sf/servlet/SFSuperSvl'
        # response: requests.models.Response = session.post(url, data=form_data, cookies=session.cookies)

        response: requests.models.Response = s.post(url, data=form_data)

        if response.status_code == 200:
            bs = BeautifulSoup(response.text, "html.parser")
            pass
        else:
            # Todo: Failure 처리
            pass

        # Todo: 전처리 코드

        # return bs
        #return BeautifulSoup
        print(response)
        return response.txt
Exemplo n.º 12
0
    def prepare_request(self, request):
        cookies = request.cookies or {}

        # Bootstrap CookieJar.
        if not isinstance(cookies, cookielib.CookieJar):
            cookies = cookiejar_from_dict(cookies)

        # Merge with session cookies
        merged_cookies = merge_cookies(
            merge_cookies(RequestsCookieJar(), self.cookies), cookies)

        # Set environment's basic authentication if not explicitly set.
        # auth = request.auth
        # if self.trust_env and not auth and not self.auth:
        #     auth = get_netrc_auth(request.url)

        p = PreparedRequest()
        p.prepare(
            method=request.method.upper(),
            url=request.url,
            files=request.files,
            data=request.data,
            json=request.json,
            headers=merge_setting(request.headers,
                                  self.headers,
                                  dict_class=CaseInsensitiveDict),
            params=merge_setting(request.params, self.params),
            auth=merge_setting(request.auth, self.auth),
            cookies=merged_cookies,
            hooks=merge_hooks(request.hooks, self.hooks),
        )
        return p
Exemplo n.º 13
0
    def authorize(self):
        c = RequestsCookieJar()
        ui = util.guid()
        c.set('ui', ui, path='/', domain='graph.qq.co')
        self.session.cookies.update(c)

        cgi_url = 'https://graph.qq.com/oauth2.0/authorize'

        headers = {
            'User-Agent': util.get_user_agents(),
            'Referer': 'https://graph.qq.com/oauth2.0/show?which=Login&display=pc&response_type=code&client_id=100497308&redirect_uri=https%3A%2F%2Fy.qq.com%2Fportal%2Fwx_redirect.html%3Flogin_type%3D1%26surl%3Dhttps%253A%252F%252Fy.qq.com%252Fportal%252Fprofile.html%2523stat%253Dy_new.top.user_pic%2526stat%253Dy_new.top.pop.logout%26use_customer_cb%3D0&state=state&display=pc',
            'Content-Type': 'application/x-www-form-urlencoded',
        }

        payload = {
            'response_type': 'code',
            'client_id': '100497308',
            'redirect_uri': 'https://y.qq.com/portal/wx_redirect.html?login_type=1&surl=https%3A%2F%2Fy.qq.com%2F%23&use_customer_cb=0',
            'scope': '',
            'state': 'state',
            'switch': '',
            'from_ptlogin': '******',
            'src': '1',
            'update_auth': '1',
            'openapi': '80901010',
            'g_tk': util.get_g_tk(self.session.cookies.get('p_skey')),
            'auth_time': str(int(time.time())),
            'ui': ui
        }
        urlencode = parse.urlencode(payload)
        post = self.session.post(cgi_url, headers=headers, data=urlencode)
        return post
Exemplo n.º 14
0
    def test_is_authenticated_false_if_session_cookies_empty(self):
        user = User(email="",
                    password="",
                    username="",
                    session_cookies=RequestsCookieJar())

        self.assertFalse(AuthService().is_authenticated(user=user))
Exemplo n.º 15
0
def requests_utils(url: str, method: str, **kwargs):

    req_method = method.upper()

    if req_method not in __switcher.keys():
        return

    try:
        func = __switcher.get(req_method)
        # 设置 cookies
        cookie_jar = RequestsCookieJar()
        if cookie_list:
            for item in cookie_list:
                cookie_jar.set(item['name'],
                               item['value'],
                               domain=item['domain'])
        cookie_list.clear()
        # 每次请求都是独立的,需要设置请求头
        with func(url,
                  headers=headers,
                  cookies=cookie_jar,
                  timeout=10,
                  **kwargs) as resp:
            __http_log(resp.request, resp)
    except requests.exceptions.RequestException as e:
        log_error(e)
        raise e
    except:
        raise
Exemplo n.º 16
0
    def __init__(self,
                 api_key: str,
                 batch_size: int = DEFAULT_BATCH_SIZE,
                 timeout: int = DEFAULT_TIMEOUT,
                 cookie_jar_file: Optional[str] = DEFAULT_COOKIE_FILE,
                 username: str = None,
                 password: str = None,
                 get_today: Callable[[], date] = date.today):
        self.__api_key = api_key
        self.__batch_size = batch_size if Verify.batch_size(
            batch_size) else DEFAULT_BATCH_SIZE
        self.__timeout = max(0, timeout) or DEFAULT_TIMEOUT
        self.__cookie_jar_file = cookie_jar_file

        self.__username = username
        self.__password = password
        self.__login_rejected = False
        self.__prevent_multiple_login_lock = RLock()

        self.__sessions = SessionFactory(unsafe=['ips_password'])
        self.__cookies = RequestsCookieJar()
        self.__with_cookie_jar('rb',
                               lambda f: self.__cookies.update(pickle.load(f)))
        # no error, since cookie file probably doesn't exist; we'll try to write it later and log any error then

        self.__get_today = get_today
Exemplo n.º 17
0
    def test_merge_cookies_is_called_identically_to_the_original_implementation(
        self,
        _,
        mock_ReqCookieJar,
        mock_cookiejar_from_dict,
        mock_merge_cookies,
        __,
        ___,
        ____,
    ):
        """Assert that :func:`requests.session.merge_cookies` is called exactly twice.

        Furthermore, assert the first call's result is passed to the second
        instance of the call, like so::

            session_cookies = merge_cookies(RequestsCookieJar(), self.cookies)
            merged_cookies = merge_cookies(session_cookies, cookies)

        """
        given_cookies = {'hello': 'there'}
        mock_merge_cookies.return_value = mock_merge_cookies
        mock_cookiejar_from_dict.return_value = given_cookies
        req = BitexRequest(cookies=given_cookies)
        req.method = "get"
        self.session.prepare_request(req)

        mock_merge_cookies.has_calls([
            mock.call(mock_merge_cookies, {'hello': 'there'}),
            mock.call(RequestsCookieJar(), self.session.cookies),
        ])
        assert mock_merge_cookies.call_count == 2
Exemplo n.º 18
0
def main():
    # Check username and password
    global USERNAME, PASSWORD
    if not USERNAME: USERNAME = input('请输入学号:')
    if not PASSWORD: PASSWORD = input('请输入密码:')

    req = requests.Session()
    cookie_jar = RequestsCookieJar()
    login_payload = {'username': USERNAME, 'password': PASSWORD}
    url = 'http://dj.cs.ustc.edu.cn/admin/index/login.html'

    # Open Login
    print('正在登录: %s' % url)
    r = req.post(url, data=login_payload, allow_redirects=False)
    cookie_jar.update(r.cookies)
    # print(cookie_jar.items())

    # Now set url to index.html
    url = 'http://dj.cs.ustc.edu.cn/admin/index/index.html'
    r = req.get(url, cookies=cookie_jar)

    # Now we have got the page. We should know what '待办事项' refers to
    dashboard_page = etree.HTML(r.text)
    iframe_link_path = dashboard_page.xpath(
        "//*[@id='draggable']/div[2]/div[1]/dl[1]/dd[2]/a/@data-param")
    assert (len(iframe_link_path) == 1)
    iframe_link = DOMAIN + iframe_link_path[0]

    todo_events = []
    r = req.get(iframe_link, cookies=cookie_jar)
    assert (r.status_code == 200)
    events_page = etree.HTML(r.text)
    events = events_page.xpath("//div[@class='bDiv']/table/tbody/tr")
    for i in range(len(events)):
        event_name = events_page.xpath(
            "//div[@class='bDiv']/table/tbody/tr[%d]/td[1]/text()" %
            (i + 1))[0]
        event_status = events_page.xpath(
            "//div[@class='bDiv']/table/tbody/tr[%d]/td[5]/text()" %
            (i + 1))[0].strip()
        event_link = events_page.xpath(
            "//div[@class='bDiv']/table/tbody/tr[%d]/td[6]/a/@href" %
            (i + 1))[0]
        if event_status != '已办理':
            event_status = '\033[1;31m未办理\033[0m'
            todo_events.append((event_name, event_link))
        print('%s\t%s' % (event_name, event_status))

    print('=========================')
    for event in todo_events:
        sys.stdout.write('正在办理 %s' % event[0])
        event_full_link = DOMAIN + event[1]
        r = req.get(event_full_link, cookies=cookie_jar)
        commit_page = etree.HTML(r.text)
        commit_path = commit_page.xpath("//div[@class='bot']/a[1]/@href")[0]
        commit_url = DOMAIN + commit_path
        r = req.get(commit_url, cookies=cookie_jar)
        print(r.status_code == 200 and '成功' or '失败')

    return 0
Exemplo n.º 19
0
 def cookies(self) -> RequestsCookieJar:
     jar = RequestsCookieJar()
     for name, cookie_dict in self['cookies'].items():
         jar.set_cookie(
             create_cookie(name, cookie_dict.pop('value'), **cookie_dict))
     jar.clear_expired_cookies()
     return jar
Exemplo n.º 20
0
    def web_login(self):
        # 开启浏览器
        self.browser.get('https://lagou.com')
        time.sleep(5)

        # 登录
        loginbutton = self.browser.find_element_by_xpath('//*[@id="lg_tbar"]/div/div[2]/ul/li[3]/a')
        loginbutton.click()

        self.browser.find_element_by_xpath(
            '/html/body/div[2]/div[1]/div/div/div[2]/div[3]/div[1]/div/div[1]/form/div[1]/div/input').send_keys(
            self.username)
        self.browser.find_element_by_xpath(
            '/html/body/div[2]/div[1]/div/div/div[2]/div[3]/div[1]/div/div[1]/form/div[2]/div/input').send_keys(
            self.password)
        # time.sleep(3)
        # self.browser.find_element_by_xpath('/html/body/div[2]/div[1]/div/div/div[2]/div[3]/div[2]/div[2]/div[2]').click()

        # time.sleep(20)
        # 等待手动图形验证
        input("确认验证完毕")
        seleuium_cookies = self.browser.get_cookies()

        # 转换cookies格式
        cookies = RequestsCookieJar()
        for cookie in seleuium_cookies:
            cookies.set(cookie['name'], cookie['value'])
        self.cookies = cookies
Exemplo n.º 21
0
    def setUp(self):
        self.xls_name = 'V_gomeplus.xlsx'
        self.V = fs_datadevices.fs_datadevice(self.xls_name)
        self.cf = ConfigParser.SafeConfigParser()
        self.path = 'E:\\ZygTest\\Data\\result\\'
        conf_name = 'config.ini'
        self.cf.readfp(codecs.open(self.path + conf_name, 'rb', 'utf_8'))
        self.search_url = []
        self.searchpub_url = []
        self.searchimg_url = []
        for option in self.cf.options('options'):
            self.url1 = self.cf.get(
                'options', option) + self.V.Rxls_URL().get('GetSearch')
            self.search_url.append(self.url1)

        for option in self.cf.options('options'):
            self.url2 = self.cf.get(
                'options',
                option) + self.V.Rxls_URL().get('GetSearchPublisher')
            self.searchpub_url.append(self.url2)

        for option in self.cf.options('options'):
            self.url3 = self.cf.get(
                'options', option) + self.V.Rxls_URL().get('GetSearchImage')
            self.searchimg_url.append(self.url3)

        self.dict_params = self.V.Rxls_Data()
        self.cookies = RequestsCookieJar()
Exemplo n.º 22
0
    def login(self):
        browser_cookies = {}
        if os.path.isfile(COOKIE_PATH):
            with open(COOKIE_PATH, 'rb') as f:
                browser_cookies = pickle.load(f)
        else:
            print("😎 Starting browser login..., please fill the login form")
            browser = webdriver.Chrome(executable_path="./vendor/chromedriver")
            try:
                # browser login
                login_url = "https://leetcode.com/accounts/login"
                browser.get(login_url)

                WebDriverWait(browser, 24 * 60 * 3600).until(
                    lambda driver: driver.current_url.find("login") < 0
                )
                browser_cookies = browser.get_cookies()
                with open(COOKIE_PATH, 'wb') as f:
                    pickle.dump(browser_cookies, f)
                print("🎉 Login successfully")

            except Exception as e:
                print(f"🤔 Login Failed: {e}, please try again")
                exit()

        cookies = RequestsCookieJar()
        for item in browser_cookies:
            cookies.set(item['name'], item['value'])

            if item['name'] == 'csrftoken':
                self.session.headers.update({
                    "x-csrftoken": item['value']
                })

        self.session.cookies.update(cookies)
Exemplo n.º 23
0
def test_cookie_jar_auth():
    session = AuthSession()
    jar = RequestsCookieJar()
    jar.set('TEST_COOKIE', 'BADCOOKIE')
    session.credentials.set_cookie_jar(jar)
    service = pyvo.dal.TAPService('http://example.com/tap', session)
    service.run_async("SELECT * FROM ivoa.obscore")
Exemplo n.º 24
0
def cookies_transfer(driver):
    for cookie in driver.get_cookies():
        cookie_jar = RequestsCookieJar()
        cookie_jar.set(cookie['name'],
                       cookie['value'],
                       domain=cookie['domain'])
        s.cookies.update(cookie_jar)
Exemplo n.º 25
0
def get_page_params(url, cookies):
    parms = {}
    request_url = base_url_bl + url
    jar = RequestsCookieJar()
    cookies_str = ""
    for key, value in cookies.items():
        cookies_str = cookies_str + key + "=" + value + ";"
        jar.set(key, value)

    header = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
    }
    response = requests.get(request_url, cookies=jar, headers=header)

    html = etree.HTML(response.text)
    __VIEWSTATE = html.xpath("//input[@id='__VIEWSTATE']/@value")[0]
    __EVENTVALIDATION = html.xpath(
        "//input[@id='__EVENTVALIDATION']/@value")[0]
    __VIEWSTATEGENERATOR = html.xpath(
        "//input[@id='__VIEWSTATEGENERATOR']/@value")[0]
    parms['__VIEWSTATE'] = __VIEWSTATE
    parms['__EVENTVALIDATION'] = __EVENTVALIDATION
    parms['__VIEWSTATEGENERATOR'] = __VIEWSTATEGENERATOR
    return parms
Exemplo n.º 26
0
def go_refund_tuikuan(params, cookies, url):
    try:
        ret = {'code': "error", 'message': ''}
        request_url = base_url_bl + url
        jar = RequestsCookieJar()
        cookies_str = ""
        for key, value in cookies.items():
            cookies_str = cookies_str + key + "=" + value + ";"
            jar.set(key, value)

        header = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
        }
        response = requests.post(request_url,
                                 cookies=jar,
                                 data=params,
                                 headers=header)
        if response.status_code == 200:
            if response.text.find("申请退款成功") != -1:
                ret['code'] = 'ok'
                ret['message'] = ''
        print(response.text)
    except:

        print(traceback.format_exc())
        logger.info('%s url ' % (traceback.format_exc()))
        ret['code'] = 'error'
        ret['message'] = '申请失败'
    return ret
Exemplo n.º 27
0
def check_login(cookie):
    try:
        url = base_url_bl + "/User/Charge.aspx"
        jar = RequestsCookieJar()
        cookies_str = ""
        for key, value in cookie.items():
            cookies_str = cookies_str + key + "=" + value + ";"
            jar.set(key, value)

        # jar.set("ASP.NET_SessionId", "xgvmk1stokeiiwah3fjdb2ye")
        # jar.set(".ASPXAUTH", "518BE5C9561C4B8FE94A5E2FC94F5E1CE4878549F234B5A255A01C2D4AA72E7F74D097AFB3812C56BECDA75CE25D82ECD133A43ED8F891A89DF31181C321BB57FB1FBEB559373EE83503A98D052F715E1D610B95A626F326CE9664CA952C6D60CCB3C0AEB351488DFBB442E6C2F921EFC7F147FA94341F5536C1A3DAE55EAC06")
        header = {
            # cookie 这里就不贴出来了 ![(☆_☆)/~~]
            # "Cookie": "ASP.NET_SessionId=pg1lpw5gm1owdfjnx0il22hf&.ASPXAUTH=1411FAF265576FA799C33E4D989698497B677D1318CC59F9B2E4A581CA5E5E46B7E139A0E1B8347A3BEC2F51956F12D0765CA4B8011E83DB082634DCF496B8890EEE4BCD08E43E9D844B3A8D6D4681A95E0168358EB8DB21C2BC781F659A3D332CAD94904A21FE773106F060925CE88186D792931E4E21702342F1213EE8E8D6",
            # 'content-type': 'charset=gbk',
            # 'origin': 'https://wuliu.taobao.com',
            "User-Agent":
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
        }

        response = requests.get(url, cookies=jar, headers=header)
        if response.text.find("帐户余额") != -1:
            return True
        return False
    except:
        logger.info('%s url ' % (traceback.format_exc()))
    return False
Exemplo n.º 28
0
def cnblogs():
    """通过cookies跳过登录验证"""
    url = 'https://account.cnblogs.com/signin?returnUrl=https:%2F%2Fwww.cnblogs.com%2F'
    header = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36"
    }

    s = requests.session()
    r = s.get(url, headers=header)

    c = RequestsCookieJar()
    c.set(
        ".Cnblogs.AspNetCore.Cookies",
        "CfDJ8L-rpLgFVEJMgssCVvNUAjvl6g97AtVxWdY0r6GWTszKAnZdDoV-OYeqazuwiOg0_JqcfAIGRb1ie"
        "DRjzQCf0e9u8r5qW_yhpttyzMKlJj3Qn3tCuh0j55sfPANvBwYfAtxL8_HEAs1h-FAHXD-B_3bdfI4ysaAgQtecJyoakssP8ppwM80F"
        "JwORaYJl5KgD5qXQkP6NqDMzDyT4zcQJMQSBD9hZwK-IiVvHe2mvIhnHk4ZNk_Eitytl7Ihv9UXl14-vyo7yGEo22qlIa3Z9A5"
        "zvaTpsda0ZNduwcOpfPQQ9aCcOE4uDEvU3wB2u5_u-xWI6J7dm5Ur55KR-s5In46aCt3Pjop2NfSXypn1Y0x5SQeYDHwzo5roM9qGnUlu_FMo"
        "QjZbrAehKjpDWM25vro8WKCrcZnqkkKfsnbN1W6aiwmAsSOigGD7ZexG3WY107g1fppEylW9LIQ0d71YViT3fk0zdF2cqHu41l3Iof2I0BbskW9ES"
        "-SnvHpA8jaQQJIyZW_F1xlpunvAAcc6FN1iuaxnvXB556dCSIhYP2sfdo6UrFSMMoVLoFuOvOw4fJQ"
    )
    c.set(
        ".CNBlogsCookie",
        "87FB9B609C0F14DFFE6705DD38C9AD6586B37AF63D0FF8F0DFD069863AD9644606C51336961878C0C0FE124C5D4C03211B053"
        "AF276233AAE4E22FEC98A2069DD6465705331D14EA8EF6C2482C87D9F734997048F")

    s.cookies.update(c)
    print('完整cookies', s.cookies)

    url2 = "https://www.cnblogs.com/97xiaolai/"
    res = s.get(url2)
    return res.text
Exemplo n.º 29
0
def session_request(self,
                    method,
                    url,
                    params=None,
                    data=None,
                    headers=None,
                    cookies=None,
                    files=None,
                    auth=None,
                    timeout=None,
                    allow_redirects=True,
                    proxies=None,
                    hooks=None,
                    stream=None,
                    verify=False,
                    cert=None,
                    json=None):
    conf = CONF.get("requests", {})
    if timeout is None and "timeout" in conf:
        timeout = conf["timeout"]
    merged_cookies = merge_cookies(
        merge_cookies(RequestsCookieJar(), self.cookies), cookies
        or (conf.cookie if "cookie" in conf else None))

    req = Request(
        method=method.upper(),
        url=url,
        headers=merge_setting(headers,
                              conf["headers"] if "headers" in conf else {}),
        files=files,
        data=data or {},
        json=json,
        params=params or {},
        auth=auth,
        cookies=merged_cookies,
        hooks=hooks,
    )
    prep = self.prepare_request(req)
    proxies = proxies or (conf["proxies"] if "proxies" in conf else {})

    settings = self.merged_environment_settings(prep.url, proxies, stream,
                                                verify, cert)

    send_kwargs = {
        "timeout": timeout,
        "allow_redirects": allow_redirects,
    }
    send_kwargs.update(settings)
    resp = self.send(prep, **send_kwargs)

    if resp.encoding == "ISO-8859-1":
        encodings = get_encodings_from_content(resp.text)
        if encodings:
            encoding = encodings[0]
        else:
            encoding = resp.apparent_encoding

        resp.encoding = encoding
    return resp
Exemplo n.º 30
0
def get_score(cookies):
    try:
        requests.adapters.DEFAULT_RETRIES = 5
        jar = RequestsCookieJar()
        for cookie in cookies:
            jar.set(cookie['name'], cookie['value'])
        total_json = requests.get("https://pc-api.xuexi.cn/open/api/score/get",
                                  cookies=jar,
                                  headers={
                                      'Cache-Control': 'no-cache'
                                  }).content.decode("utf8")
        total = int(json.loads(total_json)["data"]["score"])
        #userId = json.loads(total_json)["data"]["userId"]
        user_info = requests.get("https://pc-api.xuexi.cn/open/api/user/info",
                                 cookies=jar,
                                 headers={
                                     'Cache-Control': 'no-cache'
                                 }).content.decode("utf8")
        userId = json.loads(user_info)["data"]["uid"]
        userName = json.loads(user_info)["data"]["nick"]
        score_json = requests.get(
            "https://pc-api.xuexi.cn/open/api/score/today/queryrate",
            cookies=jar,
            headers={
                'Cache-Control': 'no-cache'
            }).content.decode("utf8")
        today_json = requests.get(
            "https://pc-api.xuexi.cn/open/api/score/today/query",
            cookies=jar,
            headers={
                'Cache-Control': 'no-cache'
            }).content.decode("utf8")
        today = 0
        today = int(json.loads(today_json)["data"]["score"])
        dayScoreDtos = json.loads(score_json)["data"]["dayScoreDtos"]
        rule_list = [1, 2, 9, 1002, 1003, 6, 5, 4]
        score_list = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]  # 长度为十
        for i in dayScoreDtos:
            for j in range(len(rule_list)):
                if i["ruleId"] == rule_list[j]:
                    score_list[j] = int(i["currentScore"])
        # 阅读文章,视听学 xi ,登录,文章时长,视听学 xi 时长,每日答题,每周答题,专项答题
        scores = {}
        scores["article_num"] = score_list[0]  # 0阅读文章
        scores["video_num"] = score_list[1]  # 1视听学 xi
        scores["login"] = score_list[2]  # 7登录
        scores["article_time"] = score_list[3]  # 6文章时长
        scores["video_time"] = score_list[4]  # 5视听学 xi 时长
        scores["daily"] = score_list[5]  # 2每日答题
        scores["weekly"] = score_list[6]  # 3每周答题
        scores["zhuanxiang"] = score_list[7]  # 4专项答题

        scores["today"] = today  # 8今日得分
        return userId, total, scores, userName
    except:
        print("=" * 60)
        print("get_score 获取失败")
        print("=" * 60)
        raise