コード例 #1
0
def session():
    global _session
    if not _session:
        _session = Session()
        _session.cookies = venmo.cookies.load()
        atexit.register(_save_cookies)
    return _session
コード例 #2
0
ファイル: utils.py プロジェクト: Kuppey/Shiinabot
def scrape_site(url, cookie_file="", ses=False, is_rss=False):
    from http.cookiejar import LWPCookieJar
    from robobrowser import RoboBrowser
    from requests import Session
    s = Session()
    if cookie_file:
        s.cookies = LWPCookieJar(cookie_file)
        try:
            s.cookies.load(ignore_discard=True)
        except:
            # Cookies don't exsit yet
            pass
    s.headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; rv:39.0)'
    s.headers['Accept'] = 'text/html'
    s.headers['Connection'] = 'keep-alive'
    if is_rss:
        parser = 'xml'
    else:
        parser = 'html5lib'
    browser = RoboBrowser(session=s,
                          parser=parser)
    browser.open(url)
    if ses:
        return browser, s
    else:
        return browser
コード例 #3
0
ファイル: singletons.py プロジェクト: thelastnode/venmo
def session():
    global _session
    if not _session:
        _session = Session()
        _session.cookies = cookies.load()
        atexit.register(_save_cookies)
    return _session
コード例 #4
0
ファイル: web.py プロジェクト: KNCheung/ProjectEuler
 def getSession(self):
     s = Session()
     s.headers.update({'User-Agent': r'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:42.0) Gecko/20100101 Firefox/42.0'})
     s.cookies = LWPCookieJar(self.cookieFile)  
     if os.path.exists(self.cookieFile):
         s.cookies.load()
     return s
コード例 #5
0
 def http(self, path: Path) -> Path:
     session = Session()
     if self.cookies:
         session.cookies = cookiejar_from_dict(self.cookies)
     response = session.get(self.url)
     path.write_bytes(response.content)
     return path
コード例 #6
0
ファイル: saml.py プロジェクト: umich-iam/awscli-login
def saml_login(url: str,
               jar: LWPCookieJar,
               username: str = None,
               password: str = None,
               headers: Headers = None) -> bytes:
    """
    Generates and posts a SAML AuthNRequest to an IdP.

    Args:
        url: ECP endpoint URL for the IdP.
        username: Username to provide to the IdP.
        password: Password to provide to the IdP.
        headers: optional headers to provide to the IdP.

    Returns:
        The SOAP response from the IdP.
    """
    s = Session()
    s.cookies = cast(RequestsCookieJar, jar)
    s.headers.update({'Content-Type': 'text/xml', 'charset': 'utf-8'})

    envelope = authn_request()
    auth = (username, password) if username and password else None
    logger.debug("POST %s\nheaders: %s\npayload %s" % (url, headers, envelope))
    r = s.post(url, data=envelope, headers=headers, auth=auth)
    logger.debug("POST returned: %s" % r.content)

    r.raise_for_status()
    try:
        raise_if_saml_failed(r.content)
    except XMLSyntaxError:
        raise InvalidSOAP(url)
    return r.content
コード例 #7
0
ファイル: librusSession.py プロジェクト: mLingoTeam/mLibroAPI
def create(username, password):
    tokens = getCookie.get(username, password)
    if tokens is None:
        return None
    s = Session()
    s.cookies = cookiejar_from_dict({'DZIENNIKSID': tokens['clientToken']})
    return {'session': s, 'token': tokens['clientToken']}
コード例 #8
0
def login(s: requests.Session, username, password, cookie_file: Path):
    # r = s.get(
    #     "https://app.ucas.ac.cn/uc/wap/login?redirect=https%3A%2F%2Fapp.ucas.ac.cn%2Fsite%2FapplicationSquare%2Findex%3Fsid%3D2")
    # print(r.text)

    if cookie_file.exists():
        cookie = json.loads(cookie_file.read_text())
        s.cookies = requests.utils.cookiejar_from_dict(cookie)
        # 测试cookie是否有效
        if get_daily(s) == False:
            print("cookie失效,进入登录流程")
        else:
            print("cookie有效,跳过登录环节")
            return

    payload = {"username": username, "password": password}
    r = s.post("https://app.ucas.ac.cn/uc/wap/login/check", data=payload)

    # print(r.text)
    if r.json().get('m') != "操作成功":
        print("登录失败")
        message(api_key, sender_email, sender_email_passwd, receiver_email,
                "健康打卡登录失败", "登录失败")

    else:
        print("登录成功")
        with open(cookie_file, 'w', encoding='u8') as f:
            f.write(json.dumps(requests.utils.dict_from_cookiejar(r.cookies)))
            print("cookies 保存完成,文件名为 {}".format(cookie_file))
コード例 #9
0
ファイル: atcoder.py プロジェクト: yuuki3655/atcoder-tools
def load_cookie_to(session: requests.Session, cookie_path: Optional[str] = None):
    cookie_path = cookie_path or default_cookie_path
    session.cookies = LWPCookieJar(cookie_path)
    if os.path.exists(cookie_path):
        session.cookies.load()
        logger.info(
            "Loaded session from {}".format(os.path.abspath(cookie_path)))
        return True
    return False
コード例 #10
0
 def http(self, path: Path) -> Path:
     """Download the source file using HTTP (and cookies, if set at the
     initialization of the class)."""
     session = Session()
     if self.cookies:
         session.cookies = cookiejar_from_dict(self.cookies)
     response = session.get(self.url)
     path.write_bytes(response.content)
     return path
コード例 #11
0
def load_cookie_to(session: requests.Session, cookie_path: Optional[str] = None):
    cookie_path = cookie_path or default_cookie_path
    session.cookies = LWPCookieJar(cookie_path)
    if os.path.exists(cookie_path):
        session.cookies.load()
        logging.info(
            "Loaded session from {}".format(os.path.abspath(cookie_path)))
        return True
    return False
コード例 #12
0
def with_cookiejar(session: requests.Session, path: pathlib.Path = default_cookie_path) -> Generator[requests.Session, None, None]:
    session.cookies = http.cookiejar.LWPCookieJar(str(path))  # type: ignore
    if path.exists():
        log.status('load cookie from: %s', path)
        session.cookies.load()  # type: ignore
    yield session
    log.status('save cookie to: %s', path)
    path.parent.mkdir(parents=True, exist_ok=True)
    session.cookies.save()  # type: ignore
    path.chmod(0o600)  # NOTE: to make secure a little bit
コード例 #13
0
ファイル: utils.py プロジェクト: Okonorn/AcePictureBot
def scrape_site(url, cookie_file=""):
    global s
    s = Session()
    s.headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; rv:39.0)'
    s.headers['Accept'] = 'text/html'
    s.headers['Connection'] = 'keep-alive'
    if cookie_file:
        s.cookies = LWPCookieJar(cookie_file)
        try:
            s.cookies.load()
            if not s.cookies._cookies:
                # Cookies have expired
                raise Exception
        except (FileNotFoundError, Exception):
            if os.path.exists(cookie_file):
                os.remove(cookie_file)
            browser = RoboBrowser(session=s,
                                  parser='html5lib',
                                  timeout=10)
            if "sankakucomplex.com" in url:
                url_login = "******"
                form_num = 0
                form_user = "******"
                form_password = "******"
                username = website_logins['sankakucomplex_username']
                password = website_logins['sankakucomplex_password']
                browser.open(url_login)
                form = browser.get_form(form_num)
                form[form_user].value = username
                form[form_password].value = password
                browser.submit_form(form)
                s.cookies.save()
            elif "gelbooru.com" in url:
                url_login = "******"
                form_num = 0
                form_user = "******"
                form_password = "******"
                username = website_logins['gelbooru_username']
                password = website_logins['gelbooru_password']
                browser.open(url_login)
                form = browser.get_form(form_num)
                form[form_user].value = username
                form[form_password].value = password
                browser.submit_form(form)
                s.cookies.save()
    browser = RoboBrowser(session=s,
                          parser='html5lib',
                          timeout=10)
    try:
        browser.open(url)
        return browser
    except:
        # TODO: find what exceptions happens here
        printf("[WARNING] TIMEOUT WITH WEBSITE: {0}".format(url))
        return False
コード例 #14
0
ファイル: sessions.py プロジェクト: emq412-3T0/httpie
def get_response(
    requests_session: requests.Session,
    session_name: str,
    config_dir: Path,
    args: argparse.Namespace,
    read_only=False,
) -> requests.Response:
    """Like `client.get_responses`, but applies permanent
    aspects of the session to the request.

    """
    from .client import make_requests_kwargs, dump_request
    if os.path.sep in session_name:
        path = os.path.expanduser(session_name)
    else:
        hostname = (args.headers.get('Host', None)
                    or urlsplit(args.url).netloc.split('@')[-1])
        if not hostname:
            # HACK/FIXME: httpie-unixsocket's URLs have no hostname.
            hostname = 'localhost'

        # host:port => host_port
        hostname = hostname.replace(':', '_')
        path = (config_dir / SESSIONS_DIR_NAME / hostname /
                (session_name + '.json'))

    session = Session(path)
    session.load()

    kwargs = make_requests_kwargs(args, base_headers=session.headers)
    if args.debug:
        dump_request(kwargs)
    session.update_headers(kwargs['headers'])

    if args.auth_plugin:
        session.auth = {
            'type': args.auth_plugin.auth_type,
            'raw_auth': args.auth_plugin.raw_auth,
        }
    elif session.auth:
        kwargs['auth'] = session.auth

    requests_session.cookies = session.cookies

    try:
        response = requests_session.request(**kwargs)
    except Exception:
        raise
    else:
        # Existing sessions with `read_only=True` don't get updated.
        if session.is_new() or not read_only:
            session.cookies = requests_session.cookies
            session.save()
        return response
コード例 #15
0
 def createSession(self, cookies):
     '''
     Creates a global session to be used by all requests
     :param cookies:
     :return:
     '''
     session = Session()
     adapter = adapters.HTTPAdapter(pool_connections = 1000, pool_maxsize = 5000)
     session.mount('https://', adapter)
     session.headers = self.headers
     session.cookies = cookies
     return session
コード例 #16
0
 def createSession(self, cookies):
     '''
     Creates a global session to be used by all requests
     :param cookies:
     :return:
     '''
     session = Session()
     adapter = adapters.HTTPAdapter(pool_connections=1000,
                                    pool_maxsize=5000)
     session.mount('https://', adapter)
     session.headers = self.headers
     session.cookies = cookies
     return session
コード例 #17
0
def with_cookiejar(session: requests.Session,
                   path: str) -> Generator[requests.Session, None, None]:
    path = path or default_cookie_path
    session.cookies = http.cookiejar.LWPCookieJar(path)  # type: ignore
    if os.path.exists(path):
        log.status('load cookie from: %s', path)
        session.cookies.load()  # type: ignore
    yield session
    log.status('save cookie to: %s', path)
    if os.path.dirname(path):
        os.makedirs(os.path.dirname(path), exist_ok=True)
    session.cookies.save()  # type: ignore
    os.chmod(path, 0o600)  # NOTE: to make secure a little bit
コード例 #18
0
ファイル: utils.py プロジェクト: Dimpsy/AcePictureBot
def scrape_site(url, cookie_file=""):
    global s
    s = Session()
    s.headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; rv:39.0)'
    s.headers['Accept'] = 'text/html'
    s.headers['Connection'] = 'keep-alive'
    if cookie_file:
        s.cookies = LWPCookieJar(cookie_file)
        try:
            s.cookies.load()
            if not s.cookies._cookies:
                # Cookies have expired
                raise Exception
        except (FileNotFoundError, Exception):
            if os.path.exists(cookie_file):
                os.remove(cookie_file)
            browser = RoboBrowser(session=s, parser='html5lib', timeout=10)
            if "sankakucomplex.com" in url:
                url_login = "******"
                form_num = 0
                form_user = "******"
                form_password = "******"
                username = website_logins['sankakucomplex_username']
                password = website_logins['sankakucomplex_password']
                browser.open(url_login)
                form = browser.get_form(form_num)
                form[form_user].value = username
                form[form_password].value = password
                browser.submit_form(form)
                s.cookies.save()
            elif "gelbooru.com" in url:
                url_login = "******"
                form_num = 0
                form_user = "******"
                form_password = "******"
                username = website_logins['gelbooru_username']
                password = website_logins['gelbooru_password']
                browser.open(url_login)
                form = browser.get_form(form_num)
                form[form_user].value = username
                form[form_password].value = password
                browser.submit_form(form)
                s.cookies.save()
    browser = RoboBrowser(session=s, parser='html5lib', timeout=10)
    try:
        browser.open(url)
        return browser
    except:
        # TODO: find what exceptions happens here
        printf("[WARNING] TIMEOUT WITH WEBSITE: {0}".format(url))
        return False
コード例 #19
0
    def http(self, path: Path) -> Path:
        """Download the source file using HTTP."""
        session = Session()

        if self.cookies:
            session.cookies = cookiejar_from_dict(self.cookies)

        if self.post_data:
            response = session.post(self.url, data=self.post_data)
        else:
            response = session.get(self.url)

        path.write_bytes(response.content)
        return path
コード例 #20
0
def get_session():
    """
    :desc: Builds session from the saved cookies, if present.
           Otherwise, a new session is created.
    :return: requests.Session object
    """

    session = Session()

    if os.path.exists(COOKIES_FILE_PATH):
        session.cookies = LWPCookieJar(filename=COOKIES_FILE_PATH)
        session.cookies.load(ignore_discard=True, ignore_expires=True)

    return session
コード例 #21
0
ファイル: scraper.py プロジェクト: Masrt200/Scraper-CTF
def connect(url):
    #using stored session cache for connection
    if os.path.exists(cache_path + ".cache") and not args.dynamic_login:
        if not args.submit:
            download()
        cj = cookies.RequestsCookieJar()
        with open(cache_path + ".cache", "rb") as f:
            cook = pickle.load(f)
        cj._cookies = cook
        s = Session()
        s.cookies = cj
        return s
    elif not os.path.exists(cache_path + ".cache") and not args.dynamic_login:
        print(lc + ".cache missing! create a new session!!" + rt)
        sys.exit()
    #new connection
    else:
        with Session() as s:
            print(lc + bd + "Connecting...\r" + rt, end='')

            try:
                site = s.get(url + '/login')
            except:
                print(
                    og + bd +
                    "Failed to establish a new connection! Name or service not known"
                    + rt)
                sys.exit()

            bs_content = bs(site.content, "html.parser")
            token = bs_content.find("input", {"name": "nonce"})["value"]
            login_data = {
                "name": username,
                "password": password,
                "nonce": token
            }
            resp = s.post(url + '/login', login_data)

            if b'Your username or password is incorrect' in resp.content:
                print(og + bd + "your username or password is incorrect!!" +
                      rt)
                sys.exit()

            print(lc + bd + "\x1b[2KConnected!!" + rt)

            with open(cache_path + ".cache", 'wb') as f:
                pickle.dump(s.cookies._cookies, f)

            return s
コード例 #22
0
def login(s: requests.Session, username, password, cookie_file: Path):
    # r = s.get(
    #     "https://app.ucas.ac.cn/uc/wap/login?redirect=https%3A%2F%2Fapp.ucas.ac.cn%2Fsite%2FapplicationSquare%2Findex%3Fsid%3D2")
    # print(r.text)

    if cookie_file.exists():
        cookie = json.loads(cookie_file.read_text(encoding='utf-8'))
        s.cookies = requests.utils.cookiejar_from_dict(cookie)
        # 测试cookie是否有效
        if get_daily(s) == False:
            print("cookie失效,进入登录流程")
        else:
            print("cookie有效,跳过登录环节")
            return

    payload = {
        "username": username,
        "password": password
    }

    # 超时重试3次
    s.mount('http://', HTTPAdapter(max_retries=3))
    s.mount('https://', HTTPAdapter(max_retries=3))

    try:
        # 判断连接超时和读取超时的时间为30秒
        r = s.post("https://app.ucas.ac.cn/uc/wap/login/check", data=payload, timeout=(30, 30))
    except requests.exceptions.RequestException as e:
        print("服务器连接异常", e)
        message(api_key, sender_email, sender_email_passwd, receiver_email,
                tg_bot_token, tg_chat_id, "健康打卡失败", "服务器连接异常,建议手动检查疫情防控打卡页面是否能够正常加载")

    if r.status_code != 200:
        print("服务器返回状态异常")
        message(api_key, sender_email, sender_email_passwd, receiver_email,
                tg_bot_token, tg_chat_id, "健康打卡失败", "服务器返回状态异常,建议手动检查疫情防控打卡页面是否能够正常加载")

    # print(r.text)
    if r.json().get('m') != "操作成功":
        print("登录失败")
        message(api_key, sender_email, sender_email_passwd, receiver_email,
                tg_bot_token, tg_chat_id, "健康打卡登录失败", "登录失败")

    else:
        cookie_file.write_text(json.dumps(requests.utils.dict_from_cookiejar(r.cookies), indent=2), encoding='utf-8', )
        print("登录成功,cookies 保存在文件 {},下次登录将优先使用cookies".format(cookie_file))
コード例 #23
0
    def create_session(self):
        """
        retry on 502
        """
        session = Session()
        retry = Retry(
            status=5,
            backoff_factor=0.3,
            status_forcelist=(502, ),
            # CAUTION: adding 'POST' to this list which is not technically idempotent
            method_whitelist=("POST", "HEAD", "TRACE", "GET", "PUT", "OPTIONS",
                              "DELETE"),
        )
        adapter = HTTPAdapter(max_retries=retry)
        session.mount("https://", adapter)
        session.cookies = cookiejar_from_dict({"token_v2": self.token_v2})

        return session
コード例 #24
0
def send_athorization(mac_id, minutes, site):
    login_url = urljoin(UNIFI_SERVER, "api/login")
    sesh = Session()
    sesh.cookies = LWPCookieJar(filename="test.cookies")
    sesh.cookies.save()
    #  login
    post_data = {"username": USERNAME, "password": PASSWORD}
    sesh.post(login_url, json=post_data)
    sesh.cookies.save()

    # post to api
    auth_url = urljoin(UNIFI_SERVER, 'api/s/' + site + '/cmd/stamgr')
    data = {'cmd': 'authorize-guest', 'mac': mac_id, 'minutes': minutes}
    sesh.post(auth_url, json=data)

    # logout
    logout_url = urljoin(UNIFI_SERVER, 'logout')
    sesh.post(logout_url)
    return True
コード例 #25
0
ファイル: utils.py プロジェクト: MegaRobotMan/AcePictureBot
def scrape_site(url, cookie_file=""):
    global s
    s = Session()
    if cookie_file:
        s.cookies = LWPCookieJar(cookie_file)
        try:
            s.cookies.load(ignore_discard=True)
        except:
            # Cookies don't exist yet
            pass
    s.headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; rv:39.0)'
    s.headers['Accept'] = 'text/html'
    s.headers['Connection'] = 'keep-alive'
    browser = RoboBrowser(session=s, parser='html5lib', timeout=15)
    try:
        browser.open(url)
        return browser
    except:
        # TODO: find what exceptions happens here
        printf("[WARNING] TIMEOUT WITH WEBSITE: {0}".format(url))
        return False
コード例 #26
0
def get_f139_session():
    """
    获取富宝抓取存入的cookies,
    没有或失效重新登录并更新本地的cookies文件
    """
    session = Session()
    if not os.path.exists('./data'):
        os.makedirs('./data')
    try:
        if not os.path.exists('./data/f139_cookies.config'):
            logging.info("从本地加载cookies失败")
        else:
            with open('./data/f139_cookies.config', 'rb') as f:
                cookies = pickle.load(f)
            # print(cookies, type(cookies))
            logging.info("从本地加载cookies成功")
            session.cookies = cookies
    except FileNotFoundError:
        logging.error("从本地加载cookies失败,原因是:{}".format("FileNotFoundError"))
    except Exception as e:
        logging.error("从本地加载cookies失败,原因是:{}".format(e))
    return session
コード例 #27
0
ファイル: mal_list.py プロジェクト: Dimpsy/AcePictureBot
def scrape_site(url, cookie_file=""):
    global s
    s = Session()
    if cookie_file:
        s.cookies = LWPCookieJar(cookie_file)
        try:
            s.cookies.load(ignore_discard=True)
        except:
            # Cookies don't exist yet
            pass
    s.headers['User-Agent'] = 'Mozilla/5.0 (X11; Ubuntu; rv:39.0)'
    s.headers['Accept'] = 'text/html'
    s.headers['Connection'] = 'keep-alive'
    browser = RoboBrowser(session=s,
                          parser='html5lib',
                          timeout=15)
    try:
        browser.open(url)
        return browser
    except:
        print("[WARNING] TIMEOUT WITH WEBSITE: {0}".format(url))
        return False
コード例 #28
0
    def _create_session(token_v2: str = "") -> Session:
        """
        Helper method for creating a session object for API requests.


        Arguments
        ---------
        token_v2 : str, optional
            Token to use for creating User session.
            Defaults to empty string.


        Returns
        -------
        Session
            initialised Session object.
        """
        retry = Retry(
            total=5,
            backoff_factor=0.3,
            status_forcelist=(502,),  # retry on 502
            # CAUTION: adding 'POST' to this list which is not technically idempotent
            method_whitelist=(
                "POST",
                "HEAD",
                "TRACE",
                "GET",
                "PUT",
                "OPTIONS",
                "DELETE",
            ),
        )

        session = Session()
        session.mount("https://", HTTPAdapter(max_retries=retry))
        session.cookies = cookiejar_from_dict({"token_v2": token_v2})

        return session
コード例 #29
0
ファイル: looters.py プロジェクト: hashark/InstaLooter
    def _init_session(cls, session=None):
        # type: (Optional[Session]) -> Session
        """Initialise the given session and load class cookies to its jar.

        Arguments:
            session (~requests.Session, optional): a `requests`
                session, or `None` to create a new one.

        Returns:
            ~requests.Session: an initialised session instance.

        """
        if session is None:
            session = Session()
            # Load cookies
            session.cookies = LWPCookieJar(
                cls._cachefs.getsyspath(cls._COOKIE_FILE))
            try:
                typing.cast(FileCookieJar, session.cookies).load()
            except IOError:
                pass
            typing.cast(FileCookieJar, session.cookies).clear_expired_cookies()

        return session
コード例 #30
0
def api(url, method='GET', headers=None, data=None):
    global session

    if session:
        log_debug(f"AUTHENTICATED REQUEST: {url}")
        r = session.request(method, url=url, headers=headers, data=data)
    else:
        log_debug(f"PUBLIC REQUEST: {url}")
        r = requests.request(method,
                             url=url,
                             headers=headers,
                             auth=HTTPBasicAuth(API_USER, API_PASS),
                             data=data)

    log_debug('\n'.join(f'{k}: {v}' for k, v in r.headers.items()))

    if 'Set-Cookie' in r.headers:
        session = Session()

        my_cookie = SimpleCookie()
        my_cookie.load(r.headers['Set-Cookie'])

        cookies = {key: morsel.value for key, morsel in my_cookie.items()}
        log_json(cookies)
        session.cookies = cookiejar_from_dict(cookies)

    if r.status_code == 200:
        try:
            return get_json(r.text)
        except:
            log_info(r.text)
            return r.text
    else:
        log_error(f"API: {url} returns: {r.status_code}")

    return None
コード例 #31
0
ファイル: librusSession.py プロジェクト: mLingoTeam/mLibroAPI
def create_with_token(token):
    s = Session()
    s.cookies = cookiejar_from_dict({'DZIENNIKSID': token})
    return s
コード例 #32
0
ファイル: RemoteLogin.py プロジェクト: JoeRiker/CodeRepo
import requests.packages.urllib3
from requests import Session
from requests.auth import HTTPBasicAuth
import http.cookiejar

username = '******'
password = '******'
cookiefile = 'D:\\biscotto.txt'

requests.packages.urllib3.disable_warnings()
cj = http.cookiejar.LWPCookieJar()
s = Session()
s.cookies = cj
url = 'https://supporto.aruba.it/Askme/rest/authentication/token'
req = s.get(url, verify=False, auth=HTTPBasicAuth(username, password))

# print(req.status_code)
if req.status_code == 401:
    print('Sessione Scaduta o non valida')
else:
    print('Sessione Valida')
    cj.save(filename=cookiefile, ignore_discard=True, ignore_expires=True)
    # s.cookies.save()
    # print(s.cookies)
コード例 #33
0
from requests import Session
from pandas.io.json import json_normalize

from crawler import ZhihuCrawler

cookies = {
  'z_c0': '"2|1:0|10:1523854628|4:z_c0|92:Mi4xNV8wSUFRQUFBQUFBd0dDM2V5UnpEU1lBQUFCZ0FsVk5KSHZCV3dCWUg2c2FVSE9kWW01a2RYVzRoa2xBWUNsWDlR|1bc25bbaa821e106c1ff059a8781dca41e53ea70ad1cebb02aa332b789a92ebf"',
  '_xsrf': '97250175-8ea7-415e-8e7f-b924ee86247'
}
user_agent = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'
jar = requests.cookies.RequestsCookieJar()
for k, v in cookies.items():
  jar.set(k, v)
sess = Session()
sess.cookies = jar
sess.headers['User-Agent'] = user_agent
g = nx.DiGraph()

craw = ZhihuCrawler(sess=sess)
craw.member_info('li-mu-23')

data = craw.subscribed_topics('chen-bi-luo-74')
df = json_normalize(data)

def draw(g, **kwargs):
  fig, ax = plt.subplots(1, 1, figsize=(8, 8))
  pos = nx.spring_layout(g, k=0.01, **kwargs)
  labels = dict(g.nodes('name'))
  nx.draw(g, labels=labels, node_size=100 * topics.weight, pos=pos, ax=ax,
          font_size=12, node_color='skyblue')
コード例 #34
0
ファイル: singletons.py プロジェクト: robgarbanati/venmo
def session():
    global _session
    if not _session:
        _session = Session()
        _session.cookies = cookies.load()
    return _session
コード例 #35
0
 def get_session(self):
     session = Session()
     if not self.args.no_cookie:
         if cookie := self.get_cookie():
             session.cookies = cookie
コード例 #36
0
 def get_session(self) -> Session:
     s = Session()
     if self.cookies is not None:
         s.cookies = self.cookies
     return s
コード例 #37
0
    logging.debug("cookie_value=%s", cookie_value)
    logging.debug("sessid=%s", sessid)

    # if authorized
    if 'success' == auth_result:
        logging.info("Authentication succeed!")
        sess_id = sessid.split('=')
        # initializing
        req.params = {
            'type': 'sale',
            'mode': 'init',
            'version': conf['version'],
            'sessid': sess_id[1]
        }
        prepped = sess.prepare_request(req)
        sess.cookies = saved_cookies
        resp = sess.send(prepped)
        logging.debug("init resp.text=%s", resp.text)
        (zip_enabled, file_limit, sessid, version) = resp.text.split()

        # query orders from site
        sess_id = sessid.split('=')
        req.params = {
            'type': 'sale',
            'mode': 'query',
            'version': conf['version'],
            'sessid': sess_id[1]
        }
        req.method = 'GET'
        prepped = sess.prepare_request(req)
        sess.cookies = saved_cookies
コード例 #38
0
def get_orders(site, site_user, site_pword, version):
    if site.endswith('arc.world'):
        verify_flag = False
        proto = 'http://'
    else:
        verify_flag = True
        proto = 'https://'

    url = proto + site + '/bitrix/admin/1c_exchange.php'

    sess = Session()
    sess.headers['Connection'] = 'close'
    sess.verify = verify_flag
    sess.keep_alive = True

    # authentication
    req = Request(
        'GET',
        url,
        auth=(site_user, site_pword),
    )
    req.params = {'type': 'sale', 'mode': 'checkauth', 'version': version}
    prepped = sess.prepare_request(req)
    resp = None
    try:
        resp = sess.send(prepped)
        logging.debug("checkauth prepped sent")
        logging.debug("checkauth resp.status_code=%s", resp.status_code)
        logging.debug("checkauth resp.text=%s", resp.text)

        resp.raise_for_status()
        """
        if 200 != resp.status_code:
            logging.debug("checkauth code NEQ 200, sess.headers=%s, sess.params=%s", str(sess.headers), str(sess.params))
            exit(resp.status_code)
        """

        saved_cookies = resp.cookies
        logging.debug("resp.cookies=%s", str(resp.cookies))

        (auth_result, cookie_file, cookie_value, sessid) = resp.text.split()
        logging.debug("Parsed by =")
        logging.debug("auth_result=%s", auth_result)
        logging.debug("cookie_file=%s", cookie_file)
        logging.debug("cookie_value=%s", cookie_value)
        logging.debug("sessid=%s", sessid)

        # if authorized
        if 'success' == auth_result:
            logging.info("Authentication succeed!")
            sess_id = sessid.split('=')
            # initializing
            req.params = {
                'type': 'sale',
                'mode': 'init',
                'version': version,
                'sessid': sess_id[1]
            }
            prepped = sess.prepare_request(req)
            sess.cookies = saved_cookies
            resp = sess.send(prepped)
            logging.debug("init resp.text=%s", resp.text)
            (zip_enabled, file_limit, sessid, version) = resp.text.split()

            # query orders from site
            sess_id = sessid.split('=')
            req.params = {
                'type': 'sale',
                'mode': 'query',
                'version': version,
                'sessid': sess_id[1]
            }
            req.method = 'GET'
            prepped = sess.prepare_request(req)
            sess.cookies = saved_cookies
            resp = sess.send(prepped)
            xml_from_site = resp.text

            if 200 == resp.status_code:
                # send 'success'
                req.params = {
                    'type': 'sale',
                    'mode': 'success',
                    'version': version,
                    'sessid': sess_id[1]
                }
                prepped = sess.prepare_request(req)
                sess.cookies = saved_cookies
                resp = sess.send(prepped)
                logging.debug("success resp.text=%s", resp.text)

                xml_orders = get_xml_list_fixed(xml_from_site.splitlines())
                logging.debug("len(xml_orders)=%s", len(xml_orders))

                if len(xml_orders) <= 4:
                    xml_lines = None
                    logging.debug('empty xml, just header. Skip.')
                else:
                    xml_lines = u"\n".join(xml_orders)
                    fname_templ = site + "-%Y-%m-%d_%H-%M-%S"
                    xml_fname = time.strftime("01-xml/orders-" + fname_templ +
                                              ".xml")
                    xmlf = open(xml_fname, 'w', 'utf-8')
                    xmlf.write(xml_lines)
                    xmlf.close()
                    logging.info("wrote xml file: %s", xml_fname)

    except Exception:
        logging.critical("exception=%s", exc_info=True)
        xml_lines = None
    return xml_lines