Beispiel #1
0
    def __init__(self):
        self.articles = []
        self.query = None
        self.cjar = MozillaCookieJar()

        # If we have a cookie file, load it:
        if ScholarConf.COOKIE_JAR_FILE and \
           os.path.exists(ScholarConf.COOKIE_JAR_FILE):
            try:
                self.cjar.load(ScholarConf.COOKIE_JAR_FILE,
                               ignore_discard=True)
                ScholarUtils.log('info', 'loaded cookies file')
            except Exception as msg:
                ScholarUtils.log('warn',
                                 'could not load cookies file: %s' % msg)
                self.cjar = MozillaCookieJar()  # Just to be safe

        self.opener = build_opener(HTTPCookieProcessor(self.cjar))
        self.settings = None  # Last settings object, if any
Beispiel #2
0
def cmr_download(urls):
    """Download files from list of urls."""
    if not urls:
        return

    url_count = len(urls)
    print('Downloading {0} files...'.format(url_count))
    credentials = None

    for index, url in enumerate(urls, start=1):
        print(url)
        if not credentials and urlparse(url).scheme == 'https':
            credentials = get_credentials(url)

        filename = url.split('/')[-1]
        print('{0}/{1}: {2}'.format(
            str(index).zfill(len(str(url_count))), url_count, filename))
        # -- dont overwrite --------------------------------------------------------------
        if (os.path.exists(filename)):
            print("already downloaded")
        else:
            # -- end of overwrite modif ------------------------------------------------------
            try:
                print("url:" + url)
                # In Python 3 we could eliminate the opener and just do 2 lines:
                # resp = requests.get(url, auth=(username, password))
                # open(filename, 'wb').write(resp.content)
                req = Request(url)
                if credentials:
                    req.add_header('Authorization',
                                   'Basic {0}'.format(credentials))
                opener = build_opener(HTTPCookieProcessor())
                data = opener.open(req).read()
                open(filename, 'wb').write(data)
            except HTTPError as e:
                print('HTTP error {0}, {1}'.format(e.code, e.reason))
            except URLError as e:
                print('URL error: {0}'.format(e.reason))
            except IOError:
                raise
            except KeyboardInterrupt:
                quit()
Beispiel #3
0
    def __init__(self, username, password):
        self._cookie_jar = CookieJar()
        self._opener = build_opener(HTTPCookieProcessor(self._cookie_jar))

        self.username = username
        self.session = self._login(username, password)
        self.devices = self._devices()

        self.master = None
        for d in self.devices:
            if d.master is not None:
                self.master = d.master
                break

        if not self.master:
            raise ValueError("couldn't find master unit")

        self.ws = websocket.create_connection(self.API_URL_SOCKET)
        self.ws.send(
            json.dumps({
                "jsonrpc": "2.0",
                "id": 3,
                "method": "srvWebSocketAuth",
                "params": {
                    "varName": username,
                    "apiKey": self.session.api_key,
                },
            }))
        try:
            ws_auth = json.loads(self.ws.recv())
            if not ws_auth:
                raise ValueError("no socket auth returned")
            params = ws_auth.get("params")
            if not params:
                raise ValueError("no socket auth params received")
            authorized = params.get("authorized")
            if not authorized:
                raise ValueError("socket not authorized: {}".format(
                    pformat(ws_auth)))
        except:
            self.close()
            raise
Beispiel #4
0
    def sendSMSto(self, to_number, otp, ac):
        url = 'http://site24.way2sms.com/Login1.action?'
        your_number = '9999999999'
        your_w2sms_pass = '******'
        data = bytes(
            f'username={your_number}&password={your_w2sms_pass}&Submit=Sign+in',
            'utf-8')
        # 7988367320
        cj = CookieJar()
        opener = build_opener(HTTPCookieProcessor(cj))
        opener.addheaders = [(
            'User-Agent',
            'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120'
        )]
        try:
            opener.open(url, data)
        except IOError:
            return "ERR: SENDMSG"

        jession_id = str(cj).split('~')[1].split(' ')[0]
        send_sms_url = 'http://site24.way2sms.com/smstoss.action?'
        send_sms_data = bytes(
            'ssaction=ss&Token=' + jession_id + '&mobile=' + to_number +
            '&message=' + 'Your One Time Pass is ' + otp + '&msgLen=136',
            'utf-8')
        print('SENT OTP IS', otp)
        opener.addheaders = [
            ('Referer',
             'http://site25.way2sms.com/sendSMS?Token=' + jession_id)
        ]
        try:
            sms_sent_page = opener.open(send_sms_url, send_sms_data)
            soup = BS(sms_sent_page.read())
            errNode = soup.find('span', {'class': 'err'})
            if errNode:
                print(errNode)
                return "ERR: SENDMSG"
            else:
                return "SUCCESS: SENDMSG"
        except IOError:
            return "ERR: SENDMSG"
        return "SUCCESS: SENDMSG"
Beispiel #5
0
    def __init__(self):
        # establish connection
        #
        # make cookie
        cj = CookieJar()
        # if we wanna use https we mast add ssl=enable_ssl to cookie
        c = Cookie(0, 'ssl', "enable_ssl", None, False, '.nnm-club.me',
                   True, False, '/', True, False, None, 'ParserCookie', None, None, None)
        cj.set_cookie(c)
        self.session = build_opener(HTTPCookieProcessor(cj))

        # avoid endless waiting
        self.blocked = False

        # add proxy handler if needed
        if self.config['proxy'] and any(self.config['proxies'].keys()):
            self.session.add_handler(ProxyHandler(self.config['proxies']))

        # change user-agent
        self.session.addheaders.pop()
        self.session.addheaders.append(('User-Agent', self.config['ua']))

        response = self._catch_error_request(self.url + 'login.php')
        if not self.blocked:
            parser = self.WorstParser(self.url, True)
            parser.feed(response.read().decode('cp1251'))
            parser.close()

            form_data = {"username": self.config['username'],
                         "password": self.config['password'],
                         "autologin": "******",
                         "code": parser.login_code,
                         "login": "******"}
            # so we first encode keys to cp1251 then do default decode whole string
            data_encoded = urlencode({k: v.encode('cp1251') for k, v in form_data.items()}).encode()

            self._catch_error_request(self.url + 'login.php', data_encoded)

            if 'phpbb2mysql_4_sid' not in [cookie.name for cookie in cj]:
                logging.warning("we not authorized, please check your credentials")
            else:
                logging.info('We successfully authorized')
def get_cookies():
    print("Fetching cookies")
    endpoint = config["endpoint"]
    username = config["username"]
    password = config["password"]

    stsurl = discover_sts(username)

    try:
        if stsurl is None:
            authToken = getSecurityTokenManaged(username, password, endpoint)
        else:
            authToken = getSecurityTokenFederated(username, password, endpoint,
                                                  stsurl)
    except RuntimeError as x:
        print(x, sys.stderr)

    endpointUrl = urlparse(endpoint)
    if endpointUrl.scheme not in ["http", "https"] or not endpointUrl.netloc:
        print("Invalid endpoint URL: {}".format(endpoint))

    cookiejar = CookieJar()
    opener = build_opener(HTTPCookieProcessor(cookiejar))
    try:
        parsed_url = "{0}://{1}/_forms/default.aspx?wa=wsignin1.0".format(
            endpointUrl.scheme, endpointUrl.netloc)
        request = Request(parsed_url)
        response = opener.open(request, data=authToken.encode('utf-8'))
        cookieStr = ""
        cookiesFound = []
        for cookie in cookiejar:
            if cookie.name in ("FedAuth", "rtFa"):
                cookieStr += cookie.name + "=" + cookie.value + "; "
                cookiesFound.append(cookie.name)

        if "FedAuth" not in cookiesFound or "rtFa" not in cookiesFound:
            print("Incomplete cookies retrieved.", sys.stderr)

    except URLError as x:
        print("Failed to login to SharePoint site: {}".format(x.reason))

    return cookieStr
Beispiel #7
0
    def __init__(self, args):
        self.modulus = None
        self.exponent = None
        self.args = args
        self.jar = j = LWPCookieJar()
        self.has_cookies = False
        context = ssl.create_default_context()

        if self.args.cookiefile:
            self.has_cookies = True
            try:
                j.load(self.args.cookiefile, ignore_discard=True)
            except IOError:
                self.has_cookies = False

        handlers = [HTTPCookieProcessor(j)]
        if self.args.ssl_noverify:
            handlers.append(HTTPSHandler(context=context))
        self.opener = build_opener(*handlers)
        self.nextfile = args.file
Beispiel #8
0
def grab(url):
    cookie = MozillaCookieJar()
    cookie.load('cookies.txt', ignore_discard=True, ignore_expires=True)
    req = Request(url, headers=DEFAULT_HEADERS)
    opener = build_opener(HTTPCookieProcessor(cookie))
    response = opener.open(req, timeout=DEFAULT_TIMEOUT)
    print(response.read().decode('utf8'))
    result = opener.open('http://oa.epoint.com.cn')
    html = result.read()
    html=html.decode('utf-8')
    print(html)
    result = opener.open('http://oa.epoint.com.cn/netoffice8/ZReport/Pages/Problem/Problem_Add.aspx')
    html = result.read()
    html=html.decode('utf-8')
    resu=html
    #print(resu)
    result = opener.open('http://oa2.epoint.com.cn/EpointCommunity/EpointCommunity/Home/Home.aspx')
    html = result.read()
    html=html.decode('utf-8')
    resu=html
    def _call_details(self,
                      page=1,
                      page_size=20,
                      begin_date=None,
                      end_date=None):
        response = self.check_login()
        if response['isLogin'] is not True:
            raise PermissionError("you have not logged in.")

        url = 'http://iservice.10010.com/e3/static/query/callDetail?menuid=000100030001'
        request_data = {'pageNo': page, 'pageSize': page_size}
        request_data['beginDate'] = begin_date or datetime.now()[:9] + '01'
        request_data['endDate'] = end_date or datetime.now()[:11]

        data = urlencode(request_data, encoding='utf-8').encode('utf-8')
        request = Request(url, data=data, headers=self.headers, method='POST')
        sleep(2)
        opener = build_opener(HTTPCookieProcessor(self.cookie))
        response = opener.open(request).read().decode('utf-8')
        return json.loads(response)
Beispiel #10
0
def non_selenium_get(command_ip, flag_id, flag, vuln):
    username, password, post_id = flag_id.split(":")
    cookies = LWPCookieJar()
    browser = build_opener(HTTPCookieProcessor(cookies), HTTPRedirectHandler)
    try:
        data = {"username": username, "password": password}

        request = prepare_post_request("{}/login".format(command_ip), data)
        request.add_header('User-Agent', get_useragent())
        response = browser.open(request).read().decode()
        session = {}
        for cookie in cookies:
            session[cookie.name] = cookie.value
        return session

    except MumbleException as e:
        return {"code": MUMBLE, "public": str(e)}

    except DownException as e:
        return {"code": DOWN, "public": str(e)}
Beispiel #11
0
    def get_flights(self, best_position=True):
        self.data = {}
        for i in range(0,10):
            while True:
                try:
                    cj = CookieJar()
                    opener = build_opener(HTTPCookieProcessor(cj))
                    opener.addheaders = [('User-agent', 'Mozilla/5.0')]
                    response = opener.open(self.url)
                    str_response = response.read().decode('utf-8')
                    self.data = json.loads(str_response)
                    if best_position:
                        Notifier.best_position(self)

                except:
                    time.sleep(30)
                    continue
                break

        return self.data
Beispiel #12
0
def get(url, params: dict = None, headers: dict = None) -> HTTPResponse:
    """
    :param url:
    :param params:
    :param headers:
    :return:
    """
    if params:
        parar_str = urlencode(params)
        subfix = '?' if url.find('?') == -1 else '' if url.find(
            '=') == -1 else '&'
        url += subfix + parar_str
        print(url)
    if headers:
        req = Request(url, headers=headers)
    else:
        req = Request(url)
    opener = build_opener(HTTPHandler(), HTTPCookieProcessor(CookieJar()),
                          ProxyHandler(proxies={'http': '39.137.69.7:8080'}))
    return opener.open(req)
Beispiel #13
0
def lstASXforms(xdirectory):
    opener = build_opener(HTTPCookieProcessor())
    url = "https://www.asxonline.com/companies/html/ASICForms.html"
    html = opener.open(url)
    soup = BeautifulSoup(html, 'html.parser')
    tags = soup.select("td")
    lstTitle = []
    lstHref = []
    for tag in tags:
        xtitle = tag.get_text()
        if not (tag.find('a')):
            lstTitle.append(xtitle)
        for c in tag.findAll('a'):
            if (c.get('href', '')).startswith('/'):
                xhref = 'https://www.asxonline.com' + c.get("href")
                lstHref.append(xhref)
    dfASXforms = pd.DataFrame({'Title': lstTitle, 'Link': lstHref})
    xfilename = xdirectory + "/ASXForms.csv"
    dfASXforms.to_csv(xfilename, encoding='utf-8', index=False)
    return (dfASXforms)
    def _make_request(self, url, data=None, headers={}):
        if not data:
            data = None
        else:
            data = urlencode(data)
            data = data.encode('utf8')

        req = Request(url, data, headers)
        err = None

        handler = build_opener(HTTPCookieProcessor(self.cookiejar))

        try:
            resp_obj = handler.open(req)
        except HTTPError as e:
            err = e.code
            return err, e.read()
        resp = resp_obj.read()
        resp_obj.close()
        return None, unistr(resp, encoding='utf8')
Beispiel #15
0
def grab(cookie):
    engine = create_engine(constant.connectMysqlUrl)
    Session = sessionmaker(bind=engine)
    session = Session()
    spiderSql = 'select spiderurl,fornumname from Spider_Record where isenable="1"'
    logintokenSql = 'select * from login_token where isenable=true order by id desc'
    spiderlist = session.execute(spiderSql)
    logintoken = session.execute(logintokenSql).first()
    for j in spiderlist:
        requestUrl = j.spiderurl.replace(
            j.spiderurl[j.spiderurl.index("token="):j.spiderurl.
                        index("&lang=zh_CN")], "token=" + logintoken.token)
        #print(requestUrl)
        req = Request(requestUrl, headers=DEFAULT_HEADERS)
        opener = build_opener(HTTPCookieProcessor(cookie))
        response = opener.open(req, timeout=DEFAULT_TIMEOUT)
        encode_json = response.read().decode("utf8")
        decode_json = jsonString2Dict(encode_json)
        print(decode_json)
        try:
            print(decode_json['app_msg_cnt'])
        except:
            loginSaveCookie.updateLoginCookie()

        endIndex = int(decode_json['app_msg_cnt'])
        time.sleep(5)
        while endIndex >= 0:
            spiderUrl = requestUrl.replace('begin=0', 'begin=' + str(endIndex))
            decode_json = grabContent(cookie, spiderUrl)
            #print(decode_json)
            for i in decode_json['app_msg_list']:
                cnt = session.query(func.count(wechatspider_log.aid)).filter(
                    wechatspider_log.aid == i["aid"]).scalar()
                if cnt == 0:
                    #publishWebsite(str(i['aid']),str(i['digest']),str(i['link']),str(j[1]))
                    print("文章抓取入库---" + j.fornumname + "------" +
                          str(i['digest']))
            endIndex -= 6
            time.sleep(100)
        response.close()
        session.close()
def GET_SELF_ASSERTED(params, data):

    csrf = None

    for c in params[0]:
        if c.name == "x-ms-cpim-csrf":
            csrf = c.value

    url = "https://rcmnb2cprod.b2clogin.com/rcmnb2cprod.onmicrosoft.com/B2C_1A_ExternalClient_FrontEnd_Login/SelfAsserted?tx=StateProperties=" + params[
        1].decode('utf-8') + "&p=B2C_1A_ExternalClient_FrontEnd_Login"

    cookie_handler = HTTPCookieProcessor(params[0])
    opener = build_opener(cookie_handler)

    opener.addheaders = [
        ('X-CSRF-TOKEN', csrf),
        ('Content-Type', 'application/x-www-form-urlencoded; charset=UTF-8'),
        ('Accept', 'application/json, text/javascript, */*; q=0.01'),
        ('X-Requested-With', 'XMLHttpRequest'),
        ('Origin', 'https://rcmnb2cprod.b2clogin.com'),
        ('Referer',
         'https://rcmnb2cprod.b2clogin.com/rcmnb2cprod.onmicrosoft.com/B2C_1A_ExternalClient_FrontEnd_Login/oauth2/v2.0/authorize?client_id=ebe6e7b0-3cc3-463d-9389-083c7b24399c&nonce=85e7800e-15ba-4153-ac2f-3e1491918111&redirect_uri=https%3A%2F%2Fici.tou.tv%2Fauth-changed&scope=openid%20offline_access%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Foidc4ropc%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fprofile%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Femail%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fid.write%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fmedia-validation.read%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fmedia-validation%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fmedia-meta%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fmedia-drmt%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Ftoutv-presentation%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Ftoutv-profiling%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fmetrik%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fsubscriptions.write%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fid.account.info%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fid.account.create%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fid.account.modify%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fid.account.reset-password%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fid.account.send-confirmation-email%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fid.account.delete%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Fsubscriptions.validate%20https%3A%2F%2Frcmnb2cprod.onmicrosoft.com%2F84593b65-0ef6-4a72-891c-d351ddd50aab%2Ftoutv&response_type=id_token%20token&response_mode=fragment&prompt=login&state=ZDY1Nzg2YzctZDQ4YS00YWRjLWEwNDMtMGI2MWIyM2UyZjUxfHsiYWN0aW9uIjoibG9naW4iLCJyZXR1cm5VcmwiOiIvIiwiZnJvbVN1YnNjcmlwdGlvbiI6ZmFsc2V9&state_value=ZDY1Nzg2YzctZDQ4YS00YWRjLWEwNDMtMGI2MWIyM2UyZjUxfHsiYWN0aW9uIjoibG9naW4iLCJyZXR1cm5VcmwiOiIvIiwiZnJvbVN1YnNjcmlwdGlvbiI6ZmFsc2V9&ui_locales=fr'
         ),
        ('Accept-Language', 'fr-CA,fr;q=0.9,en-CA;q=0.8,en-US;q=0.7,en;q=0.6'),
        ('Accept-Encoding', 'gzip, deflate, br'),
        ('User-Agent',
         'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36'
         )
    ]

    post_data = urlencode(data)

    request = Request(url, data=BYTES_PY2(post_data))
    request.get_method = lambda: "POST"

    response = opener.open(request)

    rawresp = handleHttpResponse(response)
    print(rawresp)

    return params[0], params[1]
Beispiel #17
0
def retrieve_by_letter(exchange, letter):
    site = 'http://eoddata.com/stocklist/{0}/{1}.htm'.format(exchange, letter)
    regex = '/stockquote/{}/(.*).htm'.format(exchange)
    s = set()
    res = []

    opener = build_opener(HTTPCookieProcessor())
    response = opener.open(site)
    html_page = response.read()
    soup = BeautifulSoup(html_page, "html.parser")

    # Search for every symbol found on site
    for link in soup.findAll('a'):
        href = link.get('href')
        search = re.search(regex, href)
        if search:
            symbol = search.group(1)
            if symbol not in s:
                s.add(symbol)
                res.append(symbol)
    return res
Beispiel #18
0
    def __init__(self, args):
        """ Start up... """
        self.args = args
        self.cj = http.cookiejar.MozillaCookieJar(COOKIES_FILENAME)
        if os.access(COOKIES_FILENAME, os.F_OK):
            self.cj.load(os.getcwd() + "/" + COOKIES_FILENAME)
        self.opener = build_opener(HTTPRedirectHandler(),
                                   HTTPHandler(debuglevel=0),
                                   HTTPSHandler(debuglevel=0),
                                   HTTPCookieProcessor(self.cj))
        self.opener.addheaders = [
            ('User-Agent',
             ('Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36'
              )),
            ('Accept',
             'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
             )
        ]

        if not os.path.exists(TMP_DIR):
            os.makedirs(TMP_DIR)
    def _init_urllib(self):
        # Initialize a SSL context for all HTTPS calls
        if self.verify_tls:
            context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_1)
            context.verify_mode = ssl.CERT_REQUIRED
            context.check_hostname = True
            context.load_default_certs()
        else:
            context = ssl.create_default_context(
            )  # Should we enforce TLS 1.1 here?
            context.check_hostname = False
            context.verify_mode = ssl.CERT_NONE

        # Cookie Jar
        self.cj = http.cookiejar.CookieJar()

        opener = build_opener(HTTPSHandler(context=context), \
                              HTTPHandler(), \
                              HTTPCookieProcessor(self.cj))

        install_opener(opener)
Beispiel #20
0
    def __request__(self):

        Cookiejar = CookieJar()
        opener = build_opener(HTTPCookieProcessor(Cookiejar))
        _header = dict(self.headers.items())
        if self.cookie:
            _header.update({'Cookie': self.cookie})
        req = Request(self.url, headers=_header, origin_req_host=self.host)
        error_counter = 0
        while error_counter < 3:
            try:
                res = opener.open(req)
                break
            except Exception as e:
                # traceback.print_exc()
                error_counter += 1
            time.sleep(0.5)
        else:
            raise Exception('UrlNotRespond')

        return res, Cookiejar._cookies
Beispiel #21
0
    def check_market_order_sell_trade(self):
        while not self.successed_market_order_sell_trade and datetime.datetime.now(
        ).hour < 6:
            opener = build_opener(HTTPCookieProcessor(CookieJar()))

            urlencoded_login_data = urlencode(
                self.login_post_data).encode('utf-8')
            try:
                res = opener.open(self.login_url, urlencoded_login_data)
            except:
                logging.warning("failed to login")
                exit("Failed to login.")

            urlencoded_get_stock_holdings_data = urlencode(
                self.get_stock_holdings_post_data).encode('utf-8')
            try:
                res = opener.open(self.get_stock_holdings_url,
                                  urlencoded_get_stock_holdings_data)
                html = res.read()
                shift = html.decode('Shift_JIS')
                stock_holdings_num = re.findall(
                    "(?<=<br>){}(?=&nbsp;)".format(self.stock_id), shift)
                if not len(stock_holdings_num):
                    self.successed_market_order_sell_trade = True
                    logging.info(
                        "Successed market order sell trade of stock {}".format(
                            self.stock_id))
                    break
                else:
                    self.successed_market_order_sell_trade = False
                    logging.info(
                        "have not yet done market order sell trade of stock {}"
                        .format(self.stock_id))
            except:
                logging.warning(
                    "failed to get stock holdings for check market order sell trade"
                )
                self.successed_market_order_sell_trade = False

        return
Beispiel #22
0
def fzyz_exam(username, passwd, para_academic_Year, para_KEY):

    # 获取登陆Cookies
    # 登录时需要POST的数据
    data = {
        'staffCode': username,
        'password': passwd,
        'loginRole': '2',
    }
    # 登录时表单提交到的地址(用开发者工具可以看到)
    login_url = 'http://fzyz.net/sys/login.shtml'
    # 将数据转化为bytes格式
    post_data = urlencode(data).encode('utf-8')
    # 构造登录请求
    req = Request(login_url, headers={}, data=post_data)
    # 构造cookie
    cookie = CookieJar()
    # 由cookie构造opener
    opener = build_opener(HTTPCookieProcessor(cookie))
    # 发送登录请求,此后这个opener就携带了cookie,以证明自己登录过
    opener.open(req)

    # 获取考试详情json
    # 考试详情查询界面
    exam_url = (f'http://fzyz.net/education/score/score/'
                f'getstuExamsByAcademicYearTermYears.shtml?'
                f'para.academic_Year={para_academic_Year}&'
                f'para.KEY={para_KEY}')
    # 获取json
    req = Request(exam_url, headers={})
    resp = opener.open(req)
    exam_js = resp.read().decode('gbk')

    # 删除冗余信息,是小屏设备能正常显示。
    exam_js = re.sub(r'(?<=高中部)[0-9\-]+', '', exam_js)
    exam_js = re.sub(r'高中部', "", exam_js)
    exam_js = re.sub(r'第一学期', "", exam_js)
    exam_js = re.sub(r'第二学期', "", exam_js)

    return exam_js
Beispiel #23
0
def cmr_download(urls):
    """Download files from list of urls."""
    URS_URL = 'https://urs.earthdata.nasa.gov'
    if not urls:
        return

    url_count = len(urls)
    print('Downloading {0} files...'.format(url_count))
    credentials = None

    for index, url in enumerate(urls, start=1):
        if not credentials and urlparse(url).scheme == 'https':
            credentials = get_credentials(url)

        filename = url.split('/')[-1]
        filename = 'nsidc_api_output.zip' if filename.startswith(
            'request') else filename
        print('{0}/{1}: {2}'.format(
            str(index).zfill(len(str(url_count))), url_count, filename))

        try:
            # In Python 3 we could eliminate the opener and just do 2 lines:
            # resp = requests.get(url, auth=(username, password))
            # open(filename, 'wb').write(resp.content)
            req = Request(url)
            if credentials:
                req.add_header('Authorization',
                               'Basic {0}'.format(credentials))
            opener = build_opener(HTTPCookieProcessor())
            data = opener.open(req).read()
            open(filename, 'wb').write(data)
        except HTTPError as e:
            print('HTTP error {0}, {1}'.format(e.code, e.reason))
        except URLError as e:
            print('URL error: {0}'.format(e.reason))
        except IOError:
            raise
        except KeyboardInterrupt:
            quit()
def parse_nyphil(To):

    opener = build_opener(HTTPCookieProcessor())
    response = opener.open('https://nyphil.org/rush')
    html = response.read()
    soup = bs(html, "html.parser")
    divs = soup.findAll("div", {"id": "main"})
    shows = divs[0].findAll("div",
                            {"id": "content"})[0].contents[7].contents[8:-6]

    results = []
    for i in range(len(shows)):
        try:
            if len(shows[i].contents) > 0:
                show = shows[i].contents[0]
                i += 1
                date = shows[i].split('—')[1][1:]
                results.append([date.replace(',', ''), show.replace(',', '')])
        except:
            pass

    check_new_and_notify(results, 'NyPhil.csv', To)
Beispiel #25
0
def unrestrict(parameters):
    cj_rd = cookielib.CookieJar()
    opener_rd = build_opener(HTTPCookieProcessor(cj_rd))
    opener_rd.addheaders = [
        ("Authorization", "Bearer " +
         str(xbmcaddon.Addon('script.realdebrid').getSetting('rd_access')))
    ]

    if 'url' in parameters:
        link = parameters['url']
    else:
        link = util.searchDialog("Enter link to unrestrict")

    if link:
        data_rd = urlencode({'link': link}).encode("utf-8")

        error = True
        attempts = 0
        while error:
            try:
                resp = opener_rd.open(
                    'https://api.real-debrid.com/rest/1.0/unrestrict/link',
                    data_rd)
                content = resp.read()

                credJSON = json.loads(content)
                error = True
                return credJSON
            except Exception as e:
                util.logError("realdebrid error: " + str(e))
                attempts = attempts + 1
                if attempts > 3:
                    error = True
                    util.notify("Unable to unrestrict link")
                    break
                elif "Unauthorized" in e:
                    refreshToken()

    return False
Beispiel #26
0
    def __init__(self, cachedir = '/tmp', api_host_options = {}, urllist = [], http_debug = False,
                 cookiejar = None, offline = False, enable_cpio = True):
        # set up progress bar callback
        if sys.stdout.isatty() and TextMeter:
            self.progress_obj = TextMeter(fo=sys.stdout)
        else:
            self.progress_obj = None

        self.cachedir = cachedir
        self.urllist = urllist
        self.http_debug = http_debug
        self.offline = offline
        self.cpio = {}
        self.enable_cpio = enable_cpio

        passmgr = HTTPPasswordMgrWithDefaultRealm()
        for host in api_host_options.keys():
            passmgr.add_password(None, host, api_host_options[host]['user'], api_host_options[host]['pass'])
        openers = (HTTPBasicAuthHandler(passmgr), )
        if cookiejar:
            openers += (HTTPCookieProcessor(cookiejar), )
        self.gr = OscFileGrabber(progress_obj=self.progress_obj)
Beispiel #27
0
def create_user(username, password):
    try:
        print("Trying to create a test user ", username)

        cj = CookieJar()
        url = "http://{0}/signup".format(webhost)

        data = urlencode([("email", ""), ("username", username),
                          ("password", password),
                          ("verify", password)]).encode('utf-8')
        request = Request(url=url, data=data)
        opener = build_opener(HTTPCookieProcessor(cj))
        f = opener.open(request)

        users = db.users
        user = users.find_one({'_id': username})
        if (user == None):
            print("Could not find the test user ", username,
                  "in the users collection.")

            return False
        print("Found the test user ", username, " in the users collection")

        # check that the user has been built
        result = f.read()
        expr = re.compile("Welcome[\s]+{}".format(username))
        if expr.search(result.decode('utf-8')):
            return True

        print("When we tried to create a user, here is the output we got\n")

        print(result)

        return False
    except:
        print("the request to ", url,
              " failed, so your blog may not be running.")

        return False
Beispiel #28
0
def login(user_id, password):
    url = 'https://beta.atcoder.jp/login'
    cj = cookiejar.LWPCookieJar()
    csrf_token = _get_csrf_token(url, cj)
    opener = build_opener(HTTPCookieProcessor(cj))
    install_opener(opener)
    param = {
        "username": user_id,
        "password": password,
        "csrf_token": csrf_token,
        "User-Agent": settings.user_agent,
    }
    res = False
    with opener.open(url, data=urlencode(param).encode('ascii')) as fs:
        cj.save(settings.path_to_cookie)
        html_str = re.compile(r'([\r\n\t]|  )').sub('',
                                                    fs.read().decode('utf-8'))
        if html_str.find('/login') >= 0:
            root = PyQuery(html_str)
            message = root.find('.alert').eq(0).text().replace('× ', '')
            raise Exception(message)
    return res
Beispiel #29
0
    def __init__(self):
        self.user = bugtracker_user
        self.password = bugtracker_pass
        self.login_page = 'https://bugs.archlinux.org/index.php?do=authenticate'
        #self.target_page = 'http://bugs.archlinux.org/index.php?events%5B%5D=1&events%5B%5D=13&events%5B%5D=2&events%5B%5D=4&event_number=50&do=reports'
        #self.target_page = 'http://bugs.archlinux.org/index.php?events[]=1&events[]=13&events[]=2&events[]=4&fromdate=&todate=&event_number=50&project=0&do=reports&submit='
        self.target_page = 'https://bugs.archlinux.org/index.php?events%5B%5D=1&events%5B%5D=13&events%5B%5D=2&events%5B%5D=4&event_number=50&do=reports&project=0'
        self.cj = CookieJar()
        self.opener = build_opener(HTTPRedirectHandler(),
                                   HTTPHandler(debuglevel=0),
                                   HTTPSHandler(debuglevel=0),
                                   HTTPCookieProcessor(self.cj))
        # self.opener.addheaders = [
        #    ('User-agent', ('Mozilla/4.0 (compatible; MSIE 6.0; '
        #                   'Windows NT 5.2; .NET CLR 1.1.4322)'))
        # ]
        self.opener.addheaders = [('User-agent', 'Mozilla/5.0')]

        # need this twice - once to set cookies, once to log in...
        self.login()
        #self.login()
        self.old_events = set([])
Beispiel #30
0
def get_credentials(url):
    URS_URL = 'https://urs.earthdata.nasa.gov'
    """Get user credentials from .netrc or prompt for input."""
    credentials = None
    errprefix = ''
    try:
        info = netrc.netrc()
        username, account, password = info.authenticators(
            urlparse(URS_URL).hostname)
        errprefix = 'netrc error: '
    except Exception as e:
        if (not ('No such file' in str(e))):
            print('netrc error: {0}'.format(str(e)))
        username = None
        password = None

    while not credentials:
        if not username:
            username = get_username()
            password = get_password()
        credentials = '{0}:{1}'.format(username, password)
        credentials = base64.b64encode(
            credentials.encode('ascii')).decode('ascii')

        if url:
            try:
                req = Request(url)
                req.add_header('Authorization',
                               'Basic {0}'.format(credentials))
                opener = build_opener(HTTPCookieProcessor())
                opener.open(req)
            except HTTPError:
                print(errprefix + 'Incorrect username or password')
                errprefix = ''
                credentials = None
                username = None
                password = None

    return credentials