def test_redirection_full_path(): url_full2_plain = 'http://google.de/index' url = URL(url_full) updated = url.redirect(url_full2_plain) url_full2 = URL(url_full2_plain) for attr in URL.__slots__: assert getattr(updated, attr) == getattr(url_full2, attr) assert str(url_full2) == url_full2_plain
def test_set_safe_encoding(): class SafeModURL(URL): quoting_safe = '*' surl = '/path/to/something?param=value&other=*' assert URL(surl).query_string == 'other=%2A¶m=value' assert SafeModURL(surl).query_string == 'other=*¶m=value' URL.quoting_safe = '*' assert URL(surl).query_string == 'other=*¶m=value' URL.quoting_safe = ''
def crawler(): http = HTTPClient.from_url(URL(initial_url)) while True: try: num, url = task_list.pop() except IndexError: return else: req = URL(url) res = http.get(req.request_uri) process_response(res.read())
def print_friend_username(http, friend_id): friend_url = URL('/' + str(friend_id)) friend_url['access_token'] = Token # 在链接之前,线程会处于阻塞状态 response = http.get(friend_url.request_uri) assert response.status_code == 200 friend = json.load(response) # 判断是否存在username if friend.has_key('username'): print(f"{friend_url['username']} :{friend['name']}") else: print(f"{friend['name']}") # 设置一次运行20个线程 pool = gevent.pool.Pool(20) # 循环读取 for item in data: friend_id = item['id'] pool.spawn(print_friend_username, http, friend_id) pool.json() # 关闭 http.close()
def send_notification(access_token, fb_id, template, href, ref): """ Send a new Games Notification https://developers.facebook.com/docs/games/notifications/ :param access_token: App access token :param fb_id: FB user id :param template: Message to send :param href: relative path of the target (using GET params) for example, index.html?gift_id=123 :param ref: used to separate notifications into groups :returns: if success return a dict {'success': True} :raises: :class:`GraphAPIError` """ path = URL('/{0}/notifications'.format(fb_id)) post_args = { 'template': template, 'href': href, 'ref': ref, 'access_token': access_token } http = HTTPClient.from_url(FACEBOOK_URL) try: resp = http.post(path.request_uri, body=urlencode(post_args)) content = _parse_json(resp.read()) except Exception as e: raise GraphAPIError(e) finally: http.close() if content and isinstance(content, dict) and content.get('error'): raise GraphAPIError(content['error']) return content
def HttpDownloadFile(configurl, writetopath): ''' 通过http获取配置 :param configurl: 配置url :param writetopath: 写入本地的路径 :return: ''' Log.Write(configurl, writetopath) maxlen = len(configurl) for i in xrange(0, 10): fixurl = configurl[i % maxlen] Log.Write("oldurl", fixurl) fixurl = fixurl.replace("https", "http") Log.Write("fixurl", fixurl) url = URL(fixurl) try: http = HTTPClient.from_url(url) response = http.get(url.request_uri) CHUNK_SIZE = 1024 * 16 # 16KB data = response.read(CHUNK_SIZE) sumdata = data while data: data = response.read(CHUNK_SIZE) sumdata += data json.loads(sumdata) with open(writetopath, 'w') as f: f.write(sumdata) Log.Write("success") except Exception, e: Log.Write("fail", i, e) gevent.sleep(1) continue return True
def get_access_token_from_code(code, redirect_uri, app_id, app_secret): """Get an access token from the "code" returned from an OAuth dialog. Returns a dict containing the user-specific access token and its expiration date (if applicable). """ args = { "code": code, "redirect_uri": redirect_uri, "client_id": app_id, "client_secret": app_secret, } # We would use GraphAPI.request() here, except for that the fact # that the response is a key-value pair, and not JSON. url = URL("https://graph.facebook.com/oauth/access_token") url.query.update(args) http = HTTPClient.from_url(url) resp = http.get(url.request_uri) content = resp.read() query_str = parse_qs(content) if "access_token" in query_str: result = {"access_token": query_str["access_token"][0]} if "expires" in query_str: result["expires"] = query_str["expires"][0] return result else: response = json.loads(content) raise GraphAPIError(response)
def get_app_access_token(app_id, app_secret): """Get the access_token for the app. This token can be used for insights and creating test users. app_id = retrieved from the developer page app_secret = retrieved from the developer page Returns the application access_token. """ # Get an app access token args = { 'grant_type': 'client_credentials', 'client_id': app_id, 'client_secret': app_secret } url = URL("https://graph.facebook.com/oauth/access_token") url.query.update(args) http = HTTPClient.from_url(url) resp = http.get(url.request_uri) content = resp.read() query_str = parse_qs(content) return query_str['access_token']
def call_jsonrpc_api(method, params=None, endpoint=None, auth=None, abort_on_error=False): if not endpoint: endpoint = config.COUNTERPARTYD_RPC if not auth: auth = config.COUNTERPARTYD_AUTH if not params: params = {} payload = { "id": 0, "jsonrpc": "2.0", "method": method, "params": params, } headers = { 'Content-Type': 'application/json', 'Connection':'close', #no keepalive } if auth: #auth should be a (username, password) tuple, if specified headers['Authorization'] = http_basic_auth_str(auth[0], auth[1]) try: u = URL(endpoint) client = HTTPClient.from_url(u, connection_timeout=JSONRPC_API_REQUEST_TIMEOUT, network_timeout=JSONRPC_API_REQUEST_TIMEOUT) r = client.post(u.request_uri, body=json.dumps(payload), headers=headers) except Exception, e: raise Exception("Got call_jsonrpc_api request error: %s" % e)
def extend_access_token(self, app_id, app_secret): """ Extends the expiration time of a valid OAuth access token. See <https://developers.facebook.com/roadmap/offline-access-removal/ #extend_token> """ http = HTTPClient.from_url(FACEBOOK_URL, connection_timeout=self.timeout) args = { "client_id": app_id, "client_secret": app_secret, "grant_type": "fb_exchange_token", "fb_exchange_token": self.access_token, } path = URL('/oauth/access_token') path.query.update(args) # add GET params to url try: resp = http.get(path.request_uri) content = resp.read() query_str = parse_qs(content) if "access_token" in query_str: result = {"access_token": query_str["access_token"][0]} if "expires" in query_str: result["expires"] = query_str["expires"][0] return result content = _parse_json(content) except Exception as e: raise GraphAPIError(e) finally: http.close() if content and isinstance(content, dict) and content.get("error"): raise GraphAPIError(content["error"]) return content
def __init__(self, appType: Optional[str] = None, secondary: bool = False): if appType == 'IOS': self.appName = ApplicationType.IOS self.secondary = True elif appType == 'IOSIPAD': self.appName = ApplicationType.IOSIPAD self.secondary = False elif appType == 'ANDROIDLITE': self.appName = ApplicationType.ANDROIDLITE self.secondary = True elif appType == 'DESKTOPWIN': self.appName = ApplicationType.DESKTOPWIN self.secondary = False elif appType == 'DESKTOPMAC': self.appName = ApplicationType.DESKTOPMAC self.secondary = False elif appType == 'CHROMEOS': self.appName = ApplicationType.CHROMEOS self.secondary = False self.server = Server(self.appName, self.secondary) self.callback = Callback(self.__defaultCallback) self.systemName = None self.__concurrency = 30 url = URL(self.server.LINE_HOST_DOMAIN) self.__client = HTTPClient(url.host, url.port, concurrency=self.__concurrency, ssl=True, connection_timeout=180.0, network_timeout=180.0)
def fetch_image(url, folder, filename, max_size=20 * 1024, formats=['png'], dimensions=(48, 48), fetch_timeout=1): def make_data_dir(subfolder): path = os.path.join(config.DATA_DIR, subfolder) if not os.path.exists(path): os.makedirs(path) return path try: #fetch the image data try: u = URL(url) client_kwargs = { 'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True } if u.scheme == "https": client_kwargs['ssl_options'] = { 'cert_reqs': gevent.ssl.CERT_NONE } client = HTTPClient.from_url(u, **client_kwargs) r = client.get(u.request_uri, headers={'Connection': 'close'}) raw_image_data = r.read(max_size) #read up to max_size except Exception, e: raise Exception("Got fetch_image request error: %s" % e) else:
def from_url(cls, url, **kw): if not isinstance(url, URL): url = URL(url) enable_ssl = url.scheme == PROTO_HTTPS if not enable_ssl: kw.pop('ssl_options', None) return cls(url.host, port=url.port, ssl=enable_ssl, **kw)
def rpc(method, params=None, abort_on_error=False): endpoint = config.BLOCKCHAIN_SERVICE_CONNECT auth = None m = re.search('(.*?//)(.*?):(.*?)@(.*)', endpoint) if m: endpoint = m.group(1) + m.group(4) auth = (m.group(2), m.group(3)) if not params: params = [] payload = { "id": 0, "jsonrpc": "2.0", "method": method, "params": params, } headers = { 'Content-Type': 'application/json', 'Connection': 'close', #no keepalive } if auth: #auth should be a (username, password) tuple, if specified headers['Authorization'] = util.http_basic_auth_str(auth[0], auth[1]) try: u = URL(endpoint) client = HTTPClient.from_url( u, connection_timeout=JSONRPC_API_REQUEST_TIMEOUT, network_timeout=JSONRPC_API_REQUEST_TIMEOUT) r = client.post(u.request_uri, body=json.dumps(payload), headers=headers) except Exception, e: raise Exception("Got call_jsonrpc_api request error: %s" % e)
def get_url(url, abort_on_error=False, is_json=True, fetch_timeout=5, auth=None, post_data=None): """ @param post_data: If not None, do a POST request, with the passed data (which should be in the correct string format already) """ headers = { 'Connection': 'close', } #no keepalive if auth: #auth should be a (username, password) tuple, if specified headers['Authorization'] = http_basic_auth_str(auth[0], auth[1]) try: u = URL(url) client_kwargs = { 'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True } if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} client = HTTPClient.from_url(u, **client_kwargs) if post_data is not None: if is_json: headers['content-type'] = 'application/json' r = client.post(u.request_uri, body=post_data, headers=headers) else: r = client.get(u.request_uri, headers=headers) except Exception, e: raise Exception("Got get_url request error: %s" % e)
def get_fixture(self, sport_id, leagues_ids): req = URL(self.fixtures_v1.format(sport_id, leagues_ids)) since = self.last_since_id.get('last_fixture') if since: req['since'] = since response = self.http.get(req.request_uri, headers=self.auth_headers) data = self.read_json(response) fixtures_list, ev = [], None if not data: return fixtures_list try: for (league, ev) in self.get_event_pairs(data): ev = Fixture(**ev) if not self.is_main_fixture(ev) or\ self.is_live_bet(ev): continue home_id, away_id, tournament_id = self.\ get_fixture_ids(league['id'], ev) document = FixtureModel.get_document( ev.id, ev.home, ev.away, self.parse_date(ev.starts), league['name'], home_id, away_id, tournament_id) fixtures_list.append(document) except KeyError: raise Exception( "Error occured during processing fixtures." + "Event data: {}.\nPinnacle response: {}".format(ev, data)) # save since ID self.last_since_id['last_fixture'] = data['last'] return fixtures_list
def test_path_only(): url = URL(url_path_only) assert url.host == '' assert url.port == None assert url.path == '/path/to/something' assert url['param'] == 'value' assert url['other'] == 'true'
def get_odds(self, sport_id, leagues_ids, oddsFormat="Decimal"): req = URL(self.odds_v1.format(sport_id, leagues_ids, oddsFormat)) since = self.last_since_id.get('last_odds') if since: req['since'] = since response = self.http.get(req.request_uri, headers=self.auth_headers) data = self.read_json(response) odds_list = [] if not data: return odds_list try: for (_, ev) in self.get_event_pairs(data, "leagues"): event = Event(**ev) period = self.get_full_game_period(event) if not period: continue period = self.modify_odds_moneyline(period) # ignore special odds if not all([period.spreads, period.moneyline, period.totals]): continue document = Odds.get_document(event.id, datetime.utcnow(), period.spreads, period.moneyline, period.totals) odds_list.append(document) except KeyError: raise Exception("Error occured during processing odds." + " Pinnacle response: {}".format(data)) except TypeError: self.logger.error("Invalid Event: {}".format(ev)) # save since ID self.last_since_id['last_odds'] = data['last'] return odds_list
def fire_custom_event(api_key, api_secret, api_method, environment_id, api_call_data): timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") api_key = api_key.encode("utf-8") api_secret = api_secret.encode("utf-8") params = { "Action": api_method, "EnvID": environment_id, "Version": API_VERSION, "AuthVersion": API_AUTH_VERSION, "Timestamp": timestamp, "KeyID": api_key, "Signature": base64.b64encode(hmac.new(api_secret, ":".join([api_method, api_key, timestamp]), hashlib.sha256).digest()), } params.update(api_call_data) url = URL(API_URL) for k, v in params.items(): url[k] = v http = HTTPClient.from_url(url) response = http.get("/?" + url.query_string) code = response.status_code body = response.read() http.close() return url, code, body
def postRequest(url, files=[], fields=[]): content_type, body = encode_multipart_formdata(fields=fields, files=files) headers = {'Content-Type': content_type, 'Accept': '*/*'} url = URL(url) http = HTTPClient.from_url(url) response = http.request('POST', url.request_uri, body=body, headers=headers) return response
def test_simple_url(): url = URL(url_full) assert url.path == '/subdir/file.py' assert url.host == 'getgauss.com' assert url.port == 80 assert url['param'] == 'value' assert url['other'] == 'true' assert url.fragment == 'frag'
def test_redirection_abs_path(): url = URL(url_full) updated = url.redirect('/test.html') assert updated.host == url.host assert updated.port == url.port assert updated.path == '/test.html' assert updated.query == {} assert updated.fragment == ''
def test_empty(): url = URL() assert url.host == '' assert url.port == 80 assert url.query == {} assert url.fragment == '' assert url.netloc == '' assert str(url) == 'http:///'
def get_leagues(self, sport_id): """ Returns all sports leagues with the status whether they currently have lines or not. """ req = URL(self.leagues_v2.format(sport_id)) response = self.http.get(req.request_uri, headers=self.auth_headers) data = self.read_json(response) return data
def get_sports(self): """ Returns all sports with the status whether they currently have lines or not. """ req = URL(self.sports_v2) response = self.http.get(req.request_uri, headers=self.auth_headers) data = self.read_json(response) return data
def get(): payload = {"query": GET_QUERY, "format": "application/sparql-results+json"} params = urllib.urlencode(payload) url = URL(SPARQL_ENDPOINT + "?" + params) http = Http() response, content = http.request(SPARQL_ENDPOINT + "?" + params) return content
def verify_login(token): sig = md5(APP_KEY + token).hexdigest() url = '%stokenKey=%s&sign=%s' % (KUAIYONG_URL, token, sig) logger.debug('kuaiyong url:%s', url) url = URL(url) http = HTTPClient(url.host, port=url.port) response = eval(http.get(url.request_uri).read()) http.close() return response
def make_stream_request(url): try: u = URL(url) client_kwargs = {'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True} if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} client = HTTPClient.from_url(u, **client_kwargs) r = client.get(u.request_uri, headers={'Connection':'close'}) except Exception, e: data = (False, "Got exception: %s" % e)
def __init__(self, hostname, id): self.id = id self.url = URL("http://{}/".format(hostname)) self.http = HTTPClient.from_url(self.url, concurrency=1, network_timeout=10, connection_timeout=10) self.ntrans = 0 self._initial_datetime = datetime.datetime.now()
def __init__(self): ''' @description: - initializes DB connections and other configurations ''' self.pool = Pool(1000) # used for spawning threads values = Configuration.values() # -- initializes DB -- self.dbconn = SQLUtils( host=values['mysql-db']['sms_api_config']['host'], port=values['mysql-db']['sms_api_config']['port'], database=values['mysql-db']['sms_api_config']['db'], user=values['mysql-db']['sms_api_config']['user'], password=values['mysql-db']['sms_api_config']['password']) print '-------' print 'DB:' print 'port: %s' % values['mysql-db']['sms_api_config']['host'] print 'host: %s' % values['mysql-db']['sms_api_config']['port'] print 'db: %s' % values['mysql-db']['sms_api_config']['db'] print 'user: %s' % values['mysql-db']['sms_api_config']['user'] print 'password: %s' % values['mysql-db']['sms_api_config']['password'] print '-------' # -- initializes dragonpay related config -- self.api_url = values['dragonpay']['api_url'] self.merchant_id = values['dragonpay']['merchant_id'] self.secret_key = values['dragonpay']['secret_key'] self.host = values['dragonpay']['host'] self.api_get_txn_status_url = values['dragonpay'][ 'api_get_txn_status_url'] self.uri = values['dragonpay']['uri'] parse_command_line() if options.config == 'prod': url = URL('https://%s' % values['dragonpay']['host']) else: url = URL('http://%s' % values['dragonpay']['host']) self.http_conn = HTTPClient.from_url(url, concurrency=10)