def test_redirection_full_path(): url_full2_plain = 'http://google.de/index' url = URL(url_full) updated = url.redirect(url_full2_plain) url_full2 = URL(url_full2_plain) for attr in URL.__slots__: assert getattr(updated, attr) == getattr(url_full2, attr) assert str(url_full2) == url_full2_plain
def test_redirection_abs_path(): url = URL(url_full) updated = url.redirect('/test.html') assert updated.host == url.host assert updated.port == url.port assert updated.path == '/test.html' assert updated.query == {} assert updated.fragment == ''
def test_redirection_rel_path(): url = URL(url_full) for redir in ('test.html?key=val', 'folder/test.html?key=val'): updated = url.redirect(redir) assert updated.host == url.host assert updated.port == url.port assert updated.path.startswith('/subdir/') assert updated.path.endswith(redir.split('?', 1)[0]) assert updated.query == {'key': 'val'} assert updated.fragment == ''
def get_url(url, abort_on_error=False, is_json=True, fetch_timeout=5): headers = { 'Connection':'close', } #no keepalive try: u = URL(url) client_kwargs = {'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True} if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} client = HTTPClient.from_url(u, **client_kwargs) r = client.get(u.request_uri, headers=headers) except Exception, e: raise Exception("Got get_url request error: %s" % e)
def print_friend_username(http, friend_id): friend_url = URL('/' + str(friend_id)) friend_url['access_token'] = TOKEN # the greenlet will block until a connection is available response = http.get(friend_url.request_uri) assert response.status_code == 200 friend = json.load(response) if friend.has_key('username'): print '%s: %s' % (friend['username'], friend['name']) else: print '%s has no username.' % friend['name']
def verify_login(session, appid): """ 登陆校验 """ url = "%ssession=%s&appid=%s" % (TBT_URL, session, appid) url = URL(url) http = HTTPClient(url.host, port=url.port) response = eval(http.get(url.request_uri).read()) http.close() return response
def request(method, url, level=0): url = URL(url) http = HTTPClient.from_url(url, concurrency=10) response = getattr(http, method)(url.request_uri) if response.status_code in (301, 302) and level < 3: [loc] = [v for n, v in response.headers if n.lower() == 'location'] return request(method, loc, level+1) result = response.status_code, response.read() http.close() return result
def __login(passport): """login """ logger.info('player login passport:%s' % passport) domain = '%s:%s' % (SERVER_MA_URL, SERVERS_MA_WEBPORT) url = '%s/verify?passport=%s' % (domain, passport) url = URL(url) http = HTTPClient(url.host, port=url.port) response = eval(http.get(url.request_uri).read()) http.close() if response.get('result') is True: return str({'result': True, 'account_id': '\'%s\'' % passport}) return str({'result': False})
def beehive_ping(self, subsystem=None, vassal=None): """Ping beehive instance :param server: host name :param port: server port [default=6379] """ path_inventory = u'%s/inventories/%s' % (self.ansible_path, self.environment) path_lib = u'%s/library/beehive/' % (self.ansible_path) runner = Runner(inventory=path_inventory, verbosity=self.verbosity, module=path_lib) hosts, vars = runner.get_inventory_with_vars(u'beehive') vars = runner.variable_manager.get_vars(runner.loader, host=hosts[0]) instances = vars.get(u'instance') vassals = [] if subsystem is not None and vassal is not None: vassals.append([subsystem, vassal]) else: for instance in instances: vassals.append(instance.split(u'-')) resp = [] for vassal in vassals: port = instances.get(u'%s-%s' % tuple(vassal)).get(u'port') for host in hosts: url = URL(u'http://%s:%s/v1.0/server/ping/' % (host, port)) http = HTTPClient(url.host, port=url.port) try: # issue a get request response = http.get(url.request_uri) # read status_code response.status_code # read response body res = json.loads(response.read()) # close connections http.close() if response.status_code == 200: resp.append({u'subsystem':vassal[0], u'instance':vassal[1], u'host':host, u'port':port, u'ping':True, u'status':u'UP'}) else: resp.append({u'subsystem':vassal[0], u'instance':vassal[1], u'host':host, u'port':port, u'ping':False, u'status':u'UP'}) except gevent.socket.error as ex: resp.append({u'subsystem':vassal[0], u'instance':vassal[1], u'host':host, u'port':port, u'ping':False, u'status':u'DOWN'}) self.result(resp, headers=[u'subsystem', u'instance', u'host', u'port', u'ping', u'status'])
def __init__(self, app_type: ApplicationType = ApplicationType.IOS, concurrency: int = 30, secondary: bool = False): self.header_builder = HeaderBuilder(app_type, secondary=secondary) self.__concurrency = concurrency url = URL(LEGY_HOST) self.__client = HTTPClient(url.host, url.port, concurrency=self.__concurrency, ssl=True, connection_timeout=DEFAULT_TIMEOUT, network_timeout=DEFAULT_TIMEOUT)
def delete_request(self, user_id, request_id): """Deletes the Request with the given ID for the given user.""" path = URL('/%s_%s' % (request_id, user_id)) path['access_token'] = self.access_token http = HTTPClient.from_url(FACEBOOK_URL, connection_timeout=self.timeout) resp = http.delete(path.request_uri) content = resp.read() content = _parse_json(content) if content and isinstance(content, dict) and content.get("error"): raise GraphAPIError(content["error"]) http.close() return content
def __init__(self, url, connection_count=1, connection_timeout=60.0, network_timeout=60.0, verbose=False): self._last_request_id = None self._parsed_url = URL("http://" + url) self._client_stub = HTTPClient.from_url( self._parsed_url, concurrency=connection_count, connection_timeout=connection_timeout, network_timeout=network_timeout) self.verbose = verbose
def __init__(self, url, connection_count=1, connection_timeout=60.0, network_timeout=60.0, verbose=False, max_greenlets=None): self._parsed_url = URL("http://" + url) self._client_stub = HTTPClient.from_url( self._parsed_url, concurrency=connection_count, connection_timeout=connection_timeout, network_timeout=network_timeout) self._pool = gevent.pool.Pool(max_greenlets) self._verbose = verbose
def make_stream_request(url): try: u = URL(url) client_kwargs = { 'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True } if u.scheme == "https": client_kwargs['ssl_options'] = { 'cert_reqs': gevent.ssl.CERT_NONE } client = HTTPClient.from_url(u, **client_kwargs) r = client.get(u.request_uri, headers={'Connection': 'close'}) except Exception, e: data = (False, "Got exception: %s" % e)
def get_token(self): url = URL(self.auth_url) headers = {} headers['x-auth-user'] = self.auth_user headers['x-auth-key'] = self.auth_key self.http = HTTPClient.from_url(url, headers=headers) response = self.http.get(url.request_uri) assert response.status_code == 200 self.token = response['x-auth-token'] self.storage_url = response['x-storage-url'] #print 'TOKEN: %s' % self.token #print 'URL: %s' % self.storage_url self.http.close() self.http = None
def _go_http_internal(self, http_request, http_response): """ Perform an http request :param http_request: HttpRequest :type http_request: HttpRequest :param http_response: HttpResponse :type http_response: HttpResponse """ try: # Default to gevent impl = http_request.force_http_implementation if impl == HttpClient.HTTP_IMPL_AUTO: # Fallback gevent (urllib3 issue with latest uwsgi, gevent 1.1.1) impl = HttpClient.HTTP_IMPL_URLLIB3 # impl = HttpClient.HTTP_IMPL_GEVENT # Uri url = URL(http_request.uri) SolBase.sleep(0) # If proxy and https => urllib3 if http_request.http_proxy_host and url.scheme == PROTO_HTTPS: # Fallback gevent (urllib3 issue with latest uwsgi, gevent 1.1.1) impl = HttpClient.HTTP_IMPL_URLLIB3 # impl = HttpClient.HTTP_IMPL_GEVENT # Log logger.debug("Http using impl=%s", impl) # Fire if impl == HttpClient.HTTP_IMPL_GEVENT: self._go_gevent(http_request, http_response) SolBase.sleep(0) elif impl == HttpClient.HTTP_IMPL_URLLIB3: self._go_urllib3(http_request, http_response) SolBase.sleep(0) else: raise Exception("Invalid force_http_implementation") except Exception as e: # This is not an underlying http exception, we raise without storing in http_response logger.warning("Ex=%s", SolBase.extostr(e)) raise
def fetch_image(url, folder, filename, max_size=20*1024, formats=['png'], dimensions=(48, 48), fetch_timeout=1): def make_data_dir(subfolder): path = os.path.join(config.DATA_DIR, subfolder) if not os.path.exists(path): os.makedirs(path) return path try: #fetch the image data try: u = URL(url) client_kwargs = {'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True} if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} client = HTTPClient.from_url(u, **client_kwargs) r = client.get(u.request_uri, headers={'Connection':'close'}) raw_image_data = r.read(max_size) #read up to max_size except Exception, e: raise Exception("Got fetch_image request error: %s" % e) else:
def put(self, container, name=None, content=None, concurrent=False): put_url = '%s/%s' % (self.storage_url, container) if name: put_url = '%s/%s' % (put_url, name) if content: headers = { 'Content-Length': str(len(content)), 'x-auth-token': self.token, 'Content-Type': 'application/octet-stream' } else: headers = {'Content-Length': '0', 'x-auth-token': self.token} url = URL(put_url) if self.http is None: self.http = HTTPClient.from_url( url, headers=headers, headers_type=dict, concurrency=self.concurrency, connection_timeout=self.connect_timeout, network_timeout=self.network_timeout) response = self.http.request('PUT', url.request_uri, body=content, headers=headers) if response.status_code not in [201, 202]: self.err_count += 1 sys.stdout.write('E%s' % response.status_code) sys.stdout.flush() print response.headers return sys.stdout.write('.') sys.stdout.flush() if concurrent is False: self.http.close() self.http = None
def fql(self, query, args=None, post_args=None): """FQL query. Example query: "SELECT affiliations FROM user WHERE uid = me()" """ http = HTTPClient.from_url(FACEBOOK_FQL_URL, connection_timeout=self.timeout) args = args or {} if self.access_token: if post_args is not None: post_args["access_token"] = self.access_token else: args["access_token"] = self.access_token """Check if query is a dict and use the multiquery method else use single query """ if not isinstance(query, basestring): args["queries"] = query fql_method = 'fql.multiquery' else: args["query"] = query fql_method = 'fql.query' path = URL('/method/%s' % fql_method) args["format"] = "json" path.query.update(args) # add GET params to url try: if not post_args: resp = http.get(path.request_uri) else: resp = http.post(path.request_uri, body=urlencode(post_args)) content = resp.read() content = _parse_json(content) except Exception as e: raise GraphAPIError(e) finally: http.close() if content and isinstance(content, dict) and content.get("error"): raise GraphAPIError(content["error"]) return content
def request(self, path, args=None, post_args=None): """Fetches the given path in the Graph API. We translate args to a valid query string. If post_args is given, we send a POST request to the given path with the given arguments. """ http = HTTPClient.from_url(FACEBOOK_URL, connection_timeout=self.timeout) args = args or {} if not path.startswith('/'): path = '/%s' % path path = URL(path) if self.access_token: if post_args is not None: post_args["access_token"] = self.access_token else: args["access_token"] = self.access_token path.query.update(args) # add GET params to url try: if not post_args: resp = http.get(path.request_uri) else: resp = http.post(path.request_uri, body=urlencode(post_args)) content = resp.read() if 'image' in resp['content-type']: content = { 'data': content, 'mime-type': resp['content-type'], 'url': path.request_uri } else: content = _parse_json(content) except Exception as e: raise GraphAPIError(e) finally: http.close() if content and isinstance(content, dict) and content.get("error"): raise GraphAPIError(content["error"]) return content
def fetch_image(url, folder, filename, max_size=20*1024, formats=['png'], dimensions=(48, 48), fetch_timeout=1): def make_data_dir(subfolder): path = os.path.join(config.DATA_DIR, subfolder) if not os.path.exists(path): os.makedirs(path) return path try: #fetch the image data try: u = URL(url) client_kwargs = {'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True} if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} client = HTTPClient.from_url(u, **client_kwargs) r = client.get(u.request_uri, headers={'Connection':'close'}) raw_image_data = r.read(max_size) #read up to max_size except Exception as e: raise Exception("Got fetch_image request error: %s" % e) else: if r.status_code != 200: raise Exception("Bad status code returned from fetch_image: '%s'" % (r.status_code)) finally: client.close() #decode image data try: image = Image.open(io.StringIO(raw_image_data)) except Exception as e: raise Exception("Unable to parse image data at: %s" % url) if image.format.lower() not in formats: raise Exception("Image is not a PNG: %s (got %s)" % (url, image.format)) if image.size != dimensions: raise Exception("Image size is not 48x48: %s (got %s)" % (url, image.size)) if image.mode not in ['RGB', 'RGBA']: raise Exception("Image mode is not RGB/RGBA: %s (got %s)" % (url, image.mode)) imagePath = make_data_dir(folder) imagePath = os.path.join(imagePath, filename + '.' + image.format.lower()) image.save(imagePath) os.system("exiftool -q -overwrite_original -all= %s" % imagePath) #strip all metadata, just in case return True except Exception as e: logging.warn(e) return False
def test_beehive(self, server, port): """Test redis instance :param server: host name :param port: server port [default=6379] """ url = URL(u'http://%s:%s/v1.0/server/ping/' % (server, port)) http = HTTPClient(url.host, port=url.port) # issue a get request response = http.get(url.request_uri) # read status_code response.status_code # read response body res = json.loads(response.read()) # close connections http.close() if res[u'status'] == u'ok': resp = True else: resp = False self.logger.info(u'Ping beehive %s : %s' % (url.request_uri, resp)) self.json = u'Ping beehive %s : %s' % (url.request_uri, resp)
def get_url(url, abort_on_error=False, is_json=True, fetch_timeout=30, retries=0): headers = { 'Connection':'close', } #no keepalive try: u = URL(url) client_kwargs = {'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True} if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} client = HTTPClient.from_url(u, **client_kwargs) r = client.get(u.request_uri, headers=headers) except Exception as e: if retries > 2: raise Exception("Got get_url request error: %s" % e) else: time.sleep(2) return get_url(url, abort_on_error, is_json, fetch_timeout, retries+1) else: if r.status_code != 200 and abort_on_error: raise Exception("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.read())) result = json.loads(r.read()) if is_json else r.read() finally: client.close() return result
def sql(self, sql, parse_json=True, do_post=True): """ executes sql in cartodb server set parse_json to False if you want raw reponse """ url = URL(self.resource_url) # depending on query size do a POST or GET if len(sql) < self.MAX_GET_QUERY_LEN and not do_post: url['q'] = sql resp = self.req(url) else: body = {'q': sql} resp = self.req(url, 'POST', body=body) content = resp.read() if resp.status_code == 200: if parse_json: return json.loads(content) return content elif resp.status_code == 400: raise self.CartoDBException(json.loads(content)['error']) elif resp.status_code == 500: raise self.CartoDBException('internal server error') return None
def call_jsonrpc_api(method, params=None, endpoint=None, auth=None, abort_on_error=False): if not endpoint: endpoint = config.COUNTERPARTYD_RPC if not auth: auth = config.COUNTERPARTYD_AUTH if not params: params = {} payload = { "id": 0, "jsonrpc": "2.0", "method": method, "params": params, } headers = { 'Content-Type': 'application/json', 'Connection':'close', #no keepalive } if auth: #auth should be a (username, password) tuple, if specified headers['Authorization'] = http_basic_auth_str(auth[0], auth[1]) try: u = URL(endpoint) client = HTTPClient.from_url(u, connection_timeout=JSONRPC_API_REQUEST_TIMEOUT, network_timeout=JSONRPC_API_REQUEST_TIMEOUT) r = client.post(u.request_uri, body=json.dumps(payload), headers=headers) except Exception as e: raise Exception("Got call_jsonrpc_api request error: %s" % e) else: if r.status_code != 200 and abort_on_error: raise Exception("Bad status code returned from counterpartyd: '%s'. result body: '%s'." % (r.status_code, r.read())) result = json.loads(r.read()) finally: client.close() if abort_on_error and 'error' in result: raise Exception("Got back error from server: %s" % result['error']) return result
def get_cdkey_gift_1123(data, player): request = cdkey_pb2.CdkeyRequest() request.ParseFromString(data) response = cdkey_pb2.CdkeyResqonse() url = '%s/verify?area_id=%s&uid=%s&code=%s&token=%s' % \ (CDKEY_URL, SERVER_NO, player.base_info.id, request.cdkey, SERVER_TOKEN) logger.debug('cdkey url:%s', url) url = URL(url) http = HTTPClient(url.host, port=url.port) url_response = eval(http.get(url.request_uri).read()) http.close() logger.debug('cdkey url result:%s', url_response) # gain_data = tomorrow_gift['reward'] # return_data = gain(player, gain_data, const.TOMORROW_GIFT) # get_return(player, return_data, response.gain) response.res.result = True response.res.result_no = url_response.get('success') response.res.message = str(url_response.get('message')) return response.SerializeToString()
def make_stream_request(url): try: u = URL(url) client_kwargs = {'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True} if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} client = HTTPClient.from_url(u, **client_kwargs) r = client.get(u.request_uri, headers={'Connection':'close'}) except Exception as e: data = (False, "Got exception: %s" % e) else: if r.status_code != 200: data = (False, "Got non-successful response code of: %s" % r.status_code) else: try: #read up to max_fetch_size raw_data = r.read(max_fetch_size) if is_json: #try to convert to JSON try: data = json.loads(raw_data) except Exception as e: data = (False, "Invalid JSON data: %s" % e) else: data = (True, data) else: #keep raw data = (True, raw_data) except Exception as e: data = (False, "Request error: %s" % e) finally: client.close() if per_request_complete_callback: per_request_complete_callback(url, data) completed_urls[url] = data if len(completed_urls) == len(urls): #all done, trigger callback return completed_callback(completed_urls)
def __init__(self, url: str, headers: Dict[str, str], concurrency: int = 30, client: Optional[HTTPClient] = None): self.__url = URL(url) self.__custom_headers = headers self.__concurrency = concurrency if not client: self.client = HTTPClient(self.__url.host, self.__url.port, concurrency=self.__concurrency, ssl=True) else: self.client = client if self.__url.path != LONG_POLLING_V4_PATH: self.flush = self.__flush else: self.open() self.flush = self.__flush_and_reconnect self.__wbuf = BytesIO()
def get_base_url(self): url = URL() url.host = self.host url.port = self.port url.scheme = self.ssl and PROTO_HTTPS or PROTO_HTTP return url
} oauthlib_consumer = oauthlib.Consumer(APP_ID, APP_SECRET) token = oauthlib.Token(token_info['oauth_token'], token_info['oauth_token_secret']) params = { 'oauth_version': "1.0", 'oauth_nonce': oauthlib.generate_nonce(), 'oauth_timestamp': int(time.time()), 'oauth_token': token.key, 'oauth_consumer_key': oauthlib_consumer.key, 'locations': '-122.75,36.8,-121.75,37.8' # San Francisco } url = URL('https://stream.twitter.com/1/statuses/filter.json') req = oauthlib.Request.from_consumer_and_token(oauthlib_consumer, token=token, http_url=str(url), http_method='POST') signature_method = oauthlib.SignatureMethod_HMAC_SHA1() req = oauthlib.Request(method="POST", url=str(url), parameters=params) req.sign_request(signature_method, oauthlib_consumer, token) http = HTTPClient.from_url(url) response = http.request('POST', url.request_uri, body=req.to_postdata(), headers={ 'Content-Type':
def get_base_url(self): url = URL() url.host = self.host url.port = self.port url.scheme = self.ssl and 'https' or 'http' return url
def test_consistent_reparsing(): for surl in (url_full, url_path_only): url = URL(surl) reparsed = URL(str(url)) for attr in URL.__slots__: assert getattr(reparsed, attr) == getattr(url, attr)
def test_equality(): assert URL('https://example.com/') != URL('http://example.com/') assert URL('http://example.com/') == URL('http://example.com/')