def test_make_headers(self): self.assertEqual(make_headers(accept_encoding=True), {'accept-encoding': 'gzip,deflate'}) self.assertEqual(make_headers(accept_encoding='foo,bar'), {'accept-encoding': 'foo,bar'}) self.assertEqual(make_headers(accept_encoding=['foo', 'bar']), {'accept-encoding': 'foo,bar'}) self.assertEqual( make_headers(accept_encoding=True, user_agent='banana'), { 'accept-encoding': 'gzip,deflate', 'user-agent': 'banana' }) self.assertEqual(make_headers(user_agent='banana'), {'user-agent': 'banana'}) self.assertEqual(make_headers(keep_alive=True), {'connection': 'keep-alive'}) self.assertEqual(make_headers(basic_auth='foo:bar'), {'authorization': 'Basic Zm9vOmJhcg=='}) self.assertEqual(make_headers(proxy_basic_auth='foo:bar'), {'proxy-authorization': 'Basic Zm9vOmJhcg=='}) self.assertEqual(make_headers(disable_cache=True), {'cache-control': 'no-cache'})
def test_make_headers(self): self.assertEqual( make_headers(accept_encoding=True), {'accept-encoding': 'gzip,deflate'}) self.assertEqual( make_headers(accept_encoding='foo,bar'), {'accept-encoding': 'foo,bar'}) self.assertEqual( make_headers(accept_encoding=['foo', 'bar']), {'accept-encoding': 'foo,bar'}) self.assertEqual( make_headers(accept_encoding=True, user_agent='banana'), {'accept-encoding': 'gzip,deflate', 'user-agent': 'banana'}) self.assertEqual( make_headers(user_agent='banana'), {'user-agent': 'banana'}) self.assertEqual( make_headers(keep_alive=True), {'connection': 'keep-alive'}) self.assertEqual( make_headers(basic_auth='foo:bar'), {'authorization': 'Basic Zm9vOmJhcg=='}) self.assertEqual( make_headers(proxy_basic_auth='foo:bar'), {'proxy-authorization': 'Basic Zm9vOmJhcg=='})
def check_stock_proxy_manager(url, proxy=None, count=0): if proxy is None: manager = PoolManager(timeout=5, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) else: proxy_url = "%s://%s:%s" % (proxy[0], proxy[1], proxy[2]) manager = ProxyManager(proxy_url, timeout=5, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) headers = util.make_headers(accept_encoding='gzip, deflate', keep_alive=True, user_agent="Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:47.0) Gecko/20100101 Firefox/47.0") headers['Accept-Language'] = "en-US,en;q=0.5" headers['Connection'] = 'keep-alive' headers['Accept'] = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" try: response = manager.request('GET', url, preload_content=False, headers=headers) content = json.loads(response.data) print("%s - Connect Success!" % count) return content['hasStock'] except Exception as ex: print("%s - Connect Error!" % count) return False
def getAmqpStats(helper): from urllib3 import PoolManager, util try: username = "******" password = "******" headers = util.make_headers(basic_auth = username + ":" + password) http = PoolManager() r = http.request("GET", "upsilon:15672/api/channels", headers = headers) channels = json.loads(r.data) tbl = PrettyTable(["Connection", "username", "Unacked", "Publish"]) if helper.args.debug: print json.dumps(channels, indent = 4); for conn in channels: tbl.add_row([conn['name'], conn['user'], conn['messages_unacknowledged'], 0]) print tbl except Exception as e: print str(e)
def check_ip(ip_info, port_info, type): check_url = "https://bck.hermes.com/product-page?locale=us_en&productsku=H056289CC18" ip_url = "%s://%s:%s" % (type, ip_info, port_info) manager = ProxyManager(ip_url, timeout=10, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) headers = util.make_headers( accept_encoding='gzip, deflate', keep_alive=True, user_agent= "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:47.0) Gecko/20100101 Firefox/47.0" ) headers['Accept-Language'] = "en-US,en;q=0.5" headers['Connection'] = 'keep-alive' headers[ 'Accept'] = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" try: response = manager.request('GET', check_url, preload_content=False, headers=headers) res = response.data print(res) json.loads(res) return True except Exception as ex: return False
def _get_zip_deploy_headers(username, password, cmd_mock_client): from urllib3.util import make_headers from azure.cli.core.util import get_az_user_agent headers = make_headers(basic_auth='{0}:{1}'.format(username, password)) headers['Content-Type'] = 'application/octet-stream' headers['Cache-Control'] = 'no-cache' headers['User-Agent'] = get_az_user_agent() headers['x-ms-client-request-id'] = cmd_mock_client.data['headers'][ 'x-ms-client-request-id'] return headers
def openshift_login(self): os_oauth = OAuth2Session(client_id='openshift-challenging-client') authorization_url, state = os_oauth.authorization_url( self.openshift_auth_endpoint, state="1", code_challenge_method='S256') auth_headers = make_headers(basic_auth='{0}:{1}'.format( self.auth_username, self.auth_password)) # Request authorization code using basic auth credentials ret = os_oauth.get(authorization_url, headers={ 'X-Csrf-Token': state, 'authorization': auth_headers.get('authorization') }, verify=self.con_verify_ca, allow_redirects=False) if ret.status_code != 302: self.fail_request("Authorization failed.", method='GET', url=authorization_url, reason=ret.reason, status_code=ret.status_code) # In here we have `code` and `state`, I think `code` is the important one qwargs = {} for k, v in parse_qs(urlparse(ret.headers['Location']).query).items(): qwargs[k] = v[0] qwargs['grant_type'] = 'authorization_code' # Using authorization code given to us in the Location header of the previous request, request a token ret = os_oauth.post( self.openshift_token_endpoint, headers={ 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded', # This is just base64 encoded 'openshift-challenging-client:' 'Authorization': 'Basic b3BlbnNoaWZ0LWNoYWxsZW5naW5nLWNsaWVudDo=' }, data=urlencode(qwargs), verify=self.con_verify_ca) if ret.status_code != 200: self.fail_request("Failed to obtain an authorization token.", method='POST', url=self.openshift_token_endpoint, reason=ret.reason, status_code=ret.status_code) return ret.json()['access_token']
def __init__(self, all_pages=False, congress_search=101, session_search=1, only_congress_summary=False, update_mode=True): # headers and http probably won't be used self.headers = util.make_headers( accept_encoding='gzip, deflate', keep_alive=True, user_agent= "Mozill/5.0 (X11; Linux x86_64; rv:47.0) Gecko/20100101 Firefox/47.0" ) self.http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) self.sesh = HTMLSession() self.senate_page_base = 'https://www.senate.gov' self.senate_new_votes_page = 'https://www.senate.gov/legislative/votes_new.htm' self.get_roll_call_lists() self.all_pages = all_pages if type(congress_search) != 'str': congress_search = str(congress_search) if type(session_search) != 'str': session_search = str(session_search) self.wanted_congress = congress_search self.wanted_session = session_search self.only_congress_summary = only_congress_summary # need to make variable to keep directory stuff together self.path_to_data = 'D:/Programming/PolySci/Polysci-Backend/prototyping/data_storage/' self.senate_data_path_top = self.path_to_data + 'senate_data/' self.xml_summary_direc = '../data_storage/senate_data/crawled_data/xml_summaries/' self.summaries_with_links = '../data_storage/senate_data/crawled_data/link_summaries/' self.senate_vote_page_direc = '../data_storage/senate_data/crawled_data/full_vote_sets/' self.skip_writing_to_database = False self.update_mode = update_mode if self.skip_writing_to_database: print( 'WILL NOT WRITE TO DATABASE, HAVE SET VARIABLE TO PREVENT WRITING' ) return
def __init__(self, base_url=None, proxy_url=None, headers={}, json_encoder=None): # type: (Optional[str], Optional[str], Mapping[str, str], Optional[Type[json.JSONEncoder]]) -> None base_headers = util.make_headers(keep_alive=True, accept_encoding=True) base_headers.update(headers) super(SyncSender, self).__init__( base_url=base_url, proxy_url=proxy_url, headers=base_headers, json_encoder=json_encoder) options = dict( block=True, maxsize=self.max_pool_size, ) if self.proxy_url is not None: self.pool_manager = poolmanager.ProxyManager(self.proxy_url, **options) # type: poolmanager.PoolManager else: self.pool_manager = poolmanager.PoolManager(**options)
def test_make_headers(self): self.assertEqual(make_headers(accept_encoding=True), {"accept-encoding": "gzip,deflate"}) self.assertEqual(make_headers(accept_encoding="foo,bar"), {"accept-encoding": "foo,bar"}) self.assertEqual(make_headers(accept_encoding=["foo", "bar"]), {"accept-encoding": "foo,bar"}) self.assertEqual( make_headers(accept_encoding=True, user_agent="banana"), {"accept-encoding": "gzip,deflate", "user-agent": "banana"}, ) self.assertEqual(make_headers(user_agent="banana"), {"user-agent": "banana"}) self.assertEqual(make_headers(keep_alive=True), {"connection": "keep-alive"}) self.assertEqual(make_headers(basic_auth="foo:bar"), {"authorization": "Basic Zm9vOmJhcg=="})
def postREST(url,data,posttype='params',auth=None,attempts=1): print("Posting to " + url + " with data: " + str(data)) hdrs = None params = None encoded_body = None if auth: if 'basic' in auth: auth_str = auth['basic']['username'] + ":" + auth['basic']['password'] hdrs = URLUtil.make_headers(basic_auth=auth_str) if posttype == 'raw': encoded_body = json.dumps(data) elif posttype == 'params': #params = urllib.parse.urlencode(data).encode('ascii') params = data response = None try: if encoded_body: response = manager.request('POST',url,body=encoded_body,headers=hdrs,timeout=30) elif params: response = manager.request('POST',url,fields=params,headers=hdrs,timeout=30) except ValueError as e: raise PostDataException("Could not post data for url: " + str(url) + " ERROR: " + str(e)) except urllib.error.HTTPError as e: raise PostDataException("Could not post data for url: " + str(url) + " ERROR: " + str(e)) except urllib.error.URLError as e: raise PostDataException("Could not post data for url: " + str(url) + " ERROR: " + str(e)) except URLExceptions.ProtocolError: pass except URLExceptions.ReadTimeoutError: pass except Exception as e: raise PostDataException("Could not post data for url: " + str(url) + " ERROR: " + str(e)) if not response: if attempts == 3: raise PostDataException("Could not post data after 3 attempts for url: " + str(url)) else: attempts = attempts + 1 time.sleep(3) postRestParams(url,params,auth,attempts) return True return response
def __init__( self, base_url: str, port: int, active_item_column_map: Optional[ColumnsMap] = None, no_active_item_ignore_time: Optional[float] = 0.25, playlist_ref: Optional[PlaylistRef] = None, playlist_items_column_map: Optional[ColumnsMap] = None, offset: Optional[int] = 0, count: Optional[int] = 1000000, username: Optional[str] = None, password: Optional[str] = None, ): self.player_state = None """ A PlayerState object representing the current state of the player including the currently active item. Set to None when the listener is not connected. """ self.playlists = None """ A Playlists object representing the current playlists. Set to None when the listener is not connected. """ self._connection = None self._event_reader = None self._session = None self._no_active_item_ignore_time = no_active_item_ignore_time self._player_state_update_timeout = None if not base_url.lower().startswith("http"): base_url = "http://" + base_url user_pass = None if username is not None and password is not None: user_pass = "******".join((username, password)) self._headers = make_headers(basic_auth=user_pass) # The beefweb server treats header names as case sensitive... if "authorization" in self._headers: self._headers["Authorization"] = self._headers.pop("authorization") if active_item_column_map is None: active_item_column_map = self._default_column_map if playlist_items_column_map is None: playlist_items_column_map = self._default_column_map self._handlers = { "player": InfoHandler( return_type=PlayerState, attach_attr="player_state", column_map=active_item_column_map, callbacks=set(), ), "playlistItems": InfoHandler( return_type=PlaylistItems, attach_attr=None, column_map=playlist_items_column_map, callbacks=set(), ), "playlists": InfoHandler( return_type=Playlists, attach_attr="playlists", column_map=None, callbacks=set(), ), } param_dict = { "player": param_value_to_str(True), "trcolumns": param_value_to_str(active_item_column_map), "playlists": param_value_to_str(True), } if playlist_ref is not None: param_dict.update({ "playlistItems": param_value_to_str(True), "plref": param_value_to_str(playlist_ref), "plcolumns": param_value_to_str(playlist_items_column_map), "plrange": f"{offset}:{count}", }) self._url = ( f"{base_url}:{port}/api/query/updates?{urlencode(param_dict)}")
proxy_bypass, urlunparse, basestring, integer_types, is_py3, proxy_bypass_environment, getproxies_environment, Mapping) from .cookies import cookiejar_from_dict from .structures import CaseInsensitiveDict from .exceptions import ( InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) NETRC_FILES = ('.netrc', '_netrc') DEFAULT_CA_BUNDLE_PATH = certs.where() DEFAULT_PORTS = {'http': 80, 'https': 443} # Ensure that ', ' is used to preserve previous delimiter behavior. DEFAULT_ACCEPT_ENCODING = ", ".join( re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"]) ) if sys.platform == 'win32': # provide a proxy_bypass version on Windows without DNS lookups def proxy_bypass_registry(host): try: if is_py3: import winreg else: import _winreg as winreg except ImportError: return False
InvalidHeader, InvalidURL, UnrewindableBodyError, ) from .structures import CaseInsensitiveDict NETRC_FILES = (".netrc", "_netrc") DEFAULT_CA_BUNDLE_PATH = certs.where() DEFAULT_PORTS = {"http": 80, "https": 443} # Ensure that ', ' is used to preserve previous delimiter behavior. DEFAULT_ACCEPT_ENCODING = ", ".join( re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])) if sys.platform == "win32": # provide a proxy_bypass version on Windows without DNS lookups def proxy_bypass_registry(host): try: import winreg except ImportError: return False try: internetSettings = winreg.OpenKey( winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", )
# Can get the password from the oc install log PASSWORD = '******' # Gather authorization APIs info oauth_server_info = requests.get( '{}/.well-known/oauth-authorization-server'.format(HOST), verify=False).json() #print(oauth_server_info) openshift_oauth = OAuth2Session(client_id='openshift-challenging-client') authorization_url, state = openshift_oauth.authorization_url( oauth_server_info['authorization_endpoint'], state="1", code_challenge_method='S256') basic_auth_header = make_headers(basic_auth='{}:{}'.format(USERNAME, PASSWORD)) # Request auth using simple credentials challenge_response = openshift_oauth.get( authorization_url, headers={ 'X-Csrf-Token': state, 'authorization': basic_auth_header.get('authorization') }, verify=False, allow_redirects=False) #print(challenge_response) qwargs = { k: v[0]
try: # Python 3 from urllib.parse import urlparse except ImportError: from urlparse import urlparse from urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool from urllib3.util import get_host, make_headers from .structure import CookieSession __all__ = ("opener", "RegularOpener", "SecureOpener") #UA = "Mozilla/5.0 (X11; Linux x86_64; rv:14.0) Gecko/20100101 Firefox/14.0.1" UA = None headers = make_headers(keep_alive=True, accept_encoding=["gzip", "deflate"], user_agent=UA) headers["cookie"] = "" def opener(url, **opener_kwargs): o = urlparse(url) print(o) if not o.scheme and not o.netloc: url = "http://" + url return opener(url, **opener_kwargs) openers_by_scheme = {"http": RegularOpener, "https": SecureOpener} return openers_by_scheme[o.scheme](url, **opener_kwargs) class RegularOpener(HTTPConnectionPool): def __init__(self, host, port=None, strict=False, timeout=None, maxsize=1, block=False, headers=headers): try: