def assert_request_equal(self, expected, real_request): method, path = expected[:2] if urlparse(path).scheme: match_path = real_request['full_path'] else: match_path = real_request['path'] self.assertEqual((method, path), (real_request['method'], match_path)) if len(expected) > 2: body = expected[2] real_request['expected'] = body err_msg = 'Body mismatch for %(method)s %(path)s, ' \ 'expected %(expected)r, and got %(body)r' % real_request self.orig_assertEqual(body, real_request['body'], err_msg) if len(expected) > 3: headers = CaseInsensitiveDict(expected[3]) for key, value in headers.items(): real_request['key'] = key real_request['expected_value'] = value real_request['value'] = real_request['headers'].get(key) err_msg = ( 'Header mismatch on %(key)r, ' 'expected %(expected_value)r and got %(value)r ' 'for %(method)s %(path)s %(headers)r' % real_request) self.orig_assertEqual(value, real_request['value'], err_msg) real_request['extra_headers'] = dict( (key, value) for key, value in real_request['headers'].items() if key not in headers) if real_request['extra_headers']: self.fail('Received unexpected headers for %(method)s ' '%(path)s, got %(extra_headers)r' % real_request)
def _check_creds(self, creds): d = CaseInsensitiveDict() if isinstance(creds, dict): d.update(creds) elif isinstance(creds, basestring): if os.path.exists(creds): creds = file(creds, "r").read() for line in creds.splitlines(): if ":" in line: k, v = line.split(":", 1) d[k.strip()] = v.strip() else: raise TypeError("unsupported type for credentials data") if "companyId" not in d and "CID" in d: d["companyId"] = d["CID"] if "companyId" in d and not "psk" in d: raise ValueError("psk is required when companyId is provided") elif "psk" in d and not "companyId" in d: raise ValueError("companyId is required when psk is provided") elif "companyId" in d and "psk" in d: return {"companyId": int(d["companyId"]), "psk": str(d["psk"])} elif "loginSessionId" in d and not "profileId" in d: raise ValueError("profileId is required when loginSessionId is " "provided") elif "profileId" in d and not "loginSessionId" in d: raise ValueError("loginSessionId is required when profileId is " "provided") elif "loginSessionId" in d and "profileId" in d: return {"loginSessionId": str(d["loginSessionId"]), "profileId": int(d["profileId"])} else: raise ValueError("either companyId+psk or " "loginSessionId+profileId must be provided")
def send_document(url, data, timeout=10, *args, **kwargs): """Helper method to send a document via POST. Additional ``*args`` and ``**kwargs`` will be passed on to ``requests.post``. :arg url: Full url to send to, including protocol :arg data: POST data to send (dict) :arg timeout: Seconds to wait for response (defaults to 10) :returns: Tuple of status code (int or None) and error (exception class instance or None) """ logger.debug("send_document: url=%s, data=%s, timeout=%s", url, data, timeout) headers = CaseInsensitiveDict({ 'User-Agent': USER_AGENT, }) if "headers" in kwargs: # Update from kwargs headers.update(kwargs.get("headers")) kwargs.update({ "data": data, "timeout": timeout, "headers": headers }) try: response = requests.post(url, *args, **kwargs) logger.debug("send_document: response status code %s", response.status_code) return response.status_code, None except RequestException as ex: logger.debug("send_document: exception %s", ex) return None, ex
def assert_request_equal(self, expected, real_request): method, path = expected[:2] if urlparse(path).scheme: match_path = real_request["full_path"] else: match_path = real_request["path"] self.assertEqual((method, path), (real_request["method"], match_path)) if len(expected) > 2: body = expected[2] real_request["expected"] = body err_msg = "Body mismatch for %(method)s %(path)s, " "expected %(expected)r, and got %(body)r" % real_request self.orig_assertEqual(body, real_request["body"], err_msg) if len(expected) > 3: headers = CaseInsensitiveDict(expected[3]) for key, value in headers.items(): real_request["key"] = key real_request["expected_value"] = value real_request["value"] = real_request["headers"].get(key) err_msg = ( "Header mismatch on %(key)r, " "expected %(expected_value)r and got %(value)r " "for %(method)s %(path)s %(headers)r" % real_request ) self.orig_assertEqual(value, real_request["value"], err_msg) real_request["extra_headers"] = dict( (key, value) for key, value in real_request["headers"].items() if key not in headers ) if real_request["extra_headers"]: self.fail( "Received unexpected headers for %(method)s " "%(path)s, got %(extra_headers)r" % real_request )
class DDWRT(Router): def __init__(self, conf, hostnames): self.hostnames = CaseInsensitiveDict() self.hostnames.update(hostnames) self.conf = conf self.auth = self.conf.auth() def clients(self): """ Receives all currently logged in users in a wifi network. :rtype : list :return: Returns a list of dicts, containing the following keys: mac, ipv4, seen, hostname """ clients = self._get_clients_raw() clients_json = [] for client in clients: client_hostname_from_router = client[0] client_ipv4 = client[1].strip() client_mac = client[2].strip().upper() client_hostname = self.hostnames.get(client_mac, client_hostname_from_router).strip() client_connections = int(client[3].strip()) # Clients with less than 20 connections are considered offline if client_connections < 20: continue clients_json.append({ 'mac': client_mac, 'ipv4': client_ipv4, 'seen': int(time.time()), 'hostname': client_hostname, }) logger.debug('The router got us {} clients.'.format(len(clients_json))) logger.debug(str(clients_json)) return clients_json def _get_clients_raw(self): info_page = self.conf.internal() response = requests.get(info_page, auth=self.auth) logger.info('Got response from router with code {}.'.format(response.status_code)) return DDWRT._convert_to_clients(response.text) or [] @staticmethod def _convert_to_clients(router_info_all): # Split router info in lines and filter empty info router_info_lines = filter(None, router_info_all.split("\n")) # Get key / value of router info router_info_items = dict() for item in router_info_lines: key, value = item[1:-1].split("::") # Remove curly braces and split router_info_items[key.strip()] = value.strip() # Get client info as a list arp_table = utils.groupn(router_info_items['arp_table'].replace("'", "").split(","), 4) dhcp_leases = utils.groupn(router_info_items['dhcp_leases'].replace("'", "").split(","), 5) return arp_table if (len(arp_table) > 0) else []
def prepare_response(self, cached): """Verify our vary headers match and construct a real urllib3 HTTPResponse object. """ # Special case the '*' Vary value as it means we cannot actually # determine if the cached response is suitable for this request. if "*" in cached.get("vary", {}): return body_raw = cached["response"].pop("body") headers = CaseInsensitiveDict(data=cached['response']['headers']) if headers.get('transfer-encoding', '') == 'chunked': headers.pop('transfer-encoding') cached['response']['headers'] = headers try: body = io.BytesIO(body_raw) except TypeError: # This can happen if cachecontrol serialized to v1 format (pickle) # using Python 2. A Python 2 str(byte string) will be unpickled as # a Python 3 str (unicode string), which will cause the above to # fail with: # # TypeError: 'str' does not support the buffer interface body = io.BytesIO(body_raw.encode('utf8')) return HTTPResponse( body=body, preload_content=False, **cached["response"] )
class Attachment(object): def __init__(self, part): encoding = part.encoding or 'utf-8' self.headers = CaseInsensitiveDict({ k.decode(encoding): v.decode(encoding) for k, v in part.headers.items() }) self.content_type = self.headers.get('Content-Type', None) self.content_id = self.headers.get('Content-ID', None) self.content_location = self.headers.get('Content-Location', None) self._part = part def __repr__(self): return '<Attachment(%r, %r)>' % (self.content_id, self.content_type) @cached_property def content(self): """Return the content of the attachment :rtype: bytes or str """ encoding = self.headers.get('Content-Transfer-Encoding', None) content = self._part.content if encoding == 'base64': return base64.b64decode(content) elif encoding == 'binary': return content.strip(b'\r\n') else: return content
class MockResponse(object): """ Mock response object with a status code and some content """ def __init__(self, status_code, content=None, headers=None): self.status_code = status_code self.content = content or '' self.headers = CaseInsensitiveDict() if headers: self.headers.update(headers) def raise_for_status(self): http_error_msg = '' if 400 <= self.status_code < 500: http_error_msg = '%s Client Error: ...' % self.status_code elif 500 <= self.status_code < 600: http_error_msg = '%s Server Error: ...' % self.status_code if http_error_msg: raise requests.HTTPError(http_error_msg, response=self) def json(self, **kwargs): return json.loads(self.content)
def test_get(self): cid = CaseInsensitiveDict() cid["spam"] = "oneval" cid["SPAM"] = "blueval" self.assertEqual(cid.get("spam"), "blueval") self.assertEqual(cid.get("SPAM"), "blueval") self.assertEqual(cid.get("sPam"), "blueval") self.assertEqual(cid.get("notspam", "default"), "default")
def test_preserve_last_key_case(self): cid = CaseInsensitiveDict({"Accept": "application/json", "user-Agent": "requests"}) cid.update({"ACCEPT": "application/json"}) cid["USER-AGENT"] = "requests" keyset = frozenset(["ACCEPT", "USER-AGENT"]) assert frozenset(i[0] for i in cid.items()) == keyset assert frozenset(cid.keys()) == keyset assert frozenset(cid) == keyset
def set_extra_headers(self, headers): header_dict = CaseInsensitiveDict(headers) if 'Reply-To' in header_dict: self.data["ReplyTo"] = header_dict.pop('Reply-To') self.data["Headers"] = [ {"Name": key, "Value": value} for key, value in header_dict.items() ]
def test_lower_items(self): cid = CaseInsensitiveDict({ 'Accept': 'application/json', 'user-Agent': 'requests', }) keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items()) lowerkeyset = frozenset(['accept', 'user-agent']) assert keyset == lowerkeyset
def test_get(self): cid = CaseInsensitiveDict() cid['spam'] = 'oneval' cid['SPAM'] = 'blueval' self.assertEqual(cid.get('spam'), 'blueval') self.assertEqual(cid.get('SPAM'), 'blueval') self.assertEqual(cid.get('sPam'), 'blueval') self.assertEqual(cid.get('notspam', 'default'), 'default')
def test_get(self): cid = CaseInsensitiveDict() cid['spam'] = 'oneval' cid['SPAM'] = 'blueval' assert cid.get('spam') == 'blueval' assert cid.get('SPAM') == 'blueval' assert cid.get('sPam') == 'blueval' assert cid.get('notspam', 'default') == 'default'
def test_get(self): cid = CaseInsensitiveDict() cid["spam"] = "oneval" cid["SPAM"] = "blueval" assert cid.get("spam") == "blueval" assert cid.get("SPAM") == "blueval" assert cid.get("sPam") == "blueval" assert cid.get("notspam", "default") == "default"
def test_copy(self): cid = CaseInsensitiveDict({ 'Accept': 'application/json', 'user-Agent': 'requests', }) cid_copy = cid.copy() assert cid == cid_copy cid['changed'] = True assert cid != cid_copy
def test_preserve_key_case(self): cid = CaseInsensitiveDict({ 'Accept': 'application/json', 'user-Agent': 'requests', }) keyset = frozenset(['Accept', 'user-Agent']) assert frozenset(i[0] for i in cid.items()) == keyset assert frozenset(cid.keys()) == keyset assert frozenset(cid) == keyset
def test_setdefault(self): cid = CaseInsensitiveDict({'Spam': 'blueval'}) self.assertEqual( cid.setdefault('spam', 'notblueval'), 'blueval' ) self.assertEqual( cid.setdefault('notspam', 'notblueval'), 'notblueval' )
def test_fixes_649(self): """__setitem__ should behave case-insensitively.""" cid = CaseInsensitiveDict() cid['spam'] = 'oneval' cid['Spam'] = 'twoval' cid['sPAM'] = 'redval' cid['SPAM'] = 'blueval' assert cid['spam'] == 'blueval' assert cid['SPAM'] == 'blueval' assert list(cid.keys()) == ['SPAM']
def test_fixes_649(self): """__setitem__ should behave case-insensitively.""" cid = CaseInsensitiveDict() cid["spam"] = "oneval" cid["Spam"] = "twoval" cid["sPAM"] = "redval" cid["SPAM"] = "blueval" assert cid["spam"] == "blueval" assert cid["SPAM"] == "blueval" assert list(cid.keys()) == ["SPAM"]
def request(self, method, url, accept_json=False, headers=None, params=None, json=None, data=None, files=None, **kwargs): full_url = self.url + url input_headers = _remove_null_values(headers) if headers else {} headers = CaseInsensitiveDict({'user-agent': 'watson-developer-cloud-python-' + __version__}) if accept_json: headers['accept'] = 'application/json' headers.update(input_headers) # Remove keys with None values params = _remove_null_values(params) json = _remove_null_values(json) data = _remove_null_values(data) files = _remove_null_values(files) # Support versions of requests older than 2.4.2 without the json input if not data and json is not None: data = json_import.dumps(json) headers.update({'content-type': 'application/json'}) auth = None if self.username and self.password: auth = (self.username, self.password) if self.api_key is not None: if params is None: params = {} if url.startswith('https://gateway-a.watsonplatform.net/calls'): params['apikey'] = self.api_key else: params['api_key'] = self.api_key response = requests.request(method=method, url=full_url, cookies=self.jar, auth=auth, headers=headers, params=params, data=data, files=files, **kwargs) if 200 <= response.status_code <= 299: if accept_json: response_json = response.json() if 'status' in response_json and response_json['status'] == 'ERROR': response.status_code = 400 error_message = 'Unknown error' if 'statusInfo' in response_json: error_message = response_json['statusInfo'] if error_message == 'invalid-api-key': response.status_code = 401 raise WatsonException('Error: ' + error_message) return response_json return response else: if response.status_code == 401: error_message = 'Unauthorized: Access is denied due to invalid credentials' else: error_message = self._get_error_message(response) raise WatsonException(error_message)
def test_preserve_last_key_case(self): cid = CaseInsensitiveDict({ 'Accept': 'application/json', 'user-Agent': 'requests', }) cid.update({'ACCEPT': 'application/json'}) cid['USER-AGENT'] = 'requests' keyset = frozenset(['ACCEPT', 'USER-AGENT']) assert frozenset(i[0] for i in cid.items()) == keyset assert frozenset(cid.keys()) == keyset assert frozenset(cid) == keyset
def verify_signature(self, query_parameters): """Verify the signature provided with the query parameters. http://docs.shopify.com/api/tutorials/oauth example usage:: from shopify_trois import Credentials from shopify_trois.engines import Json as Shopify from urllib.parse import parse_qsl credentials = Credentials( api_key='your_api_key', scope=['read_orders'], secret='your_app_secret' ) shopify = Shopify(shop_name="your_store_name", credentials=\ credentials) query_parameters = parse_qsl("code=238420989938cb70a609f6ece2e2586\ b&shop=yourstore.myshopify.com×tamp=1373382939&\ signature=6fb122e33c21851c465345b8cb97245e") if not shopify.verify_signature(query_parameters): raise Exception("invalid signature") credentials.code = dict(query_parameters).get('code') shopify.setup_access_token() :returns: Returns True if the signature is valid. """ params = CaseInsensitiveDict(query_parameters) signature = params.pop("signature", None) calculated = ["%s=%s" % (k, v) for k, v in params.items()] calculated.sort() calculated = "".join(calculated) calculated = "{secret}{calculated}".format( secret=self.credentials.secret, calculated=calculated ) md5 = hashlib.md5() md5.update(calculated.encode('utf-8')) produced = md5.hexdigest() return produced == signature
def __init__(self, email, password, device_id): self.__gmusic = Mobileclient() self.__email = email self.__device_id = device_id self.logged_in = False self.queue = list() self.queue_index = -1 self.play_queue_order = list() self.play_modes = TizEnumeration(["NORMAL", "SHUFFLE"]) self.current_play_mode = self.play_modes.NORMAL self.now_playing_song = None userdir = os.path.expanduser('~') tizconfig = os.path.join(userdir, ".config/tizonia/." + email + ".auth_token") auth_token = "" if os.path.isfile(tizconfig): with open(tizconfig, "r") as f: auth_token = pickle.load(f) if auth_token: # 'Keep track of the auth token' workaround. See: # https://github.com/diraimondo/gmusicproxy/issues/34#issuecomment-147359198 print_msg("[Google Play Music] [Authenticating] : " \ "'with cached auth token'") self.__gmusic.android_id = device_id self.__gmusic.session._authtoken = auth_token self.__gmusic.session.is_authenticated = True try: self.__gmusic.get_registered_devices() except CallFailure: # The token has expired. Reset the client object print_wrn("[Google Play Music] [Authenticating] : " \ "'auth token expired'") self.__gmusic = Mobileclient() auth_token = "" if not auth_token: attempts = 0 print_nfo("[Google Play Music] [Authenticating] : " \ "'with user credentials'") while not self.logged_in and attempts < 3: self.logged_in = self.__gmusic.login(email, password, device_id) attempts += 1 with open(tizconfig, "a+") as f: f.truncate() pickle.dump(self.__gmusic.session._authtoken, f) self.library = CaseInsensitiveDict() self.song_map = CaseInsensitiveDict() self.playlists = CaseInsensitiveDict() self.stations = CaseInsensitiveDict()
def string_to_sign(self, request): """ Generates the string we need to sign on. Params: - request The request object Returns String ready to be signed on """ # We'll use case insensitive dict to store the headers h = CaseInsensitiveDict() # Add the hearders h.update(request.headers) # If we have an 'x-amz-date' header, # we'll try to use it instead of the date if b'x-amz-date' in h or 'x-amz-date' in h: date = '' else: # No x-amz-header, we'll generate a date date = h.get('Date') or self._get_date() # Set the date header request.headers['Date'] = date # A fix for the content type header extraction in python 3 # This have to be done because requests will try to set # application/www-url-encoded header if we pass bytes as the content, # and the content-type is set with a key that is b'Content-Type' and # not 'Content-Type' content_type = '' if b'Content-Type' in request.headers: # Fix content type content_type = h.get(b'Content-Type') del request.headers[b'Content-Type'] request.headers['Content-Type'] = content_type # The string we're about to generate # There's more information about it here: # http://docs.aws.amazon.com/AmazonS3/latest/dev/ # RESTAuthentication.html#ConstructingTheAuthenticationHeader msg = [ # HTTP Method request.method, # MD5 If provided h.get(b'Content-MD5', '') or h.get('Content-MD5', ''), # Content type if provided content_type or h.get('Content-Type', ''), # Date date, # Canonicalized special amazon headers and resource uri self._get_canonicalized_amz_headers(h) + self._get_canonicalized_resource(request) ] # join with a newline and return return '\n'.join(msg)
def string_to_sign(self, request): h = CaseInsensitiveDict() h.update(request.headers) # Try to use if b'x-amz-date' in h or 'x-amz-date' in h: date = '' else: date = h.get('Date') or self._get_date() request.headers['Date'] = date # Set the date header request.headers['Date'] = date # A fix for the content type header extraction in python 3 # This have to be done because requests will try to set application/www-url-encoded herader # if we pass bytes as the content, and the content-type is set with a key that is b'Content-Type' and not # 'Content-Type' content_type = '' if b'Content-Type' in request.headers: # Fix content type content_type = h.get(b'Content-Type') del request.headers[b'Content-Type'] request.headers['Content-Type'] = content_type msg = [ request.method, h.get(b'Content-MD5', '') or h.get('Content-MD5', ''), content_type or h.get('Content-Type', ''), date, self._get_canonicalized_amz_headers(h) + self._get_canonicalized_resource(request) ] return '\n'.join(msg)
def http_get(self, url, headers=None): """ returns (status, header, content) status: int header: dict content: string """ hdrs = CaseInsensitiveDict() if headers: hdrs.update(headers) if not 'user-agent' in hdrs: hdrs['user-agent'] = USER_AGENT print "http_get:", url r = requests.get(url, headers=hdrs) return (r.status_code, r.headers, r.text)
def __init__(self, status_code, content=None, headers=None): self.status_code = status_code self.content = content or '' self.headers = CaseInsensitiveDict() if headers: self.headers.update(headers)
def generate_request(method, url, body): """ Generate our own custom request, so we can calculate digest auth. """ method = method.upper() url = url files = [] json_string = None headers = CaseInsensitiveDict({ 'Accept': 'application/json', 'Accept-Encoding': 'gzip, deflate', 'Connection': 'keep-alive', 'Content-Type': 'application/json', 'User-Agent': 'stormpath-flask/0.4.4 flask/0.10.1 stormpath-sdk-python/2.4.5 python/2.7.6 Linux/LinuxMint (Linux-3.13.0-37-generic-x86_64-with-LinuxMint-17.1-rebecca)' }) if body: headers.update({'Content-Length': str(len(json.dumps(body)))}) params = OrderedDict() auth = Sauthc1Signer( id=os.environ.get('STORMPATH_API_KEY_ID'), secret=os.environ.get('STORMPATH_API_KEY_SECRET')) cookies = RequestsCookieJar() hooks = {'response': []} pr = PreparedRequest() if body: json_body = json.dumps(body) else: json_body = None pr.prepare( method=method.upper(), url=url, files=files, data=json_body, json=json_string, headers=headers, params=params, auth=auth, cookies=cookies, hooks=hooks, ) return pr
class TestCaseInsensitiveDict: @pytest.fixture(autouse=True) def setup(self): """ CaseInsensitiveDict instance with "Accept" header. """ self.case_insensitive_dict = CaseInsensitiveDict() self.case_insensitive_dict['Accept'] = 'application/json' def test_list(self): assert list(self.case_insensitive_dict) == ['Accept'] possible_keys = pytest.mark.parametrize('key', ('accept', 'ACCEPT', 'aCcEpT', 'Accept')) @possible_keys def test_getitem(self, key): assert self.case_insensitive_dict[key] == 'application/json' @possible_keys def test_delitem(self, key): del self.case_insensitive_dict[key] assert key not in self.case_insensitive_dict def test_lower_items(self): assert list(self.case_insensitive_dict.lower_items()) == [('accept', 'application/json')] def test_repr(self): assert repr(self.case_insensitive_dict) == "{'Accept': 'application/json'}" def test_copy(self): copy = self.case_insensitive_dict.copy() assert copy is not self.case_insensitive_dict assert copy == self.case_insensitive_dict @pytest.mark.parametrize( 'other, result', ( ({'AccePT': 'application/json'}, True), ({}, False), (None, False) ) ) def test_instance_equality(self, other, result): assert (self.case_insensitive_dict == other) is result
def __init__(self, status, headers): self.status = status self.headers = CaseInsensitiveDict(headers)
def get_rich_item(self, item): eitem = {} self.copy_raw_fields(self.RAW_FIELDS_COPY, item, eitem) # The real data message = CaseInsensitiveDict(item['data']) # Fields that are the same in message and eitem copy_fields = ["Date", "Subject", "Message-ID"] for f in copy_fields: if f in message: eitem[f] = message[f] else: eitem[f] = None # Fields which names are translated map_fields = {"Subject": "Subject_analyzed"} for fn in map_fields: if fn in message: eitem[map_fields[fn]] = message[fn] else: eitem[map_fields[fn]] = None # Enrich dates eitem["email_date"] = str_to_datetime( item["metadata__updated_on"]).isoformat() eitem["list"] = item["origin"] if 'Subject' in message and message['Subject']: eitem['Subject'] = eitem['Subject'][:self.KEYWORD_MAX_LENGTH] # Root message if 'In-Reply-To' in message: eitem["root"] = False else: eitem["root"] = True # Part of the body is needed in studies like kafka_kip eitem["body_extract"] = "" # Size of the message eitem["size"] = None if 'plain' in message['body']: eitem["body_extract"] = "\n".join( message['body']['plain'].split("\n")[:MAX_LINES_FOR_VOTE]) eitem["size"] = len(message['body']['plain']) # Time zone try: message_date = str_to_datetime(message['Date']) eitem["tz"] = int(message_date.strftime("%z")[0:3]) except Exception: eitem["tz"] = None identity = self.get_sh_identity(message['from']) eitem["mbox_author_domain"] = self.get_identity_domain(identity) if self.sortinghat: eitem.update(self.get_item_sh(item)) if self.prjs_map: eitem.update(self.get_item_project(eitem)) self.add_repository_labels(eitem) self.add_metadata_filter_raw(eitem) eitem.update(self.get_grimoire_fields(message['Date'], "message")) return eitem
def send_raw_request( cli_ctx, method, url, headers=None, uri_parameters=None, # pylint: disable=too-many-locals,too-many-branches,too-many-statements body=None, skip_authorization_header=False, resource=None, output_file=None, generated_client_request_id_name='x-ms-client-request-id'): import uuid from requests import Session, Request from requests.structures import CaseInsensitiveDict result = CaseInsensitiveDict() for s in headers or []: try: temp = shell_safe_json_parse(s) result.update(temp) except CLIError: key, value = s.split('=', 1) result[key] = value headers = result # If Authorization header is already provided, don't bother with the token if 'Authorization' in headers: skip_authorization_header = True # Handle User-Agent agents = [get_az_rest_user_agent()] # Borrow AZURE_HTTP_USER_AGENT from msrest # https://github.com/Azure/msrest-for-python/blob/4cc8bc84e96036f03b34716466230fb257e27b36/msrest/pipeline/universal.py#L70 _ENV_ADDITIONAL_USER_AGENT = 'AZURE_HTTP_USER_AGENT' import os if _ENV_ADDITIONAL_USER_AGENT in os.environ: agents.append(os.environ[_ENV_ADDITIONAL_USER_AGENT]) # Custom User-Agent provided as command argument if 'User-Agent' in headers: agents.append(headers['User-Agent']) headers['User-Agent'] = ' '.join(agents) if generated_client_request_id_name: headers[generated_client_request_id_name] = str(uuid.uuid4()) # try to figure out the correct content type if body: try: _ = shell_safe_json_parse(body) if 'Content-Type' not in headers: headers['Content-Type'] = 'application/json' except Exception: # pylint: disable=broad-except pass # add telemetry headers['CommandName'] = cli_ctx.data['command'] if cli_ctx.data.get('safe_params'): headers['ParameterSetName'] = ' '.join(cli_ctx.data['safe_params']) result = {} for s in uri_parameters or []: try: temp = shell_safe_json_parse(s) result.update(temp) except CLIError: key, value = s.split('=', 1) result[key] = value uri_parameters = result or None endpoints = cli_ctx.cloud.endpoints # If url is an ARM resource ID, like /subscriptions/xxx/resourcegroups/xxx?api-version=2019-07-01, # default to Azure Resource Manager. # https://management.azure.com + /subscriptions/xxx/resourcegroups/xxx?api-version=2019-07-01 if '://' not in url: url = endpoints.resource_manager.rstrip('/') + url # Replace common tokens with real values. It is for smooth experience if users copy and paste the url from # Azure Rest API doc from azure.cli.core._profile import Profile profile = Profile(cli_ctx=cli_ctx) if '{subscriptionId}' in url: url = url.replace( '{subscriptionId}', cli_ctx.data['subscription_id'] or profile.get_subscription_id()) # Prepare the Bearer token for `Authorization` header if not skip_authorization_header and url.lower().startswith('https://'): # Prepare `resource` for `get_raw_token` if not resource: # If url starts with ARM endpoint, like `https://management.azure.com/`, # use `active_directory_resource_id` for resource, like `https://management.core.windows.net/`. # This follows the same behavior as `azure.cli.core.commands.client_factory._get_mgmt_service_client` if url.lower().startswith(endpoints.resource_manager.rstrip('/')): resource = endpoints.active_directory_resource_id else: from azure.cli.core.cloud import CloudEndpointNotSetException for p in [x for x in dir(endpoints) if not x.startswith('_')]: try: value = getattr(endpoints, p) except CloudEndpointNotSetException: continue if isinstance(value, six.string_types) and url.lower().startswith( value.lower()): resource = value break if resource: # Prepare `subscription` for `get_raw_token` # If this is an ARM request, try to extract subscription ID from the URL. # But there are APIs which don't require subscription ID, like /subscriptions, /tenants # TODO: In the future when multi-tenant subscription is supported, we won't be able to uniquely identify # the token from subscription anymore. token_subscription = None if url.lower().startswith(endpoints.resource_manager.rstrip('/')): token_subscription = _extract_subscription_id(url) if token_subscription: logger.debug( 'Retrieving token for resource %s, subscription %s', resource, token_subscription) token_info, _, _ = profile.get_raw_token( resource, subscription=token_subscription) else: logger.debug('Retrieving token for resource %s', resource) token_info, _, _ = profile.get_raw_token(resource) token_type, token, _ = token_info headers = headers or {} headers['Authorization'] = '{} {}'.format(token_type, token) else: logger.warning( "Can't derive appropriate Azure AD resource from --url to acquire an access token. " "If access token is required, use --resource to specify the resource" ) # https://requests.readthedocs.io/en/latest/user/advanced/#prepared-requests s = Session() req = Request(method=method, url=url, headers=headers, params=uri_parameters, data=body) prepped = s.prepare_request(req) # Merge environment settings into session settings = s.merge_environment_settings( prepped.url, {}, None, not should_disable_connection_verify(), None) _log_request(prepped) r = s.send(prepped, **settings) _log_response(r) if not r.ok: reason = r.reason if r.text: reason += '({})'.format(r.text) raise CLIError(reason) if output_file: with open(output_file, 'wb') as fd: for chunk in r.iter_content(chunk_size=128): fd.write(chunk) return r
def init_payload(self): self.data = {} # {field: [multiple, values]} self.files = {} self.data['headers'] = CaseInsensitiveDict( ) # headers keys are case-insensitive
def aws_invoke(app, gateway_input, server_name='localhost', server_port='5000', http_protocol='HTTP/1.1', TLS=True, block_headers=True): headers = CaseInsensitiveDict(gateway_input.get('headers', {})) requestContext = gateway_input.get('requestContext') queryStringParameters = gateway_input.get('queryStringParameters', {}) clientIp = headers.get('x-forwarded-for') if clientIp is None: clientIp = requestContext.get( 'identity', {}).get('sourceIp') if requestContext is not None else '' else: clientIp = clientIp.split(',')[0] environ = { 'REQUEST_METHOD': gateway_input.get('httpMethod', 'GET').upper(), 'SCRIPT_NAME': '', 'PATH_INFO': gateway_input.get('path', '/'), 'QUERY_STRING': urlencode(queryStringParameters) if queryStringParameters is not None else '', 'SERVER_NAME': headers.get('host', server_name), 'SERVER_PORT': headers.get('x-forwarded-port', server_port), 'SERVER_PROTOCOL': http_protocol, 'SERVER_SOFTWARE': 'flask-serverless', 'REMOTE_ADDR': clientIp, 'wsgi.version': (1, 0), 'wsgi.url_scheme': headers.get('x-forwarded-proto', 'https' if TLS else 'http'), 'wsgi.input': None, 'wsgi.errors': sys.stderr, 'wsgi.multiprocess': True, 'wsgi.multithread': False, 'wsgi.run_once': True, 'HTTP_X_AWS_PATH': requestContext['path'] } if environ['REQUEST_METHOD'] == 'POST' or environ[ 'REQUEST_METHOD'] == 'PUT': contentType = headers.get('content-type', 'application/octet-stream') parsedContentType = parse_options_header(contentType) raw = gateway_input.get('body') if raw is None or gateway_input.get('isBase64Encoded', False): body = b64decode(raw) if raw is not None else None else: body = raw.encode(parsedContentType[1].get('charset', 'utf-8')) add_body(environ, body, contentType) add_headers(environ, headers, block_headers) response = Response.from_app(app.wsgi_app, environ) gateway_output = { 'headers': dict(response.headers), 'statusCode': response.status_code, } compressed = response.headers.get('Content-Encoding') == 'gzip' responseType = parse_options_header( response.headers.get('Content-Type', 'application/octet-stream')) if not compressed and ('charset' in responseType[1] or responseType[0] in textTypes or responseType[0][0:5] == 'text/'): gateway_output['body'] = response.data.decode(responseType[1].get( 'charset', 'utf-8')) gateway_output['isBase64Encoded'] = False else: gateway_output['body'] = b64encode(response.data).decode('utf-8') gateway_output['isBase64Encoded'] = True return gateway_output
def test_docstring_example(self): cid = CaseInsensitiveDict() cid['Accept'] = 'application/json' assert cid['aCCEPT'] == 'application/json' assert list(cid) == ['Accept']
def test_kwargs_init(self): cid = CaseInsensitiveDict(FOO='foo', BAr='bar') assert len(cid) == 2 assert 'foo' in cid assert 'bar' in cid
def test_mapping_init(self): cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}) assert len(cid) == 2 assert 'foo' in cid assert 'bar' in cid
def get_or_create_headers(): request.headers = (request.headers if request.headers is not None else CaseInsensitiveDict()) return request.headers
class Blink(): """Class to initialize communication.""" def __init__(self, username=None, password=None, refresh_rate=REFRESH_RATE): """ Initialize Blink system. :param username: Blink username (usually email address) :param password: Blink password :param refresh_rate: Refresh rate of blink information. Defaults to 15 (seconds) """ self._username = username self._password = password self._token = None self._auth_header = None self._host = None self.account_id = None self.network_ids = [] self.urls = None self.sync = CaseInsensitiveDict({}) self.region = None self.region_id = None self.last_refresh = None self.refresh_rate = refresh_rate self.session = None self.networks = [] self.cameras = CaseInsensitiveDict({}) self._login_url = LOGIN_URL @property def auth_header(self): """Return the authentication header.""" return self._auth_header def start(self): """ Perform full system setup. Method logs in and sets auth token, urls, and ids for future requests. Essentially this is just a wrapper function for ease of use. """ if self._username is None or self._password is None: self.login() else: self.get_auth_token() networks = self.get_ids() for network_name, network_id in networks.items(): sync_module = BlinkSyncModule(self, network_name, network_id) sync_module.start() self.sync[network_name] = sync_module self.cameras = self.merge_cameras() def login(self): """Prompt user for username and password.""" self._username = input("Username:"******"Password:"******"Login successful!") return True _LOGGER.warning("Unable to login with %s.", self._username) return False def get_auth_token(self): """Retrieve the authentication token from Blink.""" if not isinstance(self._username, str): raise BlinkAuthenticationException(ERROR.USERNAME) if not isinstance(self._password, str): raise BlinkAuthenticationException(ERROR.PASSWORD) login_url = LOGIN_URL self.session = create_session() response = api.request_login(self, login_url, self._username, self._password) if response.status_code == 200: response = response.json() (self.region_id, self.region), = response['region'].items() else: _LOGGER.debug( ("Received response code %s " "when authenticating, " "trying new url"), response.status_code ) login_url = LOGIN_BACKUP_URL response = api.request_login(self, login_url, self._username, self._password) self.region_id = 'piri' self.region = "UNKNOWN" self._host = "{}.{}".format(self.region_id, BLINK_URL) self._token = response['authtoken']['authtoken'] self._auth_header = {'Host': self._host, 'TOKEN_AUTH': self._token} self.networks = response['networks'] self.urls = BlinkURLHandler(self.region_id) self._login_url = login_url return self._auth_header def get_ids(self): """Set the network ID and Account ID.""" response = api.request_networks(self) # Look for only onboarded network, flag warning if multiple # since it's unexpected all_networks = [] network_dict = {} for network, status in self.networks.items(): if status['onboarded']: all_networks.append('{}'.format(network)) network_dict[status['name']] = network # For the first onboarded network we find, grab the account id for resp in response['networks']: if str(resp['id']) in all_networks: self.account_id = resp['account_id'] break self.network_ids = all_networks return network_dict def refresh(self, force_cache=False): """ Perform a system refresh. :param force_cache: Force an update of the camera cache """ if self.check_if_ok_to_update() or force_cache: for sync_name, sync_module in self.sync.items(): _LOGGER.debug("Attempting refresh of sync %s", sync_name) sync_module.refresh(force_cache=force_cache) def check_if_ok_to_update(self): """Check if it is ok to perform an http request.""" current_time = int(time.time()) last_refresh = self.last_refresh if last_refresh is None: last_refresh = 0 if current_time >= (last_refresh + self.refresh_rate): self.last_refresh = current_time return True return False def merge_cameras(self): """Merge all sync camera dicts into one.""" combined = CaseInsensitiveDict({}) for sync in self.sync: combined = merge_dicts(combined, self.sync[sync].cameras) return combined
def merge_cameras(self): """Merge all sync camera dicts into one.""" combined = CaseInsensitiveDict({}) for sync in self.sync: combined = merge_dicts(combined, self.sync[sync].cameras) return combined
def test_len(self): cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'}) cid['A'] = 'a' assert len(cid) == 2
def headers(self): if self._headers is None and self.response: self._headers = self.response.headers return self._headers or CaseInsensitiveDict()
def headers(self, new_headers: CaseInsensitiveDict): self.response_headers = {k: v for k, v in new_headers.lower_items()}
def request(self, method, url, params=None, data=None, files=None, headers=None, ajax=False, stream=None): """ :rtype: requests.models.Response """ _headers = CaseInsensitiveDict([ ('Accept', 'text/html,application/xhtml+xml,application/' \ 'xml;q=0.9,image/webp,image/apng,*/*;q=0.8'), ('Accept-Encoding', 'gzip, deflate'), ('Accept-Language', 'es-ES,es;q=0.8'), ('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.7'), ('Cache-Control', 'no-cache'), ('Connection', 'keep-alive'), ('Host', self.host), ('Origin', self.home_page[:-1]), ('Referer', self.home_page), ('User-Agent', self.registry.get(Registry.Key.USER_AGENT))]) if method.upper() == 'POST': _headers.update([ ('Content-Type', 'application/x-www-form-urlencoded; charset=UTF-8') ]) if ajax: _headers.update([('Accept', 'application/json')]) csrftoken = self.http.cookies.get('csrftoken', domain=self.host) if csrftoken: _headers.update([('X-CSRFToken', csrftoken)]) _headers.update([('X-Requested-With', 'XMLHttpRequest')]) if headers: _headers.update(headers) response = self.http.request(method, url, params=params, data=data, headers=_headers, files=files, timeout=60, proxies=self.proxies, stream=stream) response.raise_for_status() self.registry.update(Registry.Key.COOKIES, response.cookies) return response
def test_update_retains_unchanged(self): cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'}) cid.update({'foo': 'newfoo'}) assert cid['bar'] == 'bar'
def __init__(self, **kwargs): super().__init__(**kwargs) self._channel_name_map: Dict[str, str] = CaseInsensitiveDict()
def test_iter(self): cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'}) keys = frozenset(['Spam', 'Eggs']) assert frozenset(iter(cid)) == keys
class Config(object): """ This class is responsible for: 1) Building and giving access to `effective_configuration` from: * `Config.__DEFAULT_CONFIG` -- some sane default values * `dynamic_configuration` -- configuration stored in DCS * `local_configuration` -- configuration from `config.yml` or environment 2) Saving and loading `dynamic_configuration` into 'patroni.dynamic.json' file located in local_configuration['postgresql']['data_dir'] directory. This is necessary to be able to restore `dynamic_configuration` if DCS was accidentally wiped 3) Loading of configuration file in the old format and converting it into new format 4) Mimicking some of the `dict` interfaces to make it possible to work with it as with the old `config` object. """ PATRONI_ENV_PREFIX = 'PATRONI_' PATRONI_CONFIG_VARIABLE = PATRONI_ENV_PREFIX + 'CONFIGURATION' __CACHE_FILENAME = 'patroni.dynamic.json' __DEFAULT_CONFIG = { 'ttl': 30, 'loop_wait': 10, 'retry_timeout': 10, 'maximum_lag_on_failover': 1048576, 'check_timeline': False, 'master_start_timeout': 300, 'synchronous_mode': False, 'synchronous_mode_strict': False, 'standby_cluster': { 'create_replica_methods': '', 'host': '', 'port': '', 'primary_slot_name': '', 'restore_command': '', 'archive_cleanup_command': '', 'recovery_min_apply_delay': '' }, 'postgresql': { 'bin_dir': '', 'use_slots': True, 'parameters': CaseInsensitiveDict({p: v[0] for p, v in ConfigHandler.CMDLINE_OPTIONS.items()}) }, 'watchdog': { 'mode': 'automatic', } } def __init__(self): self._modify_index = -1 self._dynamic_configuration = {} self.__environment_configuration = self._build_environment_configuration() # Patroni reads the configuration from the command-line argument if it exists, otherwise from the environment self._config_file = len(sys.argv) >= 2 and os.path.isfile(sys.argv[1]) and sys.argv[1] if self._config_file: self._local_configuration = self._load_config_file() else: config_env = os.environ.pop(self.PATRONI_CONFIG_VARIABLE, None) self._local_configuration = config_env and yaml.safe_load(config_env) or self.__environment_configuration if not self._local_configuration: print('Usage: {0} config.yml'.format(sys.argv[0])) print('\tPatroni may also read the configuration from the {0} environment variable'. format(self.PATRONI_CONFIG_VARIABLE)) sys.exit(1) self.__effective_configuration = self._build_effective_configuration({}, self._local_configuration) self._data_dir = self.__effective_configuration['postgresql']['data_dir'] self._cache_file = os.path.join(self._data_dir, self.__CACHE_FILENAME) self._load_cache() self._cache_needs_saving = False @property def config_file(self): return self._config_file @property def dynamic_configuration(self): return deepcopy(self._dynamic_configuration) def check_mode(self, mode): return bool(parse_bool(self._dynamic_configuration.get(mode))) def _load_config_file(self): """Loads config.yaml from filesystem and applies some values which were set via ENV""" with open(self._config_file) as f: config = yaml.safe_load(f) patch_config(config, self.__environment_configuration) return config def _load_cache(self): if os.path.isfile(self._cache_file): try: with open(self._cache_file) as f: self.set_dynamic_configuration(json.load(f)) except Exception: logger.exception('Exception when loading file: %s', self._cache_file) def save_cache(self): if self._cache_needs_saving: tmpfile = fd = None try: (fd, tmpfile) = tempfile.mkstemp(prefix=self.__CACHE_FILENAME, dir=self._data_dir) with os.fdopen(fd, 'w') as f: fd = None json.dump(self.dynamic_configuration, f) tmpfile = shutil.move(tmpfile, self._cache_file) self._cache_needs_saving = False except Exception: logger.exception('Exception when saving file: %s', self._cache_file) if fd: try: os.close(fd) except Exception: logger.error('Can not close temporary file %s', tmpfile) if tmpfile and os.path.exists(tmpfile): try: os.remove(tmpfile) except Exception: logger.error('Can not remove temporary file %s', tmpfile) # configuration could be either ClusterConfig or dict def set_dynamic_configuration(self, configuration): if isinstance(configuration, ClusterConfig): if self._modify_index == configuration.modify_index: return False # If the index didn't changed there is nothing to do self._modify_index = configuration.modify_index configuration = configuration.data if not deep_compare(self._dynamic_configuration, configuration): try: self.__effective_configuration = self._build_effective_configuration(configuration, self._local_configuration) self._dynamic_configuration = configuration self._cache_needs_saving = True return True except Exception: logger.exception('Exception when setting dynamic_configuration') def reload_local_configuration(self, dry_run=False): if self.config_file: try: configuration = self._load_config_file() if not deep_compare(self._local_configuration, configuration): new_configuration = self._build_effective_configuration(self._dynamic_configuration, configuration) if dry_run: return not deep_compare(new_configuration, self.__effective_configuration) self._local_configuration = configuration self.__effective_configuration = new_configuration return True else: logger.info('No configuration items changed, nothing to reload.') except Exception: logger.exception('Exception when reloading local configuration from %s', self.config_file) if dry_run: raise @staticmethod def _process_postgresql_parameters(parameters, is_local=False): return {name: value for name, value in (parameters or {}).items() if name not in ConfigHandler.CMDLINE_OPTIONS or not is_local and ConfigHandler.CMDLINE_OPTIONS[name][1](value)} def _safe_copy_dynamic_configuration(self, dynamic_configuration): config = deepcopy(self.__DEFAULT_CONFIG) for name, value in dynamic_configuration.items(): if name == 'postgresql': for name, value in (value or {}).items(): if name == 'parameters': config['postgresql'][name].update(self._process_postgresql_parameters(value)) elif name not in ('connect_address', 'listen', 'data_dir', 'pgpass', 'authentication'): config['postgresql'][name] = deepcopy(value) elif name == 'standby_cluster': for name, value in (value or {}).items(): if name in self.__DEFAULT_CONFIG['standby_cluster']: config['standby_cluster'][name] = deepcopy(value) elif name in config: # only variables present in __DEFAULT_CONFIG allowed to be overriden from DCS if name in ('synchronous_mode', 'synchronous_mode_strict'): config[name] = value else: config[name] = int(value) return config @staticmethod def _build_environment_configuration(): ret = defaultdict(dict) def _popenv(name): return os.environ.pop(Config.PATRONI_ENV_PREFIX + name.upper(), None) for param in ('name', 'namespace', 'scope'): value = _popenv(param) if value: ret[param] = value def _fix_log_env(name, oldname): value = _popenv(oldname) name = Config.PATRONI_ENV_PREFIX + 'LOG_' + name.upper() if value and name not in os.environ: os.environ[name] = value for name, oldname in (('level', 'loglevel'), ('format', 'logformat'), ('dateformat', 'log_datefmt')): _fix_log_env(name, oldname) def _set_section_values(section, params): for param in params: value = _popenv(section + '_' + param) if value: ret[section][param] = value _set_section_values('restapi', ['listen', 'connect_address', 'certfile', 'keyfile']) _set_section_values('postgresql', ['listen', 'connect_address', 'config_dir', 'data_dir', 'pgpass', 'bin_dir']) _set_section_values('log', ['level', 'format', 'dateformat', 'max_queue_size', 'dir', 'file_size', 'file_num', 'loggers']) def _parse_dict(value): if not value.strip().startswith('{'): value = '{{{0}}}'.format(value) try: return yaml.safe_load(value) except Exception: logger.exception('Exception when parsing dict %s', value) return None value = ret.get('log', {}).pop('loggers', None) if value: value = _parse_dict(value) if value: ret['log']['loggers'] = value def _get_auth(name, params=None): ret = {} for param in params or _AUTH_ALLOWED_PARAMETERS[:2]: value = _popenv(name + '_' + param) if value: ret[param] = value return ret restapi_auth = _get_auth('restapi') if restapi_auth: ret['restapi']['authentication'] = restapi_auth authentication = {} for user_type in ('replication', 'superuser', 'rewind'): entry = _get_auth(user_type, _AUTH_ALLOWED_PARAMETERS) if entry: authentication[user_type] = entry if authentication: ret['postgresql']['authentication'] = authentication def _parse_list(value): if not (value.strip().startswith('-') or '[' in value): value = '[{0}]'.format(value) try: return yaml.safe_load(value) except Exception: logger.exception('Exception when parsing list %s', value) return None for param in list(os.environ.keys()): if param.startswith(Config.PATRONI_ENV_PREFIX): # PATRONI_(ETCD|CONSUL|ZOOKEEPER|EXHIBITOR|...)_(HOSTS?|PORT|..) name, suffix = (param[8:].split('_', 1) + [''])[:2] if suffix in ('HOST', 'HOSTS', 'PORT', 'USE_PROXIES', 'PROTOCOL', 'SRV', 'URL', 'PROXY', 'CACERT', 'CERT', 'KEY', 'VERIFY', 'TOKEN', 'CHECKS', 'DC', 'CONSISTENCY', 'REGISTER_SERVICE', 'SERVICE_CHECK_INTERVAL', 'NAMESPACE', 'CONTEXT', 'USE_ENDPOINTS', 'SCOPE_LABEL', 'ROLE_LABEL', 'POD_IP', 'PORTS', 'LABELS') and name: value = os.environ.pop(param) if suffix == 'PORT': value = value and parse_int(value) elif suffix in ('HOSTS', 'PORTS', 'CHECKS'): value = value and _parse_list(value) elif suffix == 'LABELS': value = _parse_dict(value) elif suffix in ('USE_PROXIES', 'REGISTER_SERVICE'): value = parse_bool(value) if value: ret[name.lower()][suffix.lower()] = value if 'etcd' in ret: ret['etcd'].update(_get_auth('etcd')) users = {} for param in list(os.environ.keys()): if param.startswith(Config.PATRONI_ENV_PREFIX): name, suffix = (param[8:].rsplit('_', 1) + [''])[:2] # PATRONI_<username>_PASSWORD=<password>, PATRONI_<username>_OPTIONS=<option1,option2,...> # CREATE USER "<username>" WITH <OPTIONS> PASSWORD '<password>' if name and suffix == 'PASSWORD': password = os.environ.pop(param) if password: users[name] = {'password': password} options = os.environ.pop(param[:-9] + '_OPTIONS', None) options = options and _parse_list(options) if options: users[name]['options'] = options if users: ret['bootstrap']['users'] = users return ret def _build_effective_configuration(self, dynamic_configuration, local_configuration): config = self._safe_copy_dynamic_configuration(dynamic_configuration) for name, value in local_configuration.items(): if name == 'postgresql': for name, value in (value or {}).items(): if name == 'parameters': config['postgresql'][name].update(self._process_postgresql_parameters(value, True)) elif name != 'use_slots': # replication slots must be enabled/disabled globally config['postgresql'][name] = deepcopy(value) elif name not in config or name in ['watchdog']: config[name] = deepcopy(value) if value else {} # restapi server expects to get restapi.auth = 'username:password' if 'authentication' in config['restapi']: config['restapi']['auth'] = '{username}:{password}'.format(**config['restapi']['authentication']) # special treatment for old config # 'exhibitor' inside 'zookeeper': if 'zookeeper' in config and 'exhibitor' in config['zookeeper']: config['exhibitor'] = config['zookeeper'].pop('exhibitor') config.pop('zookeeper') pg_config = config['postgresql'] # no 'authentication' in 'postgresql', but 'replication' and 'superuser' if 'authentication' not in pg_config: pg_config['use_pg_rewind'] = 'pg_rewind' in pg_config pg_config['authentication'] = {u: pg_config[u] for u in ('replication', 'superuser') if u in pg_config} # no 'superuser' in 'postgresql'.'authentication' if 'superuser' not in pg_config['authentication'] and 'pg_rewind' in pg_config: pg_config['authentication']['superuser'] = pg_config['pg_rewind'] # handle setting additional connection parameters that may be available # in the configuration file, such as SSL connection parameters for name, value in pg_config['authentication'].items(): pg_config['authentication'][name] = {n: v for n, v in value.items() if n in _AUTH_ALLOWED_PARAMETERS} # no 'name' in config if 'name' not in config and 'name' in pg_config: config['name'] = pg_config['name'] updated_fields = ( 'name', 'scope', 'retry_timeout', 'synchronous_mode', 'synchronous_mode_strict', ) pg_config.update({p: config[p] for p in updated_fields if p in config}) return config def get(self, key, default=None): return self.__effective_configuration.get(key, default) def __contains__(self, key): return key in self.__effective_configuration def __getitem__(self, key): return self.__effective_configuration[key] def copy(self): return deepcopy(self.__effective_configuration)
def test_setdefault(self): cid = CaseInsensitiveDict({'Spam': 'blueval'}) assert cid.setdefault('spam', 'notblueval') == 'blueval' assert cid.setdefault('notspam', 'notblueval') == 'notblueval'
from requests.structures import CaseInsensitiveDict from selenium import webdriver from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from selenium.webdriver.support.event_firing_webdriver import EventFiringWebDriver from . import drivers LOGGER = logging.getLogger(__name__) SUPPORTED_DRIVERS = CaseInsensitiveDict({ "BrowserStack": webdriver.Remote, "CrossBrowserTesting": webdriver.Remote, "Chrome": webdriver.Chrome, "Edge": webdriver.Edge, "Firefox": webdriver.Firefox, "IE": webdriver.Ie, "PhantomJS": webdriver.PhantomJS, "Remote": webdriver.Remote, "Safari": webdriver.Safari, "SauceLabs": webdriver.Remote, "TestingBot": webdriver.Remote, }) try: from appium import webdriver as appiumdriver SUPPORTED_DRIVERS["Appium"] = appiumdriver.Remote except ImportError: pass # Appium is optional.
assert select_proxy(url, proxies) == expected @pytest.mark.parametrize('value, expected', (('foo="is a fish", bar="as well"', { 'foo': 'is a fish', 'bar': 'as well' }), ('key_without_value', { 'key_without_value': None }))) def test_parse_dict_header(value, expected): assert parse_dict_header(value) == expected @pytest.mark.parametrize('value, expected', ( (CaseInsensitiveDict(), None), (CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8' }), 'utf-8'), (CaseInsensitiveDict({'content-type': 'text/plain'}), 'ISO-8859-1'), )) def test_get_encoding_from_headers(value, expected): assert get_encoding_from_headers(value) == expected @pytest.mark.parametrize('value, length', ( ('', 0), ('T', 1), ('Test', 4), )) def test_iter_slices(value, length): assert len(list(iter_slices(value, 1))) == length
import urllib2 except: from urllib.request import Request, urlopen from io import StringIO from urllib.parse import unquote import requests from requests.structures import CaseInsensitiveDict from bs4 import BeautifulSoup import base64 baseUrl = "https://dailysport.pw/" chnUrl = "https://dailysport.pw/c" headers = CaseInsensitiveDict() headers[ "User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36" headers["Upgrade-Insecure-Requests"] = "1" js = ''' <script> function myFnc(item){ console.log(item.id); window.parent.ifrFcn(item.id); } function myMove(){ console.log("moving"); if ( window.location !== window.parent.location ) { window.parent.ifMove(); } else { // The page is not in an iframe
def set_configuration_setting( self, configuration_setting, match_condition=MatchConditions.Unconditionally, **kwargs ): # type: (ConfigurationSetting, MatchConditions, dict) -> ConfigurationSetting """Add or update a ConfigurationSetting. If the configuration setting identified by key and label does not exist, this is a create. Otherwise this is an update. :param configuration_setting: the ConfigurationSetting to be added (if not exists) or updated (if exists) to the service :type configuration_setting: :class:`ConfigurationSetting` :param match_condition: the match condition to use upon the etag :type MatchConditions: :class:`MatchConditions` :keyword dict headers: if "headers" exists, its value (a dict) will be added to the http request header :return: The ConfigurationSetting returned from the service :rtype: :class:`ConfigurationSetting` :raises: :class:`ResourceReadOnlyError`, :class:`ResourceModifiedError`, :class:`HttpRequestError` Example .. code-block:: python config_setting = ConfigurationSetting( key="MyKey", label="MyLabel", value="my set value", content_type="my set content type", tags={"my set tag": "my set tag value"} ) returned_config_setting = client.set_configuration_setting(config_setting) """ key_value = KeyValue(key=configuration_setting.key, label=configuration_setting.label, content_type=configuration_setting.content_type, value=configuration_setting.value, tags=configuration_setting.tags) custom_headers = CaseInsensitiveDict(kwargs.get("headers")) error_map = {409: ResourceReadOnlyError} if match_condition == MatchConditions.IfNotModified: error_map[412] = ResourceModifiedError if match_condition == MatchConditions.IfModified: error_map[412] = ResourceNotModifiedError if match_condition == MatchConditions.IfPresent: error_map[412] = ResourceNotFoundError if match_condition == MatchConditions.IfMissing: error_map[412] = ResourceExistsError key_value_set = self._impl.put_key_value( entity=key_value, key=key_value.key, label=key_value.label, if_match=prep_if_match(configuration_setting.etag, match_condition), if_none_match=prep_if_none_match(configuration_setting.etag, match_condition), headers=custom_headers, error_map=error_map, ) return ConfigurationSetting._from_key_value(key_value_set)
def request(self, method, url, headers={}, params=None, data=None, files=None, data_json=None, accept_json=True, json_ver=None, ua_default=True, **kwargs): """ Make a request to Rest API. @return Return response object. """ full_url = '%s/%s' % (self.url, url.strip('/')) input_headers = _remove_null_values(headers) if headers else {} if ua_default: headers = CaseInsensitiveDict( {'user-agent': 'azion-sdk-python-' + __version__}) if accept_json: if json_ver: h_accept = 'application/json; version={}'.format(json_ver) else: h_accept = 'application/json' headers.update({"accept": h_accept}) headers.update({"Content-Type": "application/json"}) try: #if self.session is None: # self.set_session_token() if self.token_sess is not None: headers.update( {'Authorization': "Token {}".format(self.token_sess)}) except ServiceException as e: raise ServiceException( "Unable to get Session token. ERROR: {}".format(e)) headers.update(input_headers) # Remove keys with None values params = _remove_null_values(params) params = _cleanup_param_values(params) data = _remove_null_values(data) files = _remove_null_values(files) try: response = requests.request(method=method, url=full_url, headers=headers, params=params, data=data, json=data_json, **kwargs) except Exception: logger.error("ERROR requesting uri(%s) payload(%s)" % (url, data)) raise return response
def forward(self, method): data = self.data_bytes forward_headers = CaseInsensitiveDict(self.headers) # force close connection if forward_headers.get('Connection', '').lower() != 'keep-alive': self.close_connection = 1 path = self.path if '://' in path: path = '/' + path.split('://', 1)[1].split('/', 1)[1] forward_url = self.proxy.forward_url for listener in self._listeners(): if listener: forward_url = listener.get_forward_url( method, path, data, forward_headers) or forward_url proxy_url = '%s%s' % (forward_url, path) target_url = self.path if '://' not in target_url: target_url = '%s%s' % (forward_url, target_url) # update original "Host" header (moto s3 relies on this behavior) if not forward_headers.get('Host'): forward_headers['host'] = urlparse(target_url).netloc if 'localhost.atlassian.io' in forward_headers.get('Host'): forward_headers['host'] = 'localhost' forward_headers['X-Forwarded-For'] = self.build_x_forwarded_for( forward_headers) try: response = None modified_request = None # update listener (pre-invocation) for listener in self._listeners(): if not listener: continue listener_result = listener.forward_request( method=method, path=path, data=data, headers=forward_headers) if isinstance(listener_result, Response): response = listener_result break if isinstance(listener_result, dict): response = Response() response._content = json.dumps(listener_result) response.status_code = 200 break elif isinstance(listener_result, Request): modified_request = listener_result data = modified_request.data forward_headers = modified_request.headers break elif listener_result is not True: # get status code from response, or use Bad Gateway status code code = listener_result if isinstance(listener_result, int) else 503 self.send_response(code) self.send_header('Content-Length', '0') # allow pre-flight CORS headers by default self._send_cors_headers() self.end_headers() return # perform the actual invocation of the backend service if response is None: forward_headers['Connection'] = forward_headers.get( 'Connection') or 'close' data_to_send = self.data_bytes request_url = proxy_url if modified_request: if modified_request.url: request_url = '%s%s' % (forward_url, modified_request.url) data_to_send = modified_request.data response = self.method(request_url, data=data_to_send, headers=forward_headers, stream=True) # prevent requests from processing response body if not response._content_consumed and response.raw: response._content = response.raw.read() # update listener (post-invocation) if self.proxy.update_listener: kwargs = { 'method': method, 'path': path, 'data': data, 'headers': forward_headers, 'response': response } if 'request_handler' in inspect.getargspec( self.proxy.update_listener.return_response)[0]: # some listeners (e.g., sqs_listener.py) require additional details like the original # request port, hence we pass in a reference to this request handler as well. kwargs['request_handler'] = self updated_response = self.proxy.update_listener.return_response( **kwargs) if isinstance(updated_response, Response): response = updated_response # copy headers and return response self.send_response(response.status_code) content_length_sent = False for header_key, header_value in iteritems(response.headers): # filter out certain headers that we don't want to transmit if header_key.lower() not in ('transfer-encoding', 'date', 'server'): self.send_header(header_key, header_value) content_length_sent = content_length_sent or header_key.lower( ) == 'content-length' if not content_length_sent: self.send_header( 'Content-Length', '%s' % len(response.content) if response.content else 0) # allow pre-flight CORS headers by default self._send_cors_headers(response) self.end_headers() if response.content and len(response.content): self.wfile.write(to_bytes(response.content)) except Exception as e: trace = str(traceback.format_exc()) conn_errors = ('ConnectionRefusedError', 'NewConnectionError', 'Connection aborted', 'Unexpected EOF', 'Connection reset by peer') conn_error = any(e in trace for e in conn_errors) error_msg = 'Error forwarding request: %s %s' % (e, trace) if 'Broken pipe' in trace: LOG.warn( 'Connection prematurely closed by client (broken pipe).') elif not self.proxy.quiet or not conn_error: LOG.error(error_msg) if os.environ.get(ENV_INTERNAL_TEST_RUN): # During a test run, we also want to print error messages, because # log messages are delayed until the entire test run is over, and # hence we are missing messages if the test hangs for some reason. print('ERROR: %s' % error_msg) self.send_response(502) # bad gateway self.end_headers() # force close connection self.close_connection = 1 finally: try: self.wfile.flush() except Exception as e: LOG.warning('Unable to flush write file: %s' % e)
def test_getitem(self): cid = CaseInsensitiveDict({'Spam': 'blueval'}) assert cid['spam'] == 'blueval' assert cid['SPAM'] == 'blueval'
def set_headers(self): from requests.structures import CaseInsensitiveDict return CaseInsensitiveDict({'User-Agent': "CLI"})
def test_iterable_init(self): cid = CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]) assert len(cid) == 2 assert 'foo' in cid assert 'bar' in cid
def test_delitem(self): cid = CaseInsensitiveDict() cid['Spam'] = 'someval' del cid['sPam'] assert 'spam' not in cid assert len(cid) == 0