def params(self, collapse=True): """Extracts the query parameters from the split urls components. This method will provide back as a dictionary the query parameter names and values that were provided in the url. :param collapse: Boolean, turn on or off collapsing of query values with the same name. Since a url can contain the same query parameter name with different values it may or may not be useful for users to care that this has happened. This parameter when True uses the last value that was given for a given name, while if False it will retain all values provided by associating the query parameter name with a list of values instead of a single (non-list) value. """ if self.query: if collapse: return dict(parse.parse_qsl(self.query)) else: params = {} for (key, value) in parse.parse_qsl(self.query): if key in params: if isinstance(params[key], list): params[key].append(value) else: params[key] = [params[key], value] else: params[key] = value return params else: return {}
def __init__(self, base, relative=None): self._has_token = False self._url = None self._url_parts = None self._loaded = False self._xml = None self._url_parts = None self._headers = None self._config = None if isinstance(base, six.string_types): base_url = base self._url_parts = list(parse.urlsplit(base_url)) elif isinstance(base, RequestBase): base_url = base.url self._has_token = base.has_token self._url_parts = base._url_parts[:] self._headers = base._headers self._config = base.config if relative: scheme, netloc, path, qs, fragment = parse.urlsplit(relative) if path: self._url_parts[2] = _join_plex(self._url_parts[2], path) if qs: data = parse.parse_qsl(self._url_parts[3]) + parse.parse_qsl(qs) self._url_parts[3] = parse.urlencode(data) else: # Strip of all non-token parts data = parse.parse_qsl(self._url_parts[3]) self._url_parts[3] = parse.urlencode([(x, y) for x, y in data if x == 'X-Plex-Token']) if not self._has_token: self._has_token = 'X-Plex-Token' in parse.parse_qs(self._url_parts[3]) self._url = parse.urlunsplit(self._url_parts)
def oauth_dance(server, consumer_key, key_cert_data, print_tokens=False, verify=None): if verify is None: verify = server.startswith('https') # step 1: get request tokens oauth = OAuth1( consumer_key, signature_method=SIGNATURE_RSA, rsa_key=key_cert_data) r = requests.post( server + '/plugins/servlet/oauth/request-token', verify=verify, auth=oauth) request = dict(parse_qsl(r.text)) request_token = request['oauth_token'] request_token_secret = request['oauth_token_secret'] if print_tokens: print("Request tokens received.") print(" Request token: {}".format(request_token)) print(" Request token secret: {}".format(request_token_secret)) # step 2: prompt user to validate auth_url = '{}/plugins/servlet/oauth/authorize?oauth_token={}'.format( server, request_token) if print_tokens: print( "Please visit this URL to authorize the OAuth request:\n\t{}".format(auth_url)) else: webbrowser.open_new(auth_url) print( "Your browser is opening the OAuth authorization for this client session.") approved = input( 'Have you authorized this program to connect on your behalf to {}? (y/n)'.format(server)) if approved.lower() != 'y': exit( 'Abandoning OAuth dance. Your partner faceplants. The audience boos. You feel shame.') # step 3: get access tokens for validated user oauth = OAuth1(consumer_key, signature_method=SIGNATURE_RSA, rsa_key=key_cert_data, resource_owner_key=request_token, resource_owner_secret=request_token_secret ) r = requests.post( server + '/plugins/servlet/oauth/access-token', verify=verify, auth=oauth) access = dict(parse_qsl(r.text)) if print_tokens: print("Access tokens received.") print(" Access token: {}".format(access['oauth_token'])) print(" Access token secret: {}".format( access['oauth_token_secret'])) return { 'access_token': access['oauth_token'], 'access_token_secret': access['oauth_token_secret'], 'consumer_key': consumer_key, 'key_cert': key_cert_data, }
def equals(self, rhs): lhsp = urlparse.urlparse(self.lhs) rhsp = urlparse.urlparse(rhs) lhs_qs = urlparse.parse_qsl(lhsp.query) rhs_qs = urlparse.parse_qsl(rhsp.query) return (lhsp.scheme == rhsp.scheme and lhsp.netloc == rhsp.netloc and lhsp.path == rhsp.path and len(lhs_qs) == len(rhs_qs) and set(lhs_qs) == set(rhs_qs))
def parameters(self, request, context): jobid = int(job_re_path.match(request.path).group(1)) job = self._jobs[jobid] if request.method == 'GET': pass elif request.method == 'POST': data = dict(parse_qsl(request.body)) if 'QUERY' in data: assert data['QUERY'] == 'SELECT TOP 42 * FROM ivoa.obsCore' for param in job.parameters: if param.id_ == 'query': param.content = data['QUERY'] if 'UPLOAD' in data: for param in job.parameters: if param.id_ == 'upload': uploads1 = {data[0]: data[1] for data in [ data.split(',') for data in data['UPLOAD'].split(';') ]} uploads2 = {data[0]: data[1] for data in [ data.split(',') for data in param.content.split(';') ]} uploads1.update(uploads2) param.content = ';'.join([ '{}={}'.format(key, value) for key, value in uploads1.items() ])
def url(self, path, **query_params): params = dict(parse_qsl(urlparse(path).query)) params.update(query_params) url = 'http://127.0.0.1:{}{}'.format(self.port, urlparse(path).path) if params: url += '?' + urlencode(params) return url
def callback(request, context): data = dict(parse_qsl(request.query)) if 'band' in data: assert data['band'] == ( '6.000000000000001e-07 +Inf') return b''
def prepare_request_mock(self, data, referer='http://localhost/user_with_workspaces/Public Workspace', user=None, extra_headers={}, GET=''): request = Mock() request.method = 'POST' request.get_host.return_value = 'localhost' GET_PARAMETERS = parse_qsl(GET) request.GET = MagicMock() request.GET.__len__.side_effect = lambda: len(GET_PARAMETERS) request.GET.__getitem__.side_effect = lambda key: GET_PARAMETERS[key] request.GET.urlencode.side_effect = lambda: GET request.COOKIES = { settings.SESSION_COOKIE_NAME: 'test', } request.META = { 'HTTP_ACCEPT': 'application/json', 'SERVER_PROTOCOL': 'http', 'REMOTE_ADDR': '127.0.0.1', 'content_type': 'application/json', 'content_length': len(data), 'HTTP_HOST': 'localhost', 'HTTP_REFERER': referer, 'HTTP_X_FI_WARE_OAUTH_TOKEN': 'true', } request.META.update(extra_headers) request.read.return_value = data if user is None: request.user = self.admin_mock else: request.user = user return request
def test_upload_from_file_resumable(self): from six.moves.http_client import OK from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit from tempfile import NamedTemporaryFile from gcloud._testing import _Monkey from apitools.base.py import http_wrapper from apitools.base.py import transfer BLOB_NAME = 'blob-name' UPLOAD_URL = 'http://example.com/upload/name/key' DATA = b'ABCDEF' loc_response = {'status': OK, 'location': UPLOAD_URL} chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE, 'range': 'bytes 0-4'} chunk2_response = {'status': OK} # Need valid JSON on last response, since resumable. connection = _Connection( (loc_response, b''), (chunk1_response, b''), (chunk2_response, b'{}'), ) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 # Set the threshhold low enough that we force a resumable uploada. with _Monkey(transfer, _RESUMABLE_UPLOAD_THRESHOLD=5): with NamedTemporaryFile() as fh: fh.write(DATA) fh.flush() blob.upload_from_file(fh, rewind=True, connection=connection) rq = connection.http._requested self.assertEqual(len(rq), 3) self.assertEqual(rq[0]['method'], 'POST') uri = rq[0]['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') self.assertEqual(dict(parse_qsl(qs)), {'uploadType': 'resumable', 'name': BLOB_NAME}) headers = dict( [(x.title(), str(y)) for x, y in rq[0]['headers'].items()]) self.assertEqual(headers['X-Upload-Content-Length'], '6') self.assertEqual(headers['X-Upload-Content-Type'], 'application/octet-stream') self.assertEqual(rq[1]['method'], 'PUT') self.assertEqual(rq[1]['uri'], UPLOAD_URL) headers = dict( [(x.title(), str(y)) for x, y in rq[1]['headers'].items()]) self.assertEqual(rq[1]['body'], DATA[:5]) headers = dict( [(x.title(), str(y)) for x, y in rq[1]['headers'].items()]) self.assertEqual(headers['Content-Range'], 'bytes 0-4/6') self.assertEqual(rq[2]['method'], 'PUT') self.assertEqual(rq[2]['uri'], UPLOAD_URL) self.assertEqual(rq[2]['body'], DATA[5:]) headers = dict( [(x.title(), str(y)) for x, y in rq[2]['headers'].items()]) self.assertEqual(headers['Content-Range'], 'bytes 5-5/6')
def request(self, url, method, **kwargs): # Check that certain things are called correctly if method in ['GET', 'DELETE']: assert 'body' not in kwargs elif method == 'PUT': assert 'body' in kwargs # Call the method args = urlparse.parse_qsl(urlparse.urlparse(url)[4]) kwargs.update(args) munged_url = url.rsplit('?', 1)[0] munged_url = munged_url.strip('/').replace('/', '_').replace( '.', '_').replace(' ', '_') munged_url = munged_url.replace('-', '_') callback = "%s_%s" % (method.lower(), munged_url) if not hasattr(self, callback): raise AssertionError('Called unknown API method: %s %s, ' 'expected fakes method name: %s' % (method, url, callback)) # Note the call self.callstack.append((method, url, kwargs.get('body'))) status, body = getattr(self, callback)(**kwargs) response = requests.models.Response() if isinstance(status, dict): response.status_code = status.pop("status") response.headers = status else: response.status_code = status return response, body
def _get_credentials(self, key): request_token = { 'oauth_token': sickbeard.TWITTER_USERNAME, 'oauth_token_secret': sickbeard.TWITTER_PASSWORD, 'oauth_callback_confirmed': 'true' } token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(key) logger.log(u'Generating and signing request for an access token using key ' + key, logger.DEBUG) signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() # @UnusedVariable oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) logger.log(u'oauth_consumer: ' + str(oauth_consumer), logger.DEBUG) oauth_client = oauth.Client(oauth_consumer, token) logger.log(u'oauth_client: ' + str(oauth_client), logger.DEBUG) resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key) logger.log(u'resp, content: ' + str(resp) + ',' + str(content), logger.DEBUG) access_token = dict(parse_qsl(content)) logger.log(u'access_token: ' + str(access_token), logger.DEBUG) logger.log(u'resp[status] = ' + str(resp['status']), logger.DEBUG) if resp['status'] != '200': logger.log(u'The request for a token with did not succeed: ' + str(resp['status']), logger.ERROR) return False else: logger.log(u'Your Twitter Access Token key: %s' % access_token['oauth_token'], logger.DEBUG) logger.log(u'Access Token secret: %s' % access_token['oauth_token_secret'], logger.DEBUG) sickbeard.TWITTER_USERNAME = access_token['oauth_token'] sickbeard.TWITTER_PASSWORD = access_token['oauth_token_secret'] return True
def add_or_replace_parameter(url, name, new_value): """Add or remove a parameter to a given url >>> import w3lib.url >>> w3lib.url.add_or_replace_parameter('http://www.example.com/index.php', 'arg', 'v') 'http://www.example.com/index.php?arg=v' >>> w3lib.url.add_or_replace_parameter('http://www.example.com/index.php?arg1=v1&arg2=v2&arg3=v3', 'arg4', 'v4') 'http://www.example.com/index.php?arg1=v1&arg2=v2&arg3=v3&arg4=v4' >>> w3lib.url.add_or_replace_parameter('http://www.example.com/index.php?arg1=v1&arg2=v2&arg3=v3', 'arg3', 'v3new') 'http://www.example.com/index.php?arg1=v1&arg2=v2&arg3=v3new' >>> """ parsed = urlsplit(url) args = parse_qsl(parsed.query, keep_blank_values=True) new_args = [] found = False for name_, value_ in args: if name_ == name: new_args.append((name_, new_value)) found = True else: new_args.append((name_, value_)) if not found: new_args.append((name, new_value)) query = urlencode(new_args) return urlunsplit(parsed._replace(query=query))
def canonicalize_url(url, keep_blank_values=True, keep_fragments=False, encoding=None): """Canonicalize the given url by applying the following procedures: - sort query arguments, first by key, then by value - percent encode paths and query arguments. non-ASCII characters are percent-encoded using UTF-8 (RFC-3986) - normalize all spaces (in query arguments) '+' (plus symbol) - normalize percent encodings case (%2f -> %2F) - remove query arguments with blank values (unless keep_blank_values is True) - remove fragments (unless keep_fragments is True) The url passed can be a str or unicode, while the url returned is always a str. For examples see the tests in tests/test_utils_url.py """ scheme, netloc, path, params, query, fragment = parse_url(url) keyvals = parse_qsl(query, keep_blank_values) keyvals.sort() query = urlencode(keyvals) # XXX: copied from w3lib.url.safe_url_string to add encoding argument # path = to_native_str(path, encoding) # path = moves.urllib.parse.quote(path, _safe_chars, encoding='latin1') or '/' path = safe_url_string(_unquotepath(path)) or '/' fragment = '' if not keep_fragments else fragment return urlunparse((scheme, netloc.lower(), path, params, query, fragment))
def parse_dict_querystring(environ): """Parses a query string like parse_querystring, but returns a MultiDict Caches this value in case parse_dict_querystring is called again for the same request. Example:: >>> environ = {'QUERY_STRING': 'day=Monday&user=fred&user=jane'} >>> parsed = parse_dict_querystring(environ) >>> parsed['day'] 'Monday' >>> parsed['user'] 'fred' >>> parsed.getall('user') ['fred', 'jane'] """ source = environ.get('QUERY_STRING', '') if not source: return MultiDict() if 'paste.parsed_dict_querystring' in environ: parsed, check_source = environ['paste.parsed_dict_querystring'] if check_source == source: return parsed parsed = parse_qsl(source, keep_blank_values=True, strict_parsing=False) multi = MultiDict(parsed) environ['paste.parsed_dict_querystring'] = (multi, source) return multi
def GET(self): if self._GET is None: params = parse_qsl(self.url_parts.query, keep_blank_values=True) self._GET = MultiDict() for key, value in params: self._GET.add(key, value) return self._GET
def methodNext(self, previous_request, previous_response): """Retrieves the next page of results. Args: previous_request: The request for the previous page. (required) previous_response: The response from the request for the previous page. (required) Returns: A request object that you can call 'execute()' on to request the next page. Returns None if there are no more items in the collection. """ # Retrieve nextPageToken from previous_response # Use as pageToken in previous_request to create new request. if "nextPageToken" not in previous_response or not previous_response["nextPageToken"]: return None request = copy.copy(previous_request) pageToken = previous_response["nextPageToken"] parsed = list(urlparse(request.uri)) q = parse_qsl(parsed[4]) # Find and remove old 'pageToken' value from URI newq = [(key, value) for (key, value) in q if key != "pageToken"] newq.append(("pageToken", pageToken)) parsed[4] = urlencode(newq) uri = urlunparse(parsed) request.uri = uri logger.info("URL being requested: {0!s} {1!s}".format(methodName, uri)) return request
def get_cache(url='memory://'): """Loads the cache backend This function loads the cache backend specified in the given configuration. :param conf: Configuration instance to use """ parsed = parse.urlparse(url) backend = parsed.scheme query = parsed.query # NOTE(flaper87): We need the following hack # for python versions < 2.7.5. Previous versions # of python parsed query params just for 'known' # schemes. This was changed in this patch: # http://hg.python.org/cpython/rev/79e6ff3d9afd if not query and '?' in parsed.path: query = parsed.path.split('?', 1)[-1] parameters = parse.parse_qsl(query) kwargs = {'options': dict(parameters)} mgr = driver.DriverManager('neutron_fwaas.openstack.common.cache.backends', backend, invoke_on_load=True, invoke_args=[parsed], invoke_kwds=kwargs) return mgr.driver
def build_url(base, additional_params=None): """Construct a URL based off of base containing all parameters in the query portion of base plus any additional parameters. :param base: Base URL :type base: str ::param additional_params: Additional query parameters to include. :type additional_params: dict :rtype: str """ url = urlparse(base) query_params = {} query_params.update(parse_qsl(url.query, True)) if additional_params is not None: query_params.update(additional_params) for k, v in six.iteritems(additional_params): if v is None: query_params.pop(k) return urlunparse((url.scheme, url.netloc, url.path, url.params, urlencode(query_params), url.fragment))
def dispatch_wsgi(self): """ WSGI handler """ if hasattr(self, 'response'): return self._write_response() LOGGER.debug('WSGI mode detected') if self.environ['REQUEST_METHOD'] == 'POST': try: request_body_size = int(self.environ.get('CONTENT_LENGTH', 0)) except (ValueError): request_body_size = 0 self.requesttype = 'POST' self.request = self.environ['wsgi.input'].read(request_body_size) LOGGER.debug('Request type: POST. Request:\n%s\n', self.request) else: # it's a GET request self.requesttype = 'GET' self.request = wsgiref.util.request_uri(self.environ) try: query_part = splitquery(self.request)[-1] self.kvp = dict(parse_qsl(query_part, keep_blank_values=True)) except AttributeError as err: LOGGER.exception('Could not parse query string') self.kvp = {} LOGGER.debug('Request type: GET. Request:\n%s\n', self.request) return self.dispatch()
def _cs_request(self, url, method, **kwargs): # Check that certain things are called correctly if method in ["GET", "DELETE"]: assert "body" not in kwargs elif method == "PUT": assert "body" in kwargs # Call the method args = urlparse.parse_qsl(urlparse.urlparse(url)[4]) kwargs.update(args) munged_url = url.rsplit("?", 1)[0] munged_url = munged_url.strip("/").replace("/", "_").replace(".", "_") munged_url = munged_url.replace("-", "_") callback = "%s_%s" % (method.lower(), munged_url) if not hasattr(self, callback): raise AssertionError( "Called unknown API method: %s %s, " "expected fakes method name: %s" % (method, url, callback) ) # Note the call self.callstack.append((method, url, kwargs.get("body", None))) status, body = getattr(self, callback)(**kwargs) if hasattr(status, "items"): return status, body else: return {"status": status}, body
def _upload_from_file_simple_test_helper(self, properties=None, content_type_arg=None, expected_content_type=None): from six.moves.http_client import OK from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit from tempfile import NamedTemporaryFile BLOB_NAME = 'blob-name' DATA = b'ABCDEF' response = {'status': OK} connection = _Connection( (response, b''), ) bucket = _Bucket(connection) blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties) blob.CHUNK_SIZE = 5 with NamedTemporaryFile() as fh: fh.write(DATA) fh.flush() blob.upload_from_file(fh, rewind=True, content_type=content_type_arg) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') uri = rq[0]['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') self.assertEqual(dict(parse_qsl(qs)), {'uploadType': 'media', 'name': BLOB_NAME}) headers = dict( [(x.title(), str(y)) for x, y in rq[0]['headers'].items()]) self.assertEqual(headers['Content-Length'], '6') self.assertEqual(headers['Content-Type'], expected_content_type)
def client_request(self, client, method, url, **kwargs): # Check that certain things are called correctly if method in ["GET", "DELETE"]: assert "json" not in kwargs # Note the call self.callstack.append((method, url, kwargs.get("headers") or {}, kwargs.get("json") or kwargs.get("data"))) try: fixture = self.fixtures[url][method] except KeyError: pass else: return TestResponse({"headers": fixture[0], "text": fixture[1]}) # Call the method args = parse.parse_qsl(parse.urlparse(url)[4]) kwargs.update(args) munged_url = url.rsplit("?", 1)[0] munged_url = munged_url.strip("/").replace("/", "_").replace(".", "_") munged_url = munged_url.replace("-", "_") callback = "%s_%s" % (method.lower(), munged_url) if not hasattr(self, callback): raise AssertionError( "Called unknown API method: %s %s, " "expected fakes method name: %s" % (method, url, callback) ) resp = getattr(self, callback)(**kwargs) if len(resp) == 3: status, headers, body = resp else: status, body = resp headers = {} return TestResponse({"status_code": status, "text": body, "headers": headers})
def parse_request(self, environ): def rebuild_header_name(environ_key): """Construct the HTTP header name from a WSGI environ variable. """ header_name = environ_key[5:] # strip 'HTTP_' name_parts = header_name.split("_") header_name = "-".join(part.capitalize() for part in name_parts) return header_name request = {} request["host"] = environ.get("HTTP_HOST", None) request["path"] = environ.get("PATH_INFO", None) request["query"] = dict(parse_qsl(environ.get("QUERY_STRING", None))) request["method"] = environ.get("REQUEST_METHOD", None) request["headers"] = {} for key in environ: if key.startswith("HTTP_"): header_name = rebuild_header_name(key) request["headers"][header_name] = environ[key] if "CONTENT_TYPE" in environ: request["headers"]["Content-Type"] = environ["CONTENT_TYPE"] if "CONTENT_LENGTH" in environ: request["headers"]["Content-Length"] = environ["CONTENT_LENGTH"] if environ["CONTENT_LENGTH"]: request["body"] = get_post(environ) return request
def test_api_request_w_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit conn = self._makeMockOne() http = conn._http = _Http( {'status': '200', 'content-type': 'application/json'}, b'{}', ) self.assertEqual(conn.api_request('GET', '/', {'foo': 'bar'}), {}) self.assertEqual(http._called_with['method'], 'GET') uri = http._called_with['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) # Intended to emulate self.mock_template PATH = '/'.join([ '', 'mock', conn.API_VERSION, '', ]) self.assertEqual(path, PATH) parms = dict(parse_qsl(qs)) self.assertEqual(parms['foo'], 'bar') self.assertIsNone(http._called_with['body']) expected_headers = { 'Accept-Encoding': 'gzip', 'Content-Length': '0', 'User-Agent': conn.USER_AGENT, } self.assertEqual(http._called_with['headers'], expected_headers)
def update_url_query(*args, **kwargs): """ Return a new URL with the query parameters of the URL updated based on the keyword arguments of the function call. If the argument already exists in the URL, it will be overwritten with the new value; if not, it will be added. However, if the new value is None, then any existing query parameters with that key will be removed without being replaced. The URL must be passed as the first positional argument of the function; it cannot be passed as a keyword argument. """ if not args: raise TypeError("URL must be passed as the first positional argument") url = args[0] scheme, netloc, path, query, fragment = urlsplit(url) qlist = parse_qsl(query) for key, value in kwargs.items(): # remove all key/value pairs from qlist that match this key qlist = [pair for pair in qlist if not pair[0] == key] # add this key/value pair to the qlist (unless it's None) if value is not None: qlist.append((key, value)) # bring it on back query = urlencode(qlist) return urlunsplit((scheme, netloc, path, query, fragment))
def exchange_token(self, request, pipeline, code): # TODO: this needs the auth yet data = self.get_token_params( code=code, redirect_uri=absolute_uri(pipeline.redirect_url()), ) verify_ssl = pipeline.config.get('verify_ssl', True) try: req = safe_urlopen(self.access_token_url, data=data, verify_ssl=verify_ssl) body = safe_urlread(req) if req.headers.get('Content-Type', '').startswith('application/x-www-form-urlencoded'): return dict(parse_qsl(body)) return json.loads(body) except SSLError: logger.info('identity.oauth2.ssl-error', extra={ 'url': self.access_token_url, 'verify_ssl': verify_ssl, }) url = self.access_token_url return { 'error': 'Could not verify SSL certificate', 'error_description': u'Ensure that {} has a valid SSL certificate'.format(url) } except JSONDecodeError: logger.info('identity.oauth2.json-error', extra={ 'url': self.access_token_url, }) return { 'error': 'Could not decode a JSON Response', 'error_description': u'We were not able to parse a JSON response, please try again.' }
def test_api_request_w_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit PROJECT = 'project' conn = self._makeOne(PROJECT) http = conn._http = Http( {'status': '200', 'content-type': 'application/json'}, '{}', ) self.assertEqual(conn.api_request('GET', '/', {'foo': 'bar'}), {}) self.assertEqual(http._called_with['method'], 'GET') uri = http._called_with['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) self.assertEqual(path, '/'.join(['', 'storage', conn.API_VERSION, ''])) parms = dict(parse_qsl(qs)) self.assertEqual(parms['project'], PROJECT) self.assertEqual(parms['foo'], 'bar') self.assertEqual(http._called_with['body'], None) expected_headers = { 'Accept-Encoding': 'gzip', 'Content-Length': 0, 'User-Agent': conn.USER_AGENT, } self.assertEqual(http._called_with['headers'], expected_headers)
def __init__(self, columns): for key, value in map2x(None, self.COLUMNS, columns): if key: setattr(self, key, value) # special handling for 'args' - parse it into a dict if it is a string if isinstance(self.args, string_types): self.args = dict(x for x in parse_qsl(self.args.encode('utf-8')))
def _load_backend(self, backend_uri, context): """ Return the instantiated backend object identified by the given `backend_uri`. The entry point that is used to create the backend object is determined by the protocol part of the given URI. """ parsed = parse.urlparse(backend_uri) options = dict(parse.parse_qsl(parsed.query)) try: backend = self._entry_points[self.BACKENDS_ENTRY_POINT][parsed.scheme].load() except KeyError: raise BackendNotFoundError( "The requested backend `%s` could not be found in the " "registered entry points. Perhaps you forgot to install the " "corresponding backend package?" % parsed.scheme ) password = (parse.unquote(parsed.password) if parsed.password else parsed.password) return backend( username=parsed.username, password=password, hostname=parsed.hostname, port=parsed.port, path=parsed.path, options=options, context=context, )
def _cs_request(self, url, method, **kwargs): # Check that certain things are called correctly if method in ['GET', 'DELETE']: assert 'body' not in kwargs elif method == 'PUT': assert 'body' in kwargs # Call the method args = urlparse.parse_qsl(urlparse.urlparse(url)[4]) kwargs.update(args) munged_url = url.rsplit('?', 1)[0] munged_url = munged_url.strip('/').replace('/', '_').replace('.', '_') munged_url = munged_url.replace('-', '_') callback = "%s_%s" % (method.lower(), munged_url) if not hasattr(self, callback): raise AssertionError('Called unknown API method: %s %s, ' 'expected fakes method name: %s' % (method, url, callback)) # Note the call self.callstack.append((method, url, kwargs.get('body', None))) status, headers, body = getattr(self, callback)(**kwargs) r = utils.TestResponse({ "status_code": status, "text": body, "headers": headers, }) return r, body
def _get_credentials(self, key): request_token = { 'oauth_token': sickbeard.TWITTER_USERNAME, 'oauth_token_secret': sickbeard.TWITTER_PASSWORD, 'oauth_callback_confirmed': 'true' } token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(key) logger.log( 'Generating and signing request for an access token using key ' + key, logger.DEBUG) signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1( ) # @UnusedVariable oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_hash) logger.log('oauth_consumer: ' + str(oauth_consumer), logger.DEBUG) oauth_client = oauth.Client(oauth_consumer, token) logger.log('oauth_client: ' + str(oauth_client), logger.DEBUG) resp, content = oauth_client.request( self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier={0}'.format(key)) logger.log('resp, content: ' + str(resp) + ',' + str(content), logger.DEBUG) access_token = dict(parse_qsl(content)) logger.log('access_token: ' + str(access_token), logger.DEBUG) logger.log('resp[status] = ' + str(resp['status']), logger.DEBUG) if resp['status'] != '200': logger.log( 'The request for a token with did not succeed: ' + str(resp['status']), logger.ERROR) return False else: logger.log( 'Your Twitter Access Token key: {0}'.format( access_token['oauth_token']), logger.DEBUG) logger.log( 'Access Token secret: {0}'.format( access_token['oauth_token_secret']), logger.DEBUG) sickbeard.TWITTER_USERNAME = access_token['oauth_token'] sickbeard.TWITTER_PASSWORD = access_token['oauth_token_secret'] return True
def lro_get_next_page(lro_status_callback, first_page, continuation_token, show_stats=False): if continuation_token is None: return first_page try: continuation_token = continuation_token.decode("utf-8") except AttributeError: pass parsed_url = urlparse(continuation_token) job_id = parsed_url.path.split("/")[-1] query_params = dict(parse_qsl(parsed_url.query.replace("$", ""))) query_params["show_stats"] = show_stats return lro_status_callback(job_id, **query_params)
def testRetainRedirectQueryComponent(self): uri = 'https://otherhost.com/and/path?some=value' with transaction.manager: redirect_uri = Oauth2RedirectUri( self.client, uri) DBSession.add(redirect_uri) self.request.params['redirect_uri'] = uri self.redirect_uri = uri response = self._process_view() self._validate_authcode_response(response) parts = urlparse(response.location) params = dict(parse_qsl(parts.query)) self.assertTrue('some' in params) self.assertEqual(params['some'], 'value')
def on_request(request): url = urlparse(request.url) parameters = dict(parse_qsl(url.query)) page = try_convert(parameters.get('page'), int) or 1 limit = try_convert(parameters.get('limit'), int) if limit is not None and limit != 2: # Invalid limit provided return 400, {}, '' return 200, { 'X-Pagination-Limit': '2', 'X-Pagination-Item-Count': '6', 'X-Pagination-Page-Count': '3' }, read('fixtures/users/me/lists_p%d.json' % page)
def test_first_link_preserves_list_like_querystring_params(self): items = list(range(1, 26)) self.request.form["b_size"] = 10 self.request["QUERY_STRING"] = "foolist=1&foolist=2" batch = HypermediaBatch(self.request, items) # Argument lists (same query string parameter repeated multiple # times) should be preserved. batch_params = set([("b_start", "0"), ("b_size", "10")]) self.assertEqual( set([("foolist", "1"), ("foolist", "2")]), set(parse_qsl(urlparse(batch.links["first"]).query)) - batch_params, )
def test_build_api_url_w_custom_endpoint(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit custom_endpoint = "https://foo-translation.googleapis.com" conn = self._make_one(object(), api_endpoint=custom_endpoint) uri = conn.build_api_url("/foo") scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual("%s://%s" % (scheme, netloc), custom_endpoint) self.assertEqual( path, "/".join(["", "language", "translate", conn.API_VERSION, "foo"])) parms = dict(parse_qsl(qs)) pretty_print = parms.pop("prettyPrint", "false") self.assertEqual(pretty_print, "false") self.assertEqual(parms, {})
def test_authorization_url_has_expected_query_params_with_domain(self): authorization_url = self.sso.get_authorization_url( domain=self.customer_domain, redirect_uri=self.redirect_uri, state=self.state, ) parsed_url = urlparse(authorization_url) assert dict(parse_qsl(parsed_url.query)) == { "domain": self.customer_domain, "client_id": workos.project_id, "redirect_uri": self.redirect_uri, "response_type": RESPONSE_TYPE_CODE, "state": self.state, }
def handle_authcode(request, client, redirection_uri, state=None): parts = urlparse(redirection_uri.uri) qparams = dict(parse_qsl(parts.query)) user_id = authenticated_userid(request) auth_code = Oauth2Code(client, user_id) db.add(auth_code) db.flush() qparams['code'] = auth_code.authcode if state: qparams['state'] = state parts = ParseResult( parts.scheme, parts.netloc, parts.path, parts.params, urlencode(qparams), '') return HTTPFound(location=parts.geturl())
def test_authorization_url_has_expected_query_params_with_provider( self, setup_with_client_id): authorization_url = self.sso.get_authorization_url( provider=self.provider, redirect_uri=self.redirect_uri, state=self.state) parsed_url = urlparse(authorization_url) assert dict(parse_qsl(parsed_url.query)) == { "provider": str(self.provider.value), "client_id": workos.client_id, "redirect_uri": self.redirect_uri, "response_type": RESPONSE_TYPE_CODE, "state": self.state, }
def redirect_response(self, response_type, redirect_uri, params): if response_type == 'token': return self.redirect( u'{}#{}'.format( redirect_uri, urlencode([(k, v) for k, v in six.iteritems(params) if v is not None]) ) ) parts = list(urlparse(redirect_uri)) query = parse_qsl(parts[4]) for key, value in six.iteritems(params): if value is not None: query.append((key, value)) parts[4] = urlencode(query) return self.redirect(urlunparse(parts))
def slow_loading_handler(request, response): # Allow the test specify the delay for delivering the content params = dict(urlparse.parse_qsl(request.url_parts.query)) delay = int(params.get("delay", 5)) time.sleep(delay) # Do not allow the page to be cached to circumvent the bfcache of the browser response.headers.set("Cache-Control", "no-cache, no-store") response.content = """<!doctype html> <meta charset="UTF-8"> <title>Slow page loading</title> <p>Delay: <span id="delay">{}</span></p> """.format( delay )
def build_absolute_path(self, path=None, repo=None, params=None): """Builds an absolute path, including the path prefix if required. Django's HttpRequest objects have a similar function, but we implement our own so that we can handle path prefixes correctly when they're in use. Args: path (str, optional): A path beginning with a slash (may include a query string), e.g., '/abc?x=y'. If the path argument is not specified or is None, the current request's path will be used. repo (str, optional): A repo ID. If specified, the path will be considered relative to the repo's route. If this is specified, path must also be specified. params (list, optional): A list of tuples of query param keys and values to add to the path. Returns: str: An absolute path, including the sitewide OPTIONAL_PATH_PREFIX if it was used with the original request (e.g., '/personfinder/abc?x=y'). Does not preserve query parameters from the original request. """ if path is None: assert not repo # request.path will already include the path prefix if it's being # used. return self.request.path assert path[0] == '/' if repo: path = '/%s%s' % (repo, path) if self._request_is_for_prefixed_path(): res = '/%s%s' % (site_settings.OPTIONAL_PATH_PREFIX, path) else: res = path if params: url_parts = list(urlparse.urlparse(res)) url_params = dict(urlparse.parse_qsl(url_parts[4])) for key, value in params: if value is None: if key in url_params: del (url_params[key]) else: url_params[key] = value url_parts[4] = utils.urlencode(url_params) res = urlparse.urlunparse(url_parts) return res
def proxy_with_warning_page_mock(url, *args, **kwargs): cookies = kwargs.get('cookies') or {} proxy_cookie = cookies.get('proxy_cookie') url_parts = list(urlparse(url)) query = dict(parse_qsl(url_parts[4])) if proxy_cookie and query.get('proxyapproved') == 'true': del query['proxyapproved'] url_parts[4] = urlencode(query) return standalone_requests_get_mock(urlunparse(url_parts), *args[1:], **kwargs) else: # Display the html warning page with the redirect link query['proxyapproved'] = 'true' url_parts[4] = urlencode(query) with open(os.path.join(FIXTURE_DIR, 'html_warning_page'), 'r') as f: body = f.read().replace('$REDIRECT_URL$', urlunparse(url_parts)) cookies['proxy_cookie'] = 'foo' return MockedResponse(body, 200, cookies)
def __init__(self, url, method='GET', query_args={}): self.method = method self.headers = {} self._fields = {} self._files = {} # Replace all underscores in each query argument # key with dashes. query_args = dict([(key.replace('_', '-'), value) for key, value in six.iteritems(query_args)]) # Add the query arguments to the url url_parts = list(urlparse(url)) query = dict(parse_qsl(url_parts[4])) query.update(query_args) url_parts[4] = urlencode(query) self.url = urlunparse(url_parts)
def _upload_from_filename_test_helper(self, properties=None, content_type_arg=None, expected_content_type=None): from six.moves.http_client import OK from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit from tempfile import NamedTemporaryFile from _gcloud_vendor.apitools.base.py import http_wrapper BLOB_NAME = 'blob-name' UPLOAD_URL = 'http://example.com/upload/name/key' DATA = b'ABCDEF' loc_response = {'status': OK, 'location': UPLOAD_URL} chunk1_response = { 'status': http_wrapper.RESUME_INCOMPLETE, 'range': 'bytes 0-4' } chunk2_response = {'status': OK} connection = _Connection( (loc_response, ''), (chunk1_response, ''), (chunk2_response, ''), ) bucket = _Bucket(connection) blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties) blob.CHUNK_SIZE = 5 with NamedTemporaryFile(suffix='.jpeg') as fh: fh.write(DATA) fh.flush() blob.upload_from_filename(fh.name, content_type=content_type_arg) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') uri = rq[0]['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') self.assertEqual(dict(parse_qsl(qs)), { 'uploadType': 'media', 'name': BLOB_NAME }) headers = dict([(x.title(), str(y)) for x, y in rq[0]['headers'].items()]) self.assertEqual(headers['Content-Length'], '6') self.assertEqual(headers['Content-Type'], expected_content_type)
def __init__(self, client, response): self.client = client self.response = response # Retrieve pagination headers self.per_page = try_convert(response.headers.get('x-pagination-limit'), int) self.total_items = try_convert( response.headers.get('x-pagination-item-count'), int) self.total_pages = try_convert( response.headers.get('x-pagination-page-count'), int) # Parse request url scheme, netloc, path, query = urlsplit(self.response.request.url)[:4] self.url = urlunsplit([scheme, netloc, path, '', '']) self.query = dict(parse_qsl(query))
def test_authorization_url_has_expected_query_params_with_connection( self, setup_with_client_id): authorization_url = self.sso.get_authorization_url( connection=self.connection, redirect_uri=self.redirect_uri, state=self.state, ) parsed_url = urlparse(authorization_url) assert dict(parse_qsl(parsed_url.query)) == { "connection": self.connection, "client_id": workos.client_id, "redirect_uri": self.redirect_uri, "response_type": RESPONSE_TYPE_CODE, "state": self.state, }
def parse_header_tags(): """Parse tags specified in the HTTP request header.""" # Get the value of the custom HTTP header and interpret it as an query # string qs = request.headers.get( current_app.config['FILES_REST_FILE_TAGS_HEADER'], '') tags = {} for key, value in parse_qsl(qs): # Check for duplicate keys if key in tags: raise DuplicateTagError() # Check for too short/long keys and values. if not validate_tag(key, value): raise InvalidTagError() tags[key] = value return tags or None
def test_build_api_url_w_extra_query_params_tuple(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit conn = self._make_one(object()) query_params = [("q", "val1"), ("q", "val2")] uri = conn.build_api_url("/foo", query_params=query_params) scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual("%s://%s" % (scheme, netloc), conn.API_BASE_URL) expected_path = "/".join( ["", "language", "translate", conn.API_VERSION, "foo"]) self.assertEqual(path, expected_path) params = list( sorted(param for param in parse_qsl(qs) if param[0] != "prettyPrint")) expected_params = [("q", "val1"), ("q", "val2")] self.assertEqual(params, expected_params)
def client_request(self, client, method, url, **kwargs): # Check that certain things are called correctly if method in ["GET", "DELETE"]: assert "json" not in kwargs # Note the call self.callstack.append( (method, url, kwargs.get("headers") or {}, kwargs.get("json") or kwargs.get("data"))) try: fixture = self.fixtures[url][method] except KeyError: pass else: return TestResponse({"headers": fixture[0], "text": fixture[1]}) # Call the method args = parse.parse_qsl(parse.urlparse(url)[4]) kwargs.update(args) munged_url = url.rsplit('?', 1)[0] munged_url = munged_url.strip('/').replace('/', '_').replace('.', '_') munged_url = munged_url.replace('-', '_') callback = "%s_%s" % (method.lower(), munged_url) if not hasattr(self, callback): raise AssertionError('Called unknown API method: %s %s, ' 'expected fakes method name: %s' % (method, url, callback)) resp = getattr(self, callback)(**kwargs) if len(resp) == 3: status, headers, body = resp else: status, body = resp headers = {} self.last_request_id = headers.get('x-openstack-request-id', 'req-test') return TestResponse({ "status_code": status, "text": body, "headers": headers, })
def load_service_context(cls, store_uri): from mlflow.exceptions import MlflowException parsed_url = parse.urlparse(store_uri) queries = dict(parse.parse_qsl(parsed_url.query)) cache_key = store_uri.split("?")[0] if cls.has_service_context(cache_key): return cls.get_service_context(cache_key) elif _mlflow_env_vars_set(): service_context = get_service_context_from_tracking_url_mlflow_env_vars( parsed_url) logger.debug( "Created a new service context from mlflow env vars: {}". format(service_context)) cls.add_service_context(cache_key, service_context) elif _IS_REMOTE in queries and queries[_IS_REMOTE] == _TRUE_QUERY_VALUE: try: run = Run.get_context() except RunEnvironmentException: raise MlflowException( "AzureMlflow tracking URI was set to remote but there " "was a failure in loading the run.") else: service_context = run.experiment.workspace.service_context cls.add_service_context(cache_key, service_context) logger.debug( "Found Run's service context: {}".format(service_context)) else: if _is_remote(): raise MlflowException( "In remote environment but could not load a service " "context. InteractiveLoginAuthentication is not supported in " "the remote environment.") else: service_context = get_service_context_from_tracking_url_default_auth( parsed_url) logger.debug("Creating a new {} for a local run".format( service_context)) cls.add_service_context(cache_key, service_context) return cls.get_service_context(cache_key)
def reply_test(url, request): self.assertEqual(url.scheme, scheme) self.assertEqual(url.netloc, netloc) self.assertEqual(url.path, path) query = dict(parse_qsl(url.query)) self.assertEqual(query["timestamp"], timestamp) self.assertEqual(query["nonce"], nonce) self.assertEqual(query["signature"], signature) check_signature(self.app.token, query["signature"], timestamp, nonce) msg = parse_message(request.body) self.assertIsInstance(msg, messages.TextMessage) self.assertEqual(msg.source, sender) self.assertEqual(msg.content, content) reply = replies.create_reply(reply_text, msg) return response(content=reply.render())
def sort_url_by_qs_keys(url): # NOTE(kragniz): this only sorts the keys of the query string of a url. # For example, an input of '/v2/tasks?sort_key=id&sort_dir=asc&limit=10' # returns '/v2/tasks?limit=10&sort_dir=asc&sort_key=id'. This is to prevent # non-deterministic ordering of the query string causing problems with unit # tests. parsed = urlparse.urlparse(url) queries = urlparse.parse_qsl(parsed.query, True) sorted_query = sorted(queries, key=lambda x: x[0]) encoded_sorted_query = urllib.urlencode(sorted_query, True) url_parts = (parsed.scheme, parsed.netloc, parsed.path, parsed.params, encoded_sorted_query, parsed.fragment) return urlparse.urlunparse(url_parts)
def do_GET(s): """Handle a GET request. Parses the query parameters and prints a message if the flow has completed. Note that we can't detect if an error occurred. """ s.send_response(200) s.send_header("Content-type", "text/html") s.end_headers() query = s.path.split('?', 1)[-1] query = dict(parse_qsl(query)) s.server.query_params = query s.wfile.write( "<html><head><title>Authentication Status</title></head>") s.wfile.write("<body><p>The authentication flow has completed.</p>") s.wfile.write("</body></html>")
def test_add_params_sign(self): endpoint_url = self.wcapi.requester.endpoint_url('products?page=2') params = OrderedDict() params["oauth_consumer_key"] = self.consumer_key params["oauth_timestamp"] = "1477041328" params["oauth_nonce"] = "166182658461433445531477041328" params["oauth_signature_method"] = self.signature_method params["oauth_version"] = "1.0" params["oauth_callback"] = 'localhost:8888/wordpress' signed_url = self.wcapi.auth.add_params_sign("GET", endpoint_url, params) signed_url_params = parse_qsl(urlparse(signed_url).query) # self.assertEqual('page', signed_url_params[-1][0]) self.assertIn('page', dict(signed_url_params))
def authorize(self, consumer, scopes, redirect=False): """Authorize a Consumer for certain scopes, getting an authorization code. The way the provider (Keystone) will return the code is in the header, as an HTTP redirection: 'Location': 'https://foo.com/welcome_back?code=somerandomstring&state=xyz' Utilize Identity API operation: POST /OS-OAUTH2/authorize/ :param user: the user granting authorization :param consumer: the client that will be authorized, and will exchange the authorization code for an access token. :param scopes: a list of scopes. They are provided by the consumer in the authorization request :param redirect: The Keystone OAuth2 extension returns an HTTP 302 to comply with RFC 6749 but in general we dont want the redirect to happen if we are using the keystoneclient. """ endpoint = self.base_url + '/authorize' body = { 'user_auth': { 'client_id':base.getid(consumer), 'scopes':scopes } } response, body = self.client.post(endpoint, body=body, redirect=redirect) redirect_uri = response.headers.get('Location') parsed = urlparse.urlparse(redirect_uri) query = dict(urlparse.parse_qsl(parsed.query)) authorization_code = { 'redirect_uri':redirect_uri, 'state': query['state'] } code = query.get('code', None) if code: authorization_code['code'] = code token = query.get('token', None) if token: authorization_code['token'] = token return self.resource_class(self, authorization_code)
def _get_credentials(self, key): request_token = { 'oauth_token': app.TWITTER_USERNAME, 'oauth_token_secret': app.TWITTER_PASSWORD, 'oauth_callback_confirmed': 'true' } token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(key) logger.log( u'Generating and signing request for an access token using key {0}' .format(key), logger.DEBUG) oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) logger.log(u'oauth_consumer: {0}'.format(oauth_consumer), logger.DEBUG) oauth_client = oauth.Client(oauth_consumer, token) logger.log(u'oauth_client: {0}'.format(oauth_client), logger.DEBUG) resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key) logger.log(u'resp, content: {0}, {1}'.format(resp, content), logger.DEBUG) access_token = dict(parse_qsl(content)) logger.log(u'access_token: {0}'.format(access_token), logger.DEBUG) logger.log(u'resp[status] = {0}'.format(resp['status']), logger.DEBUG) if resp['status'] != '200': logger.log( u'The request for a token with did not succeed: {0}'.format( resp['status']), logger.ERROR) return False else: logger.log( u'Your Twitter Access Token key: {0}'.format( access_token['oauth_token']), logger.DEBUG) logger.log( u'Access Token secret: {0}'.format( access_token['oauth_token_secret']), logger.DEBUG) app.TWITTER_USERNAME = access_token['oauth_token'] app.TWITTER_PASSWORD = access_token['oauth_token_secret'] return True
def __init__(self, raw_request=None, encoded_request=None, protocol='http', dest_addr='localhost', port=80, method='GET', uri='/', version='HTTP/1.1', headers={}, data='', save_cookie=False, stop_magic=False): self.raw_request = raw_request self.encoded_request = encoded_request self.protocol = protocol self.dest_addr = dest_addr self.port = port self.method = method self.uri = uri self.version = version self.headers = headers self.data = data # Support data in list format and join on CRLF if isinstance(self.data, list): self.data = '\r\n'.join(self.data) self.save_cookie = save_cookie self.stop_magic = stop_magic # Check if there is any data and do defaults if self.data != '': # Default values for content length and header if 'Content-Type' not in headers.keys() and stop_magic is False: headers['Content-Type'] = 'application/x-www-form-urlencoded' # check if encoded and encode if it should be if 'Content-Type' in headers.keys(): if headers['Content-Type'] == \ 'application/x-www-form-urlencoded' and stop_magic is False: if ensure_str(unquote(self.data)) == self.data: query_string = parse_qsl(self.data) if len(query_string) != 0: encoded_args = urlencode(query_string) self.data = encoded_args if 'Content-Length' not in headers.keys() and stop_magic is False: # The two is for the trailing CRLF and the one after headers['Content-Length'] = len(self.data)
def __init__(self, url, method='GET', query_args={}, headers={}): """Initialize the HTTP request. Args: url (bytes or unicode): The URL to request. method (bytes or unicode, optional): The HTTP method to send to the server. query_args (dict, optional): Any query arguments to add to the URL. All keys and values are expected to be strings (either byte strings or unicode strings). headers (dict, optional): Any HTTP headers to provide in the request. All keys and values are expected to be strings (either byte strings or unicode strings). """ self.method = method self._fields = OrderedDict() self._files = OrderedDict() # Replace all underscores in each query argument # key with dashes. query_args = { force_unicode(key).replace('_', '-'): force_unicode(value) for key, value in six.iteritems(query_args) } # Make sure headers are always in the native string type. self.headers = { str(key): str(value) for key, value in six.iteritems(headers) } # Add the query arguments to the url url_parts = list(urlparse(str(url))) query = dict(parse_qsl(url_parts[4])) query.update(query_args) url_parts[4] = urlencode(query) self.url = urlunparse(url_parts)
def test_upload_from_file_w_slash_in_name(self): from six.moves.http_client import OK from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit from tempfile import NamedTemporaryFile from apitools.base.py import http_wrapper BLOB_NAME = 'parent/child' UPLOAD_URL = 'http://example.com/upload/name/parent%2Fchild' DATA = b'ABCDEF' loc_response = {'status': OK, 'location': UPLOAD_URL} chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE, 'range': 'bytes 0-4'} chunk2_response = {'status': OK} connection = _Connection( (loc_response, '{}'), (chunk1_response, ''), (chunk2_response, ''), ) client = _Client(connection) bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 with NamedTemporaryFile() as fh: fh.write(DATA) fh.flush() blob.upload_from_file(fh, rewind=True) self.assertEqual(fh.tell(), len(DATA)) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['redirections'], 5) self.assertEqual(rq[0]['body'], DATA) self.assertEqual(rq[0]['connection_type'], None) self.assertEqual(rq[0]['method'], 'POST') uri = rq[0]['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual(scheme, 'http') self.assertEqual(netloc, 'example.com') self.assertEqual(path, '/b/name/o') self.assertEqual(dict(parse_qsl(qs)), {'uploadType': 'media', 'name': 'parent/child'}) headers = dict( [(x.title(), str(y)) for x, y in rq[0]['headers'].items()]) self.assertEqual(headers['Content-Length'], '6') self.assertEqual(headers['Content-Type'], 'application/octet-stream')