def add_preserved_filters(context, url, popup=False, to_field=None): opts = context.get('opts') preserved_filters = context.get('preserved_filters') parsed_url = list(urlparse(url)) parsed_qs = dict(parse_qsl(parsed_url[4])) merged_qs = dict() if opts and preserved_filters: preserved_filters = dict(parse_qsl(preserved_filters)) match_url = '/%s' % url.partition(get_script_prefix())[2] try: match = resolve(match_url) except Resolver404: pass else: current_url = '%s:%s' % (match.app_name, match.url_name) changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name) if changelist_url == current_url and '_changelist_filters' in preserved_filters: preserved_filters = dict(parse_qsl(preserved_filters['_changelist_filters'])) merged_qs.update(preserved_filters) if popup: from myrobogals.admin.options import IS_POPUP_VAR merged_qs[IS_POPUP_VAR] = 1 if to_field: from myrobogals.admin.options import TO_FIELD_VAR merged_qs[TO_FIELD_VAR] = to_field merged_qs.update(parsed_qs) parsed_url[4] = urlencode(merged_qs) return urlunparse(parsed_url)
def __init__(self, query_string=None, mutable=False, encoding=None): super(QueryDict, self).__init__() if not encoding: encoding = settings.DEFAULT_CHARSET # 这个encoding是属性?,即在init过程中可以使用属性. self.encoding = encoding if six.PY3: # 查询字符串,可能是bytes(默认iso-8859-1编码),可能是None,可能是'',也可能是Unicode # 下面保证将其转化为Unicode if isinstance(query_string, bytes): # query_string normally contains URL-encoded data, a subset of ASCII. try: query_string = query_string.decode(encoding) except UnicodeDecodeError: # ... but some user agents are misbehaving :-( query_string = query_string.decode('iso-8859-1') # 从中拿出查询字符串的值,为什么不适用parse_qs?,注意query_string or ''的用法,值得学习 for key, value in parse_qsl(query_string or '', keep_blank_values=True, encoding=encoding): self.appendlist(key, value) else: for key, value in parse_qsl(query_string or '', keep_blank_values=True): try: value = value.decode(encoding) except UnicodeDecodeError: value = value.decode('iso-8859-1') self.appendlist(force_text(key, encoding, errors='replace'), value) self._mutable = mutable
def test_owner_or_mod_required_passes_url_parameters(self): @owner_or_moderator_required def mock_view(request, user, context): return None request = Mock(spec=('path', 'REQUEST', 'user')) request.user = AnonymousUser() request.REQUEST = {'abra': 'cadabra', 'foo': 'bar'} request.path = '/some/path/' user = self.create_user('user') response = mock_view(request, user, {}) self.assertEqual(isinstance(response, HttpResponseRedirect), True) url = response['location'] parsed_url = urlparse(url) self.assertEqual(parsed_url.path, reverse('user_signin')) next = dict(parse_qsl(parsed_url.query))['next'] next_url = unquote(next) parsed_url = urlparse(next_url) self.assertEqual(parsed_url.path, request.path) query = dict(parse_qsl(parsed_url.query)) self.assertEqual(set(query.keys()), set(['foo', 'abra'])) self.assertEqual(set(query.values()), set(['bar', 'cadabra'])) self.assertEqual(query['abra'], 'cadabra')
def __init__(self, query_string=None, mutable=False, encoding=None): super(QueryDict, self).__init__() if not encoding: encoding = settings.DEFAULT_CHARSET self.encoding = encoding if six.PY3: if isinstance(query_string, bytes): # query_string normally contains URL-encoded data, a subset of ASCII. try: query_string = query_string.decode(encoding) except UnicodeDecodeError: # ... but some user agents are misbehaving :-( query_string = query_string.decode('iso-8859-1') for key, value in parse_qsl(query_string or '', keep_blank_values=True, encoding=encoding): self.appendlist(key, value) else: for key, value in parse_qsl(query_string or '', keep_blank_values=True): try: value = value.decode(encoding) except UnicodeDecodeError: value = value.decode('iso-8859-1') self.appendlist(force_text(key, encoding, errors='replace'), value) self._mutable = mutable
def add_preserved_filters(context, url, popup=False, to_field=None): opts = context.get('opts') preserved_filters = context.get('preserved_filters') parsed_url = list(urlparse(url)) parsed_qs = dict(parse_qsl(parsed_url[4])) merged_qs = dict() if opts and preserved_filters: preserved_filters = dict(parse_qsl(preserved_filters)) match_url = '/%s' % url.partition(get_script_prefix())[2] try: match = resolve(match_url) except Resolver404: pass else: current_url = '%s:%s' % (match.app_name, match.url_name) changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name) if changelist_url == current_url and '_changelist_filters' in preserved_filters: preserved_filters = dict(parse_qsl(preserved_filters['_changelist_filters'])) merged_qs.update(preserved_filters) if popup: from django.contrib.admin.options import IS_POPUP_VAR merged_qs[IS_POPUP_VAR] = 1 if to_field: from django.contrib.admin.options import TO_FIELD_VAR merged_qs[TO_FIELD_VAR] = to_field merged_qs.update(parsed_qs) parsed_url[4] = urlencode(merged_qs) return urlunparse(parsed_url)
def methodNext(self, previous_request, previous_response): """Retrieves the next page of results. Args: previous_request: The request for the previous page. (required) previous_response: The response from the request for the previous page. (required) Returns: A request object that you can call 'execute()' on to request the next page. Returns None if there are no more items in the collection. """ # Retrieve nextPageToken from previous_response # Use as pageToken in previous_request to create new request. if 'nextPageToken' not in previous_response: return None request = copy.copy(previous_request) pageToken = previous_response['nextPageToken'] parsed = list(urlparse(request.uri)) q = parse_qsl(parsed[4]) # Find and remove old 'pageToken' value from URI newq = [(key, value) for (key, value) in q if key != 'pageToken'] newq.append(('pageToken', pageToken)) parsed[4] = urlencode(newq) uri = urlunparse(parsed) request.uri = uri logger.info('URL being requested: %s %s' % (methodName,uri)) return request
def get_client_by_state(request): """ Return OAuth client by state stored in session """ server_error_codes = { 'redirect_uri_mismatch': u'Redirect URI mismatch', 'access_denied': u'Access denied', } if 'error' in request.GET: error_code = request.GET.get('error') error_message = server_error_codes.get(error_code, u'Unknown error') raise PowerAppError(error_message) if 'state' not in request.GET: raise PowerAppError('Request does not have a "state" parameter') state = request.GET['state'] state_dict = dict(parse.parse_qsl(state or '')) if 'name' not in state_dict or 'secret' not in state_dict: raise PowerAppError("Invalid state") session_key = get_oauth_state_session_name(state_dict['name']) session_value = request.session.pop(session_key, None) if not session_value: raise PowerAppError("Session value not found") if state != session_value: raise PowerAppError("Invalid state") return get_client_by_name(state_dict['name'])
def extend_qs(base_url, **kwargs): """ Extend querystring of the URL with kwargs, taking care of python types. - True is converted to "1" - When a value is equal to False or None, then corresponding key is removed from the querystring at all. Please note that empty strings and numeric zeroes are not equal to False here. - Unicode is converted to utf-8 string - Everything else is converted to string using str(obj) For instance: >>> extend_querystring('/foo/?a=b', c='d', e=True, f=False) '/foo/?a=b&c=d&e=1' """ parsed = parse.urlparse(base_url) query = dict(parse.parse_qsl(parsed.query)) for key, value in kwargs.items(): value = convert_to_string(value) if value is None: query.pop(key, None) else: query[key] = value query_str = parse.urlencode(query) parsed_as_list = list(parsed) parsed_as_list[4] = query_str return parse.urlunparse(parsed_as_list)
def test_spam_submission_tags(self, mock_requests): admin = User.objects.get(username='******') flag, created = Flag.objects.get_or_create(name=SPAM_SUBMISSIONS_FLAG) flag.users.add(admin) revision = admin.created_revisions.all()[0] revision.tags = '"Banana" "Orange" "Apple"' revision.save() url = reverse('admin:wiki_revisionakismetsubmission_add') mock_requests.post(VERIFY_URL, content='valid') mock_requests.post(SPAM_URL, content=Akismet.submission_success) data = { 'revision': revision.id, 'type': 'spam', } self.client.login(username='******', password='******') url = reverse('admin:wiki_revisionakismetsubmission_add') response = self.client.post(url, data) self.assertEqual(response.status_code, 302) request_body = mock_requests.request_history[1].body submitted_data = dict(parse_qsl(request_body)) expected_content = ( 'Seventh revision of the article.\n' 'article-with-revisions\n' 'Seventh revision of the article.\n' 'Seventh revision of the article.\n' 'Apple\n' 'Banana\n' 'Orange' ) self.assertEqual(submitted_data['comment_content'], expected_content)
def remove_affiliate_code(url): parsed = urlparse(str(url)) query = dict(parse_qsl(parsed.query)) query.pop(app_settings.PARAM_NAME, None) url_parts = list(parsed) url_parts[4] = urlencode(query) return urlunparse(url_parts)
def parse_backend_uri(backend_uri): """ Converts the "backend_uri" into a host and any extra params that are required for the backend. Returns a (host, params) tuple. """ backend_uri_sliced = backend_uri.split('://') if len(backend_uri_sliced) > 2: raise InvalidCacheBackendError( "Backend URI can't have more than one scheme://") elif len(backend_uri_sliced) == 2: rest = backend_uri_sliced[1] else: rest = backend_uri_sliced[0] host = rest[2:] qpos = rest.find('?') if qpos != -1: params = dict(parse_qsl(rest[qpos+1:])) host = rest[2:qpos] else: params = {} if host.endswith('/'): host = host[:-1] return host, params
def _strip_signing_parameters(self, url): # Boto3 does not currently support generating URLs that are unsigned. Instead we # take the signed URLs and strip any querystring params related to signing and expiration. # Note that this may end up with URLs that are still invalid, especially if params are # passed in that only work with signed URLs, e.g. response header params. # The code attempts to strip all query parameters that match names of known parameters # from v2 and v4 signatures, regardless of the actual signature version used. split_url = urlparse.urlsplit(url) qs = urlparse.parse_qsl(split_url.query, keep_blank_values=True) blacklist = set([ "x-amz-algorithm", "x-amz-credential", "x-amz-date", "x-amz-expires", "x-amz-signedheaders", "x-amz-signature", "x-amz-security-token", "awsaccesskeyid", "expires", "signature", ]) filtered_qs = ((key, val) for key, val in qs if key.lower() not in blacklist) # Note: Parameters that did not have a value in the original query string will have # an '=' sign appended to it, e.g ?foo&bar becomes ?foo=&bar= joined_qs = ("=".join(keyval) for keyval in filtered_qs) split_url = split_url._replace(query="&".join(joined_qs)) return split_url.geturl()
def smart_urlquote(url): "Quotes a URL if it isn't already quoted." def unquote_quote(segment): segment = unquote(force_str(segment)) # Tilde is part of RFC3986 Unreserved Characters # http://tools.ietf.org/html/rfc3986#section-2.3 # See also http://bugs.python.org/issue16285 segment = quote(segment, safe=RFC3986_SUBDELIMS + RFC3986_GENDELIMS + str('~')) return force_text(segment) # Handle IDN before quoting. try: scheme, netloc, path, query, fragment = urlsplit(url) except ValueError: # invalid IPv6 URL (normally square brackets in hostname part). return unquote_quote(url) try: netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE except UnicodeError: # invalid domain part return unquote_quote(url) if query: # Separately unquoting key/value, so as to not mix querystring separators # included in query values. See #22267. query_parts = [(unquote(force_str(q[0])), unquote(force_str(q[1]))) for q in parse_qsl(query, keep_blank_values=True)] # urlencode will take care of quoting query = urlencode(query_parts) path = unquote_quote(path) fragment = unquote_quote(fragment) return urlunsplit((scheme, netloc, path, query, fragment))
def post(url, params): """ Make a POST request to the URL using the key-value pairs. Return a set of key-value pairs. :url: URL to post to :params: Dict of parameters to include in post payload """ payload = urlencode(params) start_time = time.time() response = requests.post( url, payload, headers={'content-type': 'text/namevalue; charset=utf-8'}) if response.status_code != requests.codes.ok: raise exceptions.PayPalError("Unable to communicate with PayPal") # Convert response into a simple key-value format pairs = {} for key, value in parse_qsl(response.text): if isinstance(key, six.binary_type): key = key.decode('utf8') if isinstance(value, six.binary_type): value = value.decode('utf8') pairs[key] = value # Add audit information pairs['_raw_request'] = payload pairs['_raw_response'] = response.text pairs['_response_time'] = (time.time() - start_time) * 1000.0 return pairs
def parse_backend_uri(backend_uri): """ Converts the "backend_uri" into a host and any extra params that are required for the backend. Returns a (host, params) tuple. """ backend_uri_sliced = backend_uri.split('://') if len(backend_uri_sliced) > 2: raise InvalidCacheBackendError( "Backend URI can't have more than one scheme://") elif len(backend_uri_sliced) == 2: rest = backend_uri_sliced[1] else: rest = backend_uri_sliced[0] host = rest qpos = rest.find('?') if qpos != -1: params = dict(parse_qsl(rest[qpos + 1:])) host = rest[:qpos] else: params = {} if host.endswith('/'): host = host[:-1] return host, params
def post(url, params, encode=True): """ Make a POST request to the URL using the key-value pairs. Return a set of key-value pairs. :url: URL to post to :params: Dict of parameters to include in post payload """ if encode: payload = urlencode(params) else: payload = params start_time = time.time() response = requests.post( url, payload, headers={'content-type': 'text/namevalue; charset=utf-8'}) if response.status_code != requests.codes.ok: raise exceptions.PayPalError("Unable to communicate with PayPal") # Convert response into a simple key-value format pairs = {} for key, value in parse_qsl(response.text): if isinstance(key, six.binary_type): key = key.decode('utf8') if isinstance(value, six.binary_type): value = value.decode('utf8') pairs[key] = value # Add audit information pairs['_raw_request'] = payload pairs['_raw_response'] = response.text pairs['_response_time'] = (time.time() - start_time) * 1000.0 return pairs
def test_spam_submission_tags(self, mock_requests): admin = User.objects.get(username='******') flag, created = Flag.objects.get_or_create(name=SPAM_SUBMISSIONS_FLAG) flag.users.add(admin) revision = admin.created_revisions.all()[0] revision.tags = '"Banana" "Orange" "Apple"' revision.save() url = reverse('admin:wiki_revisionakismetsubmission_add') mock_requests.post(VERIFY_URL, content='valid') mock_requests.post(SPAM_URL, content=Akismet.submission_success) data = { 'revision': revision.id, 'type': 'spam', } self.client.login(username='******', password='******') url = reverse('admin:wiki_revisionakismetsubmission_add') response = self.client.post(url, data) self.assertEqual(response.status_code, 302) request_body = mock_requests.request_history[1].body submitted_data = dict(parse_qsl(request_body)) expected_content = ('Seventh revision of the article.\n' 'article-with-revisions\n' 'Seventh revision of the article.\n' 'Seventh revision of the article.\n' 'Apple\n' 'Banana\n' 'Orange') self.assertEqual(submitted_data['comment_content'], expected_content)
def parse_backend_uri(backend_uri): """ Converts the "backend_uri" into a cache scheme ('db', 'memcached', etc), a host and any extra params that are required for the backend. Returns a (scheme, host, params) tuple. """ if backend_uri.find(":") == -1: raise InvalidCacheBackendError( "Backend URI must start with scheme://. URI: {}".format(backend_uri) ) scheme, rest = backend_uri.split(":", 1) if not rest.startswith("//"): raise InvalidCacheBackendError( "Backend URI must start with scheme://. Split URI: {}".format(backend_uri) ) host = rest[2:] qpos = rest.find("?") if qpos != -1: params = dict(parse_qsl(rest[qpos + 1 :])) host = rest[2:qpos] else: params = {} if host.endswith("/"): host = host[:-1] return scheme, host, params
def add_affiliate_code(url, aid_code): parsed = urlparse(str(url)) query = dict(parse_qsl(parsed.query)) query.update({app_settings.PARAM_NAME: str(aid_code)}) url_parts = list(parsed) url_parts[4] = urlencode(query) return urlunparse(url_parts)
def parse_backend_uri(backend_uri): """ Converts the "backend_uri" into a cache scheme ('db', 'memcached', etc), a host and any extra params that are required for the backend. Returns a (scheme, host, params) tuple. """ if backend_uri.find(':') == -1: raise InvalidCacheBackendError("Backend URI must start with scheme://") scheme, rest = backend_uri.split(':', 1) if not rest.startswith('//'): raise InvalidCacheBackendError("Backend URI must start with scheme://") host = rest[2:] qpos = rest.find('?') if qpos != -1: params = dict(parse_qsl(rest[qpos + 1:])) host = rest[2:qpos] else: params = {} if host.endswith('/'): host = host[:-1] return scheme, host, params
def __init__(self, query_string, mutable=False, encoding=None): super(QueryDict, self).__init__() if not encoding: encoding = settings.DEFAULT_CHARSET self.encoding = encoding if six.PY3: if isinstance(query_string, bytes): # query_string contains URL-encoded data, a subset of ASCII. query_string = query_string.decode() for key, value in parse_qsl(query_string or "", keep_blank_values=True, encoding=encoding): self.appendlist(key, value) else: for key, value in parse_qsl(query_string or "", keep_blank_values=True): self.appendlist( force_text(key, encoding, errors="replace"), force_text(value, encoding, errors="replace") ) self._mutable = mutable
def __init__(self, url): if isinstance(url, string_types): parsed = urlparse(url) self.parsed = parsed._replace(query=frozenset( parse_qsl(parsed.query)), path=unquote_plus(parsed.path)) else: # It's already a URL. self.parsed = url.parsed
def get(self, request, *args, **kwargs): if not request.GET.get('start_ts'): url = parse.urlparse(request.path) query_string = dict(parse.parse_qsl(request.META['QUERY_STRING'])) start_ts = datetime.now().replace(microsecond=0) - timedelta(minutes=5) query_string['start_ts'] = start_ts.isoformat(sep=' ') query = parse.urlencode(query_string).replace('+', '%20') url = url._replace(query=query) return HttpResponseRedirect(parse.urlunparse(url)) return super(GrepView, self).get(request, *args, **kwargs)
def __init__(self, query_string, mutable=False, encoding=None): super(QueryDict, self).__init__() if not encoding: encoding = settings.DEFAULT_CHARSET self.encoding = encoding if six.PY3: if isinstance(query_string, bytes): # query_string contains URL-encoded data, a subset of ASCII. query_string = query_string.decode() for key, value in parse_qsl(query_string or '', keep_blank_values=True, encoding=encoding): self.appendlist(key, value) else: for key, value in parse_qsl(query_string or '', keep_blank_values=True): self.appendlist(force_text(key, encoding, errors='replace'), force_text(value, encoding, errors='replace')) self._mutable = mutable
def validate_webhook_token(string): """ Verifies the webhook token, andn raises "Invalid webhook token" exception, or return {u: <user_id>, i: <integration_id>} dict """ token, qs = parse.splitquery(string) expected_token = salted_hmac(WEBHOOK_HMAC_SALT, qs or '').hexdigest() if not constant_time_compare(token, expected_token): raise PowerAppError('Invalid Webhook token') return dict(parse.parse_qsl(qs))
def test_oauth2_authorization_redirect(self): client = Client() response = client.get("/oauth/google/") self.assertEqual(response.status_code, 302) url = urlparse(response["Location"]) params = dict(parse_qsl(url.query)) self.assertEqual(params["response_type"], "code") self.assertEqual(params["redirect_uri"], "http://testserver/oauth/google/") self.assertEqual(params["scope"], "openid email profile")
def context(self): ctx = {} if six.PY2 and isinstance(self.raw_response, six.text_type): self.raw_response = self.raw_response.encode('utf8') for key, val in parse_qsl(self.raw_response.decode('utf8')): if isinstance(key, six.binary_type): key = key.decode('utf8') if isinstance(val, six.binary_type): val = val.decode('utf8') ctx[key] = [val] return ctx
def get(self, request, *args, **kwargs): if not request.GET.get('start_ts'): url = parse.urlparse(request.path) query_string = dict(parse.parse_qsl(request.META['QUERY_STRING'])) start_ts = datetime.now().replace(microsecond=0) - timedelta( minutes=5) query_string['start_ts'] = start_ts.isoformat(sep=' ') query = parse.urlencode(query_string).replace('+', '%20') url = url._replace(query=query) return HttpResponseRedirect(parse.urlunparse(url)) return super(GrepView, self).get(request, *args, **kwargs)
def test_spam_submission_submitted(self, mock_requests): admin = User.objects.get(username='******') flag, created = Flag.objects.get_or_create(name=SPAM_SUBMISSIONS_FLAG) flag.users.add(admin) revision = admin.created_revisions.all()[0] url = reverse('admin:wiki_revisionakismetsubmission_add') mock_requests.post(VERIFY_URL, content='valid') mock_requests.post(SPAM_URL, content=Akismet.submission_success) data = { 'revision': revision.id, 'type': 'spam', } self.client.login(username='******', password='******') url = reverse('admin:wiki_revisionakismetsubmission_add') response = self.client.post(url, data) self.assertEqual(response.status_code, 302) # successfully created the submission record submission = RevisionAkismetSubmission.objects.first() self.assertTrue(submission is not None) self.assertEqual(submission.sender, admin) self.assertTrue(submission.sent) self.assertEqual(submission.revision, revision) self.assertEqual(submission.type, 'spam') self.assertEqual(mock_requests.call_count, 2) request_body = mock_requests.request_history[1].body self.assertIn('user_ip=0.0.0.0', request_body) self.assertIn('user_agent=', request_body) self.assertIn(revision.slug, request_body) query_pairs = parse_qsl(request_body) expected_content = ( 'Seventh revision of the article.\n' 'article-with-revisions\n' 'Seventh revision of the article.\n' 'Seventh revision of the article.' ) expected = [ ('blog', 'http://testserver/'), ('blog_charset', 'UTF-8'), ('blog_lang', 'en_us'), ('comment_author', 'admin'), ('comment_content', expected_content), ('comment_type', 'wiki-revision'), ('permalink', 'http://testserver/en-US/docs/article-with-revisions'), ('user_ip', '0.0.0.0') ] self.assertEqual(sorted(query_pairs), expected) assert mock_requests.called assert mock_requests.call_count == 2
def qs_to_ordered_params(query_string): params_list = parse_qsl(unicode(query_string)) params = OrderedDict() for field, value in params_list: if isinstance(value, (list, tuple)): value = value[0] if isinstance(value, six.binary_type): value = value.decode('utf8') if isinstance(field, six.binary_type): field = field.decode('utf8') params[field] = value return params
def context(self): ctx = {} content = self.raw_response if isinstance(content, six.binary_type): content = content.decode('utf8') for key, val in parse_qsl(content): if isinstance(key, six.binary_type): key = key.decode('utf8') if isinstance(val, six.binary_type): val = val.decode('utf8') ctx[key] = [val] return ctx
def assertEqualUrl(self, lhs, rhs): """ Asserts whether the URLs are canonically equal. """ if six.PY2: # See those Chinese characters above? Those are quite difficult # to match against the generated URLs in portable code. True, # this solution is not the nicest, but it works. And it's test # code after all. lhs = lhs.encode('utf8') lhs = urlparse(lhs) rhs = urlparse(rhs) self.assertEqual(lhs.scheme, rhs.scheme) self.assertEqual(lhs.netloc, rhs.netloc) self.assertEqual(lhs.path, rhs.path) self.assertEqual(lhs.fragment, rhs.fragment) # We used parse_qs before, but as query parameter order became # significant with Microsoft Authenticator and possibly other # authenticator apps, we've switched to parse_qsl. self.assertEqual(parse_qsl(lhs.query), parse_qsl(rhs.query))
def _get_operation_language(self, request): # Unfortunately the ?language GET query # has a special meaning on the CMS. # It allows users to see another language while maintaining # the same url. This complicates language detection. site = get_current_site(request) parsed_url = urlparse(request.GET['cms_path']) queries = dict(parse_qsl(parsed_url.query)) language = queries.get('language') if not language: language = translation.get_language_from_path(parsed_url.path) return get_language_code(language, site_id=site.pk)
def add_preserved_filters(context, url, popup=False, to_field=None): opts = context.get('opts') preserved_filters = context.get('preserved_filters') parsed_url = list(urlparse(url)) parsed_qs = dict(parse_qsl(parsed_url[4])) merged_qs = dict() if opts and preserved_filters: preserved_filters = dict(parse_qsl(preserved_filters)) match_url = '/{0}'.format(url.partition(get_script_prefix())[2]) try: match = resolve(match_url) except Resolver404: pass else: current_url = '{0}:{1}'.format(match.app_name, match.url_name) changelist_url = 'desk:{0}_{1}_changelist'.format( opts.app_label, opts.model_name) if (current_url == changelist_url and '_changelist_filters' in preserved_filters): preserved_filters = dict( parse_qsl(preserved_filters['_changelist_filters'])) merged_qs.update(preserved_filters) if popup: merged_qs[IS_POPUP_VAR] = 1 if to_field: merged_qs[TO_FIELD_VAR] = to_field merged_qs.update(parsed_qs) parsed_url[4] = urlencode(merged_qs) return urlunparse(parsed_url)
def test_emailed_message_url_works_for_post_recipient(self): root = self.create_thread_for_user(self.sender, self.recipient) from django.core.mail import outbox html_message = get_html_message(outbox[0]) link = BeautifulSoup(html_message).find('a', attrs={'class': 'thread-link'}) url = link['href'].replace('&', '&') parsed_url = urlparse(url) url_data = parse_qsl(parsed_url.query) self.client.login(user_id=self.recipient.id, method='force') response = self.client.get(parsed_url.path, url_data) dom = BeautifulSoup(response.content) threads = dom.find_all('ul', attrs={'class': 'js-thread'}) self.assertEquals(len(threads), 1) thread_lists = dom.find_all('table', attrs={'class': 'js-thread-list'}) self.assertEquals(len(thread_lists), 0)
def get_redirect_url(self, *args, **kwargs): obj = self.get_object() url = parse.urlparse(obj.url) query_params = copy(self.request.GET) lookup = self.kwargs.get('default_value') if lookup and obj.default_field: query_params[obj.default_field.name] = lookup query_params.update(**dict(parse.parse_qsl(url.query))) url = reverse('grep_view', kwargs={'name': obj.index.name}) url = parse.urlparse(url) parts = list(url) parts[4] = parse.urlencode(query_params) return parse.urlunparse(parts)
def test_spam_submission_submitted(self, mock_requests): admin = User.objects.get(username='******') flag, created = Flag.objects.get_or_create(name=SPAM_SUBMISSIONS_FLAG) flag.users.add(admin) revision = admin.created_revisions.all()[0] url = reverse('admin:wiki_revisionakismetsubmission_add') mock_requests.post(VERIFY_URL, content='valid') mock_requests.post(SPAM_URL, content=Akismet.submission_success) data = { 'revision': revision.id, 'type': 'spam', } self.client.login(username='******', password='******') url = reverse('admin:wiki_revisionakismetsubmission_add') response = self.client.post(url, data) self.assertEqual(response.status_code, 302) # successfully created the submission record submission = RevisionAkismetSubmission.objects.first() self.assertTrue(submission is not None) self.assertEqual(submission.sender, admin) self.assertTrue(submission.sent) self.assertEqual(submission.revision, revision) self.assertEqual(submission.type, 'spam') self.assertEqual(mock_requests.call_count, 2) request_body = mock_requests.request_history[1].body self.assertIn('user_ip=0.0.0.0', request_body) self.assertIn('user_agent=', request_body) self.assertIn(revision.slug, request_body) query_pairs = parse_qsl(request_body) expected_content = ('Seventh revision of the article.\n' 'article-with-revisions\n' 'Seventh revision of the article.\n' 'Seventh revision of the article.') expected = [('blog', 'http://testserver/'), ('blog_charset', 'UTF-8'), ('blog_lang', 'en_us'), ('comment_author', 'admin'), ('comment_content', expected_content), ('comment_type', 'wiki-revision'), ('permalink', 'http://testserver/en-US/docs/article-with-revisions'), ('user_ip', '0.0.0.0')] self.assertEqual(sorted(query_pairs), expected) assert mock_requests.called assert mock_requests.call_count == 2
def _strip_signing_parameters(self, url): # Boto3 does not currently support generating URLs that are unsigned. Instead we # take the signed URLs and strip any querystring params related to signing and expiration. # Note that this may end up with URLs that are still invalid, especially if params are # passed in that only work with signed URLs, e.g. response header params. # The code attempts to strip all query parameters that match names of known parameters # from v2 and v4 signatures, regardless of the actual signature version used. split_url = urlparse.urlsplit(url) qs = urlparse.parse_qsl(split_url.query, keep_blank_values=True) blacklist = set(['x-amz-algorithm', 'x-amz-credential', 'x-amz-date', 'x-amz-expires', 'x-amz-signedheaders', 'x-amz-signature', 'x-amz-security-token', 'awsaccesskeyid', 'expires', 'signature']) filtered_qs = ((key, val) for key, val in qs if key.lower() not in blacklist) # Note: Parameters that did not have a value in the original query string will have # an '=' sign appended to it, e.g ?foo&bar becomes ?foo=&bar= joined_qs = ('='.join(keyval) for keyval in filtered_qs) split_url = split_url._replace(query="&".join(joined_qs)) return split_url.geturl()
def authenticate(self, username=None, password=None, token=None): user = None try: token = token or dict(parse_qsl(password.lstrip('? '))).get('sid') except: pass if not token: return user data = None try: data = json.loads( urllib2.urlopen("%s?sid=%s" % (settings.LOGIN_URL, token), timeout=8, context=no_check_cert()).read()) except: data = None if data is not None and data.has_key('user'): UserModel = get_user_model() if self.create_unknown_user: user, created = UserModel._default_manager.get_or_create( defaults={ 'email': data.get('mail', ''), 'first_name': data.get('display', ''), 'is_staff': True, 'last_login': datetime.datetime.now(), #'member_of' : data.get('memberOf', ''), }, **{ UserModel.USERNAME_FIELD: data['user'], }) if created: user = self.configure_user(user) else: try: user = UserModel._default_manager.get_by_natural_key( data['username']) except UserModel.DoesNotExist: pass logger.debug('authenticate user {0}'.format(user)) return user
def test_get_gateway_url(self): payproc = getpaid.backends.epaydk.PaymentProcessor(self.test_payment) fake_req = mock.MagicMock(spec=HttpRequest) fake_req.scheme = 'https' fake_req.COOKIES = {} fake_req.META = {} actual = payproc.get_gateway_url(fake_req) self.assertEqual(actual[1], "GET") self.assertEqual(actual[2], {}) actual = list(urlparse(actual[0])) self.assertEqual(actual[0], 'https') self.assertEqual(actual[1], 'ssl.ditonlinebetalingssystem.dk') self.assertEqual(actual[2], '/integration/ewindow/Default.aspx') self.assertEqual(actual[3], '') domain = getpaid.utils.get_domain() accepturl = u'https://' + domain + '/getpaid.backends.epaydk/success/' callbackurl = u'https://' + domain + '/getpaid.backends.epaydk/online/' cancelurl = u'https://' + domain + '/getpaid.backends.epaydk/failure/' expected = [ (u'merchantnumber', u'xxxxxxxx'), (u'orderid', u'1'), (u'currency', u'208'), (u'amount', u'12345'), (u'windowstate', u'3'), (u'mobile', u'1'), (u'timeout', u'3'), (u'instantcallback', u'0'), (u'language', u'2'), (u'accepturl', accepturl), (u'callbackurl', callbackurl), (u'cancelurl', cancelurl), ] md5hash = payproc.compute_hash(OrderedDict(expected)) expected.append(('hash', md5hash)) self.assertListEqual(expected, parse_qsl(actual[4])) self.assertEqual(actual[5], '')
def test_get_gateway_url(self): payproc = getpaid.backends.epaydk.PaymentProcessor(self.test_payment) fake_req = mock.MagicMock(spec=HttpRequest) fake_req.scheme = 'https' fake_req.COOKIES = {} fake_req.META = {} actual = payproc.get_gateway_url(fake_req) self.assertEqual(actual[1], "GET") self.assertEqual(actual[2], {}) actual = list(urlparse(actual[0])) self.assertEqual(actual[0], 'https') self.assertEqual(actual[1], 'ssl.ditonlinebetalingssystem.dk') self.assertEqual(actual[2], '/integration/ewindow/Default.aspx') self.assertEqual(actual[3], '') domain = getpaid.utils.get_domain() accepturl = u'https://'+ domain +'/getpaid.backends.epaydk/success/' callbackurl = u'https://'+ domain +'/getpaid.backends.epaydk/online/' cancelurl = u'https://'+ domain +'/getpaid.backends.epaydk/failure/' expected = [ (u'merchantnumber', u'xxxxxxxx'), (u'orderid', u'1'), (u'currency', u'208'), (u'amount', u'12345'), (u'windowstate', u'3'), (u'mobile', u'1'), (u'timeout', u'3'), (u'instantcallback', u'0'), (u'language', u'2'), (u'accepturl', accepturl), (u'callbackurl', callbackurl), (u'cancelurl', cancelurl), ] md5hash = payproc.compute_hash(OrderedDict(expected)) expected.append(('hash', md5hash)) self.assertListEqual(expected, parse_qsl(actual[4])) self.assertEqual(actual[5], '')
def __init__(self, url): if isinstance(url, string_types): parsed = urlparse(url) self.parsed = parsed._replace(query=frozenset(parse_qsl(parsed.query)), path=unquote_plus(parsed.path)) else: # It's already a URL. self.parsed = url.parsed
from __future__ import unicode_literals
"""HTML utilities suitable for global use."""
def get_account_from_preserve_filters(self, request): preserved_filters = self.get_preserved_filters(request) preserved_filters = dict(parse_qsl(preserved_filters)) cl_filters = preserved_filters.get('_changelist_filters') if cl_filters: return dict(parse_qsl(cl_filters)).get('account')
from django import template