def reset_view(request): if request.method == 'GET': try: match = models.user.objects.filter(email=http.urlunquote_plus(request.GET['id'])) except: return render(request, 'not_found.html') if len(match) == 0: return render(request, 'not_found.html') else: curr_user = match[0] if check_password(http.urlunquote_plus(request.GET['token']) ,curr_user.user_token) == True: form = forms.UserReset() return render(request, 'accounts/reset.html', {'form': form, 'email': http.urlquote_plus(request.GET['id'])}) else: return render(request, 'not_found.html') else: form = forms.UserReset(request.POST) if form.is_valid(): match = models.user.objects.filter(email = request.GET['id']) curr_user = match[0] curr_user.password = make_password(form.cleaned_data['password1']) curr_user.save() curr_user.user_token = make_password(get_random_string(length=8)) return render(request, 'accounts/reset.html', {'success': 'Password reset successful'}) else: err = form.errors err = err['__all__'] return render(request, 'accounts/reset.html', {'form': form, 'email': http.urlquote_plus(request.GET['id']), 'err': err})
def test_urlquote(self): self.assertEqual(http.urlquote('Paris & Orl\xe9ans'), 'Paris%20%26%20Orl%C3%A9ans') self.assertEqual(http.urlquote('Paris & Orl\xe9ans', safe="&"), 'Paris%20&%20Orl%C3%A9ans') self.assertEqual(http.urlunquote('Paris%20%26%20Orl%C3%A9ans'), 'Paris & Orl\xe9ans') self.assertEqual(http.urlunquote('Paris%20&%20Orl%C3%A9ans'), 'Paris & Orl\xe9ans') self.assertEqual(http.urlquote_plus('Paris & Orl\xe9ans'), 'Paris+%26+Orl%C3%A9ans') self.assertEqual(http.urlquote_plus('Paris & Orl\xe9ans', safe="&"), 'Paris+&+Orl%C3%A9ans') self.assertEqual(http.urlunquote_plus('Paris+%26+Orl%C3%A9ans'), 'Paris & Orl\xe9ans') self.assertEqual(http.urlunquote_plus('Paris+&+Orl%C3%A9ans'), 'Paris & Orl\xe9ans')
def do_redirect(self, request, response=None): if (getattr(settings, 'DJANGOCMS_REDIRECT_404_ONLY', True) and response and response.status_code != 404): return response full_path_quoted, part, querystring = request.get_full_path( ).partition('?') possible_paths = [full_path_quoted] full_path_unquoted = urlunquote_plus(full_path_quoted) if full_path_unquoted != full_path_quoted: possible_paths.append(urlunquote_plus(full_path_unquoted)) if not settings.APPEND_SLASH and not request.path.endswith('/'): full_path_slash, __, __ = request.get_full_path( force_append_slash=True).partition('?') possible_paths.append(full_path_slash) full_path_slash_unquoted = urlunquote_plus(full_path_slash) if full_path_slash_unquoted != full_path_slash: possible_paths.append(full_path_slash_unquoted) querystring = '%s%s' % (part, querystring) current_site = get_current_site(request) r = None key = get_key_from_path_and_site(full_path_quoted, settings.SITE_ID) cached_redirect = cache.get(key) if not cached_redirect: for path in possible_paths: filters = dict(site=current_site, old_path=path) try: r = Redirect.objects.get(**filters) break except Redirect.DoesNotExist: r = self._match_substring(path) if r: break cached_redirect = { 'site': settings.SITE_ID, 'redirect': r.new_path if r else None, 'status_code': r.response_code if r else None, } cache.set(key, cached_redirect, timeout=getattr(settings, 'DJANGOCMS_REDIRECT_CACHE_TIMEOUT', 3600)) if cached_redirect['redirect'] == '': return self.response_gone_class() if cached_redirect['status_code'] == '302': return self.response_redirect_class( '%s%s' % (cached_redirect['redirect'], querystring)) elif cached_redirect['status_code'] == '301': return self.response_permanent_redirect_class( '%s%s' % (cached_redirect['redirect'], querystring)) elif cached_redirect['status_code'] == '410': return self.response_gone_class()
def query(request, keyword): decodedKeyword = urlunquote_plus(keyword) rt = Poolroom.objects.filter( reduce(operator.and_, (Q(name__contains=kw) for kw in decodedKeyword.strip().split(' '))) & Q(exist=1)).order_by('-rating')[:10] return HttpResponse(toJson(rt, poolroom_fields))
def get_params(request): ''' Build a dictionary with the URL parameters ''' return dict([(key.upper(), urlunquote_plus(val)) for key, val in request.GET.items() if key.upper() != 'SHASIGN'])
def make_search_breadcumbs(attrs_values): """ Создание целопчки поисковых фильтров :param attrs_values: :return: """ search_breadcumbs = [] search_url = urlresolvers.reverse('search:frontend:index') attrs_prepare = [] values_prepare = [] for item in attrs_values: attr_url_part = u'attr=' + getattr(item, 'attr') value_url_part = u'value=' + urlunquote_plus(getattr(item, 'value')) search_breadcumbs.append({ 'attr': getattr(item, 'attr'), 'title': getattr(item, 'title', getattr(item, 'attr')), 'href': search_url + u'?' + u'&'.join(attrs_prepare) + u'&' + attr_url_part + u'&' + u'&'.join( values_prepare) + u'&' + value_url_part, 'value': titles.get_attr_value_title(getattr(item, 'attr'), getattr(item, 'value')), }) attrs_prepare.append(attr_url_part) values_prepare.append(value_url_part) return search_breadcumbs
def parse_into_dict(response): concepts = {} for con in response: name = urlunquote_plus(con['name']).encode('utf-8') freq = con['frequency'] concepts[name.lower()] = freq return concepts
def user_tributary_twitter_add(request,owner_name): """Handle bookmarlet and form-based addition of a twitter feed as a source. The bookmarlet is formatted in the following way: .../add?{0} """.format('="..."&'.join(UserTwitterSourceAdditionForm.base_fields.keys())) if settings.READ_ONLY: return HttpResponseForbidden("Source addition is not possible in READ_ONLY mode.") if request.method == 'POST': src_info = request.POST elif request.GET: # GET src_info = dict( (k,urlunquote_plus(v.encode("utf-8"))) for k,v in request.GET.items()) else: src_info = None form = UserTwitterSourceAdditionForm( request.user, src_info, initial={"title": "Home timeline", "username": owner_name}, error_class=CustomErrorList) if src_info and form.is_valid(): form.save() return HttpResponseRedirect(reverse('user_tributary_twitter', args=(request.user.username,))) d = add_base_template_context_data( {'form': form, 'REST_PARAMS': ','.join(UserTwitterSourceAdditionForm.base_fields.keys())}, request.user.username,request.user.username) return render(request, 'twitter_source_addition.html', d)
def render(self, name, value, attrs=None, **kwargs): if value: file_name = os.path.basename(urlunquote_plus(value)) else: file_name = '' tpl = join('s3direct', 's3direct-widget.tpl') output = render_to_string( tpl, { 'policy_url': reverse('upload-params', args=['aliyun']), 'signing_url': reverse('s3direct-signing'), 'element_id': self.build_attrs(attrs).get('id', '') if attrs else '', 'file_name': file_name, 'dest': self.dest, 'file_url': value or '', 'name': name, 'style': self.build_attrs(attrs).get('style', '') if attrs else '', 'csrf_cookie_name': getattr(settings, 'CSRF_COOKIE_NAME', 'csrftoken') }) return mark_safe(output)
def __init__(self, path, upload_to=None, preview_w=None, preview_h=None): self.upload_to = upload_to self.preview_width = preview_w self.preview_height = preview_h self.metadata = {} if not path: self.name = None return if '%' in path: path = urlunquote_plus(path) if path.startswith(settings.MEDIA_URL): # Strips leading MEDIA_URL, if starts with self._path = get_relative_media_url(path, clean_slashes=False) elif re.search(r'^(?:http(?:s)?:)?//', path): # url on other server? download it. self._path = self.download_image_url(path) else: abs_path = get_media_path(path) if os.path.exists(abs_path): self._path = get_relative_media_url(abs_path) if not self._path or not os.path.exists(os.path.join(settings.MEDIA_ROOT, self._path)): self.name = None return super(ImageFile, self).__init__(self._path) if self: self.preview_image = self.get_for_size('preview')
def test_quoted_path_redirect(self): pages = self.get_pages() escaped_path = "/path%20%28escaped%29/" redirect = Redirect.objects.create( site=self.site_1, old_path=escaped_path, new_path=pages[0].get_absolute_url(), response_code="302", ) response = self.client.get(escaped_path) self.assertEqual(response.status_code, 404) redirect.old_path = "/path%20(escaped)/" redirect.save() response = self.client.get(escaped_path) self.assertEqual(response.status_code, 302) self.assertRedirects(response, redirect.new_path, status_code=302) unescaped_path = urlunquote_plus(escaped_path) redirect.old_path = unescaped_path redirect.save() response = self.client.get(escaped_path) self.assertEqual(response.status_code, 302) self.assertRedirects(response, redirect.new_path, status_code=302)
def user_river_source_add(request,owner_name): """Handle bookmarlet and form-based addition of a syndication of source. The bookmarlet is formatted in the following way: .../source/add/?{0} """.format('="..."&'.join(UserSourceAdditionForm.base_fields.keys())) if settings.DEMO: return HttpResponseForbidden("Source addition is not possible in DEMO mode.") if request.method == 'POST': src_info = request.POST elif request.GET: # GET src_info = dict( (k,urlunquote_plus(v.encode("utf-8"))) for k,v in request.GET.items()) else: src_info = None form = UserSourceAdditionForm(request.user, src_info, error_class=CustomErrorList) if src_info and form.is_valid(): form.save() return HttpResponseRedirect(reverse('wom_user.views.user_river_sources', args=(request.user.username,))) d = add_base_template_context_data( {'form': form, 'REST_PARAMS': ','.join(UserSourceAdditionForm.base_fields.keys())}, request.user.username,request.user.username) return render_to_response('source_addition.html',d, context_instance=RequestContext(request))
def __init__(self, path, upload_to=None, preview_w=None, preview_h=None): self.upload_to = upload_to self.preview_width = preview_w self.preview_height = preview_h self.metadata = {} if not path: self.name = None return if '%' in path: path = urlunquote_plus(path) if path.startswith(settings.MEDIA_URL): # Strips leading MEDIA_URL, if starts with self._path = get_relative_media_url(path, clean_slashes=False) elif re.search(r'^(?:http(?:s)?:)?//', path): # url on other server? download it. self._path = self.download_image_url(path) else: if default_storage.exists(path): self._path = path if not self._path: self.name = None return super(ImageFile, self).__init__(self._path) if self: self.preview_image = self.get_for_size('preview')
def get_context_data(self, **kwargs): context = super(SuccessView, self).get_context_data(**kwargs) next_url = self.kwargs.get("next", "") try: next_url = reverse(next_url) except Exception as e: next_url = urlunquote_plus(next_url) context["next_url"] = next_url return context
def activate_view(request): if request.method == 'GET': try: match = models.user.objects.filter(email = http.urlunquote_plus(request.GET['id']), activated = False) except: return render(request, 'not_found.html') if len(match) == 0: return render(request, 'not_found.html') else: curr_user = match[0] print(request.GET['token']) print(check_password(request.GET['token'], curr_user.user_token)) if check_password(http.urlunquote_plus(request.GET['token']), curr_user.user_token) == True: curr_user.activated = True curr_user.save() return render(request, 'accounts/activate.html') else: return render(request, 'not_found.html')
def get_context_data(self, **kwargs): context = super(ErrorView, self).get_context_data(**kwargs) next_url = self.kwargs.get("next", "") msgs = self.kwargs.get("msgs", "") try: next_url = reverse(next_url) except Exception as e: next_url = urlunquote_plus(next_url) context["next_url"] = next_url context["msgs"] = msgs return context
def parse_get_params(get_params_string): params_dict = {} params = get_params_string.replace(u'?', '') if params: params_parts = params.split('&') for param_part in params_parts: key_value_pair = param_part.split('=') if len(key_value_pair) > 1: values = params_dict.get(key_value_pair[0], []) if not values: params_dict[key_value_pair[0]] = values values.append(urlunquote_plus(key_value_pair[1])) return params_dict
def tag_page(request, tagname, modeltype="concert"): tagname = urlunquote_plus(tagname) lists = Annotation.objects.filter(tag__name=tagname, entity_type=modeltype).values('entity_type', 'entity_id', 'tag').annotate(freq=Count('entity_type')) objects = [] for lista in lists: objects.append([__get_entity(modeltype, lista['entity_id']), lista['freq']]) ret = { 'objects': objects, 'tag_name': tagname, 'modeltype': modeltype, } return render(request, 'social/tag_page.html', ret)
def render(self, name, value, attrs=None, **kwargs): path = get_s3_path_from_url(value) if value else '' file_name = os.path.basename(urlunquote_plus(path)) tpl = os.path.join('s3upload', 's3upload-widget.tpl') output = render_to_string(tpl, { 'policy_url': reverse('s3upload'), 'element_id': self.get_attr(attrs, 'id'), 'file_name': file_name, 'dest': self.dest, 'file_url': self.get_file_url(path), 'name': name, 'element_id': self.get_attr(attrs, 'style'), }) return mark_safe(output)
def tag_page(request, tagname, modeltype="concert"): tagname = urlunquote_plus(tagname) tag = get_object_or_404(Tag, name=tagname) lists = Annotation.objects.filter(tag__name=tagname, entity_type=modeltype).values('entity_type', 'entity_id', 'tag').annotate(freq=Count('entity_type')) objects=[] for lista in lists: objects.append([__get_entity(modeltype, lista['entity_id']), lista['freq']]) variables = RequestContext(request, { 'objects': objects, 'tag_name': tagname, 'modeltype': modeltype, }) return render_to_response('tag_page.html', variables)
def _document_raw(request, doc, doc_html, rendering_params): """ Display a raw Document. """ response = HttpResponse(doc_html) response['X-Frame-Options'] = 'Allow' response['X-Robots-Tag'] = 'noindex' absolute_url = urlunquote_plus(doc.get_absolute_url()) if absolute_url in (config.KUMA_CUSTOM_SAMPLE_CSS_PATH): response['Content-Type'] = 'text/css; charset=utf-8' elif doc.is_template: # Treat raw, un-bleached template source as plain text, not HTML. response['Content-Type'] = 'text/plain; charset=utf-8' return _set_common_headers(doc, rendering_params['section'], response)
def render(self, name, value, attrs=None): if value: file_name = os.path.basename(urlunquote_plus(value)) else: file_name = '' output = self.html.format( policy_url=reverse('s3direct'), element_id=self.build_attrs(attrs).get('id'), file_name=file_name, dest=self.dest, file_url=value or '', name=name, style=self.build_attrs(attrs).get('style')) return mark_safe(output)
def render(self, name, value, **kwargs): file_url = value or '' csrf_cookie_name = getattr(settings, 'CSRF_COOKIE_NAME', 'csrftoken') ctx = { 'policy_url': reverse('s3direct'), 'signing_url': reverse('s3direct-signing'), 'dest': self.dest, 'name': name, 'csrf_cookie_name': csrf_cookie_name, 'file_url': file_url, 'file_name': os.path.basename(urlunquote_plus(file_url)), } return mark_safe( render_to_string(os.path.join('s3direct', 's3direct-widget.tpl'), ctx))
def render(self, name, value, **kwargs): file_url = value or '' csrf_cookie_name = getattr(settings, 'CSRF_COOKIE_NAME', 'csrftoken') ctx = { 'policy_url': reverse('s3direct'), 'signing_url': reverse('s3direct-signing'), 'dest': self.dest, 'name': name, 'csrf_cookie_name': csrf_cookie_name, 'file_url': file_url, 'file_name': os.path.basename(urlunquote_plus(file_url)), } return mark_safe( render_to_string( os.path.join('s3direct', 's3direct-widget.tpl'), ctx))
def render(self, name, value, attrs=None, **kwargs): if value: file_name = os.path.basename(urlunquote_plus(value)) else: file_name = '' tpl = os.path.join('s3direct', 's3direct-widget.tpl') output = render_to_string(tpl, { 'policy_url': reverse('s3direct'), 'element_id': self.build_attrs(attrs).get('id', '') if attrs else '', 'file_name': file_name, 'dest': self.dest, 'file_url': value or '', 'name': name, 'style': self.build_attrs(attrs).get('style', '') if attrs else '', }) return mark_safe(output)
def get_download_file_response(self, from_share=False): obj = self.get_object() if not from_share: fileobject = obj.file filename = obj.name else: fileobject = obj.target.file filename = obj.target.name redirect = urlunquote_plus(fileobject.url).encode('utf-8') response = HttpResponse(charset='utf-8') response['Content-Length'] = fileobject.size response[ 'Content-Disposition'] = "attachment; filename=\"%s\"" % urlquote_plus( filename, safe="()/,&=@#%*") response['Content-Type'] = mimetypes.guess_type(fileobject.path)[0] response['Content-Length'] = os.path.getsize(fileobject.path) response['X-Accel-Redirect'] = redirect return response
def tag_page(request, tagname, modeltype="concert"): tagname = urlunquote_plus(tagname) tag = get_object_or_404(Tag, name=tagname) lists = Annotation.objects.filter( tag__name=tagname, entity_type=modeltype).values( 'entity_type', 'entity_id', 'tag').annotate(freq=Count('entity_type')) objects = [] for lista in lists: objects.append( [__get_entity(modeltype, lista['entity_id']), lista['freq']]) ret = { 'objects': objects, 'tag_name': tagname, 'modeltype': modeltype, } return render(request, 'social/tag_page.html', ret)
def render(self, name, value, attrs=None): if value: file_name = os.path.basename(urlunquote_plus(value)) else: file_name = '' tpl = os.path.join('s3direct', 's3direct-widget.tpl') output = render_to_string( tpl, { 'policy_url': reverse('s3direct'), 'element_id': self.build_attrs(attrs).get('id', ''), 'file_name': file_name, 'dest': self.dest, 'file_url': value or '', 'name': name, 'style': self.build_attrs(attrs).get('style', '') }) return mark_safe(output)
def get_entity(self, identifier, is_path=False): """ looks up an entity """ output = False identifier = http.urlunquote_plus(identifier) if identifier in self.entities: # best case scenario, the entity is already looked up output = self.entities[identifier] else: found = False entity = Entity() if is_path: found = entity.context_dereference(identifier) else: found = entity.dereference(identifier) if found is False: # case of linked data slugs found = entity.dereference(identifier, identifier) if found: output = entity return output
def deserialize(self, request, data, format=None): if not format: format = request.META.get('CONTENT_TYPE', 'application/json') attached_objects = {} for file in request.FILES.getlist('attached_object'): assert file.name not in attached_objects, "Uploaded 2 files with the same name." attached_objects[file.name] = file if format.lower().startswith('multipart/form-data'): data = urlunquote_plus(request.POST['resource_data']) data = json.loads(data, strict=False) for object in data['objects']: for key, value in object.items(): self.__attach_object(attached_objects, object, key, value) return data return super(MultipartFormDataModelResource, self).deserialize( request, data, format)
def replace_tokens(search_string, token_vals): """ given a string with tokens, a token regular expression, and a dictionary of token values search for tokens in the string and replace them from the dictionary Note: token format is {token} Note: token_vals should be a dictionary where keys are all values found in place of "token" from token format """ missing_keys = True search_string = urlunquote_plus(search_string) final_string = search_string while missing_keys: try: final_string = search_string.format(**token_vals) missing_keys = False except KeyError as e: # if we find a missing token, do not replace it token_vals[e.message] = '{' + e.message + '}' return final_string
def user_collection_add(request,owner_name): """Handle bookmarlet and form-based addition of a bookmark. The bookmarlet is formatted in the following way: .../collection/add/?{0} """.format('="..."&'.join(UserBookmarkAdditionForm.base_fields.keys())) if settings.READ_ONLY: return HttpResponseForbidden("Bookmark addition is not possible in READ_ONLY mode.") if request.method == 'POST': bmk_info = request.POST elif request.GET: # GET bmk_info = dict( (k,urlunquote_plus(v.encode("utf-8"))) for k,v in request.GET.items()) else: bmk_info = None form = UserBookmarkAdditionForm(request.user, bmk_info, error_class=CustomErrorList) if bmk_info and form.is_valid(): form.save() return HttpResponseRedirect(reverse('user_collection', args=(request.user.username,))) d = add_base_template_context_data( {'form': form, 'REST_PARAMS': ','.join(UserBookmarkAdditionForm.base_fields.keys())}, request.user.username,request.user.username) return render(request, 'bookmark_addition.html', d)
def get_entity(self, identifier, is_path=False): """ returns an entity object, if it exists looks up an entity """ output = False identifier = http.urlunquote_plus(identifier) context_id = 'mem-cache-context---' + identifier if identifier in self.entities: # best case scenario, the entity is already looked up output = self.entities[identifier] elif context_id in self.entities and is_path: output = self.entities[context_id] else: context_cache_id = self.make_memory_cache_key('entities-path', identifier) cache_id = self.make_memory_cache_key('entities', identifier) item = self.get_cache_object(cache_id) if item is not None: output = item # print('YEAH found entity: ' + cache_id) else: item = self.get_cache_object(context_cache_id) if item is not None: output = item # print('YEAH by context path: ' + context_cache_id) if output is False: output = self.get_entity_db(identifier, context_id, is_path) if output is not False: entity = output if is_path: self.save_cache_object(context_cache_id, entity) else: self.save_cache_object(cache_id, entity) slug_id = self.make_memory_cache_key('entities', entity.slug) if slug_id is not cache_id: self.save_cache_object(slug_id, entity) return output
def render(self, name, value, **kwargs): file_url = value or '' csrf_cookie_name = getattr(settings, 'CSRF_COOKIE_NAME', 'csrftoken') ctx = { 'policy_url': reverse('s3direct'), 'signing_url': reverse('s3direct-signing'), 'dest': self.dest, 'name': name, 'csrf_cookie_name': csrf_cookie_name, 'file_url': file_url, 'file_name': os.path.basename(urlunquote_plus(file_url)), } if self.key_args: try: ctx.update(key_args=json.dumps(self.key_args)) except Exception: raise RuntimeError( 'widget argument key_args is not json-serializable') return mark_safe( render_to_string(os.path.join('s3direct', 's3direct-widget.tpl'), ctx))
def get_con_entity(self, identifier): """ returns an entity object, if it exists looks up an entity """ output = False identifier = http.urlunquote_plus(identifier) if identifier in self.context_entities: # best case scenario, the entity is already looked up output = self.context_entities[identifier] else: cache_id = self.make_memory_cache_key('context-entities', identifier) item = self.get_cache_object(cache_id) if item is not None: # print('YEAH found context: ' + cache_id) output = item else: output = self.get_context_entity_db(identifier) if output is not False: entity = output self.save_cache_object(cache_id, entity) slug_id = self.make_memory_cache_key('context-entities', entity.slug) if slug_id is not cache_id: self.save_cache_object(slug_id, entity) return output
def upload_image(request): upload_directory = os.path.join(settings.MEDIA_ROOT, 'images', 'upload') allowed_extensions = ('.jpg', '.jpeg', '.png', '.gif') size_limit = 375767 uploaded = request.read file_size = int(uploaded.im_self.META['CONTENT_LENGTH']) file_name = uploaded.im_self.META['HTTP_X_FILE_NAME'] file_name = urlunquote_plus(file_name) file_content = uploaded(file_size) file_name, extension = os.path.splitext(file_name) extension = extension.lower() if extension in allowed_extensions: file_name = slugify(file_name) + extension if file_size <= size_limit: file_path = os.path.join(upload_directory, file_name) if not os.path.exists(file_path): with open(file_path, 'wb+') as f: f.write(file_content) content = {'success': True} else: content = {'error': 'File is too large.'} else: content = {'error': 'File has an invalid extension.'} return HttpResponse(json.dumps(content), content_type='application/json')
def add_filters_json(self, request_dict): """Adds JSON describing active search filters. :param dict request_dict: Dictionary object of the GET request from the client. """ # NOTE: This function creates a list of the filters that # the client requested in their search / query. # Each of the listed filters has some metadata and # links to remove the filter or broaden the filter, # in case there are multiple levels of hierarchy in a # given filter. filters = [] string_fields = [] # so we have an interface for string searches for param_key, param_vals in request_dict.items(): if param_vals is None: continue if param_key in configs.FILTER_IGNORE_PARAMS: continue # Normalize the values of this parameter into a # list to make processing easier if not isinstance(param_vals, list): param_vals = [param_vals] # Get the configuration for this specific request # parameter. param_config = configs.FILTER_PARAM_CONFIGS.get(param_key) if not param_config: # No configuration for this request parameter continue # Get the hierarchy delimiter configured # for values used by this param_key. hierarchy_delim = param_config.get('hierarchy_delim') for param_val in param_vals: if (hierarchy_delim and hierarchy_delim in param_val): # NOTE: Sometimes we may get a param_val that # has a trailing hierarchy delim. This will result in weird # behavior. So we make a splitting_param_val that trims # off any trailing delimiter. splitting_param_val = param_val.lstrip(hierarchy_delim) splitting_param_val = splitting_param_val.rstrip( hierarchy_delim) hierarchy_vals = splitting_param_val.split(hierarchy_delim) else: hierarchy_vals = [param_val] parent_path_vals = [] # This gets set in the event that we have a # property with a string data type. It is used to make # a search template for that string. text_template_value = None for act_val in hierarchy_vals: act_val = urlunquote_plus(act_val) parent_path_vals.append(act_val) if hierarchy_delim: act_full_path = hierarchy_delim.join(parent_path_vals) else: act_full_path = act_val # Count the existing filters to make the # index of the next one. i = len(filters) + 1 act_filter = LastUpdatedOrderedDict() act_filter['id'] = '#filter-{}'.format(i) act_filter['oc-api:filter'] = param_config['oc-api:filter'] if hierarchy_delim is None: # Do one of the many special case non-hierarchic # filter parameters. act_filter = self.add_non_hierarchy_filter_json( param_key, act_val, act_filter, param_config, request_dict, ) filters.append(act_filter) # Skip everything in this loop below, because # below we're doing things related to entities that # may be in a hierarchy. continue # The filter-group helps to group together all of the # levels of the hierarchy_vals. act_filter['oc-api:filter-group'] = param_val if param_key == "path": # Look up item entity for spatial context # path items by the current path, which will # include the hierarchy of parent items. item_lookup_val = act_full_path else: # Look up the item entity simply by using # the current act_val. item_lookup_val = act_val if text_template_value is None: # Do not do this for the string value of a string # type property act_filter, item = self.add_entity_item_to_act_filter( item_lookup_val, act_filter, is_spatial_context=param_config.get( 'is_spatial_context', False), look_up_mapping_dict=param_config.get( 'look_up_mapping_dict'), ) if item and getattr(item, 'data_type') == 'xsd:string': act_search_term = None text_template_value = ( act_full_path + hierarchy_delim + configs.URL_TEXT_QUERY_TEMPLATE) else: # This is the a case of a search term, which is the child # of a descriptive property of data_type xsd:string. act_search_term = item_lookup_val act_filter['label'] = item_lookup_val if param_config.get('label-prop-template'): # Use a configured tem act_filter['label'] = param_config[ 'label-prop-template'].format( act_val=item_lookup_val) # Add text field templates if text_template_value is not None: # Add some special keys and values relating to text # searches. act_filter["oc-api:search-term"] = act_search_term act_filter = self.add_links_to_act_filter( param_key, match_old_value=param_val, new_value=text_template_value, act_filter=act_filter, request_dict=request_dict, make_text_template=True, ) # Add the totally remove filter links act_filter = self.add_links_to_act_filter( param_key, match_old_value=param_val, new_value=None, act_filter=act_filter, request_dict=request_dict, ) if len(parent_path_vals) < len(hierarchy_vals): # We can add links to broaden this current # filter to a higher level in the hierarchy. act_filter = self.add_links_to_act_filter( param_key, match_old_value=param_val, new_value=act_full_path, act_filter=act_filter, request_dict=request_dict, ) filters.append(act_filter) return filters
def add_filters_json(self, request_dict): """ adds JSON describing search filters """ fl = FilterLinks() fl.base_search_link = self.base_search_link filters = [] string_fields = [] # so we have an interface for string searches i = 0 for param_key, param_vals in request_dict.items(): if param_key == 'path': if param_vals: i += 1 f_entity = self.m_cache.get_entity(param_vals) label = http.urlunquote_plus(param_vals) act_filter = LastUpdatedOrderedDict() act_filter['id'] = '#filter-' + str(i) act_filter['oc-api:filter'] = 'Context' act_filter['label'] = label.replace('||', ' OR ') if f_entity: act_filter['rdfs:isDefinedBy'] = f_entity.uri # generate a request dict without the context filter rem_request = fl.make_request_sub(request_dict, param_key, param_vals) act_filter['oc-api:remove'] = fl.make_request_url(rem_request) act_filter['oc-api:remove-json'] = fl.make_request_url(rem_request, '.json') filters.append(act_filter) else: for param_val in param_vals: i += 1 remove_geodeep = False act_filter = LastUpdatedOrderedDict() act_filter['id'] = '#filter-' + str(i) if self.hierarchy_delim in param_val: all_vals = param_val.split(self.hierarchy_delim) else: all_vals = [param_val] if param_key == 'proj': # projects, only care about the last item in the parameter value act_filter['oc-api:filter'] = 'Project' label_dict = self.make_filter_label_dict(all_vals[-1]) act_filter['label'] = label_dict['label'] if len(label_dict['entities']) == 1: act_filter['rdfs:isDefinedBy'] = label_dict['entities'][0].uri elif param_key == 'prop': # prop, the first item is the filter-label # the last is the filter act_filter['label'] = False if len(all_vals) < 2: act_filter['oc-api:filter'] = 'Description' act_filter['oc-api:filter-slug'] = all_vals[0] else: filt_dict = self.make_filter_label_dict(all_vals[0]) act_filter['oc-api:filter'] = filt_dict['label'] if 'slug' in filt_dict: act_filter['oc-api:filter-slug'] = filt_dict['slug'] if filt_dict['data-type'] == 'string': act_filter['label'] = 'Search Term: \'' + all_vals[-1] + '\'' if act_filter['label'] is False: label_dict = self.make_filter_label_dict(all_vals[-1]) act_filter['label'] = label_dict['label'] elif param_key == 'type': act_filter['oc-api:filter'] = 'Open Context Type' if all_vals[0] in QueryMaker.TYPE_MAPPINGS: type_uri = QueryMaker.TYPE_MAPPINGS[all_vals[0]] label_dict = self.make_filter_label_dict(type_uri) act_filter['label'] = label_dict['label'] else: act_filter['label'] = all_vals[0] elif param_key == 'q': act_filter['oc-api:filter'] = self.TEXT_SEARCH_TITLE act_filter['label'] = 'Search Term: \'' + all_vals[0] + '\'' elif param_key == 'id': act_filter['oc-api:filter'] = 'Identifier Lookup' act_filter['label'] = 'Identifier: \'' + all_vals[0] + '\'' elif param_key == 'form-chronotile': act_filter['oc-api:filter'] = 'Time of formation, use, or life' chrono = ChronoTile() dates = chrono.decode_path_dates(all_vals[0]) if isinstance(dates, dict): act_filter['label'] = 'Time range: ' + str(dates['earliest_bce']) act_filter['label'] += ' to ' + str(dates['latest_bce']) elif param_key == 'form-start': act_filter['oc-api:filter'] = 'Earliest formation, use, or life date' try: val_date = int(float(all_vals[0])) except: val_date = False if val_date is False: act_filter['label'] = '[Invalid year]' elif val_date < 0: act_filter['label'] = str(val_date * -1) + ' BCE' else: act_filter['label'] = str(val_date) + ' CE' elif param_key == 'form-stop': act_filter['oc-api:filter'] = 'Latest formation, use, or life date' try: val_date = int(float(all_vals[0])) except: val_date = False if val_date is False: act_filter['label'] = '[Invalid year]' elif val_date < 0: act_filter['label'] = str(val_date * -1) + ' BCE' else: act_filter['label'] = str(val_date) + ' CE' elif param_key == 'disc-geotile': act_filter['oc-api:filter'] = 'Location of discovery or observation' act_filter['label'] = self.make_geotile_filter_label(all_vals[0]) remove_geodeep = True elif param_key == 'disc-bbox': act_filter['oc-api:filter'] = 'Location of discovery or observation' act_filter['label'] = self.make_bbox_filter_label(all_vals[0]) remove_geodeep = True elif param_key == 'images': act_filter['oc-api:filter'] = 'Has related media' act_filter['label'] = 'Linked to images' elif param_key == 'other-media': act_filter['oc-api:filter'] = 'Has related media' act_filter['label'] = 'Linked to media (other than images)' elif param_key == 'documents': act_filter['oc-api:filter'] = 'Has related media' act_filter['label'] = 'Linked to documents' elif param_key == 'dc-subject': act_filter['oc-api:filter'] = 'Has subject metadata' label_dict = self.make_filter_label_dict(all_vals[-1]) if len(label_dict['label']) > 0: act_filter['label'] = label_dict['label'] if 'tdar' == all_vals[-1] or 'tdar*' == all_vals[-1]: act_filter['label'] = 'tDAR defined metadata record(s)' if len(label_dict['entities']) == 1: act_filter['rdfs:isDefinedBy'] = label_dict['entities'][0].uri if label_dict['entities'][0].vocabulary is not False: act_filter['label'] += ' in ' + label_dict['entities'][0].vocabulary elif param_key == 'dc-spatial': act_filter['oc-api:filter'] = 'Has spatial metadata' label_dict = self.make_filter_label_dict(all_vals[-1]) if len(label_dict['label']) > 0: act_filter['label'] = label_dict['label'] if len(label_dict['entities']) == 1: act_filter['rdfs:isDefinedBy'] = label_dict['entities'][0].uri if label_dict['entities'][0].vocabulary is not False: act_filter['label'] += ' in ' + label_dict['entities'][0].vocabulary elif param_key == 'dc-coverage': act_filter['oc-api:filter'] = 'Has coverage / period metadata' label_dict = self.make_filter_label_dict(all_vals[-1]) if len(label_dict['label']) > 0: act_filter['label'] = label_dict['label'] if len(label_dict['entities']) == 1: act_filter['rdfs:isDefinedBy'] = label_dict['entities'][0].uri if label_dict['entities'][0].vocabulary is not False: act_filter['label'] += ' in ' + label_dict['entities'][0].vocabulary elif param_key == 'dc-temporal': act_filter['oc-api:filter'] = 'Has temporal coverage' label_dict = self.make_filter_label_dict(all_vals[-1]) if len(label_dict['label']) > 0: act_filter['label'] = label_dict['label'] if len(label_dict['entities']) == 1: if label_dict['entities'][0].entity_type == 'vocabulary': act_filter['label'] = 'Concepts defined by: ' + label_dict['label'] elif 'periodo' in all_vals[-1]: act_filter['label'] = 'PeriodO defined concepts' if len(label_dict['entities']) == 1: act_filter['rdfs:isDefinedBy'] = label_dict['entities'][0].uri if label_dict['entities'][0].vocabulary is not False\ and label_dict['entities'][0].vocabulary != label_dict['label']: act_filter['label'] += ' in ' + label_dict['entities'][0].vocabulary elif param_key == 'obj': act_filter['oc-api:filter'] = 'Links (in some manner) to object' label_dict = self.make_filter_label_dict(all_vals[-1]) if len(label_dict['label']) > 0: act_filter['label'] = label_dict['label'] if len(label_dict['entities']) == 1: act_filter['rdfs:isDefinedBy'] = label_dict['entities'][0].uri if label_dict['entities'][0].vocabulary is not False: act_filter['label'] += ' in ' + label_dict['entities'][0].vocabulary elif param_key == 'dc-isReferencedBy': act_filter['oc-api:filter'] = 'Is referenced by' label_dict = self.make_filter_label_dict(all_vals[-1]) if len(label_dict['label']) > 0: act_filter['label'] = label_dict['label'] if len(label_dict['entities']) == 1: act_filter['rdfs:isDefinedBy'] = label_dict['entities'][0].uri if label_dict['entities'][0].vocabulary is not False\ and label_dict['entities'][0].vocab_uri != label_dict['entities'][0].uri: act_filter['label'] += ' in ' + label_dict['entities'][0].vocabulary elif param_key == 'linked' and all_vals[-1] == 'dinaa-cross-ref': act_filter['oc-api:filter'] = 'Has cross references' act_filter['label'] = 'Links to, or with, DINAA curated site files' else: act_filter = False if act_filter is not False: rem_request = fl.make_request_sub(request_dict, param_key, param_val) if 'geodeep' in rem_request and remove_geodeep: rem_request.pop('geodeep', None) act_filter['oc-api:remove'] = fl.make_request_url(rem_request) act_filter['oc-api:remove-json'] = fl.make_request_url(rem_request, '.json') filters.append(act_filter) return filters
def clean_service(self): """ Remove any HTML percent encoding in the service URL. """ service = self.cleaned_data.get('service') return urlunquote_plus(service)
def test_unquote_plus(self): self.assertEqual(urlunquote_plus('Paris+%26+Orl%C3%A9ans'), 'Paris & Orl\xe9ans') self.assertEqual(urlunquote_plus('Paris+&+Orl%C3%A9ans'), 'Paris & Orl\xe9ans')
def __init__(self, tags_query, **kwargs): self.uquery = urlunquote_plus(tags_query)
def decode_cookie_value(s): # cookie value is urlencoded, decode hr chars return urlunquote_plus(str(s))
class UserLoginTestCase(GenericViewTestCase): """ Test that users can login via the supported authentication methods """ authenticated_url = reverse(viewname='common:home') # Unquote directly until furl 2.1.0 is released which will include # the tostr() argument query_dont_quote=True # TODO: Remove after release and update to furl 2.1.0 authentication_url = urlunquote_plus( furl(path=reverse(settings.LOGIN_URL), args={ 'next': authenticated_url }).tostr()) auto_login_user = False create_test_case_superuser = True def setUp(self): super(UserLoginTestCase, self).setUp() Namespace.invalidate_cache_all() def _request_authenticated_view(self): return self.get(path=self.authenticated_url) def _request_password_reset_get_view(self): return self.get(viewname='authentication:password_reset_view', data={ 'email': self._test_case_superuser.email, }) def _request_password_reset_post_view(self): return self.post(viewname='authentication:password_reset_view', data={ 'email': self._test_case_superuser.email, }) @override_settings(AUTHENTICATION_LOGIN_METHOD='username') def test_non_authenticated_request(self): response = self._request_authenticated_view() self.assertRedirects(response=response, expected_url=self.authentication_url) @override_settings(AUTHENTICATION_LOGIN_METHOD='username') def test_username_login(self): logged_in = self.login( username=self._test_case_superuser.username, password=self._test_case_superuser.cleartext_password) self.assertTrue(logged_in) response = self._request_authenticated_view() # We didn't get redirected to the login URL self.assertEqual(response.status_code, 200) @override_settings(AUTHENTICATION_LOGIN_METHOD='email') def test_email_login(self): with self.settings( AUTHENTICATION_BACKENDS=(TEST_EMAIL_AUTHENTICATION_BACKEND, )): logged_in = self.login( username=self._test_case_superuser.username, password=self._test_case_superuser.cleartext_password) self.assertFalse(logged_in) logged_in = self.login( email=self._test_case_superuser.email, password=self._test_case_superuser.cleartext_password) self.assertTrue(logged_in) response = self._request_authenticated_view() # We didn't get redirected to the login URL self.assertEqual(response.status_code, 200) @override_settings(AUTHENTICATION_LOGIN_METHOD='username') def test_username_login_via_views(self): response = self._request_authenticated_view() self.assertRedirects(response=response, expected_url=self.authentication_url) response = self.post(viewname=settings.LOGIN_URL, data={ 'username': self._test_case_superuser.username, 'password': self._test_case_superuser.cleartext_password }) response = self._request_authenticated_view() # We didn't get redirected to the login URL self.assertEqual(response.status_code, 200) @override_settings(AUTHENTICATION_LOGIN_METHOD='email') def test_email_login_via_views(self): with self.settings( AUTHENTICATION_BACKENDS=(TEST_EMAIL_AUTHENTICATION_BACKEND, )): response = self._request_authenticated_view() self.assertRedirects(response=response, expected_url=self.authentication_url) response = self.post( viewname=settings.LOGIN_URL, data={ 'email': self._test_case_superuser.email, 'password': self._test_case_superuser.cleartext_password }, follow=True) self.assertEqual(response.status_code, 200) response = self._request_authenticated_view() # We didn't get redirected to the login URL self.assertEqual(response.status_code, 200) @override_settings(AUTHENTICATION_LOGIN_METHOD='username') def test_username_remember_me(self): response = self.post(viewname=settings.LOGIN_URL, data={ 'username': self._test_case_superuser.username, 'password': self._test_case_superuser.cleartext_password, 'remember_me': True }, follow=True) response = self._request_authenticated_view() self.assertEqual(response.status_code, 200) self.assertEqual(self.client.session.get_expiry_age(), setting_maximum_session_length.value) self.assertFalse(self.client.session.get_expire_at_browser_close()) @override_settings(AUTHENTICATION_LOGIN_METHOD='username') def test_username_dont_remember_me(self): response = self.post(viewname=settings.LOGIN_URL, data={ 'username': self._test_case_superuser.username, 'password': self._test_case_superuser.cleartext_password, 'remember_me': False }, follow=True) response = self._request_authenticated_view() self.assertEqual(response.status_code, 200) self.assertTrue(self.client.session.get_expire_at_browser_close()) @override_settings(AUTHENTICATION_LOGIN_METHOD='email') def test_email_remember_me(self): with self.settings( AUTHENTICATION_BACKENDS=(TEST_EMAIL_AUTHENTICATION_BACKEND, )): response = self.post( viewname=settings.LOGIN_URL, data={ 'email': self._test_case_superuser.email, 'password': self._test_case_superuser.cleartext_password, 'remember_me': True }, follow=True) response = self._request_authenticated_view() self.assertEqual(response.status_code, 200) self.assertEqual(self.client.session.get_expiry_age(), setting_maximum_session_length.value) self.assertFalse(self.client.session.get_expire_at_browser_close()) @override_settings(AUTHENTICATION_LOGIN_METHOD='email') def test_email_dont_remember_me(self): with self.settings( AUTHENTICATION_BACKENDS=(TEST_EMAIL_AUTHENTICATION_BACKEND, )): response = self.post( viewname=settings.LOGIN_URL, data={ 'email': self._test_case_superuser.email, 'password': self._test_case_superuser.cleartext_password, 'remember_me': False }) response = self._request_authenticated_view() self.assertEqual(response.status_code, 200) self.assertTrue(self.client.session.get_expire_at_browser_close()) @override_settings(AUTHENTICATION_LOGIN_METHOD='username') def test_password_reset(self): self.logout() response = self._request_password_reset_post_view() self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) email_parts = mail.outbox[0].body.replace('\n', '').split('/') uidb64 = email_parts[-3] token = email_parts[-2] # Add the token to the session session = self.client.session session[INTERNAL_RESET_SESSION_TOKEN] = token session.save() new_password = '******' response = self.post( viewname='authentication:password_reset_confirm_view', kwargs={ 'uidb64': uidb64, 'token': INTERNAL_RESET_URL_TOKEN }, data={ 'new_password1': new_password, 'new_password2': new_password }) self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session) self._test_case_superuser.refresh_from_db() self.assertTrue(self._test_case_superuser.check_password(new_password)) def test_username_login_redirect(self): TEST_REDIRECT_URL = reverse(viewname='common:about_view') response = self.post( path='{}?next={}'.format(reverse(settings.LOGIN_URL), TEST_REDIRECT_URL), data={ 'username': self._test_case_superuser.username, 'password': self._test_case_superuser.cleartext_password, 'remember_me': False }, follow=True) self.assertEqual(response.redirect_chain, [(TEST_REDIRECT_URL, 302)]) @override_settings(AUTHENTICATION_DISABLE_PASSWORD_RESET=False) def test_password_reset_disable_false_get_view(self): self.logout() response = self._request_password_reset_get_view() self.assertEqual(response.status_code, 200) self.assertEqual(len(mail.outbox), 0) @override_settings(AUTHENTICATION_DISABLE_PASSWORD_RESET=True) def test_password_reset_disable_true_get_view(self): self.logout() response = self._request_password_reset_get_view() self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse(viewname=setting_home_view.value)) self.assertEqual(len(mail.outbox), 0) @override_settings(AUTHENTICATION_DISABLE_PASSWORD_RESET=False) def test_password_reset_disable_false_post_view(self): self.logout() response = self._request_password_reset_post_view() self.assertEqual(response.status_code, 302) self.assertNotEqual(response.url, setting_home_view.value) self.assertEqual(len(mail.outbox), 1) @override_settings(AUTHENTICATION_DISABLE_PASSWORD_RESET=True) def test_password_reset_disable_true_post_view(self): self.logout() response = self._request_password_reset_post_view() self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse(viewname=setting_home_view.value)) self.assertEqual(len(mail.outbox), 0)