def second_pass_render(request, content): """ Split on the secret delimiter and generate the token list by passing through text outside of phased blocks as single text tokens and tokenizing text inside the phased blocks. This ensures that nothing outside of the phased blocks is tokenized, thus eliminating the possibility of a template code injection vulnerability. """ result = tokens = [] for index, bit in enumerate(content.split(settings.SECRET_DELIMITER)): if index % 2: tokens = Lexer(bit, None).tokenize() else: tokens.append(Token(TOKEN_TEXT, bit)) # restore the previos context including the CSRF token context = RequestContext(request, restore_csrf_token(request, unpickle_context(bit))) # restore the loaded components (tags and filters) parser = Parser(tokens) unpickled_components = unpickle_components(bit) or [] for component in unpickled_components: lib = import_library(component) parser.add_library(lib) # render the piece with the restored context rendered = parser.parse().render(context) if settings.SECRET_DELIMITER in rendered: rendered = second_pass_render(request, rendered) result.append(rendered) return "".join(result)
def _render_html(self, template_string, context={}): # :( if DJANGO_VERSION > (1,2): from django.template import import_library tag_lib = import_library('beproud.django.commons.tests.test_tags') else: from django.template import get_library tag_lib = get_library('beproud.django.commons.tests.test_tags') lexer = Lexer(template_string, self._make_origin()) parser = Parser(lexer.tokenize()) parser.add_library(tag_lib) nodelist = parser.parse() return nodelist.render(Context(context))
def _render_html(self, template_string, context={}): # :( if DJANGO_VERSION > (1, 9): from django.template.library import import_library tag_lib = import_library('testapp.tags') else: # DJANGO_VERSION > (1,7): from django.template.base import import_library tag_lib = import_library('testapp.tags') if DJANGO_VERSION > (1, 9): lexer = Lexer(template_string) else: lexer = Lexer(template_string, self._make_origin()) parser = Parser(lexer.tokenize()) parser.add_library(tag_lib) nodelist = parser.parse() return nodelist.render(Context(context))
def input_node_generator(prompt='>>> ', leading_tokens=None, input_source=raw_input, parser=None): if parser is None: parser = Parser([]) input = False while not input: input = input_source(prompt) input = input + '\n' tokens = Lexer(input, None).tokenize() if leading_tokens: tokens = leading_tokens + tokens initial_tokens = deepcopy(tokens) try: parser.tokens = tokens for node in parser.parse(): yield node except TemplateSyntaxError, e: if e.args[0].startswith('Unclosed tags'): for node in input_node_generator('... ', initial_tokens, input_source, parser): yield node else: raise