def second_pass_render(request, content): """ Split on the secret delimiter and generate the token list by passing through text outside of phased blocks as single text tokens and tokenizing text inside the phased blocks. This ensures that nothing outside of the phased blocks is tokenized, thus eliminating the possibility of a template code injection vulnerability. """ result = tokens = [] for index, bit in enumerate(content.split( settings.PHASED_SECRET_DELIMITER)): if index % 2: tokens = Lexer(bit, None).tokenize() else: tokens.append(Token(TOKEN_TEXT, bit)) context = RequestContext( request, restore_csrf_token(request, unpickle_context(bit))) rendered = Parser(tokens).parse().render(context) if settings.PHASED_SECRET_DELIMITER in rendered: rendered = second_pass_render(request, rendered) result.append(rendered) return "".join(result)
def second_pass_render(request, content): """ Split on the secret delimiter and generate the token list by passing through text outside of phased blocks as single text tokens and tokenizing text inside the phased blocks. This ensures that nothing outside of the phased blocks is tokenized, thus eliminating the possibility of a template code injection vulnerability. """ result = tokens = [] for index, bit in enumerate(content.split(settings.PHASED_SECRET_DELIMITER)): if index % 2: tokens = Lexer(bit, None).tokenize() else: tokens.append(Token(TOKEN_TEXT, bit)) context = RequestContext(request, restore_csrf_token(request, unpickle_context(bit))) rendered = Parser(tokens).parse().render(context) if settings.PHASED_SECRET_DELIMITER in rendered: rendered = second_pass_render(request, rendered) result.append(rendered) return "".join(result)