Ejemplo n.º 1
0
def second_pass_render(request, content):
    """
    Split on the secret delimiter and generate the token list by passing
    through text outside of phased blocks as single text tokens and tokenizing
    text inside the phased blocks. This ensures that nothing outside of the
    phased blocks is tokenized, thus eliminating the possibility of a template
    code injection vulnerability.
    """
    result = tokens = []
    for index, bit in enumerate(content.split(settings.SECRET_DELIMITER)):
        if index % 2:
            tokens = Lexer(bit, None).tokenize()
        else:
            tokens.append(Token(TOKEN_TEXT, bit))
        # restore the previos context including the CSRF token
        context = RequestContext(request,
            restore_csrf_token(request, unpickle_context(bit)))
        # restore the loaded components (tags and filters)
        parser = Parser(tokens)
        unpickled_components = unpickle_components(bit) or []
        for component in unpickled_components:
            lib = import_library(component)
            parser.add_library(lib)
        # render the piece with the restored context
        rendered = parser.parse().render(context)
        if settings.SECRET_DELIMITER in rendered:
            rendered = second_pass_render(request, rendered)
        result.append(rendered)

    return "".join(result)
Ejemplo n.º 2
0
def second_pass_render(request, content):
    """
    Split on the secret delimiter and generate the token list by passing
    through text outside of phased blocks as single text tokens and tokenizing
    text inside the phased blocks. This ensures that nothing outside of the
    phased blocks is tokenized, thus eliminating the possibility of a template
    code injection vulnerability.
    """
    result = tokens = []
    for index, bit in enumerate(content.split(
            settings.PHASED_SECRET_DELIMITER)):
        if index % 2:
            tokens = Lexer(bit, None).tokenize()
        else:
            tokens.append(Token(TOKEN_TEXT, bit))

        context = RequestContext(
            request, restore_csrf_token(request, unpickle_context(bit)))
        rendered = Parser(tokens).parse().render(context)

        if settings.PHASED_SECRET_DELIMITER in rendered:
            rendered = second_pass_render(request, rendered)
        result.append(rendered)

    return "".join(result)
Ejemplo n.º 3
0
def second_pass_render(request, content):
    """
    Split on the secret delimiter and generate the token list by passing
    through text outside of phased blocks as single text tokens and tokenizing
    text inside the phased blocks. This ensures that nothing outside of the
    phased blocks is tokenized, thus eliminating the possibility of a template
    code injection vulnerability.
    """
    result = tokens = []
    for index, bit in enumerate(content.split(settings.PHASED_SECRET_DELIMITER)):
        if index % 2:
            tokens = Lexer(bit, None).tokenize()
        else:
            tokens.append(Token(TOKEN_TEXT, bit))

        context = RequestContext(request,
            restore_csrf_token(request, unpickle_context(bit)))
        rendered = Parser(tokens).parse().render(context)

        if settings.PHASED_SECRET_DELIMITER in rendered:
            rendered = second_pass_render(request, rendered)
        result.append(rendered)

    return "".join(result)