Exemplo n.º 1
0
    def parse(self, stream: TokenStream) -> EchoNode:
        expect(stream, TOKEN_TAG, value=TAG_ECHO)
        tok = stream.current
        stream.next_token()

        expect(stream, TOKEN_EXPRESSION)
        expr_iter = tokenize_if_expression(stream.current.value)

        expr = parser.parse_filtered_if_expression(TokenStream(expr_iter))
        return EchoNode(tok, expression=expr)
Exemplo n.º 2
0
    def parse_prefix_expression(self,
                                stream: TokenStream) -> NotPrefixExpression:
        tok = stream.current
        stream.next_token()

        exp = NotPrefixExpression(
            tok.value,
            right=self.parse_expression(stream,
                                        precedence=Precedence.LOGICALRIGHT),
        )

        return exp
Exemplo n.º 3
0
def parse_call_argument(
        stream: TokenStream) -> Tuple[Optional[str], Expression]:
    """Return the next argument from the given token stream."""
    if stream.peek.type == TOKEN_COLON:
        # A keyword argument
        name: Optional[str] = str(parse_unchained_identifier(stream))
        stream.next_token()
        stream.next_token()  # Eat colon
    else:
        # A positional argument
        name = None

    expr = parse_expression(stream)
    stream.next_token()

    return name, expr
Exemplo n.º 4
0
    def parse(self, stream: TokenStream) -> StatementNode:
        tok = stream.current
        expect(stream, TOKEN_STATEMENT)

        expr_iter = tokenize_if_expression(tok.value)
        node = StatementNode(
            tok, parser.parse_filtered_if_expression(TokenStream(expr_iter)))
        return node
Exemplo n.º 5
0
 def test_if_expression_string(self):
     """Test the string representation of an inline `if` expression."""
     tokens = TokenStream(
         tokenize_if_expression("'hello' | downcase if true else 'goodbye' | upcase")
     )
     expr = parser.parse_filtered_if_expression(tokens)
     self.assertEqual(
         str(expr), "'hello' | downcase if (True) else 'goodbye' | upcase"
     )
Exemplo n.º 6
0
    def parse(self, stream: TokenStream) -> AssignNode:
        expect(stream, TOKEN_TAG, value=TAG_ASSIGN)
        tok = stream.current
        stream.next_token()

        expect(stream, TOKEN_EXPRESSION)

        match = RE_ASSIGNMENT.match(stream.current.value)
        if match:
            name, expression = match.groups()
        else:
            raise LiquidSyntaxError(
                f'invalid assignment expression "{stream.current.value}"',
                linenum=stream.current.linenum,
            )

        expr_iter = tokenize_if_expression(expression)
        expr = parser.parse_filtered_if_expression(TokenStream(expr_iter))
        return AssignNode(tok, AssignmentExpression(name, expr))
Exemplo n.º 7
0
def parse_range_literal(stream: TokenStream) -> expression.RangeLiteral:
    """Read a range literal from the token stream."""
    # Start of a range expression (<int or id>..<int or id>)
    expect(stream, TOKEN_RANGELPAREN)
    stream.next_token()  # Eat left parenthesis.
    start = parse_range_argument(stream)

    expect_peek(stream, TOKEN_RANGE)
    stream.next_token()
    stream.next_token()  # Eat TOKEN_RANGE

    stop = parse_range_argument(stream)
    expect_peek(stream, TOKEN_RPAREN)

    assert isinstance(
        start,
        (expression.Identifier, expression.IntegerLiteral,
         expression.FloatLiteral),
    )
    assert isinstance(
        stop,
        (expression.Identifier, expression.IntegerLiteral,
         expression.FloatLiteral),
    )

    expr = expression.RangeLiteral(start, stop)
    stream.next_token()
    return expr
Exemplo n.º 8
0
    def parse_filtered_if_expression(
            self, stream: TokenStream) -> FilteredIfExpression:
        """Parse a filtered expression with an optional inline ``if`` and ``else``."""
        expr = self.parse_expression(stream)
        stream.next_token()

        if stream.current.type == TOKEN_PIPE:
            filters = self.parse_filters(stream)
        else:
            filters = []

        if stream.current.type == TOKEN_IF:
            stream.next_token()  # Eat `if` token
            condition: Optional[Expression] = self.parse_boolean_expression(
                stream)
            stream.next_token()

            if stream.current.type == TOKEN_ELSE:
                stream.next_token()  # Eat `else` token
                alternative: Optional[Expression] = self.parse_expression(
                    stream)
                stream.next_token()
            else:
                alternative = None
        else:
            assert stream.current.type in (TOKEN_EOF, TOKEN_PIPE)
            condition = None
            alternative = None

        tail_filters = self.parse_filters(stream)

        return FilteredIfExpression(
            expression=expr,
            filters=filters,
            condition=condition,
            alternative=alternative,
            tail_filters=tail_filters,
        )
Exemplo n.º 9
0
    def parse(self, stream: TokenStream) -> Node:
        expect(stream, TOKEN_TAG, value=TAG_MACRO)
        tok = stream.current
        stream.next_token()

        expect(stream, TOKEN_EXPRESSION)
        expr_stream = TokenStream(
            tokenize_macro_expression(stream.current.value))

        # Name of the macro. Must be a string literal
        expect(expr_stream, TOKEN_STRING)
        name = parse_string_literal(expr_stream).value
        expr_stream.next_token()

        # Args can be positional (no default), or keyword (with default).
        args = []

        # The argument list might not start with a comma.
        if expr_stream.current.type == TOKEN_IDENTIFIER:
            args.append(parse_macro_argument(expr_stream))

        while expr_stream.current.type != TOKEN_EOF:
            if expr_stream.current.type == TOKEN_COMMA:
                expr_stream.next_token()  # Eat comma
                args.append(parse_macro_argument(expr_stream))
            else:
                typ = expr_stream.current.type
                raise LiquidSyntaxError(
                    f"expected a comma separated list of arguments, found {typ}",
                    linenum=tok.linenum,
                )

        stream.next_token()
        block = self.parser.parse_block(stream, (TAG_ENDMACRO, TOKEN_EOF))
        expect(stream, TOKEN_TAG, value=TAG_ENDMACRO)

        return MacroNode(tok=tok, name=name, args=args, block=block)
Exemplo n.º 10
0
    def parse_argument(self, stream: TokenStream) -> WithKeywordArg:
        """Parse a keyword argument from a stream of tokens."""
        key = str(parse_unchained_identifier(stream))
        stream.next_token()

        expect(stream, TOKEN_COLON)
        stream.next_token()  # Eat colon

        val = parse_expression(stream)
        stream.next_token()

        return WithKeywordArg(key, val)
Exemplo n.º 11
0
    def parse(self, stream: TokenStream) -> CallNode:
        expect(stream, TOKEN_TAG, value=TAG_CALL)
        tok = stream.current

        stream.next_token()
        expect(stream, TOKEN_EXPRESSION)
        expr_stream = TokenStream(
            tokenize_macro_expression(stream.current.value))

        # Name of the macro. Must be a string literal
        expect(expr_stream, TOKEN_STRING)
        name = parse_string_literal(expr_stream).value
        expr_stream.next_token()

        # Args can be positional (no default), or keyword (with default).
        args = []
        kwargs = []

        if expr_stream.current.type not in (TOKEN_COMMA, TOKEN_EOF):
            arg_name, expr = parse_call_argument(expr_stream)
            if arg_name is None:
                args.append(expr)
            else:
                kwargs.append(CallKeywordArg(arg_name, expr))

        while expr_stream.current.type != TOKEN_EOF:
            if expr_stream.current.type == TOKEN_COMMA:
                expr_stream.next_token()  # Eat comma

                arg_name, expr = parse_call_argument(expr_stream)
                if arg_name is None:
                    args.append(expr)
                else:
                    kwargs.append(CallKeywordArg(arg_name, expr))
            else:
                typ = expr_stream.current.type
                raise LiquidSyntaxError(
                    f"expected a comma separated list of arguments, found {typ}",
                    linenum=tok.linenum,
                )

        return CallNode(tok=tok, name=name, args=args, kwargs=kwargs)
Exemplo n.º 12
0
    def parse_grouped_expression(self, stream: TokenStream) -> Expression:
        """Parse a possibly grouped expression from a stream of tokens."""
        stream.next_token()
        exp = self.parse_expression(stream)

        stream.next_token()
        while stream.current.type == TOKEN_RPAREN:
            stream.next_token()

        if stream.current.type != TOKEN_EOF:
            exp = self.parse_infix_expression(stream, left=exp)

        return exp
Exemplo n.º 13
0
def parse_macro_argument(stream: TokenStream) -> MacroArg:
    """Return the next argument from the given token stream."""
    name = str(parse_unchained_identifier(stream))
    stream.next_token()

    if stream.current.type == TOKEN_COLON:
        # A keyword argument
        stream.next_token()  # Eat colon
        default = parse_expression(stream)
        stream.next_token()
    else:
        # A positional argument
        default = NIL

    return MacroArg(name, default)
Exemplo n.º 14
0
    def parse(self, stream: TokenStream) -> Node:
        expect(stream, TOKEN_TAG, value=TAG_WITH)
        tok = stream.current

        stream.next_token()
        expect(stream, TOKEN_EXPRESSION)
        expr_stream = TokenStream(
            tokenize_with_expression(stream.current.value))

        # A dictionary to help handle duplicate keywords.
        args = {}

        while expr_stream.current.type != TOKEN_EOF:
            key, expr = self.parse_argument(expr_stream)
            args[key] = expr

            if expr_stream.current.type == TOKEN_COMMA:
                expr_stream.next_token()  # Eat comma

        stream.next_token()
        block = self.parser.parse_block(stream, (TAG_ENDWITH, TOKEN_EOF))
        expect(stream, TOKEN_TAG, value=TAG_ENDWITH)

        return WithNode(tok=tok, args=args, block=block)
Exemplo n.º 15
0
 def parse_expression(self, stream: TokenStream) -> Expression:
     expect(stream, TOKEN_EXPRESSION)
     expr_iter = tokenize_boolean_not_expression(stream.current.value)
     return self.expression_parser.parse_boolean_expression(
         TokenStream(expr_iter))