Beispiel #1
0
def parse_range_literal(stream: TokenStream) -> expression.RangeLiteral:
    """Read a range literal from the token stream."""
    # Start of a range expression (<int or id>..<int or id>)
    expect(stream, TOKEN_RANGELPAREN)
    stream.next_token()  # Eat left parenthesis.
    start = parse_range_argument(stream)

    expect_peek(stream, TOKEN_RANGE)
    stream.next_token()
    stream.next_token()  # Eat TOKEN_RANGE

    stop = parse_range_argument(stream)
    expect_peek(stream, TOKEN_RPAREN)

    assert isinstance(
        start,
        (expression.Identifier, expression.IntegerLiteral,
         expression.FloatLiteral),
    )
    assert isinstance(
        stop,
        (expression.Identifier, expression.IntegerLiteral,
         expression.FloatLiteral),
    )

    expr = expression.RangeLiteral(start, stop)
    stream.next_token()
    return expr
Beispiel #2
0
    def parse(self, stream: TokenStream) -> StatementNode:
        tok = stream.current
        expect(stream, TOKEN_STATEMENT)

        expr_iter = tokenize_if_expression(tok.value)
        node = StatementNode(
            tok, parser.parse_filtered_if_expression(TokenStream(expr_iter)))
        return node
Beispiel #3
0
    def parse(self, stream: TokenStream) -> EchoNode:
        expect(stream, TOKEN_TAG, value=TAG_ECHO)
        tok = stream.current
        stream.next_token()

        expect(stream, TOKEN_EXPRESSION)
        expr_iter = tokenize_if_expression(stream.current.value)

        expr = parser.parse_filtered_if_expression(TokenStream(expr_iter))
        return EchoNode(tok, expression=expr)
Beispiel #4
0
    def parse_argument(self, stream: TokenStream) -> WithKeywordArg:
        """Parse a keyword argument from a stream of tokens."""
        key = str(parse_unchained_identifier(stream))
        stream.next_token()

        expect(stream, TOKEN_COLON)
        stream.next_token()  # Eat colon

        val = parse_expression(stream)
        stream.next_token()

        return WithKeywordArg(key, val)
Beispiel #5
0
    def parse(self, stream: TokenStream) -> AssignNode:
        expect(stream, TOKEN_TAG, value=TAG_ASSIGN)
        tok = stream.current
        stream.next_token()

        expect(stream, TOKEN_EXPRESSION)

        match = RE_ASSIGNMENT.match(stream.current.value)
        if match:
            name, expression = match.groups()
        else:
            raise LiquidSyntaxError(
                f'invalid assignment expression "{stream.current.value}"',
                linenum=stream.current.linenum,
            )

        expr_iter = tokenize_if_expression(expression)
        expr = parser.parse_filtered_if_expression(TokenStream(expr_iter))
        return AssignNode(tok, AssignmentExpression(name, expr))
Beispiel #6
0
    def parse(self, stream: TokenStream) -> Node:
        expect(stream, TOKEN_TAG, value=TAG_MACRO)
        tok = stream.current
        stream.next_token()

        expect(stream, TOKEN_EXPRESSION)
        expr_stream = TokenStream(
            tokenize_macro_expression(stream.current.value))

        # Name of the macro. Must be a string literal
        expect(expr_stream, TOKEN_STRING)
        name = parse_string_literal(expr_stream).value
        expr_stream.next_token()

        # Args can be positional (no default), or keyword (with default).
        args = []

        # The argument list might not start with a comma.
        if expr_stream.current.type == TOKEN_IDENTIFIER:
            args.append(parse_macro_argument(expr_stream))

        while expr_stream.current.type != TOKEN_EOF:
            if expr_stream.current.type == TOKEN_COMMA:
                expr_stream.next_token()  # Eat comma
                args.append(parse_macro_argument(expr_stream))
            else:
                typ = expr_stream.current.type
                raise LiquidSyntaxError(
                    f"expected a comma separated list of arguments, found {typ}",
                    linenum=tok.linenum,
                )

        stream.next_token()
        block = self.parser.parse_block(stream, (TAG_ENDMACRO, TOKEN_EOF))
        expect(stream, TOKEN_TAG, value=TAG_ENDMACRO)

        return MacroNode(tok=tok, name=name, args=args, block=block)
Beispiel #7
0
    def parse(self, stream: TokenStream) -> CallNode:
        expect(stream, TOKEN_TAG, value=TAG_CALL)
        tok = stream.current

        stream.next_token()
        expect(stream, TOKEN_EXPRESSION)
        expr_stream = TokenStream(
            tokenize_macro_expression(stream.current.value))

        # Name of the macro. Must be a string literal
        expect(expr_stream, TOKEN_STRING)
        name = parse_string_literal(expr_stream).value
        expr_stream.next_token()

        # Args can be positional (no default), or keyword (with default).
        args = []
        kwargs = []

        if expr_stream.current.type not in (TOKEN_COMMA, TOKEN_EOF):
            arg_name, expr = parse_call_argument(expr_stream)
            if arg_name is None:
                args.append(expr)
            else:
                kwargs.append(CallKeywordArg(arg_name, expr))

        while expr_stream.current.type != TOKEN_EOF:
            if expr_stream.current.type == TOKEN_COMMA:
                expr_stream.next_token()  # Eat comma

                arg_name, expr = parse_call_argument(expr_stream)
                if arg_name is None:
                    args.append(expr)
                else:
                    kwargs.append(CallKeywordArg(arg_name, expr))
            else:
                typ = expr_stream.current.type
                raise LiquidSyntaxError(
                    f"expected a comma separated list of arguments, found {typ}",
                    linenum=tok.linenum,
                )

        return CallNode(tok=tok, name=name, args=args, kwargs=kwargs)
Beispiel #8
0
    def parse(self, stream: TokenStream) -> Node:
        expect(stream, TOKEN_TAG, value=TAG_WITH)
        tok = stream.current

        stream.next_token()
        expect(stream, TOKEN_EXPRESSION)
        expr_stream = TokenStream(
            tokenize_with_expression(stream.current.value))

        # A dictionary to help handle duplicate keywords.
        args = {}

        while expr_stream.current.type != TOKEN_EOF:
            key, expr = self.parse_argument(expr_stream)
            args[key] = expr

            if expr_stream.current.type == TOKEN_COMMA:
                expr_stream.next_token()  # Eat comma

        stream.next_token()
        block = self.parser.parse_block(stream, (TAG_ENDWITH, TOKEN_EOF))
        expect(stream, TOKEN_TAG, value=TAG_ENDWITH)

        return WithNode(tok=tok, args=args, block=block)
Beispiel #9
0
 def parse_expression(self, stream: TokenStream) -> Expression:
     expect(stream, TOKEN_EXPRESSION)
     expr_iter = tokenize_boolean_not_expression(stream.current.value)
     return self.expression_parser.parse_boolean_expression(
         TokenStream(expr_iter))