예제 #1
0
            interpolate = False
        elif arg.type == "word" and arg.value == "\\":
            escaped = True
        elif arg.type == "word" and arg.value == "&":
            interpolate = True
        elif arg.type == "tuple":
            new_args.append(tuple_mutator(scope, arg))
        else:
            new_args.append(arg)
    return ASTTuple(new_args)


def template_ast(scope, args):
    assert len(args) == 1, f"ast expects 1 arg, {len(args)} given"

    ast = args[0]
    assert ast.type == "tuple", f"ast expects first arg to be a tuple"

    return ASTTemplate(tuple_mutator(scope, ast))


template_scope = Scope()
template_scope.add_natives(NATIVES)
template_scope.define("ast", template_ast)


def parse_template(template_ast):
    sub_scope = Scope(parent=template_scope)

    return sub_scope.execute_statement(template_ast)
예제 #2
0
tokenizer.add_consumer("open", word_consumer("("))
tokenizer.add_consumer("close", word_consumer(")"))
tokenizer.add_consumer("true", word_consumer("true"))
tokenizer.add_consumer("false", word_consumer("false"))
tokenizer.add_consumer("null", word_consumer("null"))
tokenizer.add_consumer(
    "word",
    regex_consumer(
        r"^(?:[a-zA-Z$_][a-zA-Z$_0-9]*|[!£%^&*-+=\[\]\{\}@'#~,<.>/?\\])"))
tokenizer.add_consumer("int", regex_consumer(r"^[0-9]+"))
tokenizer.add_consumer("float", regex_consumer(r"^[0-9]+\.[0-9]+"))
tokenizer.add_consumer("string", regex_consumer(r"^\"(?:[^\"\\]|\\.)*\""))
tokenizer.add_consumer("whitespace", regex_consumer(r"^\s+"))

global_scope = Scope()
global_scope.add_natives(NATIVES)


def filter_tokens(tokens):
    new_tokens = []
    for token in tokens:
        if token.name == "whitespace":
            continue
        elif token.name == "escaped_char":
            token = token.copy(name="word", content=token.content[1:])
        new_tokens.append(token)
    return new_tokens


def compile(content):
    tokens = filter_tokens(tokenizer.consume_all(content))