Exemple #1
0
 def compile(self, functions, source, code):
     lexer = lex.Lexer(source)
     token = lexer.nextToken()
     while not token.type == lex.TOKEN_END:
         if not self.parse(functions, lexer, token, code):
             return
         token = lexer.nextToken()
                   w=396,
                   h=484)
        pydf.image(os.path.join(image_path, image_dict['name2']),
                   x=image_dict['x2'],
                   y=image_dict['y2'],
                   w=112,
                   h=112)
    return pydf


table = {
    "NUM": r"[0-9]+",
    "SPACE": (r'[ \t\n]+', lambda t: None),
    "ID": r"[a-z.]+"
}
lexer = lex.Lexer(table)

rules = [[
    "expr",
    ("NUM NUM ID NUM NUM ID NUM NUM", lambda p: {
        'start': int(p[0]),
        'end': int(p[1]),
        'name1': p[2],
        'x1': int(p[3]),
        'y1': int(p[4]),
        'name2': p[5],
        'x2': int(p[6]),
        'y2': int(p[7])
    })
]]
Exemple #3
0
    return items[0]



rule_tokens = {
    'IDENTIFIER': r'[a-zA-Z_]+',
    'LBRACKET':   r'\[',
    'LPAREN':     r'\(',
    'PIPE':       r'\|',
    'RBRACKET':   r'\]',
    'RPAREN':     r'\)',
    'STAR':       r'\*',
    'PLUS':       r'\+',
    'WHITESPACE': (r' ', lambda t: None),
}
rule_lexer = lex.Lexer(rule_tokens)


def rule_fn(rule_table, name, rule):
    '''
    Use the function wrapper instead of PLY package for functions that we define explicitly
    '''
    def wrapper(fn):
        rule_table.append((name, (rule, fn)))
        return fn
    return wrapper

class Parser:
    '''
    Add user given rules to rule table 
    '''
Exemple #4
0
    def C_PAR(self):
        t = self.next()
        if t.token.type == "C_PAR":
            return True

    def EOF(self):
        t = self.next()
        if t.token.type == "EOF":
            return True


"""
    5*3+4:

         4
        /
       + 
     /  \
    *    3
     \
       5
"""

if __name__ == '__main__':
    text = "((1)+(3*2))"
    l = lex.Lexer(text)
    l.scan()

    p = RDParser(l.tokens)
    p.parse()
Exemple #5
0
    lex.TokenType("}", r'^\}'),
    lex.TokenType("[", r'^\['),
    lex.TokenType("]", r'^\]'),
    lex.TokenType("(", r'^\('),
    lex.TokenType(")", r'^\)'),
    lex.TokenType(",", r'^\,'),
    lex.TokenType(";", r'^\;'),
    lex.TokenType(">", r'^\>'),
    lex.TokenType("<", r'^\<'),
    lex.TokenType("<=", r'^\<\='),
    lex.TokenType(">=", r'^\>\='),
]

with open(sys.argv[1]) as code:

    lexer = lex.Lexer(types)

    tokens = lexer.tokenize(code.read())

    parser = parse.Parser(tokens)
    ast = parser.parse()

    sc = scope.Scope()
    sc.bind("print",
            value.ParamsBuiltinFunctionObject(lambda text: print(str(text))))
    sc.bind(
        "input",
        value.NoParamsBuiltinFunctionObject(
            lambda: value.StringObject(input())))
    sc.bind(
        "int",
Exemple #6
0
def main():
    # JavaScript
    f = open(u"../test/test1.js", "rb")
    s = f.read().decode("utf-8")
    f.close()

    lexer = lex.Lexer(lexjs, s)

    while (True):
        t = lexer.get_token()
        if (t is None): break
        sys.stdout.write("{0:s}\n".format(lexer.repr_token(t)))

    # Sep
    sys.stdout.write("\n\n\n")

    # Python
    f = open(u"../test/test1.py", "rb")
    s = f.read().decode("utf-8")
    f.close()

    lexer = lex.Lexer(lexpy, s)

    while (True):
        t = lexer.get_token()
        if (t is None): break
        sys.stdout.write("{0:s}\n".format(lexer.repr_token(t)))

    # Sep
    sys.stdout.write("\n\n\n")

    # CSS
    f = open(u"../test/test1.css", "rb")
    s = f.read().decode("utf-8")
    f.close()

    lexer = lex.Lexer(lexcss, s)

    while (True):
        t = lexer.get_token()
        if (t is None): break
        sys.stdout.write("{0:s}\n".format(lexer.repr_token(t)))

    # Sep
    sys.stdout.write("\n\n\n")

    # XML
    f = open(u"../test/test1.xml", "rb")
    s = f.read().decode("utf-8")
    f.close()

    lexer = lex.Lexer(lexxml, s)

    while (True):
        t = lexer.get_token()
        if (t is None): break
        sys.stdout.write("{0:s}\n".format(lexer.repr_token(t)))

    # Sep
    sys.stdout.write("\n\n\n")

    # HTML
    f = open(u"../test/test1.html", "rb")
    s = f.read().decode("utf-8")
    f.close()

    lexer = lex.Lexer(lexxml, s)
    lexer.html = True

    while (True):
        t = lexer.get_token()
        if (t is None): break
        sys.stdout.write("{0:s}\n".format(lexer.repr_token(t)))

    # Done
    return 0