Esempio n. 1
0
def test_tokenize():
    source0 = ['(define a 3)']
    tokens0 = tokenize(source0)
    source1 = ['(+ 10 25 0)']
    tokens1 = tokenize(source1)
    assert list(tokens0) == ['(', 'define', 'a', '3', ')']
    assert list(tokens1) == ['(', '+', '10', '25', '0', ')']
Esempio n. 2
0
def test_or_shortcircuit():
    source = ['(or #t never-reached)']
    exp = parse_tokens(tokenize(source))[0]
    try:
        eval_in_env(exp, Environment([]))
    except Exception as e:
        assert str(e) != 'unknown variable "never-reached"'
Esempio n. 3
0
def parse(text, display_tokens=False):
    tokens = tokenize(text)
    if display_tokens:
        display, tokens = tee(tokens)
        print(list(display))
    ast = _VyperParser(text).parse(tokens)
    return ast
Esempio n. 4
0
def main():
    inp = """
; (define (list . x) x)
"----- START -----"
(include "prelude.scm")
(define prelude (import "prelude.scm"))

prelude
(prelude p1 str)
(env)
"""

    env = Environment([], [], basic_environment)

    if True:
        for result in repl(env,parse(tokenize([inp]))):
            print "$", result

    if False:
        for result in repl(env,parse(tokenize(reader_raw()))):
            print result
Esempio n. 5
0
def repl(p='\n> ', i=sys.stdin, o=sys.stdout):
    '''
    Given a stream of parse trees, evaluate each,
    and return the result.
    '''
    sys.stderr.write('(…) Tali (α) ')

    ts = parse(tokenize(i))
    while True:
        if p: print(p, end='', flush=True)
        t = next(ts)
        r = eval(t)
        print(t, flush=True)
        print(r, flush=True)
Esempio n. 6
0
def test():
    print "testing: parse"
    from lex import tokenize

    to_process = [ 
        ("()"        , "nil"),
        ("'()"       , "( quote nil )"),
        ("'a"        , "( quote a )"),
        ("'( a b)"   , "( quote ( a b ) )"),
        ("()"        , "nil"),
        ("(a)"       , "( a )"),
        ("('a)"      , "( ( quote a ) )"),
        ("(a b)"     , "( a b )"),
        ("(a b c)"   , "( a b c )"),
        ("(a b c d e f (g) h (i j) k (l m n) o (p q (r s) t) u (v (w (x (y (z))))))", 
         "( a b c d e f ( g ) h ( i j ) k ( l m n ) o ( p q ( r s ) t ) u ( v ( w ( x ( y ( z ) ) ) ) ) )"),
        ("(a . b)"   , "( a . b )"),
        ("(a b . c)" , "( a b . c )"),
        ('"asdf"'    , '"asdf"'), 
        ('("asdf")'    , '( "asdf" )'), 
        ('(4"asdf")'   , '( 4 "asdf" )'), 
        ('(4."asdf")'   , '( 4 . "asdf" )'), 
        ('"as df"'    , '"as df"'), 
        ('as ;'    , 'as'), 
        ('as ;abcdef12345'    , 'as'), 
        ('(as ";")'    , '( as ";" )'), 
        ('(as "; hello")'    , '( as "; hello" )'), 
        ('as ;;;;;;'    , 'as'), 
        ('as ; !" %$ 86842 "$^P~>?:@:~'    , 'as'), 
        ("(() (()()))", "( nil ( nil nil ) )")]
    
    for test, expected in to_process:

        tokens = tokenize([test])
        result = list(parse(tokens))
        assert len(result) == 1
        res = str(result[0])
        if res != expected:
            print "Mismatch"
            print "test    :", test
            print "result  :", res
            print "expected:", expected
Esempio n. 7
0
def read_file(stream, env):

    # tokenize, parse and eval entire file
    # don't care about the result of eval
    # only the changed env matters

    no_env = env is None
    if no_env:
        # use the default (whatever that is)
        # extend it so that we don't needlessly have all the default
        # functions in out imported class.
        env = Environment([], [], basic_environment)

    for sexp in parse(tokenize(iter(stream))):
        sexp.scm_eval(env)

    # I guess we should kill the parent if there was no 
    # env before. No need having thingslike `quote` in there.
    if no_env:
        del env.variables["__parent__"]

    # as the environment is a class we can just return it
    return env
Esempio n. 8
0
def repl():
    color_print('****************************************', 'blue')
    color_print('lis.py - a simple Lisp written in Python', 'blue')
    color_print('(c) Nick Collins, 2013', 'blue')
    color_print('****************************************', 'blue')
    env = Environment([])
    while True:
        try:
            user_input = raw_input('lis.py> ')
            exp = parse_tokens(tokenize([user_input]))[0]
            tmp_env = Environment([], env)
            color_print(eval_in_env(exp, tmp_env), 'green')
            env = tmp_env
        except EOFError:
            color_print('\nLeaving lis.py.', 'blue')
            break
        except LisNameError as e:
            color_print(e, 'red')
        except LisSyntaxError as e:
            color_print(e, 'red')
        except Exception as e:
            color_print(e, 'red')
            print('*** Invalid input ***')
Esempio n. 9
0
def test_parse_define_lambda():
    source = ['(define add (lambda (x y) (+ x y)))']
    tokens = tokenize(source)
    exp = parse_tokens(tokens)
    assert exp == [['define', 'add', ['lambda', ['x', 'y'], ['+', 'x', 'y']]]]
Esempio n. 10
0
            curr_statement.line_number = t.line_number
            statement_list.append(curr_statement)
            if not curr_statement.op:
                warnings_list.append(_create_parse_warning(
                    t.line_number, "empty statement"
                ))
            curr_statement = Statement()
            next_is_semicolon = False
            is_in_args = False

    if not curr_statement.is_new():
        warnings_list.append(_create_parse_warning(
            -1, "last statement not empty"
        ))
    return (statement_list,warnings_list)

if __name__ == '__main__':
    text="LOOP: ADD 12 3 R0;\n" +\
         "      ADD 5 R0 R1;\n" +\
         "      GOTO LOOP;;\n"
    print("Program:")
    print(text)
    token_list = tokenize(text, SymbolTable())
    s,w = parse_tokenlist(token_list)
    print("Parsed:")
    for l in s:
        print(l)
    print("Warnings:")
    for l in w:
        print(l)
Esempio n. 11
0
def test_parse_define():
    source = ['(define a 3)']
    tokens = tokenize(source)
    exp = parse_tokens(tokens)
    assert exp == [['define', 'a', 3]]
Esempio n. 12
0
from lex import tokenize
import tree
from code_gen import compile_all
file = "source.txt"
code = open(file).read()
tokens = tokenize(code)
ast = tree.tree(tokens)
# ast.display()
c_code = compile_all(ast)
f = open('output.c', 'w').write(c_code)
# print(tree.name_types)
Esempio n. 13
0
def test_or_false():
    source = ['(or (> 2 10) (= 1 2) #f)']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, Environment([])) == False
Esempio n. 14
0
def test_add():
    source = ['(+ 1 3 5)']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, Environment([])) == 9
Esempio n. 15
0
def test_nullcheck():
    source = ['(null? null)']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, []) == True
Esempio n. 16
0
def test_list():
    source = ['(list 1 2 (+ 1 2) 4)']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, []) == [1, 2, 3, 4]
Esempio n. 17
0
def test_cdr():
    source = ['(cdr (cons 1 null))']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, []) == []
Esempio n. 18
0
def test_car():
    source = ['(car (cons 1 null))']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, Environment([])) == 1
Esempio n. 19
0
'''
This is a very thin, executable wrapper around lexical
analysis.
'''
import sys
import lex

if __name__ == '__main__':
    print(*list(lex.tokenize(sys.stdin)), sep='\n')

'''
This is a very thin, executable wrapper around syntactic
analysis.
'''
import sys
from lex import tokenize
from parse import parse

if __name__ == '__main__':
    print(list(parse(tokenize(sys.stdin))))

Esempio n. 21
0
File: main.py Progetto: gcali/drisc
#! /usr/bin/env python3

from lex import tokenize 
from parser import parse_tokenlist
from table import SymbolTable
from sys import argv

if __name__ == '__main__':
    sym = SymbolTable()
    if len(argv) <= 1:
        text="LOOP: ADD 12 3 R0;\n" +\
             "      ADD 5 R0 R1;\n" +\
             "      GOTO LOOP;;\n"
        print("Program:")
        print(text)
        token_list = tokenize(text, sym)
        s,w = parse_tokenlist(token_list)
        print("Parsed:")
        for l in s:
            print(l)
        print("Warnings:")
        for l in w:
            print(l)
    else:
        for fn in argv[1:]:
            print("File name: {}".format(fn))
            with open(fn) as f:
                token_list = tokenize(f.read(), sym)
                s,w = parse_tokenlist(token_list)
                print("Parsed:")
                for l in s:
Esempio n. 22
0
def test_divide():
    source = ['(/ 10 3)']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, Environment([])) == 3
Esempio n. 23
0
def test_gt():
    source0, source1 = ['(> 2 2)'], ['(> 3 2)']
    exp0 = parse_tokens(tokenize(source0))[0]
    exp1 = parse_tokens(tokenize(source1))[0]
    assert eval_in_env(exp0, Environment([])) == False
    assert eval_in_env(exp1, Environment([])) == True
Esempio n. 24
0
def test_nullcheck_2():
    source = ['(null? (cons 1 null))']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, []) == False
Esempio n. 25
0
def test_and_false():
    source = ['(and (> 2 1) (= 1 2) #t)']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, Environment([])) == False
Esempio n. 26
0
def test_nullcheck_3():
    source = ['(null? (cdr (list 1)))']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, []) == True
Esempio n. 27
0
def test_parse_add():
    source = ['(define a (+ 3 3))']
    tokens = tokenize(source)
    exp = parse_tokens(tokens)
    assert exp == [['define', 'a', ['+', 3, 3]]]
Esempio n. 28
0
def test_cons():
    source = ['(cons (+ 1 3 5) null)']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, Environment([])) == [9]
Esempio n. 29
0
def testall():
    to_test = [
        # -------------------------------------- Special Symbols
        ("nil"   , "()" ),
        ("nil"   , "nil" ),
        ("true" , "true" ),
        ("false" , "false" ),
        # -------------------------------------- Self Evaluating
        ("4"     , "4" ),
        ("-455"  , "-455" ),
        # -------------------------------------- Use of Special Forms
        ("hi"        , "(quote hi)"),
        ("( + 3 4 )" , "(quote (+ 3 4))"),
        ("hello"     , "'hello"),
        ("( hello )" , "'(hello)"),
        ("nil"       , "(define x 20)" ),
        ("20"        , "x" ),
        ("nil"       , "(set! x 44)" ),
        ("44"        , "x" ),
        ("nil"       , "(define m 2)" ),
        ("nil"       , "(define (add8 y) (+ 8 y) )" ),
        ("nil"       , "(define (getspoon) 'spoon )" ),
        ("nil"       , "(define (rac x y z)  (+ x (* y z) ))" ),
        ("10"        , "(add8 m)" ),
        ("1001"      , "(rac 1 10 100)" ),
        ("y"         , "(if true 'y 'n)" ),
        ("y"         , "(if true 'y)" ),
        ("nil"       , "(if false 'y)" ),
        ("5"         , "(if (= 2 3) (- 3) (+ 2 3) )" ),
        # ("#PROC"     , "(lambda (z) (+ 3 z))" ),
        ("13"        , "((lambda (z) (+ 3 z)) 10)" ),
        # ("#PROC"     , "(lambda () 5)" ),
        ("222"       , "((lambda (x) (+ 111 x) 222) 333)"),
        ("5"         , "((lambda () 5))" ),
        ("5"         , "(begin 2 3 4 5)" ),
        ("nil"       , "(begin (+ x 3) nil)"),
        ("4"         , "(begin (set! x -99) 4)"),
        ("-99"       , "x" ),
        # -------------------------------------- Pairs
        ("( 4 . 5 )" , "(cons 4 5)"),
        ("( nil . 6 )" , "(cons () 6)"),
        ("4"         , "(car (cons 4 5))"),
        ("5"         , "(cdr (cons 4 5))"),
        # -------------------------------------- Nesting
        ("10"        , "(if true (if true 10 20) 30 )"),
        ("nil" , "(define (add13 y) (+ ((lambda (z) (+ 3 z)) 10) y) )" ),
        ("15"        , "(add13 (- 0(- 0 2)) )" ),
        # -------------------------------------- Shorthand quote
        ("hi"        , "'hi"),
        ("( + 3 4 )" , "'(+ 3 4)"),
        # -------------------------------------- Random
        # ("#PROC"       , "(lambda () (+ 3 -4))" ),
        ("nil"         , "(define meep (lambda () (+ 2 -6)))" ),
        ("-4"          , "(meep)" ),
        ("( 45 . 32 )" , "'(45 . 32)" ),
        ("( as . nd )" , "'(as.nd)" ),
        ("nil"         , "(display 'hello)"),
        ("6"           , "((lambda (x) (+ x 1)) 5)"),
        ("nil"         , "(define aax 2)"),
        ("154"         , "((lambda (moose) (set! aax moose) 154) -73)"),
        ("-73"         , "aax"),
        # -------------------------------------- Maths
        ("true"         , "(< 4 5)"),
        ("false"        , "(< 5 4)"),
        ("false"        , "(< 5 5)"),
        ("false"        , "(> 4 5)"),
        ("true"         , "(> 5 4)"),
        ("false"        , "(> 5 5)"),
        ("false"        , "(= 5 4)"),
        ("false"        , "(= 4 5)"),
        ("true"         , "(= 5 5)"),
        # -------------------------------------- Recurse (works but spams)
        ("nil"         , "(define (xxx x) (display 'in) (if (< x 10) (xxx (+ x 1))))"),
        # ("nil"         , "(xxx 0)"),
        # ("nil"         , "(xxx 1)"),
        # ("nil"         , "(xxx 9)"),
        # -------------------------------------- Factorial 
        ("nil"       , """(define (factorial n)
                              (if (= n 0)
                                1
                                (if (= n 1)
                                  1
                                  (* n (factorial (- n 1))))))"""),
        ("1"         ,"(factorial 0)"),
        ("1"         ,"(factorial 1)"),
        ("2"         ,"(factorial 2)"),
        ("6"         ,"(factorial 3)"),
        ("40320"     ,"(factorial 8)"),
        # -------------------------------------- Nested Define
        ("nil"   , """(define (outer w x)
                              (define (inner y z)
                                (+ w (+ y z)))
                              (inner x 1))"""),
        ("111"         ,"(outer 10 100)"),
        # -------------------------------------- Fibonacci James
        ("nil"   , """(define (fibonacci-james n)
                                (define (fib n1 n2 aaaaaa)
                                    (if (= aaaaaa n)
                                        n1
                                        (fib n2 (+ n1 n2) (+ aaaaaa 1))))
                                (fib 0 1 0))"""),
        ("55"         ,"(fibonacci-james 10)"),
        # -------------------------------------- Fibonacci
        ("nil"   , """(define fibonacci
                              (lambda (n)
                                (define fib 
                                  (lambda (n1 n2 cnt)
                                    (if (= cnt n)
                                        n1
                                        (fib n2 (+ n1 n2) (+ cnt 1)))))
                                (fib 0 1 0)))"""),
        ("55"         ,"(fibonacci 10)"),
        # -------------------------------------- String
        ('"jam"'      , '"jam"'),
        ('"jam"'      , '(car (cons "jam" "spoon"))'),
        ('"jam"'      , '\'"jam"'),
        # -------------------------------------- Basic Functions
        ("nil"       , "(define (test0) 0)"),
        ("0"         , "(test0)"),
        ("nil"       , "(define (test1 x) x)"),
        ("-5"        , "(test1 -5)"),
        ("nil"       , "(define (test2 x y) (cons x y))"),
        ("( 6 . hi )", "(test2 6 'hi)"),
        ("nil"       , "(define (test3 x y z) (set! x y) z)"),
        ("hi"        , "(test3 x 6 'hi)"),
        ("nil"       , "(define (test4 x y z) (+ x y) z)"),
        ("56"        , "(test4 12 34 56)"),
        ("nil"       , "(define (test5 x y z) (+ x y) z (+ 2 4) (+ 7 z))"),
        ("9"         , "(test5 4 3 2)"),
        # -------------------------------------- Dotted Function Calling
        ("nil"           , "(define (test6 . all) all)"),
        ("( 1 )"         , "(test6 1)"),
        ("( 1 2 )"       , "(test6 1 2)"),
        ("( ( a . b ) )" , "(test6 '(a . b))"),
        ("nil"           , "(define (list . x) x)"),
        ("nil"           , "(define (test7 a . all) (list all a))"),
        ("( nil 1 )"     , "(test7 1)"),
        ("( ( 2 ) 1 )"   , "(test7 1 2)"),
        ("( ( 2 3 4 5 6 ) 1 )"   , "(test7 1 2 3 4 5 6)"),
        # -------------------------------------- Dotted Lambda Calling
        ("( 1 )"         , "((lambda all all) 1)"),
        ("( 1 2 )"       , "((lambda all all) 1 2)"),
        ("( ( a . b ) )" , "((lambda all all) '(a . b))"),
        ("( nil 1 )"     , "((lambda (a . all) (list all a)) 1)"),
        ("( ( 2 ) 1 )"   , "((lambda (a . all) (list all a)) 1 2)"),
        ("( ( 2 3 4 5 6 ) 1 )"   , "((lambda (a . all) (list all a)) 1 2 3 4 5 6)"),
        # -------------------------------------- Quine
("( ( lambda ( x ) ( list x ( list ( quote quote ) x ) ) ) ( quote ( lambda ( x ) ( list x ( list ( quote quote ) x ) ) ) ) )",
 "( ( lambda ( x ) ( list x ( list ( quote quote ) x ) ) ) ( quote ( lambda ( x ) ( list x ( list ( quote quote ) x ) ) ) ) )"),
        # -------------------------------------- Class
        ("<#class-1#>"       , "BaseClass" ),
        ("nil"               , "(define Point (class BaseClass))"),
        ("nil"               , "(class-define! Point '_x)"),
        ("nil"               , "(class-define! Point '_y)"),
        ("nil"               , "(class-define! Point 'length)"),
        ("nil"               , "(class-define! Point 'total)"),
        ("nil"               , "(class-define! Point 'thing)"),
        ("nil"               , "(class-set! Point 'length 2)"),
        ("nil"               , "(class-set! Point 'total (lambda () (+ (self _x) (self _y))))"),
        ("nil"               , "(class-set! Point 'thing (lambda (mm) (+ mm (self total))))"),
        ("2"                 , "(Point length)"),
        ("nil"               , "(define p1 (class Point))"),
        ("nil"               , "(class-set! p1 '_x 4)"),
        ("nil"               , "(class-set! p1 '_y 6)"),
        ("4"                 , "(p1 _x)"),
        ("6"                 , "(p1 _y)"),
        ("10"                , "(p1 total)"),
        ("110"               , "(p1 thing 100)"),
        # -------------------------------------- Macros
        ("<#procedure#>" , "(mac (yyx) (+ 3 yyx))" ),
        ("3"             , "((mac (x) x) (+ 1 2))" ),
        ("nil"           , "(define ggg 835)" ),
        ("835"           , "((mac (ggg) 'ggg) (+ 1 2))" ),
        ("nil"           , "(define when (mac (test . body) (list 'if test (cons 'begin body))))" ),
        ("jam"           , "(when (= 4 4) 'jam)" ),
        ("nil"           , "(when (= 3 4) 'jam)" ),
        # -------------------------------------- Quasiquote
        ("a"                 , "(quasiquote a)" ),
        ("( a )"             , "(quasiquote (a))" ),
        ("( a b )"           , "(quasiquote (a b))" ),
        ("( a b c )"         , "(quasiquote (a b c))" ),
        ("( a . b )"         , "(quasiquote (a . b))" ),
        ("( a b c )"         , "(quasiquote (a b c))" ),
        ("( a b . c )"       , "(quasiquote (a b . c))" ),
        ("( a b a b )"       , "(quasiquote (a b . (a b)))" ),
        ("( quote a )"       , "(quasiquote 'a)" ),
        ("a"                 , "(quasiquote (unquote 'a))" ),
        ("( ( quote a ) b )" , "(quasiquote ( 'a (unquote 'b)))" ),
        ("( list 3 4 )"      , "(quasiquote (list (unquote (+ 1 2)) 4))" ),
        ("( a b 3 4 )"       , "(quasiquote (a . (b (unquote (+ 1 2)) 4)))" ),
        ("( a ( quasiquote ( b ( unquote c ) d ) ) e )" , 
         "(quasiquote ( a (quasiquote ( b (unquote c) d)) e ))" ),
        # -------------------------------------- Quasiquote (Sugar)
        ("a"                 , "`a" ),
        ("( a )"             , "`(a)" ),
        ("( a b )"           , "`(a b)" ),
        ("( a b c )"         , "`(a b c)" ),
        ("( a . b )"         , "`(a . b)" ),
        ("( a b c )"         , "`(a b c)" ),
        ("( a b . c )"       , "`(a b . c)" ),
        ("( a b a b )"       , "`(a b . (a b))" ), # tested on plt-scheme
        ("( quote a )"       , "`'a" ),
        ("a"                 , "`,'a" ),
        ("( ( quote a ) b )" , "`( 'a ,'b)" ),
        ("( list 3 4 )"      , "`(list ,(+ 1 2) 4)" ),
        ("( a b 3 4 )"       , "`(a . (b ,(+ 1 2) 4))" ),
        ("( a ( quasiquote ( b ( unquote c ) d ) ) e )" , "`(a`(b,c d)e)" ),
        # -------------------------------------- Done
        ("nil"         , "()" )]

    env = Environment([], [], basic_environment)

    print "testing: toplevel"
    for n, (expected, inp) in enumerate(to_test):
        if DEBUG: print "----"
        if DEBUG: print "input    ", inp
        if DEBUG: print "expected ", expected
        tok = tokenize([inp])
        tok = list(tok)
        exp = parse(tok)
        exp = list(exp)

        results = list(repl(env,exp))
        assert len(results) == 1, str(results)
        res = results[0]

        if DEBUG: print "result   ", res
        
        if str(res) != expected:
            print "Mismatch Error!"
            print "  > input    ", inp
            print "  > expected ", expected
            # print "  > sexp     ", exp
            print "  > result   ", res   
            print "-------------"
Esempio n. 30
0
import io

import lex as l
import parse as p
import interpret as i

test1 = '' \
    '(f: +,' \
    ' a: (a: 1,' \
         'b: 2))'

print(list([x for x in l.tokenize(io.StringIO(test1))]))
print(p.parse(l.tokenize(io.StringIO(test1))))
#print('\n\n')
#
#tree = p.parse(l.tokenize(test1))
#print('\n\n')
#
#print(tree)
Esempio n. 31
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json

# Importando arquivos
import lex
import ocr
import frequency
import mysql
import archive

documents = lex.tokenize(archive.get_text("exemplo1.docx", "docs/docx/"))
connection = mysql.connect()
mysql.saveTokens(connection, 'documents.txt',
                 archive.get_text("exemplo1.docx", "docs/docx/"),
                 json.dumps({"exe01.txt": documents}))
print(documents)

# archive = open("documents.txt", "w", encoding='utf-8')
# archive.write(json.dumps({"exe01.txt":documents}, ensure_ascii=False))
# archive.close()

# archive = open("documents.txt", "r", encoding='utf-8')
# text = json.loads(archive.read())
# archive.close()

# print('\n\ntext',text['exe01.txt'])

# connection = mysql.connect()
# mysql.saveTokens(connection, 'documents.txt', json.dumps({"exe01.txt":documents}))
# tokens = mysql.getTokens(connection, 'documents.txt')
Esempio n. 32
0
def test_multiply():
    source = ['(* 1 3 5)']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, Environment([])) == 15
Esempio n. 33
0
            env = tmp_env
        except EOFError:
            color_print('\nLeaving lis.py.', 'blue')
            break
        except LisNameError as e:
            color_print(e, 'red')
        except LisSyntaxError as e:
            color_print(e, 'red')
        except Exception as e:
            color_print(e, 'red')
            print('*** Invalid input ***')

# RUN INTERPRETER ======================

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('source', nargs = '?', default=None, help='source file')
    args = parser.parse_args()

    if args.source:
        try:
            with open(args.source, 'r') as source:
                tokens = tokenize(source)
                program = parse_tokens(tokens)
            eval_loop(program)
        except IOError as e:
            color_print(e, 'red')
            color_print('Invalid source file', 'red')
    else:
        repl()
Esempio n. 34
0
def test_subtract():
    source = ['(- 10 14)']
    exp = parse_tokens(tokenize(source))[0]
    assert eval_in_env(exp, Environment([])) == -4
def get_tokens(text, language='portuguese'):
    if type(text) != str:
        return "Erro, argument text != string"
    return tokenize(text, language)
Esempio n. 36
0
def test_parse_unexpected_closing_paren():
    source = ['(define add 3))']
    tokens = tokenize(source)
    with pytest.raises(LisSyntaxError) as e:
        exp = parse_tokens(tokens)
Esempio n. 37
0
def t_error(t):
    print("Illegal character '%s'" % t.value[0])
    t.lexer.skip(1)


class Ex351Lexer:
    data = None
    lexer = None

    def __init__(self):
        self.lexer = lex.lex()

    def setData(self, data):
        self.data = data
        self.lexer.input(data)

    def tokenize(self):
        tokens = []
        while True:
            tok = self.lexer.token()
            if not tok:
                break  # No more input
            tokens.append(tok)
        return tokens


if __name__ == '__main__':
    lex = Ex351Lexer()
    lex.setData("if x then 3 <= 4 else 20 >= 1")
    print(lex.tokenize())
Esempio n. 38
0
 def parse(self, stream): return self.exprs(lex.tokenize(stream))
 def exprs(self, ts):