Esempio n. 1
0
def main():
    if len(sys.argv) == 2:
        file = open(sys.argv[1], 'r')
        source = file.read()
        lexer = Lexer(source)
        parser = Parser(lexer)
        interpreter = Interpreter(parser)
        result = interpreter.interpret()
        print(result)
    else:
        print(
            "SHELL MODE: still you need to type the whole program in a single line!"
        )
        while True:
            try:
                text = input('>>> ')
            except EOFError:
                break
            if not text:
                continue

            lexer = Lexer(text)
            parser = Parser(lexer)
            interpreter = Interpreter(parser)
            result = interpreter.interpret()
            print(result)
Esempio n. 2
0
    def test_token(self):
        # テスト1
        tw = io.StringIO("1234")
        lex = Lexer.Lexer(tw)
        if lex.advance():
            self.assertEqual(lex.token(), Token.TokenType.INT)
        else:
            self.fail()

        # テスト2
        tw = io.StringIO("1234 5678")
        lex = Lexer.Lexer(tw)
        while lex.advance():
            self.assertEqual(lex.token(), Token.TokenType.INT)
Esempio n. 3
0
    def __init__(self, filename):
        self.filename = filename
        self.lex_gen = Lexer.Lexer(filename)
        self.lex_gen = self.lex_gen.token_generator()

        # get the ball rolling looking at the first token
        self.curr_tok = self.lex_gen.__next__()
Esempio n. 4
0
	def run(self):
		tst = Lexer("")
		prs = Parser(tst)
		
		while True:
			try:
				compound = 0
				tst.flush()
				text = ""
				descend = False;
				text = input('mpr> ')
				if '{' in text:
					descend = True;
					compound += 1
				while compound > 0 or (text != "" and text[-1] != ';' and text[-1] != '}'):
					inpt = input('...  ')
					if '{' in inpt:
						compound += 1
					if '}' in inpt:
						compound -= 1
					text += inpt
			except EOFError:
				break;
			tst.append(text)
			try:
				self.interpret(descend, prs.compound())
			except ValueError as err:
				print(err)
			except SyntaxError as err:
				print(err)
			except TypeError as err:
				print(err)
			except KeyError as err:
				print("Variable {var} not defined!".format(var=err))
Esempio n. 5
0
    def test_bug_with_quotation(self):
        lexer = Lexer('A = "word" B;')

        token = lexer.get_next_token()
        self.assertEquals(IDENTIFIER, token.type)
        self.assertEquals('A', token.value)

        token = lexer.get_next_token()
        self.assertEquals(EQUAL, token.type)
        self.assertEquals('EQUAL', token.value)

        token = lexer.get_next_token()
        self.assertEquals(QUOTATION_MARK, token.type)
        self.assertEquals('QUOTATION_MARK', token.value)

        token = lexer.get_next_token()
        self.assertEquals(IDENTIFIER, token.type)
        self.assertEquals('word', token.value)

        token = lexer.get_next_token()
        self.assertEquals(QUOTATION_MARK, token.type)
        self.assertEquals('QUOTATION_MARK', token.value)

        token = lexer.get_next_token()
        self.assertEquals(IDENTIFIER, token.type)
        self.assertEquals('B', token.value)

        token = lexer.get_next_token()
        self.assertEquals(SEMICOLON, token.type)
        self.assertEquals('SEMICOLON', token.value)

        token = lexer.get_next_token()
        self.assertEquals(EOF, token.type)
        self.assertEquals('EOF', token.value)
	def __test_execute(self,source = ''):
		if len(source) != 0:
			program = Assembler(Lexer(source).lex()).assemble()
			vObj    = VM(program)  
			vObj.run()
			return vObj
		else:
			raise Exception('SOURCE CANNOT BE EMPTY DURING TEST EXCEUTE')
Esempio n. 7
0
 def start(self, input, output):
     print(PROMPT)
     for line in input:
         l = Lexer.Lexer(line, '', 0, 0)
         tok = l.next_token()
         while tok.type.name != constants.EOF:
             print('token is {} {}'.format(tok.literal, tok.type.name))
             tok = l.next_token()
Esempio n. 8
0
def main():
    if len(sys.argv) == 2:
        text = open(sys.argv[1], 'r').read()
    else:
        text = open('test.txt', 'r').read()
    if text:
        lexer = Lexer(text)
        parser = Parser(lexer)
        interpreter = Interpreter(parser)
	def __execute(self,paths = []):
		if len(paths) != 0:
			for path in paths:
				print('Executing file: {}'.format(path))
				source = file_get_contents(path)
				program = Assembler(Lexer(source).lex()).assemble()
				vObj    = VM(program)  
				vObj.run()	
				print()
Esempio n. 10
0
 def __init__(self, path):
     self.path = path
     self.file = open(path, 'r')
     self.code = self.file.read() + '$$'
     self.file.close()
     self.lexer = Lex.Lexer(self.code)
     self.operands = Stack()
     self.operators = Stack()
     self.operators.push(Lex.Token('sentinel', 'sentinel'))
Esempio n. 11
0
    def __init__(self, file_ptr, argp):
        self.Lexer = Lexer.Lexer(file_ptr, argp)
        self.print_out: bool = argp.syntax
        self.filename: str = argp.input

        self.state_strs_pending_print = []
        self.state_strs_pending_file = []
        self.productions_pending_write = []
        self.new_production = []
def main():
    # Read input
    lines = json.loads(readIn()[0])
    commands = []
    print("Submission Received! Going on an adventure...")
    sys.stdout.flush()
    name = lines[2]
    key = lines[3]
    tests = [lines[4]]
    owner = lines[5]
    # print(tests)

    users3key = "smop-file-dump/" + key + "/" + name + "/"
    owners3key = "smop-file-dump/" + key + "/" + owner + "/"

    ownerlist = "aws s3 ls s3://" + owners3key + " --recursive --human-readable > python/owner.txt"
    coderlist = "aws s3 ls s3://" + users3key + " --recursive --human-readable > python/coder.txt"
    os.system(ownerlist)
    os.system(coderlist)

    with open('python/coder.txt', 'r') as coderfiles, open('python/owner.txt', 'r') as ownerfiles:
        coderfiles = coderfiles.read().split("\n")
        ownerfiles = ownerfiles.read().split("\n")
    if len(coderfiles) != len(ownerfiles):
        for i in range(len(ownerfiles)):
            if (ownerfiles[i] != ''):
                filename = ownerfiles[i].split(key + "/" + owner + "/")[1]
                if filename not in coderfiles:
                    addfile = "aws s3 cp s3://smop-file-dump/" + key + "/" + owner + "/" + filename + " s3://smop-file-dump/" + key + "/" + name + "/" + filename + "\n"
                    commands.append(addfile)

    commands.append("aws s3 cp s3://" + users3key + " . --recursive\n") 
    commands.append("screen -d -m php -S localhost:8000")

    writeFileApp(commands, "python/jssetup.sh")

    # tests = ["click button mybutton\n-> span field text 1"]

    writeFileApp(tests, 'python/tests.parth')
    writeFileParth("python/sample.py", "python/seleniumTest.py")
    tester = Lexer.Lexer()
    tokens = tester.lex('python/tests.parth') #change this back to python/test2.parth and then finally to parth file path when working
    # print(tokens)
    sys.stdout.flush()
    parser = Parser.Parser()
    parser.parse(tokens, "python/seleniumTest.py") #change this back to python/seleniumTest.py
    
    everything = test('python/app.js', 'python/seleniumTest.py', 'python/tests.parth', name = name, key = key) #change back and make sure all these files have a python/ before them
    everything = parseout(everything)
    #everything['lines'] = readFile('python/foo.js')

    # Return Using Print
    print(json.dumps(everything))
    sys.stdout.flush()

    return #EOF
def main():
  #input = "(sum + 47) / total"
  input = sys.argv[1]
  lexer = Lexer(input)
  print("Tokenizing ",end="")
  print(input)
  while True:
    t = lexer.lex()
    if t.get_token().value == TokenTypes.EOF.value:
      break
Esempio n. 14
0
def main():
    global next_token
    global l
    l = Lexer(sys.argv[1])
    next_token = l.lex()
    expr()
    if next_token.get_token().value == TokenTypes.EOF.value:
        print("PARSE SUCCEEDED")
    else:
        print("PARSE FAILED")
Esempio n. 15
0
def main():
    global next_token
    global l
    l = Lexer(sys.argv[1])
    # l = Lexer("(sum + 20)/30")
    next_token = l.lex()
    expr()
    if next_token.get_token().value == TokenTypes.EOF.value:
        print("PARSE SUCCESS")
    else:
        print("PARSE FAIL")
Esempio n. 16
0
def run(fn, text):
    lexer = Lexer(fn, text)
    tokens, error = lexer.make_tokens()
    if error: return None, error
    parser = Parser(tokens)
    ast = parser.parse()
    if ast.error: return None, ast.error
    interpreter = Interpreter()
    result = interpreter.visit(ast.node, context)

    return result.value, result.error
Esempio n. 17
0
def run(fn, text):
    lexer = Lexer(fn, text)
    tokens, error = lexer.generate_tokens()
    if error: return None, error
    parser = Parser(tokens)
    ast = parser.parse()
    if ast.error: return None, ast.error
    interpreter = Interpreter()
    context = Context(fn)
    context.symbol_table = global_symbol_table
    result = interpreter.visit(ast.node, context)
    return result.value, result.error
def main():
    # Read input
    lines = json.loads(readIn()[0])
    commands = []
    print("Submission Received! Going on an adventure...")
    sys.stdout.flush()

    tests = [lines[2]]
    entrypoint = lines[0]
    etype = lines[1]
    repo = lines[3]
    sourceip = lines[4]
    if "." in sourceip:
        sourceip = sourceip.split(':')[-1]
    lip, ipv6 = handleip(sourceip)
    if ipv6:
        name = ''.join([lip[i] for i in [1, 0, 2, 5, 4, 6, 3, 7]])
    else:
        name = ''.join([lip[i] for i in [1, 0, 2]])

    commands.append("git clone " + repo + ".git repo \n")
    if etype.lower() == "php":
        commands.append("screen -d -m php -S localhost:8000")

    elif etype.lower() == "html":
        commands.append("screen -d -m php -S localhost:8000")

    writeFileApp(commands, "python/jssetup.sh")

    writeFileApp(tests, 'python/tests.parth')

    writeFileParth("python/sample.py", "python/seleniumTest.py")
    lexer = Lexer.Lexer()
    tokens = lexer.lex(
        'python/tests.parth'
    )  #change this back to python/test2.parth and then finally to parth file path when working

    parser = Parser.Parser()
    parser.parse(tokens, "python/seleniumTest.py", True, entrypoint,
                 etype)  #change this back to python/seleniumTest.py

    everything = test(
        'python/app.js', 'python/seleniumTest.py', 'python/tests.parth', True,
        name, sourceip
    )  #change back and make sure all these files have a python/ before them
    everything = parseout(everything)
    #everything['lines'] = readFile('python/foo.js')

    # Return Using Print
    print(json.dumps(everything))
    sys.stdout.flush()

    return  #EOF
Esempio n. 19
0
    def test_value(self):
        # テスト1
        tw = io.StringIO("1234")
        lex = Lexer.Lexer(tw)
        if lex.advance():
            self.assertEqual(lex.value(), 1234)
        else:
            self.fail()

        # テスト2
        tw = io.StringIO("1234 5678")
        lex = Lexer.Lexer(tw)
        if lex.advance():
            self.assertEqual(lex.value(), 1234)
        else:
            self.fail()

        if lex.advance():
            self.assertEqual(lex.value(), 5678)
        else:
            self.fail()
Esempio n. 20
0
def main():
    file_object = open('source_code.txt')
    try:
        text = file_object.read()
        lexer = Lexer(text)
        parser = Parser(lexer)
        interpreter = Interpreter(parser)
        interpreter.interpret()
        print(interpreter.GLOBAL_SCOPE)

    finally:
        file_object.close()
Esempio n. 21
0
    def __init__(self, file_ptr, argp):
        self.Lexer = Lexer.Lexer(file_ptr, argp)
        self.args = argp
        self.print_out: bool = argp.syntax
        self.filename: str = argp.input
        self.instruction_generator = InstructionGenerator()
        self.symbol_table = SymbolTable()

        self.state_strs_pending_print = []
        self.state_strs_pending_file = []
        self.productions_pending_write = []
        self.new_production = []
Esempio n. 22
0
def main():
    # ===============================================
    # print("=" * 50)
    # print(f"Step 1: read source file in and store in string 'lex'".upper())
    # print("=" * 50)
    # ===============================================

    content = openSourceFile('test.lang')
    # print('content = ', content)

    # ===============================================
    # print("=" * 50)
    # print(f"Step 2: create lexeme from the string 'lex'".upper())
    # print("=" * 50)
    # ===============================================

    # Create Object 'lex' which is Lexer type/class, initialize with source code
    lex = Lexer(content)

    # Process tokenize and return lexeme
    lexeme = lex.tokenize()

    # ===============================================
    # print("=" * 50)
    # print(f"Step 3: Generate output file.upper())
    # print("=" * 50)
    # ===============================================

    # Please delete the outputLexer.txt file before every compiles
    output = open('outputLexer.txt', 'a')
    print('#' * 30)

    output.write('\n\n')
    output.write('===============================================\n')
    output.write('Output file start here\n')
    output.write(
        'This part is added to separate every time compile if output file is not deleted\n'
    )
    output.write('===============================================\n')
    output.write('\n\n')
    output.write('TOKEN \t\t\tLEXEME\n\n')
    i = 0
    while i < len(lexeme):

        # print(f'lexeme {i} = ', lexeme[i].keys, ' = ', lexeme[i]values)
        # print(f'lexeme {i} = ', lexeme[i])
        output.write(
            f'{list(lexeme[i].keys())[0]:<12} = \t{list(lexeme[i].values())[0]}\n'
        )
        i += 1
    print('#' * 30)
Esempio n. 23
0
def main():
    print("ability to produce tokens")
    lexer = Lexer.Lexer("1*(2+3)/2")
    myparser = Parser.Parser(lexer)
    print(
        "Uses tokens to build an AST and evalutating the math expression using the tree"
    )
    interpreter = Interpreter.Interpreter(myparser)
    mathexprvalue = interpreter.interpret()
    print("calculated value = ", mathexprvalue)
    print(
        "--------------------------Evaluated the expression using the AST-------------------------------"
    )
    print("\n\n\n\n\n")

    print("Generating the tree visually")
    print("Copy the output into a .dot file and run the following command")
    print("My bin - C:\Program Files (x86)\Graphviz2.38\bin>")
    print("Provide full path for the .dot and .png files")
    print("dot -Tpng -o parsetree.png parsetree.dot")

    #Generate the objects again - None goes to my parser
    lexer = Lexer.Lexer("1*(2+3)/2")
    myparser = Parser.Parser(lexer)
    visualizer = Visualise.Visualise(myparser)
    print(
        "printing ast tree by traversing depth first using pre and post order traversals"
    )
    content = visualizer.gendotdfs()
    print(content)

    #Generate the objects again - None goes to my parser
    lexer = Lexer.Lexer("1*(2+3)/2")
    myparser = Parser.Parser(lexer)
    visualizer = Visualise.Visualise(myparser)
    print("printing ast tree by traversing breadth first without recursion")
    content = visualizer.gendotbfs()
    print(content)
Esempio n. 24
0
 def get_token_list(self):
     # initialise and clean up previous iterations
     self.token_list = None
     self.ITERABLE_token_list = None
     self.current_token = None
     self.previous_token = None
     self.next_token = None
     self.token_index = -1
     # create and store tokens
     self.lexer = Lexer(self.input_file_path)
     self.token_list = self.lexer.get_token_types()
     self.ITERABLE_token_list = iter(self.token_list)
     self.token_PEEKING_list = self.ITERABLE_token_list
     return
Esempio n. 25
0
    def __init__(self, sourceFileName, outputFileName, verbose=False):
        self.sourceFileName = sourceFileName
        f = open(sourceFileName, "r")
        self.sourceText = f.read()
        f.close()

        if os.path.exists(outputFileName):
            os.remove(outputFileName)

        self.outputFile = open(outputFileName, "a")
        self.scanner = Scanner(self.sourceText, verbose)
        self.lexer = Lexer(self.sourceText, verbose)
        self.parser = Parser(self.sourceText, self.outputFile, verbose)
        self.verbose = verbose
Esempio n. 26
0
def main():
    if len(sys.argv) == 2:
        file = open(sys.argv[1], 'r')
        source = file.read()
        try:
            lexer = Lexer(source)
            parser = Parser(lexer)
            interpreter = Interpreter(parser)
            result = interpreter.interpret()
            print(result)
        except Exception as e:
            print(e)
    else:
        print("Usage: ki.py kotlinFilename")
        sys.exit()
Esempio n. 27
0
class Analizador:
    #Documento a analizar
    __programa = ""
    #Objeto tipo Tokens para recibir los tokens de mi lenguaje
    __token = Tokens()
    #objeto tipo Lexer
    __lexer = Lexer()

    def __init__(self, document):
        self.__programa = document

    def analizar(self):
        if self.__programa == 'codigo.ap':
            codAnalizar = open(self.__programa, encoding='UTF-8').read()
            no_Validos = self.__lexer.validar(codAnalizar,
                                              self.__token.getTokens(), True)
            validos = self.__lexer.validar(codAnalizar,
                                           self.__token.getTokens(), False)
            printProgressBar(0,
                             len(validos),
                             prefix='Progreso:',
                             suffix='Completo',
                             length=70)
            i = 0
            for valido in validos:
                if valido['token'] != '\n':
                    os.system('clear')
                    printProgressBar(i + 1,
                                     len(validos),
                                     prefix='Progreso:',
                                     suffix='Completo',
                                     length=70)
                    print('\n')
                    i += 1
                    print('[', valido['token'], ']',
                          'Hace parte del lenguaje, es un:', valido['tipo'])

            if no_Validos:
                for invalido in no_Validos:
                    print("Error en la linea", invalido['linea'], " [",
                          invalido['palabra'], "]")

            else:
                print("No se encontró ningún error léxico")
        else:
            print("Error en la apertura del archivo")
Esempio n. 28
0
    def __init__(self, file_path):
        self.ERROR = 0
        self.RIGHT = 1
        self.path = file_path
        self.lexer = Lexer(file_path)
        self.token = Token(Token_Type.ERRTOKEN, "", 0.0, None)
        self.state = self.RIGHT
        self.count = 0
        self.iters = 0
        self.origin_x = 0.0
        self.origin_y = 0.0
        self.rot_ang = 0.0
        self.scale_x = 1.0
        self.scale_y = 1.0

        self.tree = Tree()
        self.root = Node()
Esempio n. 29
0
    def test_alternatives(self):
        lexer = Lexer('A | B')

        token = lexer.get_next_token()
        self.assertEquals(IDENTIFIER, token.type)
        self.assertEquals('A', token.value)

        token = lexer.get_next_token()
        self.assertEquals(ALTERNATIVE, token.type)
        self.assertEquals('ALTERNATIVE', token.value)

        token = lexer.get_next_token()
        self.assertEquals(IDENTIFIER, token.type)
        self.assertEquals('B', token.value)

        token = lexer.get_next_token()
        self.assertEquals(EOF, token.type)
        self.assertEquals('EOF', token.value)
Esempio n. 30
0
def run(fn, text):
    # Generate tokens
    lexer = Lexer(fn, text)
    tokens, error = lexer.make_tokens()
    if error: return None, error

    # Generate AST
    parser = Parser(tokens)
    ast = parser.parse()
    if ast.error: return None, ast.error

    # Run program
    interpreter = Interpreter()
    context = Context('<program>')
    context.symbol_table = global_symbol_table
    result = interpreter.visit(ast.node, context)

    return result.value, result.error