def main(): _response = setup.main() if _response["status"] != 200: raise Exception(_response["message"]) _file = malfilereader.read(sys.argv[1]) # print(_file) tokenFile = lexer.main(_file) # print(tokenFile) # for t in tokenFile.tokens: # print(t) mem = memory.Memory() mem = run.run(mem, tokenFile) print(tokenFile.outString) print(mem.mem) print(tokenFile.labels) return response.create(200, "Run Successful")
length = len(sentence) self.count = 0 while (self.count < length - 2): self.making_leaf = False self.evalleaf(sentence, operators) while self.making_leaf == True and self.count < length - 2: self.evalleaf(sentence, operators) self.count += 2 self.count += 1 return sentences def generate_tree(self): self.self = self tokens = self.token_list first_iteration = self.pe(tokens) second_iteration = self.leafy(first_iteration, "multdiv") for expression in second_iteration: """print(expression)""" return second_iteration ast = AST( lexer.main( argparse.Namespace(file_path='test.lr', printast=True, printlex=False, standalone=False))) ast.generate_tree()
'SCREEN': 16384, 'KBD': 24576 } #return 16 bit binary representation def toBin(x): bn = bin(int(x))[2:] padding = 16 - len(bn) return '0' * padding + bn script, file = argv #get the list of tokens lexed = lexer.main() lineNum = 0 memory = 16 for line in lexed: #add debug line number to c and a instructions if line[0] != 'L': line.append(lineNum) lineNum += 1 else: #add pseudo comands to the table table[line[2]] = lineNum #remove label comands from the stream for line in lexed:
import lexer import syntax import interpreter if __name__ == '__main__': lexer.main() syntax.main() interpreter.main()
#!venv/bin/python3 import os import lexer import parse # 命令行解析 获得源码文件 # # def getarg(): # parser = argparse.ArgumentParser() # parser.add_argument("file", help="choose a my_c source file") # args = parser.parse_args() # return args.file if __name__ == "__main__": #path = getarg() try: fo = open('myc.c', "r") except IOError: print("ERROR: FILE NOT FOUND!") os._exit(0) lexer = lexer.main(fo) fo.close() parse.main(lexer)
def main(filepath): global tokens tokens = lexer.main(filepath) print(tokens)