def isValid(rulesText, programText): rules = parseRules(rulesText) table = ParseTable(rules) parser = LRParser(rules, table.actionTable(), table.gotoTable()) try: derivation = parser.parse(programText) except: return False return True
def constructParser(rulesText, isVerbose = False): def log(msg=""): if isVerbose: print msg log("Parsing rules...") rules = parseRules(rulesText) log("\n".join(map(str, rules))) log() log("Constructing parse tables...") table = ParseTable(rules) log(table) parser = LRParser(rules, table.actionTable(), table.gotoTable()) return parser
def slurp(filename): file = open(filename) rulesText = file.read() file.close() return rulesText def tokenizeProgram(data): lines = data.split("\n") #remove comments lines = [line.partition("#")[0] for line in lines] characters = [] inString = False for char in "".join(lines): if char == "\"": inString = not inString if char == " " and not inString: continue if char == "\t" and not inString: continue characters.append(char) return "".join(characters) rulesText = slurp("language.txt") rules = parseRules(rulesText) programText = tokenizeProgram(slurp("test.k")) parser = constructParser(rulesText) rightDerivation = parser.parse(programText) #print "result of {}:".format(programText) interpret(rightDerivation, rules)