def main(): # Holds all the config values container = Container() #obtain configs in a list format config = open(sys.argv[1]).read().splitlines() parser.setup(container, config) random.seed(container.seed) # opening the log file result_log = open(container.prob_log_file, 'w') # initial formatting of the result log with Result Log at the top and the parameters underneath that result_log.write("Result Log \n") result_log.write("Random Seed = %s \n" % container.seed) result_log.write( "Parameters used = {'k': %s, 'd': %s, 'l': %s, 'n': %s, 'mu': %s, 'lambda': %s, 'p': %s, 'fitness evaluations': %s, 'number of runs': %s, 'problem solution location': '%s'}\n\n" % (container.k, container.d, container.l, container.n, container.mu, container.generations, container.p, container.fitness, container.runs, container.prob_solution_file)) result_log.write(str(container.mu) + "\n") threads = [] for run in range(1, container.runs + 1): # Used in the result log thread_name = run # Spins up the number of runs wanted to be executed for the program t = threading.Thread(name=thread_name, target=evaluations, args=(thread_name, container)) threads.append(t) # Start all threads for x in threads: x.start() # Wait for all of them to finish for x in threads: threads.remove(x) x.join() container.results.sort(key=itemgetter(0)) # Inputting the results into the result log for res in container.results: for i in range(len(res)): if i == 0: result_log.write("Run " + str(res[i]) + "\n") else: evalValue, averageValue, bestValue = res[i] result_log.write( str(evalValue) + " " + str("%.2f" % averageValue) + " " + str(bestValue) + "\n") result_log.write("\n") result_log.close()
def __init__(self, debug=0, outputdir='.', printToStderr=True): import parser as matrixparser self.parser = matrixparser.setup(debug=debug, outputdir=outputdir) self.lex = MatrixLexer() self.lex.build(printToStderr=printToStderr, optimize=1, lextab=os.path.join("MatrixParser.lextab")) self.errorlog = [] self.debug = debug self.lex.errors = matrixparser.errors
lines = open('reservedWords.rwf', 'r').readlines() rwTable = symbolTable() for l in lines: words = l.split() lexeme = words[0].translate(None, '\"') tokenType = words[1] attribute = words[2] token = {'lexeme': lexeme, 'tokenType': tokenType, 'attribute': attribute} rwTable.insert(token) #get an array of lines if not os.path.isfile(sys.argv[1]): print 'The file you tried to compile does not exist' sys.exit() lines = open(sys.argv[1], "r").readlines() lines[-1] += '\x03' listingFile = open('lineListing.txt', 'w') tokenFile = open('tokenFile.txt', 'w') tokenFile.write("line No.".center(10) + "Lexeme".ljust(17) \ + "Token Type".ljust(15) + "attribute".ljust(40) + '\n') lexer.setup(listingFile, tokenFile, lines, rwTable) for l in lines: lexer.feedLexer(l) print "lexical analysis is complete" parser.setup(lexer, rwTable, listingFile) parser.parse()