def main(): # fname = raw_input('Please specify geometry file location: ') fname = './Data/array.msh' # world.preprocessWorld('./Data/waypoints.gpx', [20, 50]) # world.preprocessDebugWorld('./Data/WSC.route') world.loadDebugData('./Data/WSC.debug') world.importWorld(fname, '') # Set the initial conditions world.setInitialConditions() # world.simulate({}) start = timer() optimizer.optimize() end = timer() print 'Compute time: ' + str(end - start) # Finds average solar power of array across the whole day # jd1 = 0 # stp = step.step(1, 35, [-12.462827, 130.841782], 500.0, 1500., 130., 0., 135., 0., [0.,0.], 0, 0.) # stp.timezone = 9.5 # sc = car.car() # fname = './Data/array.msh' # sc.loadArray(fname) # for h in range(6,19): # dt = datetime(2015, 10, 8, h, 00) # stp.gTime = dt # jd1 += sc.arrayOut(stp) # jd1 = jd1 / 7.5 # print(jd1) return
def recursive_refactor(node, settings, depth=0): """ Recursively refactors an AST tree. At each step, we determine the most expensive expression, and move it to the root as part of a branch. We then create a left and right AST in which the expression has been replaced with a constant. """ # Check for max depth if depth == settings.max_depth: return node # Do the cost calculation count, names = count_expressions(node) # Iterate over expressions until we find a suitable expression expr = None for max, name in util.max_count(count): # Base case is that there is no further reductions if max < settings.min_select: return node # We now take the expression and do a simple # path expansion. We have a "true" (left) branch, # that assumes the expression is true. Then we have # a "false" (right) branch. Each branch has the # expression re-written into a constant value # and then we do an optimization pass. expr = select_rewrite_expression(settings, name, names[name]) if expr is not None: break # If we don't find an expression, finish if expr is None: return node # Do a deep copy on the left side, since we # are re-writing the tree, re-use the right side left = dup(node) left = rewrite_ast(left, name, expr, True) left = optimize(left, settings.max_opt_pass, settings.min_change) left = recursive_refactor(left, settings, depth+1) right = rewrite_ast(node, name, expr, False) right = optimize(right, settings.max_opt_pass, settings.min_change) right = recursive_refactor(right, settings, depth+1) # Now we can push the most common expression into # a branch, and conditionally execute the sub-ast's return ast.Branch(expr, left, right)
def solve(G): """ Args: G: networkx.Graph Returns: T: networkx.Graph """ # TODO: your code here! g = GraphSolver(G) T = g.dijkstra_solve_graph() optimize(g, T) return T
def recursive_refactor(node, settings, depth=0): """ Recursively refactors an AST tree. At each step, we determine the most expensive expression, and move it to the root as part of a branch. We then create a left and right AST in which the expression has been replaced with a constant. """ # Check for max depth if depth == settings.max_depth: return node # Do the cost calculation count, names = count_expressions(node) # Iterate over expressions until we find a suitable expression expr = None for max, name in util.max_count(count): # Base case is that there is no further reductions if max < settings.min_select: return node # We now take the expression and do a simple # path expansion. We have a "true" (left) branch, # that assumes the expression is true. Then we have # a "false" (right) branch. Each branch has the # expression re-written into a constant value # and then we do an optimization pass. expr = select_rewrite_expression(settings, name, names[name]) if expr is not None: break # If we don't find an expression, finish if expr is None: return node # Do a deep copy on the left side, since we # are re-writing the tree, re-use the right side left = dup(node) left = rewrite_ast(left, name, expr, True) left = optimize(left, settings.max_opt_pass, settings.min_change) left = recursive_refactor(left, settings, depth + 1) right = rewrite_ast(node, name, expr, False) right = optimize(right, settings.max_opt_pass, settings.min_change) right = recursive_refactor(right, settings, depth + 1) # Now we can push the most common expression into # a branch, and conditionally execute the sub-ast's return ast.Branch(expr, left, right)
def simple_ite_optim(self): # Create random input and output data #xL = torch.ones((1, 2), dtype=torch.float16, requires_grad=True) #xH = torch.ones((1, 2), dtype=torch.float16, requires_grad=True) xL = torch.tensor([-10.0]) xH = torch.tensor([1.0]) var_map = {'x': 0, 'c': 1} # x in [-10, 1.0] # c approaches 0 but c < 0 program = StatementBlock([ # if c <= 0 IfThenElse( IntervalBool(torch.tensor([0.0, 1.0]), torch.tensor([0.0, 0.0])), # x = 2x + c AssignStatement(torch.tensor([[2.0, 1.0], [0.0, 1.0]]), torch.tensor([0.0, 0.0])), # x = 700 AssignStatement(torch.tensor([[0.0, 0.0], [0.0, 1.0]]), torch.tensor([700.0, 0.0]))), # return x ReturnStatement(torch.tensor([1.0, 0.0]), torch.tensor([0.0, 0.0])) ]) result_c = optimize(xL, xH, [InferParameter(var_map['c'])], program)[0] self.assertTrue(result_c >= 0.0) self.assertTrue(result_c <= 0.25)
def refactor(pred_set, ast, settings=None): """ Performs a refactor of an AST tree to get the maximum selectivity and minimze wasted evaluations. Settings are controlled using a RefactorSettings object. If none is provided the `shallow` settings are used. """ # Determine our settings if settings is None: settings = RefactorSettings.shallow() # Perform static resolution of all literals if settings.static_rewrite: static_resolution(ast, pred_set) # Canonicalize the AST if settings.canonicalize: ast = compare.canonicalize(ast) # Do an initial optimization pass for easy wins if settings.initial_optimize: ast = optimize(ast, settings.max_opt_pass, settings.min_change) # Recursively rebuild the tree to optimize cost if settings.refactor: ast = recursive_refactor(ast, settings) # Compact the tree if settings.compact: compact.compact(ast) return ast
def solve(G): """ Args: G: networkx.Graph Returns: T: networkx.Graph """ g = GraphSolver(G) start = g.find_leaf_path() T = g.dijkstra_solve_graph(start, calculate_heuristic, first_heuristic) if average_pairwise_distance(T) == 0: return T optimize(g, T) return T
def compile(source): vprint("Lexing ... \n") tokens = lexer.lex(source) show(lexemes, "Token List : \n", tokens) vprint("Parsing ... \n") tree = parser.parse(tokens) showTree(parseTree, "Parse Tree : \n", tree) vprint("Unsugaring ... \n") tree = unsugarer.unsugar(tree) showTree(unsugTree, "Unsugared Tree : \n", tree) tree = simplifier.simplify(tree) if Optimize: vprint("Optimizing ... " + str(optimizer.level) + "\n") tree = optimizer.optimize(tree) showTree(optimTree, "Optimized Tree : \n", tree) if flattened: vprint("Flattening ... \n") show(flattened, "Flattened Tree : \n", flattener.flatten(tree)) if Generate: vprint("Translating ... \n") translator.addfunc(tree) show(translation, "Generated Assembly ... \n", translator.getresult()) return translator.getresult()
def fit(self, X, y, epochs, optimizer, regularizer=regularizer.Regularizer(0)): data_number, feature_number = X.shape self.__W = np.zeros((feature_number, 1)) self.__b = 0 if self.__debug: loss = [] for _ in range(epochs): h = self.predict(X) g_W = X.T.dot(h - y) / data_number + regularizer.regularize( self.__W) g_b = np.mean(h - y) g_W, g_b = optimizer.optimize([g_W, g_b]) self.__W -= g_W self.__b -= g_b if self.__debug: h = self.predict(X) loss.append(np.mean((h - y)**2)) if self.__debug: plt.plot(loss) plt.show()
def main(program, debug=0): if not (os.path.isfile(program)): print("Invalid program file!") debug = str(bin(debug))[2::] debug = (4 - len(debug)) * '0' + debug lexical_debug = int(debug[3]) syntactical_debug = int(debug[2]) semantical_debug = int(debug[1]) optimizer_debug = int(debug[0]) if not (lexical.lexical(lexical_grammar, program, lexical_debug)): print("Lexical error(s), leaving...") os.remove("out.lex") sys.exit(1) if not (syntactical.syntactical(syntax_grammar, program, syntactical_debug)): print("Syntactical error(s), leaving...") os.remove("out.lex") os.remove("out.sdt") os.remove("out.ic") sys.exit(1) if not (semantical.semantical("out.sdt", program, semantical_debug)): print("Semantical error(s), leaving...") os.remove("out.lex") os.remove("out.sdt") os.remove("out.ic") sys.exit(1) optimizer.optimize("out.ic", optimizer_debug) if not (syntactical_debug): os.remove("out.sdt") os.remove("out.ic") if not (lexical_debug): os.remove("out.lex") print("Ok")
def train_cifar(cnn, epochs, train_loader, test_loader, level=[50, 75, 100], rate=[0.1, 0.01, 0.001], test=True): optimizers = optimizer.get_SGD_optimizers(list(cnn.parameters()), rate) loss_function = nn.CrossEntropyLoss() for epoch in range(0, epochs): loss_value = 100.0 cnn.train() for step, (x, y) in enumerate(train_loader): Xu = x.cuda() Yu = y.cuda() output = cnn(Xu) output = output.view(output.size(0), -1) loss = loss_function(output, Yu) loss_value = loss.cpu().item() loss.backward() optimizer.optimize(optimizers, level, epoch) if (step % 100 == 0): print("in epoch %d step %d" % (epoch, step)) print("the loss in epoch %d is %.4f" % (epoch, loss)) if (test): correct = 0.0 for data, target in test_loader: cnn.eval() data, target = data.cuda(), target.cuda() output = cnn(data) output = output.view(output.size(0), -1) pred = torch.max(output, 1)[1] curr = torch.sum((pred == target).float()) correct += curr.cpu().data.item() accuracy = correct / len(test_loader.dataset) print("the test acc in epoch %d is %.4f" % (epoch, accuracy))
def compile(source, optimize=True): from parser import parse from ast import parse2ast from semantic import check_semantics from codegenerator import generate_code, code5str ast = check_semantics(parse2ast(parse(source))) if optimize: from astoptimizer import optimize_ast optimize_ast(ast) code = generate_code(ast) if optimize: import optimizer code = optimizer.optimize(code) return code5str(code)
def main(): (init, hours) = parse.parse_csv(open(sys.argv[1], 'r')) writer = csv.writer(open(sys.argv[2], "w")) season = config.get_season(hours[0]) nuclear = init[-1].mw_drawn.nuclear def target_nuclear(inrow): # This is a control system to decide how much nuclear power we want. # Using the provided numbers, hydro is optimal. # But nuclear is second-best, and nuclear power supply is inelastic. # So we want to use lots of nuclear, but only after we use as much # hydro as possible. # If we're using 80% as much power this week, adjust estimates down. adjust_factor = max(1, inrow.mw_available.total / inrow.historical_drawn[0]) predicted_drawn = [drawn * adjust_factor for drawn in inrow.historical_drawn] avg_draw = sum(predicted_drawn)/len(predicted_drawn) # Hydro appears to be pretty stable. predicted_needed = max(0, avg_draw - inrow.mw_available.hydro) print('Aiming for {} nuclear power'.format(predicted_needed)) return predicted_needed value_func = {'cost': -1, 'co2': -2, 'green': 0} if len(sys.argv) > 3: value_func = { 'cost': float(sys.argv[3]), 'co2': float(sys.argv[4]), 'green': float(sys.argv[5]), } outrows = [] for hour in hours: rate = config.consumer_rate(season, hour.time) nuclear = clamp(target_nuclear(hour), nuclear * 0.99, nuclear*1.01) power_row, sold = optimizer.optimize(hour, nuclear, value_func) outrow = gen_outrow(hour, power_row, sold, rate) writer.writerow(outrow.to_row()) outrows.append(outrow) print_summary(outrows)
def refactor(pred_set, ast, settings=None): """ Performs a refactor of an AST tree to get the maximum selectivity and minimze wasted evaluations. Settings are controlled using a RefactorSettings object. If none is provided the `shallow` settings are used. """ # Determine our settings if settings is None: settings = RefactorSettings.shallow() # Perform static resolution of all literals if settings.static_rewrite: static_resolution(ast, pred_set) # Canonicalize the AST if settings.canonicalize: ast = compare.canonicalize(ast) # Do an initial optimization pass for easy wins if settings.initial_optimize: ast = optimize(ast, settings.max_opt_pass, settings.min_change) # Recursively rebuild the tree to optimize cost if settings.refactor: ast = recursive_refactor(ast, settings) # Perform static resolution of all literals again, since # literal sets may have changed if settings.static_rewrite: static_resolution(ast, pred_set) # Compact the tree if settings.compact: compact.compact(ast) # Cache any common expressions if settings.cache_expr: cache.cache_expressions(ast) return ast
def optimize(self, fix_points=False, local_window=20, iterations=20): """optimize the map :fix_points: TODO :returns: TODO """ reproj_error = optimizer.optimize(self.frames, self.points, local_window, fix_points, verbose=False, iterations=20) if fix_points == False: for p in self.points: # If an old point was observed only in fewer than 4 frames if p.frames[-1].id < self.max_frame_id - 5 and len( p.frames) < 3: self.points.remove(p) return reproj_error
def fit(self, X, y, epochs, optimizer, regularizer=regularizer.Regularizer(0)): ''' Parameters ---------- X : shape (n_samples, n_features) Training data y : shape (n_samples,) Target values epochs : The number of epochs optimizer : Optimize algorithm, see also optimizer.py regularizer : Regularize algorithm, see also regularizer.py ''' n_samples, n_features = X.shape self.__W = np.zeros(n_features) self.__b = 0 if self.__debug: loss = [] for _ in range(epochs): h = self.predict(X) g_W = X.T.dot(h - y) / n_samples + regularizer.regularize(self.__W) g_b = np.mean(h - y) g_W, g_b = optimizer.optimize([g_W, g_b]) self.__W -= g_W self.__b -= g_b if self.__debug: h = self.predict(X) loss.append(np.mean((h - y)**2)) if self.__debug: plt.plot(loss) plt.show()
from optimizer import optimize optimize(0.1, 1000, 'autopilot.controllers.attitude.', ['p', 'i', 'd'], [35, 36])
def po(x): p = parse(x) return optimize(p)
sys.argv.pop(i) for p in sys.argv.pop(i).split(":"): flags["includes"].insert( 0, p ) else: break test_args(2, " no file specified, only options") if len( sys.argv ) > 2: test_args(" too many arguments given") code = Compile( sys.argv[1] ) if flags["optimize"]: import optimizer code = optimizer.optimize( code ) os.chdir( homedir ) #print constant_table #openfile = open( "ref/header.v64" ) #headercode = openfile.read() #openfile.close() #code = code + headercode openfile = open( "tmp/code.v64", "w" ) openfile.write( header ) openfile.write( code ) openfile.close()
def test_optimize(self): army1, army2 = simfile.get_army('test3.sim') best = optimizer.optimize(army1, army2, show_pbar=False) self.assertEqual(best[0].stack, 119) self.assertEqual(best[1].stack, 5)
def interpret(ast): ast = optimizer.optimize(ast) global_env = (None, {"javascript output" : ""}) for elt in ast: eval_elt(elt,global_env) return (global_env[1])["javascript output"]
def optimize(path, powermod): we, them = simfile.get_army(path) we.apply_power_mod(powermod) print "Best constellation: %s" % optimizer.optimize(we, them)
def po(x): p=parse(x) return optimize(p)
key = {"+":"{}(({}))({()<({}[()]<<>({}<>)>)>}{})<>(({}%s)<<>{({}[()]<({}<>)<>>)}{}>)", "-":"{}(({}))({()<({}[()]<<>({}<>)>)>}{})<>(({}[%s])<<>{({}[()]<({}<>)<>>)}{}>)", ">":"({}<(({}%s))>)", "<":"(<>)<>({}<((({}))){(<{}{}(({}[()]))<>{}<>>)}{}>)", "[":"{}(({}))({()<({}[()]<<>({}<>)>)>}{})<>(({})<><{({}[()]<({}<>)<>>)}{}>){", "]":"{}(({}))({()<({}[()]<<>({}<>)>)>}{})<>(({})<><{({}[()]<({}<>)<>>)}{}>)}"} filler = lambda x: key[x] if x in "[]<" else key[x[0]]%("()"*len(x)) return "(({})(<>))" + "".join(map(filler, divideBrainFuck(brainfuck))) + "{}{}<>" if __name__ == "__main__": commandLineArgs = sys.argv if len(commandLineArgs) != 3: print "Please pass a input and output file." print "(Usage: python %s BrainFuck BrainFlak)" %commandLineArgs[0] exit() #Open first file compile and write to second file #Open files try: infile = open(commandLineArgs[1]) string = infile.read() infile.close() #Clean the string string = re.sub("[^+-<>\[\]]","",string) outfile = open(commandLineArgs[2],"w") outfile.write(optimizer.optimize(compile(string))) outfile.close() except IOError: print "File",commandLineArgs[1],"does not exist."
G_inp = T.LongTensor(1, 1).fill_(self.vocab.stoi[opt.start_token]) G_inp = get_cuda(G_inp) sentence = opt.start_token + " " num_words = 0 while G_inp[0][0].item() != self.vocab.stoi[opt.end_token]: with T.autograd.no_grad(): logit, G_hidden, _ = self.vae(None, G_inp, z, G_hidden) probs = F.softmax(logit[0] / TEMPERATURE, dim=1) G_inp = T.multinomial(probs, 1) sentence += (self.vocab.itos[G_inp[0][0].item()] + " ") num_words += 1 if num_words > 64: break sentence = sentence.replace('<unk>', '').replace('<sos>', '').replace( '<eos>', '').replace('<pad>', '') sentences.append(sentence) return sentences optimize( np.eye(2)[CLASS].astype(np.float32), BlackBox(), Generator(), population_size=256, encoding_size=128, elite_size=32, max_iter=500, )
from optimizer import optimize optimize(0.1, 1000, 'autopilot.controllers.stabilizing.yaw_', ['kp', 'ki', 'kii', 'kd'], [40])
import optimizer import nlopt import utils import plotter import imagecomposer from shutil import rmtree from os import path OUTPUT_FOLDER = "C:/Temp/Femmopt" rmtree(OUTPUT_FOLDER, True) print('Running local Nelder-Mead optimization...') nl_evals = optimizer.optimize(nlopt.LN_NELDERMEAD, "NelderMead", OUTPUT_FOLDER) print('Running global Direct-L optimization...') dl_evals = optimizer.optimize(nlopt.GN_DIRECT_L, "DirectL", OUTPUT_FOLDER) print('Writing evaluation log files...') utils.write_evallog(path.join(OUTPUT_FOLDER, 'NelderMead_eval.dat'), nl_evals) utils.write_evallog(path.join(OUTPUT_FOLDER, 'DirectL_eval.dat'), dl_evals) print('Creating plots...') plotter.plot_searchpath(OUTPUT_FOLDER, "NelderMead_searchpath", nl_evals) plotter.plot_searchpath(OUTPUT_FOLDER, "DirectL_searchpath", dl_evals) plotter.plot_objectives(OUTPUT_FOLDER, "Objective", nl_evals, dl_evals) print('Preparing composite images and the movie...') imagecomposer.make_movie(OUTPUT_FOLDER, "NelderMead", "DirectL", "Objective", nl_evals, dl_evals, "OptimizationMovie")
from bf_parser import parse from optimizer import optimize, optimize_pass from datatypes import loop string = '''++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-] >>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.''' def to_pprint_format(ir): out = [] for elem in ir: if isinstance(elem, loop): out.append(to_pprint_format(elem.blocks)) else: out.append(elem) return out def po(x): p=parse(x) return optimize(p) if __name__=='__main__': from pprint import pprint try: f=open('mandelbrot.bf') p=parse(string) x=optimize(p) pprint(to_pprint_format(x)) except KeyboardInterrupt: pass
from optimizer import optimize optimize(0.1, 1000, 'pilot.controllers.ne_speed.', ['p', 'i', 'd'], [28, 29])
def main(argv): ''' Entry point when executed from command line. You can use zxb.py as a module with import, and this function won't be executed. ''' global FLAG_use_BASIC, FLAG_autorun global FILE_input, FILE_output, FILE_output_ext, OPTIONS_asm OPTIONS.add_option_if_not_defined('memoryCheck', bool, False) OPTIONS.add_option_if_not_defined('strictBool', bool, False) OPTIONS.add_option_if_not_defined('arrayCheck', bool, False) OPTIONS.add_option_if_not_defined('array_base', int, 0) OPTIONS.add_option_if_not_defined('string_base', int, 0) OPTIONS.add_option_if_not_defined('enableBreak', bool, False) OPTIONS.add_option_if_not_defined('emmitBackend', bool, False) OPTIONS.add_option_if_not_defined('arch', str, 'zx48k') OPTIONS.add_option_if_not_defined('__DEFINES', dict, {}) OPTIONS.add_option_if_not_defined('explicit', bool, False) # ------------------------------------------------------------ # Command line parsing # ------------------------------------------------------------ parser = OptionParser(usage='Usage: %prog <input file> [options]', version='%prog ' + VERSION) parser.add_option( "-d", "--debug", action="count", dest="debug", default=OPTIONS.Debug.value, help= "Enable verbosity/debugging output. Additional -d increase verbosity/debug level." ) parser.add_option("-O", "--optimize", type="int", dest="optimization_level", help="Sets optimization level. 0 = None", default=DEFAULT_OPTIMIZATION_LEVEL) parser.add_option( "-o", "--output", type="string", dest="output_file", help="Sets output file. Default is input filename with .bin extension", default=None) parser.add_option("-T", "--tzx", action="store_true", dest="tzx", default=False, help="Sets output format to tzx (default is .bin)") parser.add_option("-t", "--tap", action="store_true", dest="tap", default=False, help="Sets output format to tap (default is .bin)") parser.add_option( "-B", "--BASIC", action="store_true", dest="basic", default=False, help= "Creates a BASIC loader which load the rest of the CODE. Requires -T ot -t" ) parser.add_option("-a", "--autorun", action="store_true", dest="autorun", default=False, help="Sets the program to be run once loaded") parser.add_option("-A", "--asm", action="store_true", dest="asm", default=False, help="Sets output format to asm") parser.add_option("-S", "--org", type="int", dest="org", help="Start of machine code. By default %i" % OPTIONS.org.value, default=OPTIONS.org.value) parser.add_option( "-e", "--errmsg", type="string", dest="stderr", default=OPTIONS.StdErrFileName.value, help="Error messages file (standard error console by default)") parser.add_option("--array-base", type="int", dest="array_base", default=OPTIONS.array_base.value, help="Default lower index for arrays (0 by default)") parser.add_option("--string-base", type="int", dest="string_base", default=OPTIONS.string_base.value, help="Default lower index for strings (0 by default)") parser.add_option( "-Z", "--sinclair", action="store_true", dest="sinclair", default=False, help= "Enable by default some more original ZX Spectrum Sinclair BASIC features: ATTR, SCREEN$, POINT" ) parser.add_option("-H", "--heap-size", type="int", dest="heap_size", default=OPTIONS.heap_size.value, help="Sets heap size in bytes (default %i bytes)" % OPTIONS.heap_size.value) parser.add_option("--debug-memory", action="store_true", dest="debug_memory", default=False, help="Enables out-of-memory debug") parser.add_option("--debug-array", action="store_true", dest="debug_array", default=False, help="Enables array boundary checking") parser.add_option("--strict-bool", action="store_true", dest="strict_bool", default=False, help="Enforce boolean values to be 0 or 1") parser.add_option("--enable-break", action="store_true", dest="enable_break", default=False, help="Enables program execution BREAK detection") parser.add_option("-E", "--emmit-backend", action="store_true", dest="emmit_backend", default=False, help="Emmits backend code instead of ASM or binary") parser.add_option( "--explicit", action="store_true", dest="explicit", default=False, help="Requires all variables and functions to be declared before used") parser.add_option( "-D", "--define", type="str", dest="defines", action="append", help="Defines de given macro. Eg. -D MYDEBUG or -D NAME=Value") (options, args) = parser.parse_args() if len(args) != 1: parser.error("missing input file. (Try -h)") return 3 # ------------------------------------------------------------ # Setting of internal parameters according to command line # ------------------------------------------------------------ OPTIONS.Debug.value = options.debug asmparse.FLAG_optimize = OPTIONS.optimization.value = options.optimization_level asmparse.FILE_output = OPTIONS.outputFileName.value = FILE_output = options.output_file asmparse.FILE_stderr = OPTIONS.StdErrFileName.value = options.stderr OPTIONS.array_base.value = options.array_base OPTIONS.string_base.value = options.string_base OPTIONS.Sinclair.value = options.sinclair OPTIONS.org.value = options.org OPTIONS.heap_size.value = options.heap_size OPTIONS.memoryCheck.value = options.debug_memory OPTIONS.strictBool.value = options.strict_bool or OPTIONS.Sinclair.value OPTIONS.arrayCheck.value = options.debug_array OPTIONS.emmitBackend.value = options.emmit_backend OPTIONS.enableBreak.value = options.enable_break OPTIONS.explicit.value = options.explicit if options.defines: for i in options.defines: name, val = tuple(i.split('=', 1)) OPTIONS.__DEFINES.value[name] = val zxbpp.ID_TABLE.define(name, lineno=0) if OPTIONS.Sinclair.value: OPTIONS.array_base.value = 1 OPTIONS.string_base.value = 1 OPTIONS.strictBool.value = True debug.ENABLED = OPTIONS.Debug.value if int(options.tzx) + int(options.tap) + int(options.asm) + int( options.emmit_backend) > 1: parser.error( "Options --tap, --tzx, --emmit-backend and --asm are excluyent") return 3 asmparse.FLAG_use_BASIC = options.basic backend.FLAG_autostart = asmparse.FLAG_autorun = options.autorun if asmparse.FLAG_use_BASIC and not options.tzx and not options.tap: parser.error( 'Option --BASIC and --autorun requires --tzx or tap format') return 4 if options.tzx: FILE_output_ext = 'tzx' elif options.tap: FILE_output_ext = 'tap' elif options.asm: FILE_output_ext = 'asm' elif options.emmit_backend: FILE_output_ext = 'ic' if not os.path.exists(args[0]): parser.error("No such file or directory: '%s'" % args[0]) return 2 if OPTIONS.memoryCheck.value: OPTIONS.__DEFINES.value['__MEMORY_CHECK__'] = '' zxbpp.ID_TABLE.define('__MEMORY_CHECK__', lineno=0) if OPTIONS.arrayCheck.value: OPTIONS.__DEFINES.value['__CHECK_ARRAY_BOUNDARY__'] = '' zxbpp.ID_TABLE.define('__CHECK_ARRAY_BOUNDARY__', lineno=0) zxbpp.main(args) asmparse.FILE_output_ext = FILE_output_ext input = zxbpp.OUTPUT asmparse.FILE_input = FILE_input = zxbparser.FILENAME = os.path.basename( args[0]) if FILE_output is None: OPTIONS.outputFileName.value = FILE_output = os.path.splitext( os.path.basename(FILE_input))[0] + '.' + FILE_output_ext asmparse.FILE_output = FILE_output if OPTIONS.StdErrFileName.value is not None: FILE_stderr = asmparse.FILE_stderr = OPTIONS.StdErrFileName.value OPTIONS.stderr.value = open(FILE_stderr, 'wt') zxbparser.parser.parse(input, lexer=zxblex.lexer, tracking=True, debug=(OPTIONS.Debug.value > 2)) if gl.has_errors: return 1 # Exit with errors zxbtrad.traverse(zxbparser.ast) # This will fill MEMORY with code zxbtrad.traverse( zxbtrad.FUNCTIONS) # This will fill MEMORY with pending functions zxbtrad.emmit_strings() if OPTIONS.emmitBackend.value: output_file = open(FILE_output, 'wt') for quad in zxbtrad.dumpMemory(MEMORY): output_file.write(str(quad) + '\n') MEMORY[:] = [] # Empties memory zxbtrad.traverse( zxbparser.data_ast ) # This will fill MEMORY with global declared variables for quad in zxbtrad.dumpMemory(MEMORY): output_file.write(str(quad) + '\n') output_file.close() return 0 # Join all lines into a single string and ensures an INTRO at end of file asm_output = backend.emmit(MEMORY) from optimizer import optimize asm_output = optimize(asm_output) + '\n' # Now put user asm blocks back from backend import ASMS asm_output = asm_output.split('\n') for i in range(len(asm_output)): tmp = ASMS.get(asm_output[i], None) if tmp is not None: asm_output[i] = '\n'.join(tmp) asm_output = '\n'.join(asm_output) # Now filter them against the preprocessor again zxbpp.setMode('asm') zxbpp.OUTPUT = '' zxbpp.filter(asm_output, args[0]) # Now output the result asm_output = zxbpp.OUTPUT.split('\n') get_inits(asm_output) # Find out remaining inits MEMORY[:] = [] zxbtrad.traverse(zxbparser.data_ast ) # This will fill MEMORY with global declared variables tmp = [x for x in backend.emmit(MEMORY) if x.strip()[0] != '#'] asm_output += tmp asm_output = backend.emmit_start() + asm_output asm_output += backend.emmit_end(asm_output) if options.asm: # Only output assembler file output_file = open(FILE_output, 'wt') output(asm_output, output_file) output_file.close() else: from StringIO import StringIO fout = StringIO() output(asm_output, fout) asmparse.assemble(fout.getvalue()) fout.close() asmparse.generate_binary(FILE_output, FILE_output_ext) return 0 # Exit success
def main(argv): ''' Entry point when executed from command line. You can use zxb.py as a module with import, and this function won't be executed. ''' global FLAG_use_BASIC, FLAG_autorun global FILE_input, FILE_output, FILE_output_ext, OPTIONS_asm OPTIONS.add_option_if_not_defined('memoryCheck', bool, False) OPTIONS.add_option_if_not_defined('strictBool', bool, False) OPTIONS.add_option_if_not_defined('arrayCheck', bool, False) OPTIONS.add_option_if_not_defined('array_base', int, 0) OPTIONS.add_option_if_not_defined('string_base', int, 0) OPTIONS.add_option_if_not_defined('enableBreak', bool, False) OPTIONS.add_option_if_not_defined('emmitBackend', bool, False) OPTIONS.add_option_if_not_defined('arch', str, 'zx48k') OPTIONS.add_option_if_not_defined('__DEFINES', dict, {}) OPTIONS.add_option_if_not_defined('explicit', bool, False) # ------------------------------------------------------------ # Command line parsing # ------------------------------------------------------------ parser = OptionParser(usage='Usage: %prog <input file> [options]', version = '%prog ' + VERSION) parser.add_option("-d", "--debug", action="count", dest="debug", default=OPTIONS.Debug.value, help="Enable verbosity/debugging output. Additional -d increase verbosity/debug level.") parser.add_option("-O", "--optimize", type="int", dest="optimization_level", help="Sets optimization level. 0 = None", default=DEFAULT_OPTIMIZATION_LEVEL) parser.add_option("-o", "--output", type="string", dest="output_file", help="Sets output file. Default is input filename with .bin extension", default=None) parser.add_option("-T", "--tzx", action="store_true", dest="tzx", default=False, help="Sets output format to tzx (default is .bin)") parser.add_option("-t", "--tap", action="store_true", dest="tap", default=False, help="Sets output format to tap (default is .bin)") parser.add_option("-B", "--BASIC", action="store_true", dest="basic", default=False, help="Creates a BASIC loader which load the rest of the CODE. Requires -T ot -t") parser.add_option("-a", "--autorun", action="store_true", dest="autorun", default=False, help="Sets the program to be run once loaded") parser.add_option("-A", "--asm", action="store_true", dest="asm", default=False, help="Sets output format to asm") parser.add_option("-S", "--org", type="int", dest="org", help="Start of machine code. By default %i" % OPTIONS.org.value, default=OPTIONS.org.value) parser.add_option("-e", "--errmsg", type="string", dest="stderr", default=OPTIONS.StdErrFileName.value, help="Error messages file (standard error console by default)") parser.add_option("--array-base", type="int", dest="array_base", default=OPTIONS.array_base.value, help="Default lower index for arrays (0 by default)") parser.add_option("--string-base", type="int", dest="string_base", default=OPTIONS.string_base.value, help="Default lower index for strings (0 by default)") parser.add_option("-Z", "--sinclair", action="store_true", dest="sinclair", default=False, help="Enable by default some more original ZX Spectrum Sinclair BASIC features: ATTR, SCREEN$, POINT") parser.add_option("-H", "--heap-size", type="int", dest="heap_size", default=OPTIONS.heap_size.value, help="Sets heap size in bytes (default %i bytes)" % OPTIONS.heap_size.value) parser.add_option("--debug-memory", action="store_true", dest="debug_memory", default=False, help="Enables out-of-memory debug") parser.add_option("--debug-array", action="store_true", dest="debug_array", default=False, help="Enables array boundary checking") parser.add_option("--strict-bool", action="store_true", dest="strict_bool", default=False, help="Enforce boolean values to be 0 or 1") parser.add_option("--enable-break", action="store_true", dest="enable_break", default=False, help="Enables program execution BREAK detection") parser.add_option("-E", "--emmit-backend", action="store_true", dest="emmit_backend", default=False, help="Emmits backend code instead of ASM or binary") parser.add_option("--explicit", action="store_true", dest="explicit", default=False, help="Requires all variables and functions to be declared before used") parser.add_option("-D", "--define", type="str", dest="defines", action="append", help="Defines de given macro. Eg. -D MYDEBUG or -D NAME=Value") (options, args) = parser.parse_args() if len(args) != 1: parser.error("missing input file. (Try -h)") return 3 # ------------------------------------------------------------ # Setting of internal parameters according to command line # ------------------------------------------------------------ OPTIONS.Debug.value = options.debug asmparse.FLAG_optimize = OPTIONS.optimization.value = options.optimization_level asmparse.FILE_output = OPTIONS.outputFileName.value = FILE_output = options.output_file asmparse.FILE_stderr = OPTIONS.StdErrFileName.value = options.stderr OPTIONS.array_base.value = options.array_base OPTIONS.string_base.value = options.string_base OPTIONS.Sinclair.value = options.sinclair OPTIONS.org.value = options.org OPTIONS.heap_size.value = options.heap_size OPTIONS.memoryCheck.value = options.debug_memory OPTIONS.strictBool.value = options.strict_bool or OPTIONS.Sinclair.value OPTIONS.arrayCheck.value = options.debug_array OPTIONS.emmitBackend.value = options.emmit_backend OPTIONS.enableBreak.value = options.enable_break OPTIONS.explicit.value = options.explicit if options.defines: for i in options.defines: name, val = tuple(i.split('=', 1)) OPTIONS.__DEFINES.value[name] = val zxbpp.ID_TABLE.define(name, lineno = 0) if OPTIONS.Sinclair.value: OPTIONS.array_base.value = 1 OPTIONS.string_base.value = 1 OPTIONS.strictBool.value = True debug.ENABLED = OPTIONS.Debug.value if int(options.tzx) + int(options.tap) + int(options.asm) + int(options.emmit_backend) > 1: parser.error("Options --tap, --tzx, --emmit-backend and --asm are excluyent") return 3 asmparse.FLAG_use_BASIC = options.basic backend.FLAG_autostart = asmparse.FLAG_autorun = options.autorun if asmparse.FLAG_use_BASIC and not options.tzx and not options.tap: parser.error('Option --BASIC and --autorun requires --tzx or tap format') return 4 if options.tzx: FILE_output_ext = 'tzx' elif options.tap: FILE_output_ext = 'tap' elif options.asm: FILE_output_ext = 'asm' elif options.emmit_backend: FILE_output_ext = 'ic' if not os.path.exists(args[0]): parser.error("No such file or directory: '%s'" % args[0]) return 2 if OPTIONS.memoryCheck.value: OPTIONS.__DEFINES.value['__MEMORY_CHECK__'] = '' zxbpp.ID_TABLE.define('__MEMORY_CHECK__', lineno = 0) if OPTIONS.arrayCheck.value: OPTIONS.__DEFINES.value['__CHECK_ARRAY_BOUNDARY__'] = '' zxbpp.ID_TABLE.define('__CHECK_ARRAY_BOUNDARY__', lineno = 0) zxbpp.main(args) asmparse.FILE_output_ext = FILE_output_ext input = zxbpp.OUTPUT asmparse.FILE_input = FILE_input = zxbparser.FILENAME = os.path.basename(args[0]) if FILE_output is None: OPTIONS.outputFileName.value = FILE_output = os.path.splitext(os.path.basename(FILE_input))[0] + '.' + FILE_output_ext asmparse.FILE_output = FILE_output if OPTIONS.StdErrFileName.value is not None: FILE_stderr = asmparse.FILE_stderr = OPTIONS.StdErrFileName.value OPTIONS.stderr.value = open(FILE_stderr, 'wt') zxbparser.parser.parse(input, lexer = zxblex.lexer, tracking = True, debug = (OPTIONS.Debug.value > 2)) if gl.has_errors: return 1 # Exit with errors zxbtrad.traverse(zxbparser.ast) # This will fill MEMORY with code zxbtrad.traverse(zxbtrad.FUNCTIONS) # This will fill MEMORY with pending functions zxbtrad.emmit_strings() if OPTIONS.emmitBackend.value: output_file = open(FILE_output, 'wt') for quad in zxbtrad.dumpMemory(MEMORY): output_file.write(str(quad) + '\n') MEMORY[:] = [] # Empties memory zxbtrad.traverse(zxbparser.data_ast) # This will fill MEMORY with global declared variables for quad in zxbtrad.dumpMemory(MEMORY): output_file.write(str(quad) + '\n') output_file.close() return 0 # Join all lines into a single string and ensures an INTRO at end of file asm_output = backend.emmit(MEMORY) from optimizer import optimize asm_output = optimize(asm_output) + '\n' # Now put user asm blocks back from backend import ASMS asm_output = asm_output.split('\n') for i in range(len(asm_output)): tmp = ASMS.get(asm_output[i], None) if tmp is not None: asm_output[i] = '\n'.join(tmp) asm_output = '\n'.join(asm_output) # Now filter them against the preprocessor again zxbpp.setMode('asm') zxbpp.OUTPUT = '' zxbpp.filter(asm_output, args[0]) # Now output the result asm_output = zxbpp.OUTPUT.split('\n') get_inits(asm_output) # Find out remaining inits MEMORY[:] = [] zxbtrad.traverse(zxbparser.data_ast) # This will fill MEMORY with global declared variables tmp = [x for x in backend.emmit(MEMORY) if x.strip()[0] != '#'] asm_output += tmp asm_output = backend.emmit_start() + asm_output asm_output += backend.emmit_end(asm_output) if options.asm: # Only output assembler file output_file = open(FILE_output, 'wt') output(asm_output, output_file) output_file.close() else: from StringIO import StringIO fout = StringIO() output(asm_output, fout) asmparse.assemble(fout.getvalue()) fout.close() asmparse.generate_binary(FILE_output, FILE_output_ext) return 0 # Exit success
def main(): predictor = Predictor() print(optimize())
dest='verbose', action='store_true', help='show python source') parser.add_argument('-o', '--optimize', dest='optimize', action='store_true', help='source optimization') args = parser.parse_args() # parse code = args.file.read() if args.file else args.eval tokens = FourParser.tokenize(FourParser.strip(code)) if args.optimize: tokens = optimizer.optimize(tokens) root = FourParser.parse(tokens) # verbose if args.verbose: sys.stderr.write('\n<< python source >>\n') sys.stderr.write('\n'.join([ '{0:<10} | {1}'.format(f, p) for f, p in zip(str(root).split('\n'), root.python.split('\n')) ])) # run with NamedTemporaryFile(mode='w', delete=False) as _file: boilerplate = open(os.path.join(os.path.dirname(__file__), 'runner.py')).read() _file.write(boilerplate)
import optimizer import nlopt import utils import plotter import imagecomposer import config from shutil import rmtree from os import path OUTPUT_FOLDER = config.get_output_folder() rmtree(OUTPUT_FOLDER, True) print('Running local Nelder-Mead optimization...') nl_evals = optimizer.optimize(nlopt.LN_NELDERMEAD, "NelderMead", OUTPUT_FOLDER) print('Running global Direct-L optimization...') dl_evals = optimizer.optimize(nlopt.GN_DIRECT_L, "DirectL", OUTPUT_FOLDER) print('Writing evaluation log files...') utils.write_evallog(path.join(OUTPUT_FOLDER, 'NelderMead_eval.dat'), nl_evals) utils.write_evallog(path.join(OUTPUT_FOLDER, 'DirectL_eval.dat'), dl_evals) print('Creating plots...') plotter.plot_searchpath(OUTPUT_FOLDER, "NelderMead_searchpath", nl_evals) plotter.plot_searchpath(OUTPUT_FOLDER, "DirectL_searchpath", dl_evals) plotter.plot_objectives(OUTPUT_FOLDER, "Objective", nl_evals, dl_evals) print('Preparing composite images and the movie...') imagecomposer.make_movie(OUTPUT_FOLDER, "NelderMead", "DirectL", "Objective",
def compile_file(ifilename, ofilename): start = time.clock() printLog("Compiling " + ifilename + " into " + ofilename + "...") #Preprocess file & Handle includes/strings printLog("Preprocessing...") preprocessed = preprocess(ifilename) printLog("Done\n") #Split into tokens printLog("Tokenizing...") tokenizer = Tokenizer() tokens = tokenizer.tokenizeString('\n'.join(preprocessed)) printLog("Token stream:") printLog(tokens) printLog("\n") #Create parse tree printLog("Parsing...") tree = parse(tokens) printLog("Generated parse tree:") printLog(tree) printLog("\n") #Generate symbol table printLog("Locating symbols...") symbols = generateSymbolTable(tree) printLog("Symbol table:") printLog(symbols) printLog("\n") #Generate code printLog("Generating code...") assembly = generateCode(tree, symbols) printLog("Constructed high level assembly:") for i,line in enumerate(assembly.split('\n')): printLog(str(i) + max(5 - len(str(i)), 0) * " " + ": " + line) #Save to output file root_name = ifilename.split('.')[0] printLog("\nSaving to " + root_name + ".al...") with open(root_name + ".al", 'w') as outputf: outputf.write(assembly) printLog("Saved") #Save log print("Saving log...") saveLogFile() print("Done!") #Optimize code print("Switching to optimizer...") optimizer.optimize(root_name + ".al", optimizer.OPTIMIZATION_FULL, optimizer.PRIORITY_BALANCED) #Run assembler print("Switching to assembler...") assembler.assemble(root_name + ".al", ofilename, True, True) print("Done assembling!") print("Finished compiling in " + str(time.clock() - start) + "s")
from optimizer import optimize optimize(0.1, 1000, 'autopilot.controllers.stabilizing.att_', ['kp', 'ki', 'kii', 'kd'], [38, 39])
def compile_file(ifilename, ofilename): start = time.clock() printLog("Compiling " + ifilename + " into " + ofilename + "...") #Preprocess file & Handle includes/strings printLog("Preprocessing...") preprocessed = preprocess(ifilename) printLog("Done\n") #Split into tokens printLog("Tokenizing...") tokenizer = Tokenizer() tokens = tokenizer.tokenizeString('\n'.join(preprocessed)) printLog("Token stream:") printLog(tokens) printLog("\n") #Create parse tree printLog("Parsing...") tree = parse(tokens) printLog("Generated parse tree:") printLog(tree) printLog("\n") #Generate symbol table printLog("Locating symbols...") symbols = generateSymbolTable(tree) printLog("Symbol table:") printLog(symbols) printLog("\n") #Generate code printLog("Generating code...") assembly = generateCode(tree, symbols) printLog("Constructed high level assembly:") for i, line in enumerate(assembly.split('\n')): printLog(str(i) + max(5 - len(str(i)), 0) * " " + ": " + line) #Save to output file root_name = ifilename.split('.')[0] printLog("\nSaving to " + root_name + ".al...") with open(root_name + ".al", 'w') as outputf: outputf.write(assembly) printLog("Saved") #Save log print("Saving log...") saveLogFile() print("Done!") #Optimize code print("Switching to optimizer...") optimizer.optimize(root_name + ".al", optimizer.OPTIMIZATION_FULL, optimizer.PRIORITY_BALANCED) #Run assembler print("Switching to assembler...") assembler.assemble(root_name + ".al", ofilename, True, True) print("Done assembling!") print("Finished compiling in " + str(time.clock() - start) + "s")
from optimizer import optimize, optimize_pass from datatypes import loop string = '''++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-] >>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.''' def to_pprint_format(ir): out = [] for elem in ir: if isinstance(elem, loop): out.append(to_pprint_format(elem.blocks)) else: out.append(elem) return out def po(x): p = parse(x) return optimize(p) if __name__ == '__main__': from pprint import pprint try: f = open('mandelbrot.bf') p = parse(string) x = optimize(p) pprint(to_pprint_format(x)) except KeyboardInterrupt: pass
def getCurrentDataFromFilings(worksheets, isXLSX=True): allWorksheets = [] periodEndDate = None print('beginning worksheets to single column transformation') for worksheetIndex in range(0, len(worksheets)): worksheet = None if isXLSX: worksheet = worksheets[worksheetIndex] else: worksheet = worksheets[worksheetIndex][0] # set period end date global if worksheetIndex == 0: periodEndDate = dateutil.parser.parse(worksheet.loc[worksheet[(worksheet.columns)[0]].isin(['Period End Date', 'Document Period End Date', 'End Date'])].values[0][1]) worksheet = worksheet.dropna(thresh=int(len(worksheet)*.5), axis=1) if worksheet.shape[1] > 1: if isinstance(worksheet.columns, pd.MultiIndex): col1, col2, fullCols = getCurrentMultiIndexColumn( worksheet.columns) worksheet.columns = fullCols columns = [col1[1], col2[1]] tempDF = pd.concat([worksheet[col1], worksheet[col2]], axis=1) tempDF.columns = columns else: col1 = list(worksheet.columns)[0] col2, fullCols = getCurrentColumn(worksheet.columns, periodEndDate) worksheet.columns = fullCols tempDF = pd.concat([worksheet[col1], worksheet[col2]], axis=1) tempDF.columns = [col1, col2] multiplier = 0 if 'In Thousands' in tempDF.columns[0]: multiplier = 1000 if 'In Millions' in tempDF.columns[0]: multiplier = 1000000 if 'In Billions' in tempDF.columns[0]: multiplier = 1000000000 tempDF = tempDF.fillna(0) if multiplier > 0: tempDF[tempDF.columns[1]].loc[tempDF[tempDF.columns[1]].astype(str).str.isnumeric( )] = tempDF[tempDF.columns[1]].loc[tempDF[tempDF.columns[1]].astype(str).str.isnumeric()].astype(float) * multiplier tempDF = optimize(tempDF.transpose()).transpose() print('completed report transformation: ' + col1) allWorksheets.append(tempDF.values) x = pd.DataFrame(np.concatenate(allWorksheets)) col2 = x.loc[x[0] == 'Document Type'].values[0] + ' on ' + \ x.loc[x[0] == 'Document Period End Date'].values[0] x.columns = ['Metric', col2[1]] print('completed worksheets to single column transform for filing: ' + col2[1]) colStr = str(col2[1]).upper() if 'Q' in colStr and '10' in colStr: return x, col2[1], '10Q' elif 'K' in colStr and '10' in colStr: return x, col2[1], '10K' else: return None, None, None
from optimizer import optimize optimize(0.1, 3000, 'autopilot.controllers.ne_speed.', ['p', 'i'], [28, 29])
def parse(tokens): stmt = parse_stmt(tokens) optimized = optimizer.optimize(stmt) return optimized
from optimizer import optimize optimize(0.1, 1000, 'autopilot.controllers.u_pos.', ['p', 'i', 'd'], [28])
# np.zeros(len(F.parameters)), # [F.best_errors[p] for p in F.parameters], # n_walkers) p0 = list(p0.reshape((-1, p0.shape[-1]))) def save(): subprocess.check_call( ['rsync', '-r', local_dbdir + "/", "nimrod:" + dbdir + "/"]) np.save(local_dbdir + "/parameters.npy", F.parameters) np.save(local_dbdir + "/best_parameters.npy", np.array([F.best_parameters[p] for p in F.parameters])) save() i = 0 for (best_pos, best_chi2, ps, chi2s) in optimizer.optimize(p0, chi2, pool=pool, explore=2, adapt_covariance=False): np.save(local_dbdir + "/best_pos.npy", best_pos) np.save(local_dbdir + "/best_chi2.npy", best_chi2) np.save(local_dbdir + "/%06d-ps.npy" % i, ps) np.save(local_dbdir + "/%06d-chi2s.npy" % i, chi2s) save() i += 1 finally: pool.close()