def main(): lm = TMP36(0) ee.write_list(data_ary, True, addr) ram.write_list(ram_lst, True, addr) # Setup the LED pin avr.port_direction_or(avr.portb, 0x80) sys.run(print_eeprom_and_sram) while True: print_temperature(lm)
def main(): parser = argparse.ArgumentParser(description="Biogeographical simulator") parser.add_argument("ngens", type=int, default=1000, help="Number of generations to run (default = %(default)s).") parser.add_argument("-z", "--random-seed", default=None, help="Seed for random number generator engine.") args = parser.parse_args() sys = System(random_seed=args.random_seed) sys.bootstrap() sys.run(args.ngens) print(sys.seed_lineage.as_newick_string())
def systemrun(): sys = ising.Wolff(N,T) initialConfig, delta, flipcount = sys.run(100000) show = plot.show(initialConfig, delta, flipcount) #show.saveVideo() show.showPlot()
def metropolisrun(): N = 400 T = 1.8 sys = ising.Metropolis(N,T) initialConfig, delta = sys.run(1600000) show = plot.show(initialConfig, delta, []) show.showPlot()
def get_system_error(FCLfile, valData, Nmax=None, inDataMFs='tri', outDataMFs='tri', errorType='crispSoS'): """ determines the system error as calculated by the validation data FCLfile - FCL file path/name to build FRBS valData is validation data in format: [quant_inputs, qual_inputs, outputData] with each data item {['function', 'var'] : [min,max]} (or [val]) inDataMFs and outDataMFs - type of MFs for inputs and outputs in data errorType is type of error nMax is the max number of points from the data to use """ q = 0 #0 for quant data, 1 or qual data allErrors_comb = [] #load fuzzy system inputs, outputs, rulebase, AND_operator, OR_operator, aggregator, implication, defuzz = build_fuzz_system(FCLfile) sys = Fuzzy_System(inputs, outputs, rulebase, AND_operator, OR_operator, aggregator, implication, defuzz) allErrors = [] #list of all errors i = 0 #counter for data_item in valData: #for each data item valIns = {} valOuts = {} for inKey in data_item[q]: #for each input key build selected MF [inXs, inYs] = fuzzyOps.rangeToMF(data_item[q][inKey], inDataMFs) valIns['_'.join(inKey)] = [inXs, inYs] #addinput MF to input dict to build inputs for system if outDataMFs <> 'sing': [outXs, outYs] = fuzzyOps.rangeToMF(data_item[2], outDataMFs) valOuts = [outXs, outYs] #data outputs elif outDataMFs =='sing': #singleton output MF: [avg] dAvg = sum(data_item[2])/len(data_item[2]) valOuts = dAvg #data outputs valOuts = [outXs, outYs] #data outputs sysOuts = sys.run(valIns) #get system output if errorType == 'crispSoS': #capture crisp errors allErrors.append(dAvg - sysOuts.itervalues().next()) if errorType == 'fuzzy': #capture fuzzy error allErrors.append( fuzErrorInt([outXs, outYs], sysOuts.itervalues().next())**2 ) i = i+1 if Nmax <> None: #check for Nmax exceeded if i > Nmax: break if errorType == 'crispSoS': #sum squares of errors and divide by 2N (Simon 2002) allErrors = [x**2 for x in allErrors] error = (sum(allErrors)/(len(allErrors)))**0.5 elif errorType == 'fuzzy': #get a fuzzy error measure error = (sum(allErrors)/(len(allErrors)))**0.5 return error
def systemrun(): sys = lib.Wolff(N, T) initialConfig, delta, flipcount = sys.run(100000) show = plot.Show(initialConfig, delta, flipcount) show.showPlot()
def main(experiment, experimentName, n=1): print("Started Runs") start: float = timeit.default_timer() finishedRuns = [] numberOfIterations = list(range(n)) byTarget = {"makespan": {}, "avgFlowTime": {}, "maximumLateness": {}} for target in byTarget: for sf in experiment["schedulers"]: byTarget[target][sf] = [] if Path(experimentName + ".csv").is_file(): with open(experimentName + ".csv", newline="") as csvfile: reader = csv.reader(csvfile, delimiter=";") for row in reader: byTarget[row[0]][row[1]].append((float(row[2]), float(row[3]))) #print(", ".join(row)) product = itertools.product(*(list(experiment.values()))[:-2]) for conf in product: #itertools.product(numberOfJobs,numberOfNodes,seqR,largeR,timespan,minSeq,maxSeq,minPar,maxPar): for i in numberOfIterations: jobs: List[Simulation.Job] = Generator.generate(*conf) for sf in experiment["schedulers"]: sys: Simulation.System = Simulation.System( jobs.copy(), conf[1], schedulerConverter[sf]) finished: List[Simulation.Job] = sys.run() analysis = Analysis.standardAnalysis(finished) for target in byTarget: byTarget[target][sf].append( (conf[xValueConverter[experiment["x-axis"][0]]], analysis[target])) #print (byTarget) #sortieren nach x #dbCopy = {**bySchedulers} #save to file: with open(experimentName + ".csv", "w", newline="") as csvfile: writer = csv.writer(csvfile, delimiter=";") for target in byTarget: for sf in byTarget[target]: for valuePair in byTarget[target][sf]: writer.writerow([target] + [sf] + list(valuePair)) fig, axs = plt.subplots(3) i = 0 for targetFunktion in ["makespan", "avgFlowTime", "maximumLateness"]: #setup xLabel = experiment["x-axis"][0] yLabel = targetFunktion for sf in byTarget[targetFunktion]: xs = [] ys = [] pairs = sorted(byTarget[targetFunktion][sf], key=lambda x: x[0]) grouped = itertools.groupby(pairs, lambda x: x[0]) for key, group in grouped: vals = list(group) xs.append(key) ys.append(sum(map(lambda x: x[1], vals)) / len(vals)) axs[i].plot(xs, ys, label=sf) axs[i].set_ylabel(targetFunktion) axs[i].set_ylim(ymin=0) axs[i].legend(bbox_to_anchor=(1.0, 1), loc="upper left", fontsize="xx-small") i += 1 plt.xlabel(experiment["x-axis"][0]) plt.show()
def get_system_error(FCLfile, valData, Nmax=None, inDataMFs='tri', outDataMFs='tri', errorType='crispSoS'): """ determines the system error as calculated by the validation data FCLfile - FCL file path/name to build FRBS valData is validation data in format: [quant_inputs, qual_inputs, outputData] with each data item {['function', 'var'] : [min,max]} (or [val]) inDataMFs and outDataMFs - type of MFs for inputs and outputs in data errorType is type of error nMax is the max number of points from the data to use """ q = 0 #0 for quant data, 1 or qual data allErrors_comb = [] #load fuzzy system inputs, outputs, rulebase, AND_operator, OR_operator, aggregator, implication, defuzz = build_fuzz_system( FCLfile) sys = Fuzzy_System(inputs, outputs, rulebase, AND_operator, OR_operator, aggregator, implication, defuzz) allErrors = [] #list of all errors i = 0 #counter for data_item in valData: #for each data item valIns = {} valOuts = {} for inKey in data_item[q]: #for each input key build selected MF [inXs, inYs] = fuzzyOps.rangeToMF(data_item[q][inKey], inDataMFs) valIns['_'.join(inKey)] = [ inXs, inYs ] #addinput MF to input dict to build inputs for system if outDataMFs <> 'sing': [outXs, outYs] = fuzzyOps.rangeToMF(data_item[2], outDataMFs) valOuts = [outXs, outYs] #data outputs elif outDataMFs == 'sing': #singleton output MF: [avg] dAvg = sum(data_item[2]) / len(data_item[2]) valOuts = dAvg #data outputs valOuts = [outXs, outYs] #data outputs sysOuts = sys.run(valIns) #get system output if errorType == 'crispSoS': #capture crisp errors allErrors.append(dAvg - sysOuts.itervalues().next()) if errorType == 'fuzzy': #capture fuzzy error allErrors.append( fuzErrorInt([outXs, outYs], sysOuts.itervalues().next())**2) i = i + 1 if Nmax <> None: #check for Nmax exceeded if i > Nmax: break if errorType == 'crispSoS': #sum squares of errors and divide by 2N (Simon 2002) allErrors = [x**2 for x in allErrors] error = (sum(allErrors) / (len(allErrors)))**0.5 elif errorType == 'fuzzy': #get a fuzzy error measure error = (sum(allErrors) / (len(allErrors)))**0.5 return error
def main(): start: float = timeit.default_timer() schedulers = [\ #Simulation.System.fifo,\ #Simulation.System.fifo_fit,\ #Simulation.System.fifo_backfill,\ #Simulation.System.lpt,\ #Simulation.System.lpt_fit,\ #Simulation.System.lpt_backfill,\ #Simulation.System.spt,\ #Simulation.System.spt_fit,\ #Simulation.System.spt_backfill,\ #Simulation.System.fifo_optimistic,\ #Simulation.System.fifo_backfill_lpt Simulation.System.fifo_optimistic_lpt,#,\ #Simulation.System.lpt_backfill_fifo,\ #Simulation.System.lpt_optimistic_fifo Simulation.System.fifo_backfill_spt ] numberOfIterations = list(range(1)) dbConnector = DBConnector.DBConnector() print("DB connection open, start running") doneRuns = 0 experiment = figure_3 product = itertools.product(*experiment.values()) #print(*experiment.values()) numberOfRuns = functools.reduce(operator.mul, map(len, list(experiment.values())), 1) #product = itertools.product(numberOfJobs,numberOfNodes,seqR,largeR,timespan,minSeq,maxSeq,minPar,maxPar, errorRate, maxError) runCounter = 0 for conf in product: #itertools.product(numberOfJobs,numberOfNodes,seqR,largeR,timespan,minSeq,maxSeq,minPar,maxPar): for i in numberOfIterations: jobs: List[Simulation.Job] = Generator.generate(*conf) for sf in schedulers: sys: Simulation.System = Simulation.System( jobs.copy(), conf[1], sf) finishedJobs: List[Simulation.Job] = sys.run() #print("unused PT:") (Analysis.idleTime(finishedJobs)) dbConnector.add(*conf, Analysis.standardAnalysis(finishedJobs), sf) runCounter += 1 print(runCounter / numberOfRuns) del dbConnector stop = timeit.default_timer() print('Time: ', stop - start)
if dummy_target: with open(dummy_target, 'a'): os.utime(dummy_target, None) sys.exit(0) # Copy outputs zipped_outputs = zip(args.outputs, args.original_outputs) for expected, generated in zipped_outputs: do_copy = False if not os.path.exists(expected): if not os.path.exists(generated): print( 'Unable to find generated file. This can cause the build to fail:' ) print(generated) do_copy = False else: do_copy = True elif os.path.exists(generated): if os.path.getmtime(generated) > os.path.getmtime(expected): do_copy = True if do_copy: if os.path.exists(expected): os.remove(expected) shutil.copyfile(generated, expected) if __name__ == '__main__': sys.run(sys.argv[1:])
t2 = self.enginestatuscheck() print("Mode removed") if y.get("delete"): u = y["delete"] # will give hexcode string sqlhelper.deletecolorfromdb(u) c = sqlhelper.updatedicts() self.writestatus() t2 = self.enginestatuscheck() print("Color removed") time.sleep(.1) except KeyboardInterrupt: print("KeyboardInterrupt") break try: t1.do_run = False t1.join() t2.do_run = False t2.join() except: pass if __name__=="__main__": sys = System() #create object c = sqlhelper.updatedicts() # sys.turnon3separate([1,4,6]) # time.sleep(1) # sys.turnoff() #turn off # sys.writestatus() sys.run() #start main program # sys.turnoff() # print("Done")