def run_FCFS(rho, k, seed=None): """Runs the simulation for FCFS discipline with given parameters, returning its statistics""" results_w = [] results_nq = [] stat_w = Statistics() stat_nq = Statistics() simulator = SimulatorFCFS(rho, seed) simulator.transient_phase() for i in range(0, 3200): res = simulator.simulate_FCFS(k) mean_w = stat_w.calculate_incremental_mean(res[0]) var_w = stat_w.calculate_incremental_variance(res[0]) mean_nq = stat_nq.calculate_incremental_time_mean(res[1], res[2]) var_nq = stat_nq.calculate_incremental_variance(res[1]) center_ew, lower_ew, upper_ew, precision_ew = stat_w.confidence_interval_for_mean( mean_w, var_w, len(res[0]), 0.95) center_vw, lower_vw, upper_vw, precision_vw = stat_w.confidence_interval_for_variance( var_w, len(res[0]), 0.95) center_enq, lower_enq, upper_enq, precision_enq = stat_nq.confidence_interval_for_mean( mean_nq, var_nq, res[2], 0.95) center_vnq, lower_vnq, upper_vnq, precision_vnq = stat_nq.confidence_interval_for_variance( var_nq, res[2], 0.95) results_w.append( ((mean_w, center_ew, lower_ew, upper_ew, precision_ew), (var_w, center_vw, lower_vw, upper_vw, precision_vw))) results_nq.append( ((mean_nq, center_enq, lower_enq, upper_enq, precision_enq), (var_nq, center_vnq, lower_vnq, upper_vnq, precision_vnq))) return results_w, results_nq
def DocumentsAnalysis(self, naturalText='', generatedText=''): natGraph = GraphBuilder() natTextSize = len(sent_tokenize(naturalText)) genTextSize = len(sent_tokenize(generatedText)) genGraph = GraphBuilder() natGraph.CreateGraph(naturalText) genGraph.CreateGraph(generatedText) self.__plot.StatisticsComp(Statistics(natGraph.Graph(), natTextSize), Statistics(genGraph.Graph(), genTextSize)) self.__textCount += 1
def run_train_batched(self, train_data, valid_data, vocabs): print(self.model.parameters) total_train = train_data.compute_batches(self.opt.batch_size, vocabs, self.opt.max_chars, 0, 1, self.opt.decoder_type, trunc=self.opt.trunc) total_valid = valid_data.compute_batches(self.opt.batch_size, vocabs, self.opt.max_chars, 0, 1, self.opt.decoder_type, randomize=False, trunc=self.opt.trunc) print('Computed Batches. Total train={}, Total valid={}'.format(total_train, total_valid)) report_stats = Statistics() self.last_ppl = None for epoch in range(self.start_epoch, self.opt.epochs + 1): self.model.train() total_stats = Statistics() batch_number = -1 for idx, batch in enumerate(train_data.batches): batch['gpu'] = self.opt.gpuid[0] loss, batch_stats = self.model.forward(batch) batch_size = batch['code'].size(0) loss.div(batch_size).backward() report_stats.update(batch_stats) total_stats.update(batch_stats) batch_number += 1 clip_grad_norm_(self.model.parameters(), self.opt.max_grad_norm) self.optimizer.step() self.optimizer.zero_grad() if batch_number % self.opt.report_every == -1 % self.opt.report_every: report_stats.output(epoch, batch_number + 1, len(train_data.batches), total_stats.start_time) report_stats = Statistics() print('Train perplexity: %g' % total_stats.ppl()) print('Train accuracy: %g' % total_stats.accuracy()) self.model.eval() valid_stats = Statistics() for idx, batch in enumerate(valid_data.batches): batch['gpu'] = self.opt.gpuid[0] loss, batch_stats = self.model.forward(batch) valid_stats.update(batch_stats) print('Validation perplexity: %g' % valid_stats.ppl()) print('Validation accuracy: %g' % valid_stats.accuracy()) self.update_learning_rate(valid_stats) print('Saving model') self.save_checkpoint(epoch, valid_stats) print('Model saved')
def __init__(self, genome): """ The GPopulation Class creator """ if isinstance(genome, GPopulation): self.oneSelfGenome = genome.oneSelfGenome self.internalPop = [] self.internalPopRaw = [] self.popSize = genome.popSize self.sortType = genome.sortType self.sorted = False self.minimax = genome.minimax self.scaleMethod = genome.scaleMethod self.allSlots = [self.scaleMethod] self.internalParams = genome.internalParams self.multiProcessing = genome.multiProcessing try: logging.debug("do I have a comm?") self.mpi_comm = genome.mpi_comm self.mpi_myeval = genome.mpi_myeval self.mpi_full_copy = genome.mpi_full_copy logging.debug("I do") except: logging.debug("I do not") pass self.statted = False self.stats = Statistics() return logging.debug("New population instance, %s class genomes.", genome.__class__.__name__) self.oneSelfGenome = genome self.internalPop = [] self.internalPopRaw = [] self.popSize = 0 self.sortType = Consts.CDefPopSortType self.sorted = False self.minimax = Consts.CDefPopMinimax self.scaleMethod = FunctionSlot("Scale Method") self.scaleMethod.set(Consts.CDefPopScale) self.allSlots = [self.scaleMethod] self.internalParams = {} self.multiProcessing = (False, False) # Statistics self.statted = False self.stats = Statistics()
def __init__(self, seed=1, fixPart=0.30, trashPart=0.15): CoupledDEVS.__init__(self, "Factory") self.cylinder = self.addSubModel(ObjectSource("Cylinder", 3)) self.cube = self.addSubModel(ObjectSource("Cube", 2)) self.preassembler = self.addSubModel(Preassembler()) self.assembler = self.addSubModel(Assembler(seed=seed+1)) self.inspector = self.addSubModel(Inspector(seed=seed, fixPart=fixPart, trashPart=trashPart)) self.accept = self.addSubModel(Accept()) self.fix = self.addSubModel(Fix()) self.trash = self.addSubModel(Trash()) self.stats = self.addSubModel(Statistics()) self.connectPorts(self.cylinder.object_out, self.preassembler.in_object) self.connectPorts(self.cube.object_out, self.preassembler.in_object) self.connectPorts(self.preassembler.out_product, self.assembler.in_product) self.connectPorts(self.assembler.out_stats, self.stats.in_queueTimes) self.connectPorts(self.assembler.out_product, self.inspector.in_product) self.connectPorts(self.inspector.out_stats, self.stats.in_queueTimes) self.connectPorts(self.inspector.out_accept, self.accept.in_product) self.connectPorts(self.inspector.out_fix, self.fix.in_product) self.connectPorts(self.fix.out_product, self.assembler.in_product) self.connectPorts(self.inspector.out_trash, self.trash.in_product) self.connectPorts(self.accept.out_product, self.stats.in_product) self.connectPorts(self.trash.out_product, self.stats.in_product)
def setUp(self): self.config = { "PARENT_THREAD_SLEEP_TIME": 60, "TWITCH_THREAD_SLEEP_TIME": 0.75, "IRC_THREAD_SLEEP_TIME": 1, "TIMEOUT": 5, "MATCH_PHRASES": [ "BabyRage NEVER LUCKY BabyRage", ], "LOGS_FOLDER": "/logs/", "STATS_FOLDER": "/stats/", "CSV_FOLDER": "/CSV/", "DATE_TIME_FORMAT": "%B %d %Y %H:%M:%S", "TIME_FORMAT": "%H:%M:%S", "GRAPH_FILE_FORMAT": "D%d_M%m_Y%Y_H%H_m%M_s%S.csv", "JSON_FILE_FORMAT": "D%d_M%m_Y%Y_H%H_m%M_s%S.json", "CHAT_LOG_FILE_FORMAT": "D%d_M%m_Y%Y_H%H_m%M_s%S.log", "RECONNECT_TIME": 360, #enable bot that reads stream IRC? "IRC_BOT": True, #if true, this will cause the IRC Bot to never stop reading chat, even if the streamer goes offline "ALWAYS_ONLINE": False, #enable bot that grabs stream data using Twitch API? "TWITCH_BOT": True, } self.jsonFile = JsonEditor.JsonEditor( "./data/_TEST_/stats/D13_M05_Y2015_H20_m00_s45.json", "./data/_TEST_/logs/_TEST_.json") self.stats = Statistics( "_TEST_", "./data/_TEST_/CSV/D13_M05_Y2015_H20_m00_s45.csv", "./data/_TEST_/stats/D13_M05_Y2015_H20_m00_s45.json", "./data/_TEST_/logs/D13_M05_Y2015_H20_m00_s45.log", "./data/_TEST_/logs/_TEST_.json", self.config)
def forward(self, batch): # initial parent states for Prod Decoder batch_size = batch['seq2seq'].size(0) batch['parent_states'] = {} for j in range(0, batch_size): batch['parent_states'][j] = {} batch['parent_states'][j][0] = Variable(torch.zeros( 1, 1, self.opt.decoder_rnn_size), requires_grad=False) context, context_lengths, enc_hidden = self.encoder(batch) dec_initial_state = DecoderState( enc_hidden, Variable(torch.zeros(batch_size, 1, self.opt.decoder_rnn_size), requires_grad=False)) output, attn, copy_attn = self.decoder(batch, context, context_lengths, dec_initial_state) del batch['parent_states'] src_map = torch.zeros(0, 0) # print(src_map) # print(batch['concode_src_map_vars'].shape) src_map = torch.cat((src_map, batch['concode_src_map_vars']), 1) src_map = torch.cat((src_map, batch['concode_src_map_methods']), 1) scores = self.generator(bottle(output), bottle(copy_attn), src_map, batch) loss, total, correct = self.generator.computeLoss(scores, batch) return loss, Statistics(loss.data[0], total, correct, self.encoder.n_src_words)
def __init__(self, name="Player", health=20, shield=10, dodge=0, parry=0, criticalHit=1, mana=10, damageMin=1, damageMax=2, armor=0, xp=0, inventory=Inventory()): Character.__init__(self, name, health, shield, dodge, parry, criticalHit, mana, damageMin, damageMax, armor, xp, inventory) self.statistics = Statistics() self.success = { "monster_hunter": Success(name="Monster hunter"), "commercial": Success(name="Commercial"), "lucky": Success(name="Lucky"), "compulsive_buyer": Success(name="Compulsive buyer"), "vendor": Success(name="Vendor on the run"), "consumer": Success(name="Consumer"), "the_end": Success(name="The End") }
def selective_repeat_test(file_name: str, repetitions: int) -> None: for i in range(repetitions): #Print proggress print(f"Run {i + 1}/{repetitions}") stats = Statistics() #Setup test specific statistics CommunicationSettings.check_sum = CheckSum.CRC CommunicationSettings.window_size = 4 #Create sender and reciever sender = SenderSR("Sender", stats) reciever = RecieverSR("Reciever", stats) #Setup resoult image name reciever.set_recreated_image_name(f"Img/res_sr{i + 1}.png") #Bind them sender.bind(reciever) reciever.bind(sender) #Start them sender.start() reciever.start() #Start transmition sender.send_image(file_name) wait_for_simulation_end() print(stats.get_statistics()) CommunicationSettings.reset_sumulation_state()
def compute_data(self): # end-1c because it deletes 1 character (newline character) dataset = self.txt_space.get("1.0", 'end-1c') # using Statistics class to generate ECDF, MEAN, VARIANCE and STD stats = Statistics(self.format_dataset(dataset)) stats.plot_data("INFO") stats.cdf()
def __init__(self, list_of_articles_from_source): self.list_of_articles_from_source = list_of_articles_from_source self.statistics_names = Statistics().statistics_names self.list_of_sources = [ arts_from_source.source_name for arts_from_source in list_of_articles_from_source ]
def __init__(self, parent=None): super().__init__(parent) self.statistics = Statistics(resourcePath(DB_PATH)) self.setupUi(self) self.initUi()
def run_Stat(self, Distribution_By_Category, Distribution_By_Bins): S = Statistics(self.asset_pool) S.general_statistics_1() S.loop_Ds_ret_province_profession(Distribution_By_Category, Distribution_By_Bins) S.cal_income2debt_by_ID()
def showStatistics(self): """Shows general statistics of the analyzed data over all frames.""" if len(self.contacts) == 0 or self.contacts is None: box = ErrorBox(ErrorMessages.NOSCORES_PROMPTANALYSIS) box.exec_() return self.statisticsView = Statistics(self.contacts) self.statisticsView.showNormal()
def __init__(self): '''Välkomnar användaren och skapar sedan Character_Collection, Player och Statistics. Sätter även en enemy(Zombie). Kör sedan menyn''' self.welcome() self.characters = Character_Collection() self.player = self.create_player() self.enemy = self.characters.get_enemy() self.statistic = Statistics(0, 0) self.menu()
def __init__(self): self.flightList = FlightListAdministration( ) #Maipular lista Dic de vuelos -> Fernan self.statistic = Statistics( ) #Actualizar atributos para llevar las ventas ->Fernan self.flightAdmin = FlightAdministration(self.flightList) self.Admin = Administrator("Lluvia", "Manilla", 10000.555, "1999/04/24") self.principalMenu()
def __init__(self): self.id = next(self.id) self.tempsArribada = 0 self.tempsArribadaCua = 0 self.tempsIniciServei = 0 self.tempsSortida = 0 self.cua = None Statistics().addEntity(self) print("S'ha creat una persona amb id " + str(self.id))
def LoadDataForVisualisation(self): print 'Loading data for visualisation...' Reader.read(self.filePath2) self.data = Reader.load self.dataStr = Reader.load2 #data i czas self.data = Operations.CalculateToMetrics(self.data) stat = Statistics(self.data, self.dataStr) stat.makeStats() self.UpdateStatsGUI(stat) print 'All done.'
def recordStats(self): #create all of our stats for the stream, is run after the stream is over jsonFile = JsonEditor(self.JSONfp, self.globalPath) stats = Statistics(self.stream, self.CSVfp, self.JSONfp, self.LOGfp, self.globalPath, self.config) dailyStats = stats.doDaily() jsonFile.toJSON(dailyStats) print self.stream + ": Tally Emotes started!" stats.tallyEmotes() print self.stream + ": End of stream tasks finished!"
def simulePercolation(self): trialsFraction = [] for trial in range(self.trials): grid = Percolation(self.gridSize, WeightedQuickUnion()) while not grid.percolates(): row = random.randint(1, self.gridSize) col = random.randint(1, self.gridSize) grid.open(row,col) trialsFraction.append(grid.numberOfOpenSites()/(self.gridSize*self.gridSize)) self.statistic = Statistics(trialsFraction)
def __init__(self, instanceName, timeLimit=1800, verbose=True): self._model = None self._modelVars_x = None self._modelVars_w = None self._modelVars_y = None self._modelSolution = None self._instance = Instance(instanceName) self._timeLimit = timeLimit self._verbose = verbose self._stats = Statistics(instanceName, "Integer Programming")
def main(): """ Main function, initialize server and starting games :return: """ statistics = Statistics() while True: server = Server(statistics) server.initiate_server() time.sleep(3)
def __init__(self, genome): """ The GPopulation Class creator """ if isinstance(genome, GPopulation): #Cloning a population? self.oneSelfGenome = genome.oneSelfGenome self.internalPop = [] self.internalPopRaw = [] self.popSize = genome.popSize self.sortType = genome.sortType self.sorted = False self.minimax = genome.minimax self.scaleMethod = genome.scaleMethod self.allSlots = [self.scaleMethod] self.internalParams = genome.internalParams self.multiProcessing = genome.multiProcessing self.statted = False self.stats = Statistics() self.proc_pool = genome.proc_pool return logging.debug("New population instance, %s class genomes.", genome.__class__.__name__) self.oneSelfGenome = genome self.internalPop = [] self.internalPopRaw = [] self.popSize = 0 self.proc_pool = None self.sortType = Consts.CDefPopSortType self.sorted = False self.minimax = Consts.CDefPopMinimax self.scaleMethod = FunctionSlot("Scale Method") self.scaleMethod.set(Consts.CDefPopScale) self.allSlots = [self.scaleMethod] self.internalParams = {} self.multiProcessing = (False, False) # Statistics self.statted = False self.stats = Statistics()
def calcRegr(self): x_avg = Statistics(self.xdata).average() y_avg = Statistics(self.ydata).average() z = 0.0 w = 0.0 for i in range(len(self.xdata)): z += pow(self.xdata[i] - x_avg, 2) w += (self.xdata[i] - x_avg) * (self.ydata[i] - y_avg) self.b1 = w / z self.b0 = (sum(self.ydata) - self.b1 * sum(self.xdata)) / len( self.ydata) y_reg = [self.b1 * x + self.b0 for x in self.xdata] self.r2 = Statistics.pearson(self.ydata, np.array(y_reg)) r = (self.b0, self.b1) return r
def question_1(data, data_features): print("Question 1:") knn_runner = AlgorithmRunner("KNN") rocchio_runner = AlgorithmRunner("Rocchio") stats = Statistics() #KNN calculations kfoldKNN = data.split_to_k_folds() sumPrecisionKNN = 0 sumRecallKNN = 0 sumAccuracyKNN = 0 for trainKNN, testKNN in kfoldKNN: knn_runner.fit( data_features.loc[:, data_features.columns != 'imdb_score']. iloc[trainKNN], data_features['imdb_score'].iloc[trainKNN]) pred = knn_runner.algorithm.predict( data_features.loc[:, data_features.columns != 'imdb_score']. iloc[testKNN]) sumPrecisionKNN = stats.precision(labels=np.array( data_features['imdb_score'].iloc[testKNN]).T, predictions=pred) + sumPrecisionKNN sumRecallKNN = stats.recall(labels=np.array( data_features['imdb_score'].iloc[testKNN]), predictions=pred) + sumRecallKNN sumAccuracyKNN = stats.accuracy(labels=np.array( data_features['imdb_score'].iloc[testKNN]), predictions=pred) + sumAccuracyKNN print("KNN classifier: ", sumPrecisionKNN / 5, ",", sumRecallKNN / 5, ",", sumAccuracyKNN / 5) #Rocchio calculations kfoldRocciho = data.split_to_k_folds() sumPrecisionRocchio = 0 sumRecallRocchio = 0 sumAccuracyRocchio = 0 for trainRocciho, testRocciho in kfoldRocciho: rocchio_runner.fit( data_features.loc[:, data_features.columns != 'imdb_score']. iloc[trainRocciho], data_features['imdb_score'].iloc[trainRocciho]) pred = rocchio_runner.algorithm.predict( data_features.loc[:, data_features.columns != 'imdb_score']. iloc[testRocciho]) sumPrecisionRocchio = stats.precision( labels=np.array(data_features['imdb_score'].iloc[testRocciho]).T, predictions=pred) + sumPrecisionRocchio sumRecallRocchio = stats.recall(labels=np.array( data_features['imdb_score'].iloc[testRocciho]), predictions=pred) + sumRecallRocchio sumAccuracyRocchio = stats.accuracy( labels=np.array(data_features['imdb_score'].iloc[testRocciho]), predictions=pred) + sumAccuracyRocchio print("Rocchio classifier: ", sumPrecisionRocchio / 5, ",", sumRecallRocchio / 5, ",", sumAccuracyRocchio / 5) print(" ")
def printStatic(): static = Statistics() print('actual') print(static.getStatistic(actual)) print('hybrid') print(static.getStatistic(hybrid)) print('syntatic') print(static.getStatistic(syntatic)) print('sematic') print(static.getStatistic(sematic)) print('dynamic') print(static.getStatistic(dynamic))
def calcRegr(self): x_avg = Statistics(self.xdata).average() y_avg = Statistics(self.ydata).average() z = 0.0 w = 0.0 for i in range(len(self.xdata)): z += pow(self.xdata[i] - x_avg, 2) w += (self.xdata[i] - x_avg) * (self.ydata[i] - y_avg) b1 = w / z b0 = (sum(self.ydata) - b1 * sum(self.xdata)) / len(self.ydata) y_reg = [b1 * x + math.exp(b0) for x in self.xdata] self.r2 = Statistics.pearson(self.ydata, np.array(y_reg)) self.b1 = b1 self.b0 = math.exp(b0) r = (math.exp(b0), b1) return r
def main(): arrange_data = ArrangeData() create_plot = CreatePlot() statistics = Statistics() arrange_data.load_data() arrange_data.count_values() create_plot.connect_small_values() create_plot.bar_plot() statistics.percentage()
def __init__(self, instanceName, Mu, Lambda, Phi, Omega, verbose=True): self._instance = Instance(instanceName) self._mu = Mu self._lambda = Lambda self._phi = Phi self._omega = Omega self._verbose = verbose self._stats = Statistics(instanceName, "Genetic Algorithm") self._bestIndividual = None self._defaultPermutation = createPermutations( [i for i in range(self._instance.m)]) self._defaultIntervals = createIntervals( [i for i in range(self._instance.n)], self._instance.m)
def simulate(episode, workers, model, optim, rewardQueue, batch_save, path): [w.start() for w in workers] stats = Statistics(episode) while True: episode += 1 if episode % batch_save == 0: torch.save(model.state_dict(), path + "models/" + str(episode)) torch.save(optim.state_dict(), path + "optims/" + str(episode)) reward = rewardQueue.get() stats.update(reward)