def send_message(self, event): if self._event_os_cached(event): return if self._exclude_event(event): return # use default values for some empty attributes event = self._plugin_defaults(event) Output.event(event) Stats.new_event(event) return # check for consolidation if self.conn is not None: try: self.conn.send(str(event)) except: id = self._plugin.get("config", "plugin_id") c = ServerConnPro(self._conf, id) self.conn = c.connect(0, 10) try: self.conn.send(str(event)) except: return logger.info(str(event).rstrip()) elif not self.consolidation.insert(event): Output.event(event) Stats.new_event(event)
def test_lastn_stats(self): games = pd.DataFrame({'Date': {1: '1/1/2015', 2: '1/2/2015', 3: '1/3/2015'}, 'H_Team_ID': {1: 1, 2: 1, 3: 2}, 'A_Team_ID': {1: 3, 2: 2, 3: 1}, 'A_Score': {1: 1, 2: 2, 3: 3}, 'H_Score': {1: 2, 2: 2, 3: 2}, 'Outcome': {1: 1, 2:0, 3:1}}) stats = Stats(games, 'test', 'Date', 'H_Team_ID', 'A_Team_ID', 'Outcome',['test']) l1 = stats.get_lastn_stats(1) size = len(l1) exp_size = 3 self.assertEqual(size, exp_size) cols = set(l1.columns.values) exp_cols = set(['Outcome', 'H_Score_1', 'A_Score_1', 'H_O_Score_1', 'A_O_Score_1']) self.assertEqual(cols, exp_cols) t1 = l1.ix[3,'H_Score_1'] exp_t1 = 2 self.assertEqual(t1, exp_t1) l2 = stats.get_lastn_stats(2) t2 = l2.ix[3, 'A_Score_2'] exp_t2 = 2 self.assertEqual(t2, exp_t2) t21 = l2.ix[1, 'A_Score_2'] exp_t21 = -1 self.assertEqual(t21, exp_t21) t2o = l2.ix[3, 'H_O_Score_2'] exp_t2o = 2 self.assertEqual(t2o, exp_t2o)
def run(self): defines.CountArriveTimes() env = simpy.Environment() RequestGenerator(env) Stats.log(message="\n-- Simulation run started - Simulation time {} minutes... --".format(env.now)) env.run(until=self.simulation_end) Stats.log(message="\n-- Simulation run finished - Simulation time {} minutes... --".format(env.now)) self.printStats()
def __init__(self, name): self.name = name self.skill_tree = None self.skill_point = 0 self.stats = Stats(self) self.avatar = WarriorAvatar() self.backpack = Backpack(self) self.equipments = Equipments(self)
def reconnect(self, attempts=0, waittime=10.0): """Reset the current connection by closing and reopening it. """ self.close() time.sleep(1) Stats.server_reconnect() while 1: if self.connect(attempts, waittime) is not None: break
def error(self, new_x, actual_y): assert len(new_x) == len(actual_y) stats = Stats() for x, y in zip(new_x, actual_y): stats.add( self.predict(x) - actual_y ) stats.recalc() assert stats.count == len(new_x) return stats
def index(season=2015): season = int(season) champ = season - 1 # render current season if (not (champ in availableSeasons)): # render season not available print 'no data for ' + str(season) return redirect(url_for('index')) #data = season parser = HReferenceParser('app/static/data/' + str(season) + '.csv') games = parser.getGames() schedule = Schedule(games) gameLog = GameLog() stats = Stats() beltHolder = availableSeasons[champ] defendingChamp = beltHolder beltGame = None for g in schedule.games: beltGame = stats.analyzeGame(g, beltHolder) if beltGame: gameLog.addGame(beltGame) beltHolder = beltGame.getBeltHolderAfterGame() upcomingChampGame = schedule.getUpcomingChampionshipGame(beltHolder) upcomingChampGameIfHomeTeamWins = None upcomingChampGameIfAwayTeamWins = None if upcomingChampGame: upcomingChampGameIfHomeTeamWins = schedule.getUpcomingChampionshipGame( upcomingChampGame.getHomeTeam(), upcomingChampGame.getAwayTeam()) upcomingChampGameIfAwayTeamWins = schedule.getUpcomingChampionshipGame( upcomingChampGame.getAwayTeam(), upcomingChampGame.getHomeTeam()) data = { 'id' : beltHolder.getID(), 'name' : beltHolder.getName() } return render_template('index.html', games = gameLog.getGames(), availableSeasons = availableSeasons, defendingChamp = defendingChamp, beltHolder = beltHolder, isOngoingSeason = season, stats = stats, gameLog = gameLog, upcomingChampGame = upcomingChampGame, upcomingChampGameIfHomeTeamWins = upcomingChampGameIfHomeTeamWins, upcomingChampGameIfAwayTeamWins = upcomingChampGameIfAwayTeamWins, sortedStats = stats.getSortedStats(), currentSeason = season, )
def __init__(self, ID_num, num_docs, doc_front_rate, doc_back_rate, patient_rate, department_size, waiting_size, admit_rate, labs_enabled=True, lab_rate=20, CT_enabled=True, num_CTs=1, CT_rate=15, verbose=True): self.ID_num = ID_num self.erack = queue.PriorityQueue() self.rads_queue = queue.PriorityQueue() self.time = 0 self.num_CTs = num_CTs self.CT_rate = CT_rate self.lab_rate = lab_rate self.num_docs = num_docs self.labs_enabled = labs_enabled self.CT_enabled = CT_enabled self.DoctorList = [] self.CTList = [] self.DispoList = [] self.AdmitList = [] self.patient_rate = patient_rate ## patient rate in terms of new patients per hour self.department_size = department_size self.waiting_size = waiting_size self.WR = queue.PriorityQueue(waiting_size) self.admit_rate = admit_rate ## average time in minutes to admit a patient self.doc_front_rate = doc_front_rate self.doc_back_rate = doc_back_rate self.verbose = verbose ## use debug / status messages for i in range(self.num_docs): self.DoctorList.append( Doctor(self, 1, self.doc_front_rate, self.doc_back_rate, 8)) if self.CT_enabled: for j in range(self.num_CTs): self.CTList.append(CT(self, self.CT_rate)) if self.labs_enabled: self.Laboratory = Laboratory(self, self.lab_rate) self.stats = Stats(num_docs, patient_rate, department_size, waiting_size)
def test_get_last_n_games(self): games = pd.DataFrame({'Date': {1: '1/1/2015', 2: '1/2/2015', 3: '1/3/2015'}, 'H_Team_ID': {1: 1, 2: 1, 3: 2}, 'A_Team_ID': {1: 3, 2: 2, 3: 1}, 'Score': {1:1, 2:2, 3:3}}) stats = Stats(games, 'test', 'Date', 'H_Team_ID', 'A_Team_ID', None,[]) last1 = len(stats.get_last_n_games(1, 1, '1/3/2015')) exp_last1 = 1 self.assertEqual(last1, exp_last1) last2 = len(stats.get_last_n_games(4, 1, '1/3/2015')) exp_last2 = 2 self.assertEqual(last2, exp_last2)
def index(season=2015): season = int(season) champ = season - 1 # render current season if (not (champ in availableSeasons)): # render season not available print 'no data for ' + str(season) return redirect(url_for('index')) #data = season parser = HReferenceParser('app/static/data/' + str(season) + '.csv') games = parser.getGames() schedule = Schedule(games) gameLog = GameLog() stats = Stats() beltHolder = availableSeasons[champ] defendingChamp = beltHolder beltGame = None for g in schedule.games: beltGame = stats.analyzeGame(g, beltHolder) if beltGame: gameLog.addGame(beltGame) beltHolder = beltGame.getBeltHolderAfterGame() upcomingChampGame = schedule.getUpcomingChampionshipGame(beltHolder) upcomingChampGameIfHomeTeamWins = None upcomingChampGameIfAwayTeamWins = None if upcomingChampGame: upcomingChampGameIfHomeTeamWins = schedule.getUpcomingChampionshipGame( upcomingChampGame.getHomeTeam(), upcomingChampGame.getAwayTeam()) upcomingChampGameIfAwayTeamWins = schedule.getUpcomingChampionshipGame( upcomingChampGame.getAwayTeam(), upcomingChampGame.getHomeTeam()) data = {'id': beltHolder.getID(), 'name': beltHolder.getName()} return render_template( 'index.html', games=gameLog.getGames(), availableSeasons=availableSeasons, defendingChamp=defendingChamp, beltHolder=beltHolder, isOngoingSeason=season, stats=stats, gameLog=gameLog, upcomingChampGame=upcomingChampGame, upcomingChampGameIfHomeTeamWins=upcomingChampGameIfHomeTeamWins, upcomingChampGameIfAwayTeamWins=upcomingChampGameIfAwayTeamWins, sortedStats=stats.getSortedStats(), currentSeason=season, )
def send_message(self, event): if self._event_os_cached(event): return if self._exclude_event(event): return # use default values for some empty attributes event = self._plugin_defaults(event) Output.event(event) Stats.new_event(event) return
def test_get_avg(self): games = pd.DataFrame({'Date': {1: '1/1/2015', 2: '1/2/2015', 3: '1/3/2015'}, 'H_Team_ID': {1: 1, 2: 1, 3: 2}, 'A_Team_ID': {1: 3, 2: 2, 3: 1}, 'A_Score': {1: 1, 2: 2, 3: 3}, 'H_Score': {1: 2, 2: 2, 3: 2}}) stats = Stats(games, 'test', 'Date', 'H_Team_ID', 'A_Team_ID', None,[]) av = stats.get_avg(games, 'Score', 1, 0) exp_av = 7.0/3 self.assertEqual(av, exp_av) avo = stats.get_avg(games, 'Score', 1, 1) exp_avo = 5.0 / 3 self.assertEqual(avo, exp_avo)
def reconnect(self, attempts=0, waittime=10.0): self.close() time.sleep(1) Stats.server_reconnect(self.server_ip) tmptries = 0 while tmptries < MAX_TRIES: if self.connect(attempts, waittime) is not None: break tmptries += 1 if tmptries == MAX_TRIES: self.__stopped = True self.__keep_working = False
def findfavoritesbeforeht(): livefhgames = allgamesdf[(allgamesdf['progress'] == 'live')] print('\n{} live FH games found'.format(len(livefhgames))) msg_title = '# Favorites fell behind #\n' for index, row in livefhgames.iterrows(): summary = 'Reached {}\' in {} - {} at {}-{}'.format(row['timer'], row['team_home'], row['team_away'], row['hgoals'], row['ggoals']) + '\n' + \ 'Check stats at https://777score.com/{}\n'.format(row['777url']) try: s = Stats(row['match_id']).flatjsonfile stats = summarytext(s) except: stats = 'No stats file found' if (row['fav_home'] == 'x' and row['hgoals'] < row['ggoals']) or ( row['fav_away'] == 'x' and row['hgoals'] > row['ggoals']): msg_favbefht = msg_title + summary + '\nodds: {} - {}\n'.format( row['pmodd1'], row['pmodd2']) + stats Telegram().send_message(chat_id=-1001403993640, msg=(row['match_id'], "FHG", msg_favbefht)) msg_title = '# Favorites not ahead before HT #\n' for index, row in livefhgames.iterrows(): summary = 'Reached {}\' in {} - {} at {}-{}'.format(row['timer'], row['team_home'], row['team_away'], row['hgoals'], row['ggoals']) + '\n' + \ 'Check stats at https://777score.com/{}\n'.format(row['777url']) try: s = Stats(row['match_id']).flatjsonfile stats = summarytext(s) except: stats = 'No stats file found' if (30 < int(row['timer']) < 46) and ( (row['fav_home'] == 'x' and row['hgoals'] <= row['ggoals']) or (row['fav_away'] == 'x' and row['hgoals'] >= row['ggoals'])): msg_favnotaheadbfht = msg_title + summary + '\nodds: {} - {}\n'.format( row['pmodd1'], row['pmodd2']) + stats Telegram().send_message(chat_id=-1001403993640, msg=(row['match_id'], "FHG", msg_favnotaheadbfht))
class TestStats(unittest.TestCase): def setUp(self): self.valid_hp = 34 self.valid_attack = 49 self.valid_defense = 49 self.valid_speed = 45 self.valid_special = 65 self.valid_stat = Stats(self.valid_hp, self.valid_attack, \ self.valid_defense, self.valid_speed, self.valid_special) def test_attack_force(self): # Test for valid arguments level1 = 5 level2 = 10 self.assertEqual(self.valid_stat.attack_force(level1), (2 * level1 + 10) * self.valid_attack) self.assertEqual(self.valid_stat.attack_force(level2), (2 * level2 + 10) * self.valid_attack) # Test with absurd arguments self.assertRaises(ValueError, self.valid_stat.attack_force, -1) self.assertRaises(TypeError, self.valid_stat.attack_force, object()) def test_defence_force(self): # Regular test self.assertEqual(self.valid_stat.defense_force(), self.valid_defense * 250) def test_critical(self): # Test with regular arguments self.assertTrue(self.valid_stat.critical(10) >= 1) # Test with absurd arguments self.assertRaises(ValueError, self.valid_stat.critical, -1) self.assertRaises(TypeError, self.valid_stat.critical, object()) def test_decrease_life(self): # Test valid values for _ in range(100): x = randrange(self.valid_hp + 1) valid_stats = Stats(self.valid_hp, self.valid_attack, self.valid_defense, self.valid_speed, self.valid_special) valid_stats.decrease_life(x) self.assertEqual(valid_stats._hp, self.valid_hp - x) # Test crazy values valid_stats = Stats(self.valid_hp, self.valid_attack, \ self.valid_defense, self.valid_speed, self.valid_special) valid_stats.decrease_life(self.valid_hp) self.assertEqual(0, valid_stats._hp) valid_stats = Stats(self.valid_hp, self.valid_attack, \ self.valid_defense, self.valid_speed, self.valid_special)
def __init__(self): # self.field = [] self.bots = [] self.mutator = Mutator() self.graphics = BotGraphics() self.generateMap() self.graphics.setStat(0, "ITERATION") self.graphics.setStat(2, "MOVE") self.graphics.setStat(4, "LAST BEST") self.stat = Stats()
def state_save(self): log = logging.getLogger("agent.ping_manager.save") if self.MAX_CYCLE == -1 or self.cycle.countCycle < self.MAX_CYCLE: self.startCycle = time.time() self.cycle.countCycle += 1 # CREATE NEW RAW FILE r_file = None if self.SAVE_RAW_TRACES == True: filename = str(self.hostname) + "_" + str(int( self.startCycle)) + ".rw" r_file = open(os.path.join(self.RAW_DIR, filename), 'w') r_file.write( "#ID\tTARGET_IP\tSEQ_N\tCYCLE\tRTT\tTIME_RECEIVED\tTIME_SENT\n" ) # CREATE STATS REFERENCES self.shared.stats = Stats(self.shared.destList, self.cycle.nDests, self.startCycle, self.cycle.countCycle, self.shared.parameters) #<-------------------------------------------------------------------------> SAVE STATS self.shared.receiver.receive.set_stats(self.shared.stats, r_file) log.info("State 03 finished") return "ping" # start new ping_cycle else: log.info("State 03 finished") return "idle" # stop pinging
def processFolder(datasetPath, binaryRootPath): """Call your executable for all sequences in all categories.""" stats = Stats(datasetPath) #STATS for category in getDirectories(datasetPath): stats.addCategories(category) #STATS categoryPath = os.path.join(datasetPath, category) for video in getDirectories(categoryPath): videoPath = os.path.join(categoryPath, video) binaryPath = os.path.join(binaryRootPath, category, video) if isValidVideoFolder(videoPath): confusionMatrix = compareWithGroungtruth(videoPath, binaryPath) stats.update(category, video, confusionMatrix) stats.writeCategoryResult(category) stats.writeOverallResults()
class Watcher: Config = Stats.ConfigData() DIRECTORY_TO_WATCH = Config["App"]["Game"]["path"] def __init__(self): Stats.CreateDirectory() Stats.ResetStats() print("Listening for games..") self.observer = Observer() def run(self): event_handler = Handler() self.observer.schedule(event_handler, self.DIRECTORY_TO_WATCH, recursive=True) self.observer.start() try: while True: time.sleep(5) except Exception as err: self.observer.stop() print(err) self.observer.join()
def findunderdogsearly(): livefhgames = allgamesdf[(allgamesdf['progress'] == 'live')] print('\n{} live FH games found'.format(len(livefhgames))) msg_title = '# Underdogs slightly better #\n' for index, row in livefhgames.iterrows(): summary = 'Reached {}\' in {} - {} at {}-{}'.format(row['timer'], row['team_home'], row['team_away'], row['hgoals'], row['ggoals']) + '\n' + \ 'Check stats at https://777score.com/{}\n'.format(row['777url']) stats = '' try: s = Stats(row['match_id']) stats = s.flatjsonfile stats_text = str(s) if (row['fav_home'] == 'x' and row['hgoals'] == row['ggoals']) or ( row['fav_away'] == 'x' and row['hgoals'] == row['ggoals']): if (stats['dangerous attacks reldiff'] > 140 and row['fav_away'] == 'x' and int(stats['dangerous attacks home']) > 20) or ( stats['dangerous attacks reldiff'] < 80 and row['fav_home'] == 'x' and int(stats['dangerous attacks away']) > 20): msg_underdog = msg_title + summary + '\nodds: {} - {}\n'.format( row['pmodd1'], row['pmodd2']) + stats_text Telegram().send_message(chat_id=-1001403993640, msg=(row['match_id'], "FHG", msg_underdog)) except: stats_text = 'No stats file found'
def start(self): print 'Generating data...' #przekazanie danych ze spinboxow (z interfejsu) do generatora danych self.gen.getData(self.spinbox_amp.value(),self.spinbox_freq.value(),self.spinbox_ilePkt.value(),self.spinbox_rozrzut.value(),self.spinbox_przesuniecie.value(),self.spinbox_zakresOd.value(),self.spinbox_zakresDo.value()) # #uruchamianie kreatora self.gen.creator() #przekazywanie danych do fittera self.fit.getData(self.gen.returnX(),self.gen.returnYn()) chio = None oneMoreGuess = 0 #iteracyjne poszukiwanie odpowiedniej czestotliwosci, dobierane na podstawie porownania z wartoscia chi2 print "Fitting..." while ((chio == None) or (chio > self.chi2Error)) and (oneMoreGuess < self.spinbox_freq.maximum()): #dopasowywanie funkcji self.fit.fit(self.fit.guess(oneMoreGuess)) chio = Stats.chi(self.gen.returnY(),self.fit.returnFittedData()) oneMoreGuess += 0.1 self.chi2 = chio if self.chi2 <= self.chi2Error: print 'Fitting status: OK' else: print 'Fitting status: Failure' #wydrukowanie zfitowanych wartosci self.fitted_args = self.fit.printfitted_args() #tworzenie plottera print 'Drawing plot...' self.DrawPlot() #aktualizacja GUI self.UpdateGui() print 'All done.' print '-----------------'
def __init__(self, name = 'sub', service = SUBSCRIBER_SERVICES, port_map = None, num = 1, channel_start = 0, tx_port = PORT_TX_DEFAULT, rx_port = PORT_RX_DEFAULT, iface = INTF_RX_DEFAULT, iface_mcast = INTF_TX_DEFAULT, mcast_cb = None, loginType = 'wireless'): self.tx_port = tx_port self.rx_port = rx_port self.port_map = port_map or g_subscriber_port_map try: self.tx_intf = self.port_map[tx_port] self.rx_intf = self.port_map[rx_port] except: self.tx_intf = self.port_map[self.PORT_TX_DEFAULT] self.rx_intf = self.port_map[self.PORT_RX_DEFAULT] log.info('Subscriber %s, rx interface %s, uplink interface %s' %(name, self.rx_intf, self.tx_intf)) Channels.__init__(self, num, channel_start = channel_start, iface = self.rx_intf, iface_mcast = self.tx_intf, mcast_cb = mcast_cb) self.name = name self.service = service self.service_map = {} services = self.service.strip().split(' ') for s in services: self.service_map[s] = True self.loginType = loginType ##start streaming channels self.join_map = {} ##accumulated join recv stats self.join_rx_stats = Stats() self.recv_timeout = False
def k_means(self, n_clusters, n_data, dataset=None): if dataset is None: _, _, dataset = self.get_dataset(n_data) # Learning kmeans = KMeans(n_clusters=n_clusters).fit(dataset) # Printing for label in range(n_clusters): label_index = [ i for i, x in enumerate(kmeans.labels_) if x == label ] recipes = [self.jr.recipes[index] for index in label_index] ingredients_ranked = Stats(self.jr).get_ingr_rank(recipes) print("\n\nCluster", label, ":") ranking_string = "" for index in range(10): ingredient = ingredients_ranked[index] if ingredient[0] != 'unique': ranking_string += ingredient[1][0] + " (" + str( ingredient[1][1]) + "), " print(ranking_string + '\n') for index_recipe in label_index[:4]: self.jr.id = index_recipe self.jr.read_recipe() return kmeans
def send_message(self, event): if self._event_os_cached(event): return if self._exclude_event(event): return # use default values for some empty attributes # check_data =True # if event["event_type"] != EventIdm.EVENT_TYPE: # check_data =False event = self._plugin_defaults(event) Output.event(event) Stats.new_event(event) return
def setUp(self): self.valid_hp = 34 self.valid_attack = 49 self.valid_defense = 49 self.valid_speed = 45 self.valid_special = 65 self.valid_stat = Stats(self.valid_hp, self.valid_attack, \ self.valid_defense, self.valid_speed, self.valid_special)
def reconnect(self, attempts=0, waittime=10.0): ''' Reset the current connection by closing and reopening it ''' if self.__runningConnect: return self.close() time.sleep(1) Stats.server_reconnect(self.server_ip) tmptries = 0 while tmptries < MAX_TRIES: if self.connect(attempts, waittime) is not None: break tmptries += 1 if tmptries >= MAX_TRIES: self.__stopped = True self.__keep_working = False
def post(self): user_request_parser = RequestParser(bundle_errors=True) user_request_parser.add_argument("password", required=True) user_request_parser.add_argument("email", required=True) args = user_request_parser.parse_args() users = User.objects(email=args["email"]) if len(users) == 0: users = User.objects(username=args["email"]) if len(users) == 0: return {"error": "Incorrect username/email"} if User.check_password(users[0].password, args["password"]): user = User.return_helper(users[0]) stats = Stats.objects(userId=user["id"]) user["stats"] = Stats.return_helper(stats[0]) return {"user": user} else: return {"error": "Incorrect Password"}
class Reddit: reddit = praw.Reddit(client_id=os.environ['CLIENT_ID'], client_secret=os.environ['CLIENT_SECRET'], password=os.environ['REDDIT_PASSWORD'], user_agent=os.environ['USER_AGENT'], username=os.environ['REDDIT_USERNAME']) data = MemeData() collect_data = True stats = Stats(reddit) def get_investments(self, comment): cnt = 0 comment.replies.replace_more(limit=100) for reply in comment.replies: if '!invest' in reply.body: cnt += 1 for reply in reply.replies: if reply.author.name == 'MemeInvestor_bot': cnt += self.get_investments(reply) return cnt def scan(self): retour = [] for submission in self.reddit.subreddit('memeeconomy').new(limit=15): time_delta = ( int(datetime.datetime.timestamp(datetime.datetime.today())) - submission.created_utc) / 60 posted_at = datetime.datetime.fromtimestamp( submission.created_utc).strftime('%H:%M:%S') if time_delta > 4: break investments = 0 submission.comments.replace_more(limit=None) for comment in submission.comments: if comment.author.name == 'MemeInvestor_bot': invest_comment = comment investments = self.get_investments(invest_comment) break ratio = investments / time_delta meme = { 'id': str(submission.id), 'title': submission.title, 'updoots': submission.ups, 'investements': investments, 'time': posted_at, 'time_stamp': str(submission.created_utc), 'ratio': str(ratio), 'flair': str(submission.author_flair_text), 'upvotes': None } if self.collect_data and 3 <= time_delta < 4: #self.stats.post_stats(meme) self.data.add(meme) retour.append(meme) return retour
def __init__(self, root: object, size=50): """Create a simulation with the given field size. :root: tkinter.Tk graphics object """ self.size = size self._sapiens = [] # all sapiens in the simulation self._field = Field(size) self.step = 0 self._view = SimulatorView(root, size) self._colours = { State.SUSCEPTIBLE: 'slate blue', State.INFECTED: 'red', State.RECOVERED: 'spring green', State.DEAD: 'black' } self._stats = Stats() self.reset()
def post(self): user_request_parser = RequestParser(bundle_errors=True) user_request_parser.add_argument("password", required=True) user_request_parser.add_argument("username", required=True) user_request_parser.add_argument("email", required=True) args = user_request_parser.parse_args() users = User.objects(email=args["email"]) if len(users) > 0: return {"error": "That email is taken"} users = User.objects(username=args["username"]) if len(users) > 0: return {"error": "That username is taken"} user = User(email=args["email"], username=args["username"], password=User.set_password(args["password"])) user.save() user = User.return_helper(user) stat = Stats(userId=user['id']) stat.save() user["stats"] = Stats.return_helper(stat) return {"user": user}
def handle_success(self, line): print('') if line[2] != '!' and line[1] != ' ': pending = commands.remove_from_buffer(self.device) if pending == 0 and self.data[0] in ['A', 'M', 'D']: self.update_device_stats(Stats.create(self.device, self.data)) else: if pending == 0: mqtt.publish_stats(self.device) ws.send_device_stats(self.device)
def evaluate(self, rule_name): if self.first_value is None: logger.debug( "Can not extract value (arg1) from monitor response or no initial value to compare with" ) return True value = None monitor_response = self.get_data(rule_name) if not monitor_response: logger.warning("No data received from monitor") return True else: value = self.get_value(monitor_response, rule_name) if value is None: return True #if not value: # continue if self.eval_condition(cond=self.watch_rule["condition"], arg1=self.first_value, arg2=value, value=int(self.watch_rule["value"])): self.watch_rule["type"] = "monitor" try: cond = self.watch_rule["condition"] arg1 = self.first_value arg2 = value value = int(self.watch_rule["value"]) comm = self.queries log = "Monitor Command: %s , Monitor expresion evaluation: %s(arg2) <%s> %s(arg1) + %s(value)? , Command Response: %s" % ( str(comm), str(arg2), str(cond), str(arg1), str(value), monitor_response.replace("\n", "\r")) except: log = "Monitor Exception" self.watch_rule = self._plugin_defaults(self.watch_rule, log) Output.event(self.watch_rule) Stats.new_event(self.watch_rule) return True logger.debug("No data matching the watch-rule received from monitor") return False
def findfhg(sport=sport, date=today): print('Finding FHG for {} at {}'.format(date, sport)) allgamesdf = pd.read_csv( path + 'datafiles/{}/{}/allgames_{}_{}.csv'.format(date, sport, date, sport)) livegames = allgamesdf[(allgamesdf['islive'] == 'x')] livefhgames = livegames[(livegames['timer'].astype(int) < 46)] print('\n{} live FH games found'.format(len(livefhgames))) latefhgames = livefhgames[(livefhgames['timer'].astype(int) > 30)] fhg_watch = latefhgames[latefhgames['fhg'] == 'x'] if not latefhgames.empty: print('###################################') print('# Late FHG #') print('###################################') msg_list_title = '# FHG from List Alert #\n' msg_late00_title = '# FH still 0-0 Alert #\n' for index, row in latefhgames.iterrows(): summary = 'Reached {}\' in {} - {} at {}-{}'.format(row['timer'],row['team_home'],row['team_away'],row['hgoals'],row['ggoals']) + '\n' + \ 'Check stats at https://777score.com/{}\n'.format(row['777url']) try: s = Stats(row['match_id']).flatjsonfile stats = summarytext(s) except: stats = 'No stats file found' print('{}\' {}-{} {} - {} '.format(row['timer'], row['hgoals'], row['ggoals'], row['team_home'], row['team_away'])) if row['fhg'] == 'x' and row['hgoals'] == '0' and row[ 'ggoals'] == '0': msg_list = msg_list_title + summary + stats #TwitterMsg().senddm(userids=userids_list, msg=(row['match_id'], "FHG_list", msg_list)) Telegram().send_message(chat_id=telegram_chat_id, msg=(row['match_id'], "FHG_list", msg_list)) elif row['hgoals'] == '0' and row['ggoals'] == '0': msg_late00 = msg_late00_title + summary + stats #TwitterMsg().senddm(userids=userids, msg=(row['match_id'], "FHG", msg_late00)) #Telegram().send_message(chat_id=telegram_chat_id, msg=(row['match_id'], "FHG", msg_late00)) else: print("no FHG candidates \n")
class Player: def __init__(self, name): self.name = name self.skill_tree = None self.skill_point = 0 self.stats = Stats(self) self.avatar = WarriorAvatar() self.backpack = Backpack(self) self.equipments = Equipments(self) def draw_player_avatar(self): print_title_fancily(None, "player avatar") self.avatar.show_avatar() def draw_player_skill_tree(self): self.skill_tree.show() def show_player_stats(self): self.stats.print_all_stats() print()
def handle_endtag(self, tag): # print "Encountered an end tag :", tag if tag == "table": self.section = "End" elif self.section == "Table": if tag == "td" and self.subSection > 3: if self.currentData == "": self.playerData += "0," else: self.playerData += self.currentData + "," elif tag == "th" and self.subSection == 3: theHeader = self.currentData while self.headers.has_key(theHeader): theHeader += 'a' self.headers[theHeader] = self.index #print (theHeader + ":%d" % self.index) self.index += 1 elif tag == "tr" and self.subSection > 3: #encounted an end of player row fieldArray = self.playerData.split(',') #query for player based on name (field 1) and date (field 4) #print 'processing ' + unicode(fieldArray[1], errors="ignore") newPlayer = Stats.findPlayer(unicode(fieldArray[1], errors="ignore"), fieldArray[4], self.year) if newPlayer is None: newPlayer = Stats() newPlayer.initFromString(self.year, self.playerData, self.headers) newPlayer.save() self.playerData = "" self.currentData = ""
def load(self) -> ModeConfiguration: """ Load all configuration from config.ini file specified in config directory :rtype: ModeConfiguration """ config = configparser.ConfigParser() config.read(os.path.join('config', 'config.ini')) dictionary = {} for section in config.sections(): dictionary[section] = {} for option in config.options(section): try: dictionary[section][option] = int( config.get(section, option)) except ValueError as ve: Stats.log(message="CONFIGURATION FILE IS NOT CORRECT!") Stats.log(message=f"ERROR: {ve}") exit(1) self.__configuration = dictionary['ModelArgs'] return self
def processFolder(datasetPath, binaryRootPath): """Call your executable for all sequences in all categories.""" stats = Stats(datasetPath) #STATS f = open(datasetPath + '\\' + 'fscore.txt', 'w') for category in getDirectories(datasetPath): stats.addCategories(category) #STATS categoryPath = os.path.join(datasetPath, category) for video in getDirectories(categoryPath): videoPath = os.path.join(categoryPath, video) binaryPath = os.path.join(binaryRootPath, category, video) if isValidVideoFolder(videoPath): confusionMatrix = compareWithGroungtruth(videoPath, binaryPath) stats.update(category, video, confusionMatrix) alpha = 0.000001 fscore = (2.0 * confusionMatrix[0])/ (((2.0 * confusionMatrix[0]) + confusionMatrix[1] + confusionMatrix[2]) + alpha) f.write(video + ' : ' + str(fscore) + '\n') else: print ('Invalid folder : ' + videoPath) stats.writeCategoryResult(category) stats.writeOverallResults() f.close()
def evaluate(self, rule_name): if self.first_value is None: logger.debug("Can not extract value (arg1) from monitor response or no initial value to compare with") return True value = None monitor_response = self.get_data(rule_name) if not monitor_response: logger.warning("No data received from monitor") return True else: value = self.get_value(monitor_response, rule_name) if value is None: return True #if not value: # continue if self.eval_condition(cond=self.watch_rule["condition"], arg1=self.first_value, arg2=value, value=int(self.watch_rule["value"])): self.watch_rule["type"] = "monitor" try: cond = self.watch_rule["condition"] arg1 = self.first_value arg2 = value value = int(self.watch_rule["value"]) comm = self.queries log = "Monitor Command: %s , Monitor expresion evaluation: %s(arg2) <%s> %s(arg1) + %s(value)? , Command Response: %s" % (str(comm), str(arg2), str(cond), str(arg1), str(value), monitor_response.replace("\n", "\r")) except: log = "Monitor Exception" self.watch_rule = self._plugin_defaults(self.watch_rule, log) Output.event(self.watch_rule) Stats.new_event(self.watch_rule) return True logger.debug("No data matching the watch-rule received from monitor") return False
def generate_request(self, env: Environment): progress = 1 step_size = defines.SIMULATION_LEN / 100 tmp_step = step_size while True: Stats.requests.append(1) station_index = self.choose_station() station = self.resource_list[station_index] Stats.chosen_stations.append(station_index) if env.now > tmp_step: Stats.log(message=f'Progress {progress} %', target='stdout') tmp_step = tmp_step + step_size progress = progress + 1 ChargingCar(env, station) is_night = defines.is_night(env.now) if is_night: # If is night yield env.timeout(defines.arriveTimesPerNight) else: yield env.timeout(defines.arriveTimesPerDay)
def printTotalStats(h, out): stats = Stats.Stats() print("Total stats for simulation with h = {0}:".format(h)) cr = stats.clientRequestsTotalAvg() print(" * Average client requests: {0:.1f}".format(cr)) ct = stats.clientTimeWaitedTotalAvg()/1000 print(" * Average client time waited: {0:.1f} ms".format(ct)) rf = stats.reconstructedFilesAvg() print(" * Average reconstructed files: {0:.0f}".format(rf)) lm = stats.lostSwitchMessagesAvg() print(" * Average lost switch messages: {0:.0f}".format(lm)) out.write("{0} {1} {2} {3} {4}\n".format(h, cr, ct, rf, lm)) stats.reset() sys.stdout.flush()
def create_group(stocks=["UAL", "JBLU"], weights=[50, 50], group_name="Custom", start="1989-12-31", end="2049-12-31"): g = stk.r[stocks] g = g[start:end] # calculate group return cum_g = (1 + g).cumprod() * weights cum_g.loc[g.index.min() - timedelta(days=1)] = weights cum_g.insert(g.shape[1], "Portfolio", cum_g.sum(axis=1)) cum_g.sort_index(inplace=True) port_r = cum_g["Portfolio"].pct_change() g.insert(g.shape[1], "Portfolio", port_r) return Stats(g, "daily", group_name)
def handle_data(self, line): if self.data[0] == '?' and self.device.is_available(): self.device.request_missing_timers() self.device.request_missing_settings() commands.send(self.device) elif line[0] != '*' and (self.data[0] == 'D' or self.data[0] == 'A') and self.data[1] == ' ': self.update_device_stats(Stats.create(self.device, self.data)) elif len(self.data) >= 5 and self.data[1] == '[' and self.data[ 4] == ']' and self.data[5] == '=': if self.data[0] in ['G', 'S']: self.device.set_setting(self.data[2:4], self.data[6:]) if self.data[0] in ['R', 'W']: self.device.set_timer(int(self.data[2:3]), int(self.data[3:4]), self.data[6:])
def test_decrease_life(self): # Test valid values for _ in range(100): x = randrange(self.valid_hp + 1) valid_stats = Stats(self.valid_hp, self.valid_attack, self.valid_defense, self.valid_speed, self.valid_special) valid_stats.decrease_life(x) self.assertEqual(valid_stats._hp, self.valid_hp - x) # Test crazy values valid_stats = Stats(self.valid_hp, self.valid_attack, \ self.valid_defense, self.valid_speed, self.valid_special) valid_stats.decrease_life(self.valid_hp) self.assertEqual(0, valid_stats._hp) valid_stats = Stats(self.valid_hp, self.valid_attack, \ self.valid_defense, self.valid_speed, self.valid_special)
print "Num Tries =", NumTries, "number of cases per star shape" print "CCD Size =", ImWidth, "x", ImWidth, "pixels" print print "Each try is a star whose center is randomly distributed over" print "a range of -FWHM/2, FWHM/2 with respect to the" print "center of the CCD = the center of the slit." print print "Reported errors and statistics on these errors are in percent:" print "reported error (%) = 100 * (meas value - act value) / act value" print print "The slit is along y." def pctErr(meas, act): return (meas - act) * 100.0 / float(act) fwhmStats = Stats() amplStats = Stats() bkgndStats = Stats() nBadCtr = 0 nBad = 0 print print "fwhm ampl bg xCtr yCtr maskWid xCtMeas yCtMeas fitFWHM fitAmpl fitBg chiSq fwhmErr amplErr bgErr msgs" bkgnd = Sky + CCDInfo.bias for ampl in AmplValues: for fwhm in FWHMValues: sigma = fwhm / PyGuide.FWHMPerSigma for maskMult in MaskWidthsPerFWHM: maskWidth = maskMult * fwhm maskRad = int(maskWidth / 2.0) mask[:,:] = 0
def run(self): first_run = True while not Watchdog.__shutdown_running: tzone = str(self.conf.get("plugin-defaults", "tzone")) if tzone in all_timezones: agent_tz = timezone(tzone) local_dt = agent_tz.localize(datetime.now()) str_tzone = time.strftime("%z") matches = self.patternlocalized.match(str_tzone) if type(matches) is not None: tzone_symbol = matches.group("tzone_symbol") tzone_hour = matches.group("tzone_hour") tzone_min = matches.group("tzone_min") tzone = (float(tzone_hour) * 60 + float(tzone_min)) / 60 if tzone_symbol == "-": tzone = -1 * tzone else: tzone = 0 logger.info("Warning: TimeZone doesn't match: %s --set to 0" % tzone) else: logger.info("Warning: Agent invalid agent tzone: %s --set to 0" % tzone) tzone = 0 t = datetime.now() Output.plugin_state(self.AGENT_DATE % (str(mktime(t.timetuple())), tzone)) logger.info(self.AGENT_DATE % (str(time.mktime(t.timetuple())), tzone)) for plugin in self.plugins: id = plugin.get("config", "plugin_id") process = plugin.get("config", "process") process_aux = plugin.get("config", "process_aux") name = plugin.get("config", "name") logger.debug("Checking process %s for plugin %s." \ % (process, name)) sttopedBySrv = False for pid in Watchdog.__pluginID_stoppedByServer: if pid == id: sttopedBySrv = True break # 1) unknown process to monitoring if not process: logger.debug("plugin (%s) has an unknown state" % (name)) Output.plugin_state(self.PLUGIN_UNKNOWN_STATE_MSG % (id)) # 2) process is running elif self.pidof(process, process_aux) is not None: logger.debug("plugin (%s) is running" % (name)) Output.plugin_state(self.PLUGIN_START_STATE_MSG % (id)) # check for for plugin restart self._restart_services(plugin) # 3) process is not running else: logger.debug("plugin (%s) is not running" % (name)) Output.plugin_state(self.PLUGIN_STOP_STATE_MSG % (id)) # restart services (if start=yes in plugin # configuration and plugin is enabled) if plugin.getboolean("config", "start") and \ plugin.getboolean("config", "enable") and not sttopedBySrv: self.start_process(plugin) if self.pidof(process, process_aux) is not None and not first_run: Stats.watchdog_restart(process) # send plugin enable/disable state if plugin.getboolean("config", "enable"): logger.debug("plugin (%s) is enabled" % (name)) Output.plugin_state(self.PLUGIN_ENABLE_STATE_MSG % (id)) else: logger.debug("plugin (%s) is disabled" % (name)) Output.plugin_state(self.PLUGIN_DISABLE_STATE_MSG % (id)) time.sleep(float(self.interval)) first_run = False
def testingTFimpact(): # These are the data we'll be using folder = '/Users/uqnsomme/Documents/Obelix/FakeAGN/TFs/' names = ['0Phi', '001Phi', '010Phi', '025Phi', '01TH', 'Gamma', 'Gauss', 'Sphere'] nn = len(names) maxLag = 1000.0 # Initialisation bins = np.arange(0.0, maxLag, 10) # Grid/x-axis for histograms binssmall = np.arange(0.0, maxLag) # Grid/x-axis for accurate median calculation est = np.zeros((nn, 1000)) # Array to hold all the time lag estimates from Javelin n = np.zeros((nn, 99)) # y-axis for hisograms nsmall = np.zeros((nn, 999)) # y-axis for accurate median calculation # We'll be making the same kind of plot for all of the TFs for i in range(nn): print 'About to start reading ' + names[i] # Import data lagMatrName = folder + 'TFs' + names[i] + '.dat' lagMatr = np.loadtxt(lagMatrName) # Load in results file true = lagMatr[:, 0] # Time lag, as created by me est[i,:] = lagMatr[:, 1] # Estimates found by Javelin # Depending on what physical scenario we're considering, choose appropriate axes and labels for figure if (i==0): maxN = 4e-2 ymal = 1e-2 ymil = 5e-3 else: maxN = 1e-2 ymal = 2e-3 ymil = 1e-3 # End if-statement # Calculate histograms to do some maths n[i,:], _ = np.histogram(est, bins=bins, normed=True) nsmall[i,:], _ = np.histogram(est, bins=binssmall, normed=True) # Fit skewed Gaussian distribution to the result to later calculate median skewFits, skewCovar = opt.curve_fit(Stacking.skewGaussian, bins[0:-1], n[i,:], p0=(1./np.std(n[i,:]), np.argmax(n[i,:]), 0, 0, 1)) ySkew = Stacking.skewGaussian(binssmall, skewFits[0], skewFits[1], skewFits[2], skewFits[3], skewFits[4]) # Determine maximum likelihood and its uncertainties llhood = Stats() llhood.best = -1000 llhood.minus = 0 llhood.plus = 0 llhood.likeLimits1D(ySkew, binssmall, 1) # Determine median and error estimates median, minus, plus = Stats.getMedianEstimates(binssmall, nsmall[i,:]) # Print distribution properties to screen print "=" * 60, "\n" print "\tMaximum likelihood: tau0 =", llhood.best print "\tDifference to lower limit:", llhood.minus print "\tDifference to upper limit:", llhood.plus print "\tMedian:", median print "\tMedian lower:", minus print "\tMedian upper:", plus print "\n", "=" * 60 # Create figure showing the distribution of estimates for a particular TF fig = plt.figure(figsize=(6,10)) plt.hist(est, bins=bins, normed=True, label='JAVELIN estimates') plt.plot(binssmall, ySkew, linewidth=2, label="Best skewed Gaussian fit") plt.plot([true[i], true[i]], [0, maxN], linewidth=2, label='True lag') # Specify figure properties plt.axis([0, maxLag, 0, maxN]) plt.xlabel('Time lag [days]', fontsize='large') plt.ylabel('Number of instances', fontsize='large') plt.legend() ax = plt.axes() ax.xaxis.set_major_locator(tck.MultipleLocator(200)) ax.xaxis.set_minor_locator(tck.MultipleLocator(100)) ax.yaxis.set_major_locator(tck.MultipleLocator(ymal)) ax.yaxis.set_minor_locator(tck.MultipleLocator(ymil)) ax.set_aspect(maxLag/maxN) plt.savefig(folder+names[i]+'.png', dpi=500, bbox_inches='tight') # End for-loop # Create plot showing the different estimate distributions for all of the TFs fig = plt.figure(figsize=(6,10)) plt.hist(est[4,:], bins=bins, normed=True, histtype='step', color='darkblue', label=r'$\mathrm{Face-on\ ring}$') plt.hist(est[7,:], bins=bins, normed=True, histtype='step', color='paleturquoise', label=r'$\mathrm{Sphere}$') plt.hist(est[1,:], bins=bins, normed=True, histtype='step', color='mediumorchid', label=r'$\phi=\pi/100$') plt.hist(est[2,:], bins=bins, normed=True, histtype='step', color='darkgreen', label=r'$\phi=\pi/10$') plt.hist(est[3,:], bins=bins, normed=True, histtype='step', color='black', label=r'$\phi=\pi/4$') plt.hist(est[5,:], bins=bins, normed=True, histtype='step', color='olivedrab', label=r'$\mathrm{Gamma}$') plt.hist(est[4,:], bins=bins, normed=True, histtype='step', color='pink', label=r'$\mathrm{Gauss}$') plt.plot([true[i], true[i]], [0, maxN], 'r', label=r'$\mathrm{True\ lag}$') # Specify figure properties plt.axis([0, maxLag, 0, 0.009]) plt.xlabel(r'$\mathrm{Time\ lag\ [days]}$', fontsize='large') plt.ylabel(r'$N_{norm}$', fontsize='large') plt.legend() ax = plt.axes() ax.xaxis.set_major_locator(tck.MultipleLocator(200)) ax.xaxis.set_minor_locator(tck.MultipleLocator(100)) ax.yaxis.set_major_locator(tck.MultipleLocator(0.001)) ax.yaxis.set_minor_locator(tck.MultipleLocator(0.0005)) ax.set_aspect(maxLag/maxN) plt.savefig('/Users/uqnsomme/Documents/ManyHists.png', dpi=500, bbox_inches='tight')
def test_start(self): testStats = Stats() testStats.start('one','two') mockStats = {} mockStats['one'] = 1 self.assertEqual(testStats.getStats(),mockStats)
class Subscriber(Channels): PORT_TX_DEFAULT = 2 PORT_RX_DEFAULT = 1 INTF_TX_DEFAULT = 'veth2' INTF_RX_DEFAULT = 'veth0' STATS_RX = 0 STATS_TX = 1 STATS_JOIN = 2 STATS_LEAVE = 3 SUBSCRIBER_SERVICES = 'DHCP IGMP TLS' def __init__(self, name = 'sub', service = SUBSCRIBER_SERVICES, port_map = None, num = 1, channel_start = 0, tx_port = PORT_TX_DEFAULT, rx_port = PORT_RX_DEFAULT, iface = INTF_RX_DEFAULT, iface_mcast = INTF_TX_DEFAULT, mcast_cb = None, loginType = 'wireless'): self.tx_port = tx_port self.rx_port = rx_port self.port_map = port_map or g_subscriber_port_map try: self.tx_intf = self.port_map[tx_port] self.rx_intf = self.port_map[rx_port] except: self.tx_intf = self.port_map[self.PORT_TX_DEFAULT] self.rx_intf = self.port_map[self.PORT_RX_DEFAULT] log.info('Subscriber %s, rx interface %s, uplink interface %s' %(name, self.rx_intf, self.tx_intf)) Channels.__init__(self, num, channel_start = channel_start, iface = self.rx_intf, iface_mcast = self.tx_intf, mcast_cb = mcast_cb) self.name = name self.service = service self.service_map = {} services = self.service.strip().split(' ') for s in services: self.service_map[s] = True self.loginType = loginType ##start streaming channels self.join_map = {} ##accumulated join recv stats self.join_rx_stats = Stats() self.recv_timeout = False def has_service(self, service): if self.service_map.has_key(service): return self.service_map[service] if self.service_map.has_key(service.upper()): return self.service_map[service.upper()] return False def channel_join_update(self, chan, join_time): self.join_map[chan] = ( Stats(), Stats(), Stats(), Stats() ) self.channel_update(chan, self.STATS_JOIN, 1, t = join_time) def channel_join(self, chan = 0, delay = 2): '''Join a channel and create a send/recv stats map''' if self.join_map.has_key(chan): del self.join_map[chan] self.delay = delay chan, join_time = self.join(chan) self.channel_join_update(chan, join_time) return chan def channel_join_next(self, delay = 2): '''Joins the next channel leaving the last channel''' if self.last_chan: if self.join_map.has_key(self.last_chan): del self.join_map[self.last_chan] self.delay = delay chan, join_time = self.join_next() self.channel_join_update(chan, join_time) return chan def channel_jump(self, delay = 2): '''Jumps randomly to the next channel leaving the last channel''' if self.last_chan is not None: if self.join_map.has_key(self.last_chan): del self.join_map[self.last_chan] self.delay = delay chan, join_time = self.jump() self.channel_join_update(chan, join_time) return chan def channel_leave(self, chan = 0): if self.join_map.has_key(chan): del self.join_map[chan] self.leave(chan) def channel_update(self, chan, stats_type, packets, t=0): if type(chan) == type(0): chan_list = (chan,) else: chan_list = chan for c in chan_list: if self.join_map.has_key(c): self.join_map[c][stats_type].update(packets = packets, t = t) def channel_receive(self, chan, cb = None, count = 1, timeout = 5): log.info('Subscriber %s on port %s receiving from group %s, channel %d' % (self.name, self.rx_intf, self.gaddr(chan), chan)) r = self.recv(chan, cb = cb, count = count, timeout = timeout) if self.recv_timeout: ##Negative test case is disabled for now assert_equal(len(r), 0) def recv_channel_cb(self, pkt): ##First verify that we have received the packet for the joined instance log.info('Packet received for group %s, subscriber %s, port %s' % (pkt[IP].dst, self.name, self.rx_intf)) if self.recv_timeout: return chan = self.caddr(pkt[IP].dst) assert_equal(chan in self.join_map.keys(), True) recv_time = monotonic.monotonic() * 1000000 join_time = self.join_map[chan][self.STATS_JOIN].start delta = recv_time - join_time self.join_rx_stats.update(packets=1, t = delta, usecs = True) self.channel_update(chan, self.STATS_RX, 1, t = delta) log.debug('Packet received in %.3f usecs for group %s after join' %(delta, pkt[IP].dst))
def test_getInvocationCount(self): testStats = Stats() testStats.start('two','three') self.assertEqual(testStats.getInvocationCount('two'),1) testStats.start('two','four') self.assertEqual(testStats.getInvocationCount('two'),2)
help="Number of tests", dest="T", default=50) (options, args) = parser.parse_args() p = Percolation(options.L, options.R, options.N, options.O, (options.I, options.I)) print(p.run()) p.draw() if options.T: p.N = options.L * options.L for _ in range(options.T): p.run() tests = sorted(p.tests) pace = max(options.T // 100, 1) stats = {} for i in range(0, options.T, pace): stats[tests[i]] = float(i) / options.T l = list(zip(*list(stats.items()))) import matplotlib.pyplot as plt plt.plot(l[0], l[1], 'ro') plt.axis([0, options.L * options.L, 0, 1]) plt.show() print("Mean: ", Stats.mean(l[0])) print("Deviation: ", Stats.stddev(l[0])) print("95 percent confidence interval: ", Stats.confidence(l[0]))
import pygame from pygame.locals import * main_surface = pygame.display.set_mode(RESOLUTION) test_fuel_tank_1 = FuelTank('small_tank') test_fuel_tank_2 = FuelTank('medium_tank') test_engine = Engine('basic_engine') test_ship = Ship([test_fuel_tank_1, test_fuel_tank_1, test_fuel_tank_2, test_engine]) keyboard = Controller() stats = Stats(test_ship) clock = pygame.time.Clock() running = True while running: clock.tick(MAX_FPS) keyboard.update() main_surface.fill([255, 255, 255]) if keyboard.exit: running = False if K_LSHIFT in keyboard.pressed_keys: test_ship.change_throttle_by(1/90) if K_LCTRL in keyboard.pressed_keys:
print "though the final number reported is a normalized version:" print " asymm = sum(var(r) * numPts(r)) / (pixNoise^2 * totPts)" print "where" print " pixNoise is the noise/pixel due to read noise and sky" print " totPts = sum(numPts(r))" print print "The centroids are randomly distributed over" print "a range of -FWHM/2, FWHM/2 with respect to the" print "center of the CCD = the center of the slit." print print "The slit is along y so the error along y should be smaller than x." print print "fwhm ampl maskWid xErr yErr xUncert yUncert asymm totPix totCts rad msgs" nBad = 0 ctrXStats = Stats() ctrYStats = Stats() for ampl in AmplValues: for fwhm in FWHMValues: sigma = fwhm / PyGuide.FWHMPerSigma for maskMult in MaskWidthsPerFWHM: maskWidth = maskMult * fwhm maskRad = int(maskWidth / 2.0) mask[:,:] = 0 if maskRad > 0: mask[nomCtr[0] - maskRad: nomCtr[0] + maskRad + 1, :] = 1 numpy.random.seed(1) for ii in range(NumTries): actCtr = numpy.random.uniform(-fwhm/2.0, fwhm/2.0, size=(2,)) + nomCtr
def fitSkewedGaussianAndComputeMaxLLH(self, n, bins, nbins, fileName, number, latex=False, string=' ', plotFigures=False): # Read data from file data = np.loadtxt(fileName) bestFits = data[:, 1] lowLimit = data[:, 2] highLimit = data[:, 3] # Compute estimates best = bestFits[number] lower = bestFits[number] - lowLimit[number] upper = highLimit[number] - bestFits[number] # Initialise maximum likelihood parameters llhood = Stats() llhood.best = -1000 llhood.minus = 0 llhood.plus = 0 # Fit skewed Gaussian distribution to the result # Note that the number of bins is one larger than the number of results, so we need to remove the last bin # Find approximate position of the maximum and the width of the curve to easy the fitting process curveInfo = (np.max(bins), np.argmax(n[1:-10]), 0, 0, 1) skewFits, skewCovar = opt.curve_fit(self.skewGaussian, bins[1:-10], n[1:-10], p0=curveInfo) ySkew = self.skewGaussian(bins, skewFits[0], skewFits[1], skewFits[2], skewFits[3], skewFits[4]) # Determine maximum likelihood and its uncertainties based on skewed Gaussian fit llhood.likeLimits1D(ySkew, bins, 1) ## OUTPUT # Plot if wanted if (plotFigures == True): plt.plot(bins, ySkew, 'k--', linewidth=2, label="Skewed Gaussian fit") plt.legend(loc="center right", bbox_to_anchor=(1.018, 1.07), ncol=2) # End if-statement # Print distribution properties to screen if (latex==False): # Print in readable format print "~" * 60, "\n" print "\tstackResults" print "\tMaximum likelihood: tau0 =", llhood.best print "\tDifference to lower limit:", llhood.minus print "\tDifference to upper limit:", llhood.plus if (number != None): print "\tJavelin best fit value:", best print "\tDifference to Javelin lower limit:", lower print "\tDifference to Javelin upper limit:", upper # End if-statement print "\n", "~" * 60 else: # Print in LaTeX format print(' ', string, '& $',round(llhood.best,1),'_{-', round(llhood.minus,1),'}^{+',round(llhood.plus,1),'} $ & $', round(best,1),'_{-',round(lower,1),'}^{+', round(upper,1),'} $ \\') # End if-statement # Decide what output to give from function if (number != None): return np.array([llhood.best, llhood.minus, llhood.plus, best, lower, upper]) else: return np.array([llhood.best, llhood.minus, llhood.plus, -1000, 0, 0 ])
section = 'oneline' emLine = 'Hb' binends = np.array([-26.15, -25.75, -25.35, -24.95, -24.55, -24.15, -23.75, -23.35, -22.85, -22.35])#, -21.55]) pm = 1.3 limits = np.array([0., 0.74, -22.90-pm, -22.90])#MagBins[30], MagBins[31]]) number = findAGNnumber(limits, QSOtemplate) #number = 1 print ' ' print 'Limits:', limits print 'Number of AGN within limits:', number # Initialisation AGN = createOzDESAGN(QSOtemplate) stack = Stacking() stats = Stats() conv = Convergence() plotOzDESAGN(QSOtemplate, plotIndividuals=False, binends=binends, name='MgII') # Create AGN AGN.generateAGN(limits, True, number, resDir, fName) output = AGN.generateLightCurveParameters(section, resDir+fName) AGN.printLightCurveParametersToFile(output, resDir+params) lag = printLagValueInParameterBin(resDir+params, limits, emLine=emLine) # Do the Javelin magic runJavelinOnFakeData(resDir+params, limits, number, resDir, string, emLine=emLine) # Stack and analyse LLH, lags, lagMaxes = stack.stackData(resDir, string, number, resDir+params, limits, meanTrueLag=lag, binsize=10,