def GET(self): web.header("Content-Type", "text/html; charset=utf-8") db = utils.get_mongo_database() gstats_db = db.goal_stats goal_stats = list(gstats_db.find()) n = db.games.count() ret = standard_heading("CouncilRoom.com: Goal Stats") ret += '<span class="subhead">Goal Stats</span>\n<p>\n' ret += '<table width="50%">' ret += '<tr><td><th>Goal Name<th width="1%">Total Times Achieved<th width="1%">% Occurrence<th>Description<th>Leaders' for g in sorted(goal_stats, key=lambda k: k['count'], reverse=True): goal_name = g['_id'] ret += '<tr><td>' ret += '<img src="%s" alt="%s"/>' % (goals.GetGoalImageFilename(goal_name), goal_name) ret += '<th>%s<td align="right">%d<td align="right">%.2f<td align="center">%s' % (goal_name, g['count'], g['count']*100./n, goals.GetGoalDescription(goal_name)) rank = 1 ret += '<td>' for (players, count) in g['top']: if len(players)==1: ret += "%d) %s (%d)<br />" % (rank, game.PlayerDeck.PlayerLink(players[0]), count) else: ret += "%d) %d tied with %d (" % (rank, len(players), count) links = [game.PlayerDeck.PlayerLink(player) for player in players] ret += ', '.join(links) ret += ')<br/>' rank += len(players) return ret
def test_rawgames_integration(self): """Validate IsotropicScraper for raw games""" # Careful, as right now this touches bits of production, such # as the S3 buckets. # TODO: Figure out how to get this pointed at a database for # use in integration tests. iso = isotropic.IsotropicScraper(utils.get_mongo_database(), 'unittest_raw_games') self.assertTrue(iso.is_rawgames_in_s3(datetime.date(2010, 10, 15))) self.assertRaisesRegexp(HTTPError, 'HTTP Error 404: Not Found', iso.copy_rawgames_to_s3, datetime.date(2009, 10, 15)) content = iso.get_rawgames_from_s3(datetime.date(2010, 10, 15)) self.assertEquals(content[0:7], 'BZh91AY', "Tar file signature") # TODO: Figure out how to run the lengthy tests that hit # actual files on Isotropic only in certain limited modes if None: iso.copy_rawgames_to_s3(datetime.date(2012, 10, 3)) # Scrape and insert a whole gameday count = iso.scrape_and_store_rawgames(datetime.date(2012, 9, 15)) self.assertEquals(count, 9999, "Inserted expected number of rawgames")
def get(self, request, *args, **kwargs): db = get_mongo_database() mongocars = db.cars print(request.GET) form = SearchForm(request.GET) res = [] context = {'form': form} if request.method == 'GET': if len(request.GET) > 0: searching_parametr = get_params_from_request(request.GET) print(searching_parametr) a = {'mpg': {'$gt': '10', '$lt': '20'}} print(a) for car in mongocars.find(searching_parametr): car_result = Car.objects.get(id=car.get('sql_id')) res.append(car_result) # Pagination paginator = Paginator(res, 10) page = self.request.GET.get('page') try: cars = paginator.page(page) except PageNotAnInteger: cars = paginator.page(1) except EmptyPage: cars = paginator.page(paginator.num_pages) context.update({ 'search_res': cars, }) return render(request, 'search.html', context)
def main(parsed_args): db = utils.get_mongo_database() goal_db = db.goals gstats_db = db.goal_stats all_goals = goals.goal_check_funcs.keys() total_pcount = collections.defaultdict(int) goal_scanner = incremental_scanner.IncrementalScanner('goals', db) stat_scanner = incremental_scanner.IncrementalScanner('goal_stats', db) if not parsed_args.incremental: log.warning('resetting scanner and db') stat_scanner.reset() gstats_db.remove() log.info("Starting run: %s", stat_scanner.status_msg()) # TODO: The following logic doesn't work now that goal calculation doesn't happen with a scanner. # if goal_scanner.get_max_game_id() == stat_scanner.get_max_game_id(): # log.info("Stats already set! Skip") # exit(0) log.info('all_goals %s', all_goals) for goal_name in all_goals: log.info("Working on %s", goal_name) found_goals_cursor = goal_db.find({'goals.goal_name': goal_name}, {'goals.player': 1, '_id': 0}) total = found_goals_cursor.count() log.info("Found %d instances of %s", total, goal_name) pcount = collections.defaultdict(int) for goal in found_goals_cursor: player = goal['goals'][0]['player'] pcount[player] += 1 total_pcount[player] += 1 psorted = sorted(pcount.iteritems(), key=operator.itemgetter(1), reverse=True) top = [] leaders = 0 i = 0 while leaders < 3 and i < len(psorted): (player, count) = psorted[i] players = [] if player not in AIs.names: players = [player] i += 1 while i < len(psorted) and psorted[i][1] == count: if psorted[i][0] not in AIs.names: players.append(psorted[i][0]) i += 1 leaders += len(players) if len(players) > 0: top.append((players, count)) mongo_val = {'_id': goal_name, 'count': total, 'top': top} gstats_db.save(mongo_val) stat_scanner.set_max_game_id(goal_scanner.get_max_game_id()) stat_scanner.save() log.info("Ending run: %s", stat_scanner.status_msg())
def main(parsed_args): """ Scan and update buy data""" start = time.time() db = utils.get_mongo_database() games = db.games output_db = db overall_stats = DeckBuyStats() scanner = incremental_scanner.IncrementalScanner(BUYS_COL_NAME, output_db) buy_collection = output_db[BUYS_COL_NAME] if not parsed_args.incremental: log.warning('resetting scanner and db') scanner.reset() buy_collection.drop() start_size = scanner.get_num_games() log.info("Starting run: %s", scanner.status_msg()) do_scan(scanner, games, overall_stats, parsed_args.max_games) log.info("Ending run: %s", scanner.status_msg()) end_size = scanner.get_num_games() if parsed_args.incremental: existing_overall_data = DeckBuyStats() utils.read_object_from_db(existing_overall_data, buy_collection, '') overall_stats.merge(existing_overall_data) def deck_freq(data_set): return data_set[dominioncards.Estate].available.frequency() log.info('existing %s decks', deck_freq(existing_overall_data)) log.info('after merge %s decks', deck_freq(overall_stats)) utils.write_object_to_db(overall_stats, buy_collection, '') scanner.save()
def get_context_data(self, **kwargs): context = super(ShowBasket, self).get_context_data(**kwargs) action = kwargs.get('action') if self.request.session.get('basket'): basket = self.request.session.get('basket') car_list = [] total = 0 for i in basket: car_id = basket[int(i) - 1] car = get_object_or_404(Car, pk=car_id) car_list.append(car) total += int(car.price) context.update({'carlist': car_list, 'total': total}) else: self.request.session.update({'basket': []}) if action == "clear": self.request.session.update({'basket': []}) context.update({'carlist': [], 'total': 0}) if action == "buy": car_id = kwargs.get('car_id') car = get_object_or_404(Car, pk=car_id) car.delete() db = get_mongo_database() mongocars = db.cars mongocars.remove({"sql_id": car_id}) self.request.session.update({'basket': []}) context.update({'carlist': [], 'total': 0}) return context
def main(parsed_args): db = utils.get_mongo_database() goal_db = db.goals gstats_db = db.goal_stats all_goals = goals.goal_check_funcs.keys() total_pcount = collections.defaultdict(int) goal_scanner = incremental_scanner.IncrementalScanner('goals', db) stat_scanner = incremental_scanner.IncrementalScanner('goal_stats', db) if not parsed_args.incremental: log.warning('resetting scanner and db') stat_scanner.reset() gstats_db.remove() log.info("Starting run: %s", stat_scanner.status_msg()) # TODO: The following logic doesn't work now that goal calculation doesn't happen with a scanner. # if goal_scanner.get_max_game_id() == stat_scanner.get_max_game_id(): # log.info("Stats already set! Skip") # exit(0) log.info('all_goals %s', all_goals) for goal_name in all_goals: log.info("Working on %s", goal_name) found_goals_cursor = goal_db.find({'goals.goal_name': goal_name}, { 'goals.player': 1, '_id': 0 }) total = found_goals_cursor.count() log.info("Found %d instances of %s", total, goal_name) pcount = collections.defaultdict(int) for goal in found_goals_cursor: player = goal['goals'][0]['player'] pcount[player] += 1 total_pcount[player] += 1 psorted = sorted(pcount.iteritems(), key=operator.itemgetter(1), reverse=True) top = [] leaders = 0 i = 0 while leaders < 3 and i < len(psorted): (player, count) = psorted[i] players = [player] i += 1 while i < len(psorted) and psorted[i][1] == count: players.append(psorted[i][0]) i += 1 leaders += len(players) top.append((players, count)) mongo_val = {'_id': goal_name, 'count': total, 'top': top} gstats_db.save(mongo_val) stat_scanner.set_max_game_id(goal_scanner.get_max_game_id()) stat_scanner.save() log.info("Ending run: %s", stat_scanner.status_msg())
def main(args): db = utils.get_mongo_database() games = db.games game_stats = db.game_stats for player_name in args.players: log.debug("Processing top level player name %s", player_name) norm_target_player = norm_name(player_name) games_coll = games.find({keys.PLAYERS: norm_target_player}) calculate_game_stats(list(games_coll), game_stats)
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") web.header("Access-Control-Allow-Origin", "*") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) # params: # targets, opt # cond1, opt # cond2, opt # format? json, csv? db = utils.get_mongo_database() targets = query_dict.get('targets', '').split(',') if sum(len(t) for t in targets) == 0: targets = card_info.card_names() # print targets def str_card_index(card_name): title = card_info.sane_title(card_name) if title: return str(card_info.card_index(title)) return '' target_inds = map(str_card_index, targets) # print targets, target_inds cond1 = str_card_index(query_dict.get('cond1', '')) cond2 = str_card_index(query_dict.get('cond2', '')) if cond1 < cond2: cond1, cond2 = cond2, cond1 card_stats = {} for target_ind in target_inds: key = target_ind + ';' if cond1: key += cond1 if cond2: key += ',' + cond2 db_val = db.card_supply.find_one({'_id': key}) if db_val: small_gain_stat = SmallGainStat() small_gain_stat.from_primitive_object(db_val['vals']) card_name = card_info.card_names()[int(target_ind)] card_stats[card_name] = small_gain_stat format = query_dict.get('format', 'json') if format == 'json': readable_card_stats = {} for card_name, card_stat in card_stats.iteritems(): readable_card_stats[card_name] = ( card_stat.to_readable_primitive_object()) return json.dumps(readable_card_stats) return 'unsupported format ' + format
def retrieve_test_game(game_id): """Store the raw game for the passed game id in the test data dir. """ db = utils.get_mongo_database() raw_games_col = db.raw_games rawgame = raw_games_col.find_one({'_id': game_id}) if rawgame is None: print('could not find game ' + game_id) else: contents = bz2.decompress(rawgame['text']).decode('utf-8') with codecs.open('testing/testdata/'+game_id, encoding='utf-8', mode='w') as f: f.write(contents)
def main(args): """ Update analysis statistics. By default, do so incrementally, unless --noincremental argument is given.""" commit_after = 25000 database = utils.get_mongo_database() games = database.games output_collection_name = 'analysis' output_collection = database[output_collection_name] game_analysis = GamesAnalysis() scanner = incremental_scanner.IncrementalScanner(output_collection_name, database) if args.incremental: utils.read_object_from_db(game_analysis, output_collection, '') else: log.warning('resetting scanner and db') scanner.reset() output_file_name = 'static/output/all_games_card_stats.js' if not os.path.exists('static/output'): os.makedirs('static/output') log.info("Starting run: %s", scanner.status_msg()) for idx, raw_game in enumerate( utils.progress_meter(scanner.scan(games, {}))): try: game_analysis.analyze_game(Game(raw_game)) if args.max_games >= 0 and idx >= args.max_games: log.info("Reached max_games of %d", args.max_games) break if idx % commit_after == 0 and idx > 0: start = time.time() game_analysis.max_game_id = scanner.get_max_game_id() game_analysis.num_games = scanner.get_num_games() utils.write_object_to_db(game_analysis, output_collection, '') scanner.save() log.info("Committed calculations to the DB in %5.2fs", time.time() - start) except int, exception: log.exception('Exception occurred for %s in raw game %s', Game(raw_game).isotropic_url(), raw_game) raise
def GET(self): web.header("Content-Type", "text/plain; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) target_player = query_dict['player'] db = utils.get_mongo_database() games = db.games norm_target_player = norm_name(target_player) games_coll = games.find({PLAYERS: norm_target_player}) from pymongo import json_util games_arr = [{'game': g[DECKS], 'id': g['_id']} for g in games_coll] return json.dumps(games_arr, default=json_util.default)
def GET(self): web.header("Content-Type", "text/plain; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) target_player = query_dict['player'] db = utils.get_mongo_database() games = db.games norm_target_player = norm_name(target_player) games_coll = games.find({'players': norm_target_player}) from pymongo import json_util games_arr = [{'game': g['decks'], 'id': g['_id']} for g in games_coll] return json.dumps(games_arr, default=json_util.default)
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) db = utils.get_mongo_database() selected_card = '' if 'card' in query_dict: selected_card = query_dict['card'] results = db.trueskill_openings.find({'_id': {'$regex': '^open:'}}) openings = list(results) card_list = dominioncards.opening_cards() def split_opening(o): ret = o['_id'][len('open:'):].split('+') if ret == ['']: return [] # Convert the __repr__() representation stored in the # database to the singular version of the card name. return [dominioncards.get_card(card).singular for card in ret] if selected_card not in ('All cards', ''): openings = [o for o in openings if selected_card in split_opening(o)] openings = [o for o in openings if split_opening(o)] for opening in openings: floor = opening['mu'] - opening['sigma'] * 3 ceil = opening['mu'] + opening['sigma'] * 3 opening['level_key'] = make_level_key(floor, ceil) opening['level_str'] = make_level_str(floor, ceil) opening['skill_str'] = skill_str(opening['mu'], opening['sigma']) opening['cards'] = split_opening(opening) opening['cards'].sort() opening['cards'].sort(key=lambda card: dominioncards.get_card(card).cost, reverse=True) costs = [str(dominioncards.get_card(card).cost) for card in opening['cards']] while len(costs) < 2: costs.append('-') opening['cost'] = '/'.join(costs) openings.sort(key=lambda opening: opening['level_key']) openings.reverse() if selected_card == '': openings = [op for op in openings if op['level_key'][0] != 0 or op['_id'] == ['Silver', 'Silver']] render = web.template.render('') return render.openings_template(openings, card_list, selected_card)
def main(args): """ Update analysis statistics. By default, do so incrementally, unless --noincremental argument is given.""" commit_after = 25000 database = utils.get_mongo_database() games = database.games output_collection_name = 'analysis' output_collection = database[output_collection_name] game_analysis = GamesAnalysis() scanner = incremental_scanner.IncrementalScanner(output_collection_name, database) if args.incremental: utils.read_object_from_db(game_analysis, output_collection, '') else: log.warning('resetting scanner and db') scanner.reset() output_file_name = 'static/output/all_games_card_stats.js' if not os.path.exists('static/output'): os.makedirs('static/output') log.info("Starting run: %s", scanner.status_msg()) for idx, raw_game in enumerate(utils.progress_meter(scanner.scan(games, {}))): try: game_analysis.analyze_game(Game(raw_game)) if args.max_games >= 0 and idx >= args.max_games: log.info("Reached max_games of %d", args.max_games) break if idx % commit_after == 0 and idx > 0: start = time.time() game_analysis.max_game_id = scanner.get_max_game_id() game_analysis.num_games = scanner.get_num_games() utils.write_object_to_db(game_analysis, output_collection, '') scanner.save() log.info("Committed calculations to the DB in %5.2fs", time.time() - start) except int, exception: log.exception('Exception occurred for %s in raw game %s', Game(raw_game).isotropic_url(), raw_game) raise
def save_to_mongodb(sender, instance, **kwargs): db = get_mongo_database() mongocars = db.cars mongocars.remove({"sql_id": instance.id}) mongocar = {'name': instance.name, 'mpg': instance.mpg, 'cylinders': instance.cylinders, 'displacement': instance.displacement, 'horsepower': instance.horsepower, 'weight': instance.weight, 'acceleration': instance.acceleration, 'year': instance.year, 'price': instance.price, 'origin': instance.origin, 'sql_id': instance.id} mongocars.insert(mongocar)
def calc_goals_for_days(days): """Examines games and determines if any goals were achieved, storing them in the DB. Takes a list of one or more days in the format "YYYYMMDD" or datetime.date, and generates tasks to calculate the goals achieved in each of the individual games that occurred on those days. Skips days where there are no games available. Skips games that are already present in the goal collection. Returns the number of individual games referred for searching. """ game_count = 0 db = utils.get_mongo_database() games_col = db.games goals_col = db.goals games_col.ensure_index('game_date') for day in days: if type(day) is datetime.date: day = day.strftime('%Y%m%d') games_to_process = games_col.find({'game_date': day}, {'_id': 1}) if games_to_process.count() < 1: log.info('no games to search for goals on %s', day) continue log.info('%s games to search for goals on %s', games_to_process.count(), day) chunk = [] for game in games_to_process: if len(chunk) >= CALC_GOALS_CHUNK_SIZE: calc_goals.delay(chunk, day) chunk = [] if goals_col.find({'_id': game['_id']}).count() == 0: chunk.append(game['_id']) game_count += 1 if len(chunk) > 0: calc_goals.delay(chunk, day) return game_count
def summarize_games(game_ids, day): """Summarize the passed list of games""" log.info("Summarizing %d games from %s", len(game_ids), day) db = utils.get_mongo_database() games_col = db.games game_stats_col = db.game_stats games = [] for game_id in game_ids: game = games_col.find_one({'_id': game_id}) if game: games.append(game) else: log.warning('Found nothing for game id %s', game_id) return game_stats.calculate_game_stats(games, game_stats_col)
def summarize_game_stats_for_days(days): """Examines games and determines if need to be summarized. Takes a list of one or more days in the format "YYYYMMDD" or datetime.date, and generates tasks to summarize each of the individual games that occurred on those days. Skips days where there are no games available. Skips games that are already present in the games_stats collection. Returns the number of individual games referred for summarizing. """ game_count = 0 db = utils.get_mongo_database() games_col = db.games game_stats_col = db.game_stats games_col.ensure_index('game_date') for day in days: if type(day) is datetime.date: day = day.strftime('%Y%m%d') games_to_process = games_col.find({'game_date': day}, {'_id': 1}) if games_to_process.count() < 1: log.info('No games available to summarize on %s', day) continue log.info('%s games to summarize on %s', games_to_process.count(), day) chunk = [] for game in games_to_process: if len(chunk) >= SUMMARIZE_GAMES_CHUNK_SIZE: summarize_games.delay(chunk, day) chunk = [] # Is this really slow? Does it need to be fixed? #if game_stats_col.find({'_id.game_id': game['_id']}).count() == 0: chunk.append(game['_id']) game_count += 1 if len(chunk) > 0: summarize_games.delay(chunk, day) return game_count
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) db = utils.get_mongo_database() selected_card = '' if 'card' in query_dict: selected_card = query_dict['card'] if selected_card not in ('All cards', ''): query = db.trueskill_openings.find({'cards': selected_card}) else: query = db.trueskill_openings.find({}) #offset = db.trueskill_openings.find_one({'name': # 'open:Silver+Silver'})['mu'] offset = 0 openings = list(query) card_list = card_info.OPENING_CARDS for opening in openings: for stat in ('mu', 'floor', 'ceil'): opening[stat] -= offset floor = opening['floor'] ceil = opening['ceil'] opening['level_key'] = make_level_key(floor, ceil) opening['level_str'] = make_level_str(floor, ceil) opening['skill_str'] = skill_str(opening['mu'], opening['sigma']) opening['cards'].sort() opening['cards'].sort(key=lambda card: (card_info.Cost(card)), reverse=True) costs = [str(card_info.Cost(card)) for card in opening['cards']] while len(costs) < 2: costs.append('-') opening['cost'] = '/'.join(costs) openings.sort(key=lambda opening: opening['level_key']) openings.reverse() if selected_card == '': openings = [ op for op in openings if op['level_key'][0] != 0 or op['cards'] == ['Silver', 'Silver'] ] render = web.template.render('') return render.openings_template(openings, card_list, selected_card)
def parse_days(days): """Parses rawgames into games records and stores them in the DB. Takes a list of one or more days in the format "YYYYMMDD" or datetime.date, and generates tasks to parse the games that occurred on those days. Skips days where there are no rawgames available. Skips days where the parsed game collection has more than 65% of the quantity of rawgames, as this suggests the date has already been parsed. Returns the number of individual games referred for parsing. """ game_count = 0 db = utils.get_mongo_database() raw_games_col = db.raw_games games_col = db.games raw_games_col.ensure_index('game_date') for day in days: if type(day) is datetime.date: day = day.strftime('%Y%m%d') games_to_parse = raw_games_col.find({'game_date': day}, {'_id': 1}) raw_games_qty = games_to_parse.count() if raw_games_qty < 1: log.info('no games to parse in %s', day) continue parsed_games_qty = games_col.find({'game_date': day}).count() if float(parsed_games_qty) / float(raw_games_qty) > 0.65: log.info( 'Looks like raw games for %s have already been parsed. Found %5.2f%% in games collection.', day, 100.0 * parsed_games_qty / raw_games_qty) continue game_count += games_to_parse.count() log.info('%s games to parse in %s', games_to_parse.count(), day) for chunk in utils.segments([x['_id'] for x in games_to_parse], PARSE_GAMES_CHUNK_SIZE): parse_games.delay(chunk, day) return game_count
def summarize_game_stats_for_days(days): """Examines games and determines if need to be summarized. Takes a list of one or more days in the format "YYYYMMDD" or datetime.date, and generates tasks to summarize each of the individual games that occurred on those days. Skips days where there are no games available. Skips games that are already present in the games_stats collection. Returns the number of individual games referred for summarizing. """ game_count = 0 db = utils.get_mongo_database() games_col = db.games game_stats_col = db.game_stats games_col.ensure_index('game_date') for day in days: if type(day) is datetime.date: day = day.strftime('%Y%m%d') games_to_process = games_col.find({'game_date': day}, {'_id': 1}) if games_to_process.count() < 1: log.info('No games available to summarize on %s', day) continue log.info('%s games to summarize on %s', games_to_process.count(), day) chunk = [] for game in games_to_process: if len(chunk) >= SUMMARIZE_GAMES_CHUNK_SIZE: summarize_games.delay(chunk, day) chunk = [] if game_stats_col.find({'_id.game_id': game['_id']}).count() == 0: chunk.append(game['_id']) game_count += 1 if len(chunk) > 0: summarize_games.delay(chunk, day) return game_count
def main(args): commit_after = 25000 database = utils.get_mongo_database() games = database.games collection = database.optimal_card_ratios db_tracker = None scanner = incremental_scanner.IncrementalScanner('optimal_card_ratios', database) if not args.incremental: log.warning('resetting scanner and db') scanner.reset() log.info("Starting run: %s", scanner.status_msg()) for ind, game in enumerate(utils.progress_meter(scanner.scan(games, {}))): if not db_tracker: log.debug("Initializing db tracker manager") db_tracker = DBCardRatioTrackerManager(collection, args.incremental) log.debug("DB tracker manager initialized") result = process_game(Game(game)) for final_ratio_dict, progressive_ratio_dict, win_points in result: db_tracker.integrate_results('final', final_ratio_dict, win_points) db_tracker.integrate_results('progressive', progressive_ratio_dict, win_points) if args.max_games >= 0 and ind >= args.max_games: log.info("Reached max_games of %d", args.max_games) break if ind % commit_after == 0 and ind > 0: start = time.time() db_tracker.save() scanner.save() log.info("Committed calculations to the DB in %5.2fs", time.time() - start) log.info("Ending run: %s", scanner.status_msg()) if db_tracker: db_tracker.save() scanner.save()
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) db = utils.get_mongo_database() selected_card = '' if 'card' in query_dict: selected_card = query_dict['card'] if selected_card not in ('All cards', ''): query = db.trueskill_openings.find({'cards': selected_card}) else: query = db.trueskill_openings.find({}) #offset = db.trueskill_openings.find_one({'name': # 'open:Silver+Silver'})['mu'] offset = 0 openings = list(query) card_list = card_info.OPENING_CARDS for opening in openings: for stat in ('mu', 'floor', 'ceil'): opening[stat] -= offset floor = opening['floor'] ceil = opening['ceil'] opening['level_key'] = make_level_key(floor, ceil) opening['level_str'] = make_level_str(floor, ceil) opening['skill_str'] = skill_str(opening['mu'], opening['sigma']) opening['cards'].sort() opening['cards'].sort(key=lambda card: (card_info.Cost(card)), reverse=True) costs = [str(card_info.Cost(card)) for card in opening['cards']] while len(costs) < 2: costs.append('-') opening['cost'] = '/'.join(costs) openings.sort(key=lambda opening: opening['level_key']) openings.reverse() if selected_card == '': openings = [op for op in openings if op['level_key'][0] != 0 or op['cards'] == ['Silver', 'Silver']] render = web.template.render('') return render.openings_template(openings, card_list, selected_card)
def GET(self): web.header("Content-Type", "text/plain; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) target_player = query_dict['player'] db = utils.get_mongo_database() games = db.games norm_target_player = NormName(target_player) games_coll = games.find({'players': norm_target_player}) import simplejson as json from pymongo import json_util games_arr = list({ 'game': g['decks'], 'id': g['_id'] } for g in games_coll) return json.dumps(games_arr, default=json_util.default)
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) card_list = sorted(set(dominioncards.all_cards()) - set(dominioncards.TOURNAMENT_WINNINGS)) card_x_card = dominioncards.get_card(query_dict.get('card_x', 'Minion')) card_y_card = dominioncards.get_card(query_dict.get('card_y', 'Chapel')) card_x = str(card_x_card) card_y = str(card_y_card) if card_x < card_y: db_id = card_x + ':' + card_y swap_x_and_y = False else: db_id = card_y + ':' + card_x swap_x_and_y = True db = utils.get_mongo_database() db_val = db.optimal_card_ratios.find_one({'_id': db_id}) if not db_val: return 'No stats for "' + card_x + '" and "' + card_y + '".' tracker = DBCardRatioTracker() tracker.from_primitive_object(db_val) num_games = sum(meanvarstat.frequency() for meanvarstat in tracker.final.itervalues()) num_games_threshold = int(round(num_games * .002)) final_table = self.getHtmlTableForStats( tracker.final, swap_x_and_y, num_games, num_games_threshold) num_games = max(meanvarstat.frequency() for meanvarstat in tracker.progressive.itervalues()) num_games_threshold = int(round(num_games * .002)) progressive_table = self.getHtmlTableForStats( tracker.progressive, swap_x_and_y, num_games, num_games_threshold) render = web.template.render('') return render.optimal_card_ratios_template( card_list, card_x_card, card_y_card, final_table, progressive_table)
def parse_days(days): """Parses rawgames into games records and stores them in the DB. Takes a list of one or more days in the format "YYYYMMDD" or datetime.date, and generates tasks to parse the games that occurred on those days. Skips days where there are no rawgames available. Skips days where the parsed game collection has more than 95% of the quantity of rawgames, as this suggests the date has already been parsed. Returns the number of individual games referred for parsing. """ game_count = 0 db = utils.get_mongo_database() raw_games_col = db.raw_games games_col = db.games raw_games_col.ensure_index('game_date') for day in days: if type(day) is datetime.date: day = day.strftime('%Y%m%d') games_to_parse = raw_games_col.find({'game_date': day}, {'_id': 1}) raw_games_qty = games_to_parse.count() if raw_games_qty < 1: log.info('no games to parse in %s', day) continue parsed_games_qty = games_col.find({'game_date': day}).count() if float(parsed_games_qty) / float(raw_games_qty) > 0.85: log.info('Looks like raw games for %s have already been parsed. Found %5.2f%% in games collection.', day, 100.0 * parsed_games_qty / raw_games_qty) continue game_count += games_to_parse.count() log.info('%s games to parse in %s', games_to_parse.count(), day) for chunk in utils.segments([x['_id'] for x in games_to_parse], PARSE_GAMES_CHUNK_SIZE): parse_games.delay(chunk, day) return game_count
def main(args): commit_after = 25000 database = utils.get_mongo_database() games = database.games collection = database.optimal_card_ratios db_tracker = None scanner = incremental_scanner.IncrementalScanner('optimal_card_ratios', database) if not args.incremental: log.warning('resetting scanner and db') scanner.reset() log.info("Starting run: %s", scanner.status_msg()) for ind, game in enumerate( utils.progress_meter(scanner.scan(games, {}))): if not db_tracker: log.debug("Initializing db tracker manager") db_tracker = DBCardRatioTrackerManager(collection, args.incremental) log.debug("DB tracker manager initialized") result = process_game(Game(game)) for final_ratio_dict, progressive_ratio_dict, win_points in result: db_tracker.integrate_results('final', final_ratio_dict, win_points) db_tracker.integrate_results('progressive', progressive_ratio_dict, win_points) if args.max_games >= 0 and ind >= args.max_games: log.info("Reached max_games of %d", args.max_games) break if ind % commit_after == 0 and ind > 0: start = time.time() db_tracker.save() scanner.save() log.info("Committed calculations to the DB in %5.2fs", time.time() - start) log.info("Ending run: %s", scanner.status_msg()) if db_tracker: db_tracker.save() scanner.save()
def main(): db = utils.get_mongo_database() game_summaries = [] output_fn = open('margin.txt', 'w') for idx, raw_game in enumerate( utils.progress_meter(db.games.find({}), 1000)): g = game.Game(raw_game) if g.dubious_quality(): continue if len(g.get_player_decks()) != 2: continue try: joined_names = ','.join( p.Name().encode('utf-8') for p in g.get_player_decks(sort_by_turn_order=True)) output_fn.write('%f:%s:%s\n' % (first_player_margin(g), joined_names, ','.join( g.get_supply()))) except UnicodeDecodeError: pass
def GET(self): import count_buys web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) db = utils.get_mongo_database() stats = count_buys.DeckBuyStats() utils.read_object_from_db(stats, db.buys, '') player_buy_summary = None if 'player' in query_dict: targ_name = NormName(query_dict['player']) games = map(game.Game, list(db.games.find({'players': targ_name}))) player_buy_summary = count_buys.DeckBuyStats() match_name = lambda g, name: NormName(name) == targ_name count_buys.accum_buy_stats(games, player_buy_summary, match_name) count_buys.add_effectiveness(player_buy_summary, stats) render = web.template.render('', globals={'round': round}) return render.buy_template(stats, player_buy_summary)
def scrape_raw_games(date): """Download the specified raw game archive, store it in S3, and load it into MongoDB. date is a datetime.date object """ db = utils.get_mongo_database() scraper = isotropic.IsotropicScraper(db) try: inserted = scraper.scrape_and_store_rawgames(date) if inserted > 0: # Also need to parse the raw games for the days where we # inserted new records. parse_days.delay([date]) return inserted except isotropic.ScrapeError: log.info("Data for %s is not yet available", date) return None
def GET(self): import count_buys web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) db = utils.get_mongo_database() stats = count_buys.DeckBuyStats() utils.read_object_from_db(stats, db.buys, '') player_buy_summary = None if 'player' in query_dict: targ_name = norm_name(query_dict['player']) games = map(game.Game, list(db.games.find({'players': targ_name}))) player_buy_summary = count_buys.DeckBuyStats() match_name = lambda g, name: norm_name(name) == targ_name count_buys.accum_buy_stats(games, player_buy_summary, match_name) count_buys.add_effectiveness(player_buy_summary, stats) render = web.template.render('', globals={'round': round}) return render.buy_template(stats, player_buy_summary)
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") db = utils.get_mongo_database() goal_freq = collections.defaultdict(int) attainments_by_player = collections.defaultdict( lambda: collections.defaultdict(int)) for goal_doc in db.goals.find(): for goal in goal_doc['goals']: player = goal['player'] goal_name = goal['goal_name'] goal_freq[goal_name] += 1 attainments_by_player[player][goal_name] += 1 player_scores = {} tot_games = float(db.games.count()) for player in attainments_by_player: score = 0 player_goal_freqs = attainments_by_player[player] for goal in player_goal_freqs: global_rareness = tot_games / goal_freq[goal] player_goal_freq = player_goal_freqs[goal] score += global_rareness / (1 + math.exp(-player_goal_freq)) player_scores[player] = score goal_freq = goal_freq.items() goal_freq.sort(key = lambda x: x[1]) ret = '' for goal, freq in goal_freq: ret += goal + ' ' + str(freq) + '<br>' ret += '<br>' player_scores = player_scores.items() player_scores.sort(key = lambda x: -x[1]) for player, score in player_scores[:10]: ret += player + ' ' + '%.3f' % score + '<br>' return ret
def main(): db = utils.get_mongo_database() game_summaries = [] output_fn = open('margin.txt', 'w') for idx, raw_game in enumerate( utils.progress_meter(db.games.find({}), 1000)): g = game.Game(raw_game) if g.dubious_quality(): continue if len(g.get_player_decks()) != 2: continue try: joined_names = ','.join( p.Name().encode('utf-8') for p in g.get_player_decks(sort_by_turn_order=True)) output_fn.write('%f:%s:%s\n' % ( first_player_margin(g), joined_names, ','.join(g.get_supply()))) except UnicodeDecodeError: pass
def main(args): db = utils.get_mongo_database() scanner = incremental_scanner.IncrementalScanner('analyze2', db) if not args.incremental: log.warning('resetting scanner and db') scanner.reset() for collection_name, _ in event_detectors: db[collection_name].drop() log.info("Starting run: %s", scanner.status_msg()) games_stream = analysis_util.games_stream(scanner, db.games) accumulator = EventAccumulator() accumulate_card_stats(games_stream, accumulator, args.max_games) log.info('saving to database') log.debug('saving accumulated stats') accumulator.update_db(db) log.info('saving the game scanner state') scanner.save() log.info("Ending run: %s", scanner.status_msg())
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) db = utils.get_mongo_database() games = db.games ret = '<html><head><title>Game Search Results</title></head><body>' ret += '<a href="/search_query">Back to search query page</a><BR><BR>' matcher = query_matcher.QueryMatcher(**query_dict) found_any = False for idx, game_match in enumerate(matcher.query_db(games)): found_any = True ret += game_match.display_game_snippet() + '<br>' if not found_any: ret += 'Your search returned no matches<br>' ret += '<a href="/search_query">Back to search query page</a>' return ret
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) db = utils.get_mongo_database() games = db.games ret = '<html><head><title>Game Search Results</title></head><body>' ret += '<a href="/search_query">Back to search query page</a><BR><BR>' matcher = query_matcher.QueryMatcher(**query_dict) found_any = False for idx, game_match in enumerate(matcher.QueryDB(games)): found_any = True ret += game_match.DisplayGameSnippet() + '<br>' if not found_any: ret += 'Your search returned no matches<br>' ret += '<a href="/search_query">Back to search query page</a>' return ret
def fetch_conditional_stats(self, target_inds, interaction_tuples): db = utils.get_mongo_database() card_stats = [] count_searched = 0 for target_ind in target_inds: for interaction_tuple in interaction_tuples: count_searched += 1 if count_searched > 1000: return card_stats key = target_ind + ';' + (','.join(interaction_tuple)) db_val = db.card_supply.find_one({'_id': key}) if db_val: small_gain_stat = SmallGainStat() small_gain_stat.from_primitive_object(db_val['vals']) def name_getter(ind_str): return dominioncards.index_to_card(int(ind_str)).singular card_name = name_getter(target_ind) condition = map(name_getter, interaction_tuple) stat_with_context = {'card_name': card_name, 'condition': condition, 'stats': small_gain_stat} card_stats.append(stat_with_context) return card_stats
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) debug = int(query_dict.get('debug', 0)) game_id = query_dict['game_id'] if game_id.endswith('.gz'): game_id = game_id[:-len('.gz')] yyyymmdd = game.Game.get_date_from_id(game_id) db = utils.get_mongo_database() raw_games_col = db.raw_games rawgame = raw_games_col.find_one({'_id': game_id}) if rawgame is None: return 'could not find game ' + game_id contents = bz2.decompress(rawgame['text']).decode('utf-8') body_err_msg = ('<body><b>Error annotating game, tell ' '[email protected]!</b>') try: return annotate_game.annotate_game(contents, game_id, debug) except parse_game.BogusGameError, b: return contents.replace('<body>', body_err_msg + ': foo? ' + str(b))
def fetch_conditional_stats(self, target_inds, interaction_tuples): db = utils.get_mongo_database() card_stats = [] count_searched = 0 for target_ind in target_inds: for interaction_tuple in interaction_tuples: count_searched += 1 if count_searched > 1000: return card_stats key = target_ind + ';' + (','.join(interaction_tuple)) db_val = db.card_supply.find_one({'_id': key}) if db_val: small_gain_stat = SmallGainStat() small_gain_stat.from_primitive_object(db_val['vals']) def name_getter(ind_str): return card_info.card_names()[int(ind_str)] card_name = name_getter(int(target_ind)) condition = map(name_getter, interaction_tuple) stat_with_context = {'card_name': card_name, 'condition': condition, 'stats': small_gain_stat} card_stats.append(stat_with_context) return card_stats
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) debug = int(query_dict.get('debug', 0)) game_id = query_dict['game_id'] if game_id.endswith('.gz'): game_id = game_id[:-len('.gz')] yyyymmdd = game.Game.get_date_from_id(game_id) db = utils.get_mongo_database() raw_games_col = db.raw_games rawgame = raw_games_col.find_one({'_id': game_id}) if rawgame is None: return 'could not find game ' + game_id contents = bz2.decompress(rawgame['text']).decode('utf-8') body_err_msg = ('<body><b>Error annotating game, please send the details of this message to the ' '<a href="mailto:[email protected]?Subject=Game%20Annotation%20Error">' 'Council Room Developers</a></b>') try: return annotate_game.annotate_game(contents, game_id, debug) except parse_common.BogusGameError, b: return contents.replace('<body>', body_err_msg + ': foo? ' + str(b))
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) debug = int(query_dict.get('debug', 0)) game_id = query_dict['game_id'] if game_id.endswith('.gz'): game_id = game_id[:-len('.gz')] yyyymmdd = game.Game.get_date_from_id(game_id) db = utils.get_mongo_database() raw_games_col = db.raw_games rawgame = raw_games_col.find_one({'_id': game_id}) if rawgame is None: return 'could not find game ' + game_id contents = bz2.decompress(rawgame['text']).decode('utf-8') body_err_msg = ('<body><b>Error annotating game, please send the details of this message to the ' '<a href="mailto:[email protected]?Subject=Game%20Annotation%20Error">' 'Council Room Developers</a></b>') try: return annotate_game.annotate_game(contents, game_id, debug) except parse_game.BogusGameError, b: return contents.replace('<body>', body_err_msg + ': foo? ' + str(b))
def main(args): db = utils.get_mongo_database() games_collection = db.games output_collection = db.goals total_checked = 0 checker_output = collections.defaultdict(int) if args.goals: valid_goals = True for goal_name in args.goals: if goal_name not in goal_check_funcs: valid_goals = False log.error("Unrecognized goal name '%s'", goal_name) if not valid_goals: exit(-1) goals_to_check = args.goals scanner = incremental_scanner.IncrementalScanner('subgoals', db) scanner.reset() main_scanner = incremental_scanner.IncrementalScanner('goals', db) last = main_scanner.get_max_game_id() else: goals_to_check = None scanner = incremental_scanner.IncrementalScanner('goals', db) last = None if not args.incremental: scanner.reset() output_collection.remove() output_collection.ensure_index('goals.player') log.info("Starting run: %s", scanner.status_msg()) for g in utils.progress_meter(scanner.scan(games_collection, {})): total_checked += 1 game_val = game.Game(g) # Get existing goal set (if exists) game_id = game_val.get_id() mongo_val = output_collection.find_one({'_id': game_id}) if mongo_val is None: mongo_val = collections.defaultdict( dict ) mongo_val['_id'] = game_id mongo_val['goals'] = [] # If rechecking, delete old values if goals_to_check is not None: goals = mongo_val['goals'] for ind in range(len(goals) - 1, -1, -1): goal = goals[ind] if goal['goal_name'] in goals_to_check: del goals[ind] # Get new values goals = check_goals(game_val, goals_to_check) # Write new values for goal in goals: goal_name = goal['goal_name'] mongo_val['goals'].append(goal) checker_output[goal_name] += 1 mongo_val = dict(mongo_val) output_collection.save(mongo_val) if last and game_id == last: break if args.max_games >= 0 and total_checked >= args.max_games: break log.info("Ending run: %s", scanner.status_msg()) scanner.save() print_totals(checker_output, total_checked)
def main(): filename_pattern = re.compile(r'^(?P<date>\d\d\d\d-\d\d-\d\d)\.html\.bz2$') iso_leaderboard_pattern = re.compile(r'<td>(?P<skill_mean>-?\d+\.\d+) ± ' + \ r'(?P<skill_error>-?\d+\.\d+)</td><td class=c2>' + \ r'(?P<rank>\d+)</td><td class=c>' + \ r'(?P<eligible_games_played>\d+)</td><td>' + \ r'(?P<nickname>[^<]*) <') goko_leaderboard_pattern = re.compile(r'\s+<td class="leaders-table-item table-item-rank">(?P<rank>\d+)</td>\s*\n' + r'\s*<td class="leaders-table-item table-item-name"><img [^>]*>(?P<nickname>.*)</td>\s*\n' + r'\s*<td class="leaders-table-item table-item-points">(?P<skill_mean>\d+)</td>') database = utils.get_mongo_database() history_collection = database.leaderboard_history scanner_collection = database.scanner db_val = scanner_collection.find_one({'_id': 'leaderboard_history'}) last_date = db_val['last_date'] if db_val else '0000-00-00' directory = 'static/leaderboard/' filenames = os.listdir(directory) filenames.sort() bad_leaderboard_dates = utils.get_bad_leaderboard_dates() for filename in filenames: match = filename_pattern.search(filename) if not match: continue date = match.group('date') if date in bad_leaderboard_dates: # don't load data from when the leaderboard was messed up log.warning("Skipping %s because the leaderboard was messed up", date) continue if date <= last_date: log.warning("Date %s is less than last date %s", date, last_date) continue log.info('Processing %s', date) file_obj = bz2.BZ2File(directory + filename) content = file_obj.read().decode('utf-8') file_obj.close() nickname_to_entry = {} num_matches = 0 last_rank = -1 pos = 0 while True: match = iso_leaderboard_pattern.search(content, pos) if not match: break num_matches += 1 skill_mean = float(match.group('skill_mean')) skill_error = float(match.group('skill_error')) rank = int(match.group('rank')) eligible_games_played = int(match.group('eligible_games_played')) nickname = match.group('nickname') normed_nickname = name_merger.norm_name(nickname) if normed_nickname not in nickname_to_entry: nickname_to_entry[normed_nickname] = [date, skill_mean, skill_error, rank, eligible_games_played] else: log.info('normed nickname %s already exists for %s', normed_nickname, date) last_rank = rank pos = match.end() pos = 0 while True: match = goko_leaderboard_pattern.search(content, pos) if not match: break num_matches += 1 skill_mean = float(match.group('skill_mean')) skill_error = 0 rank = int(match.group('rank')) eligible_games_played = 0 nickname = match.group('nickname') normed_nickname = nickname if normed_nickname not in nickname_to_entry: nickname_to_entry[normed_nickname] = [date, skill_mean, skill_error, rank, eligible_games_played] else: log.info('normed nickname %s already exists for %s', normed_nickname, date) last_rank = rank pos = match.end() log.info('%d entries matched', num_matches) if num_matches == 0: log.error('No entries found, so the regex is probably not doing its job anymore.') break if num_matches != last_rank: log.error('ERROR: # entries does not match last rank, so the regex is probably not doing its job anymore.') break for nickname, data in nickname_to_entry.iteritems(): history_collection.update({'_id': nickname}, {'$push': {'history': data}}, upsert=True) log.info('%d player histories updated', len(nickname_to_entry)) last_date = date scanner_collection.update({'_id': 'leaderboard_history'}, {'$set': {'last_date': last_date}}, upsert=True)
def main(): filename_pattern = re.compile(r'^(?P<date>\d\d\d\d-\d\d-\d\d)\.html\.bz2$') leaderboard_pattern = re.compile(r'<td>(?P<skill_mean>-?\d+\.\d+) ± ' + \ r'(?P<skill_error>-?\d+\.\d+)</td><td class=c2>' + \ r'(?P<rank>\d+)</td><td class=c>' + \ r'(?P<eligible_games_played>\d+)</td><td>' + \ r'(?P<nickname>[^<]*) <') database = utils.get_mongo_database() history_collection = database.leaderboard_history scanner_collection = database.scanner db_val = scanner_collection.find_one({'_id': 'leaderboard_history'}) last_date = db_val['last_date'] if db_val else '0000-00-00' directory = 'static/leaderboard/' filenames = os.listdir(directory) filenames.sort() bad_leaderboard_dates = utils.get_bad_leaderboard_dates() for filename in filenames: match = filename_pattern.search(filename) if not match: continue date = match.group('date') if date in bad_leaderboard_dates: # don't load data from when the leaderboard was messed up log.warning("Skipping %s because the leaderboard was messed up", date) continue if date <= last_date: log.warning("Date %s is less than last date %s", date, last_date) continue log.info('Processing %s', date) file_obj = bz2.BZ2File(directory + filename) content = file_obj.read().decode('utf-8') file_obj.close() nickname_to_entry = {} num_matches = 0 last_rank = -1 pos = 0 while True: match = leaderboard_pattern.search(content, pos) if not match: break num_matches += 1 skill_mean = float(match.group('skill_mean')) skill_error = float(match.group('skill_error')) rank = int(match.group('rank')) eligible_games_played = int(match.group('eligible_games_played')) nickname = match.group('nickname') normed_nickname = name_merger.norm_name(nickname) if normed_nickname not in nickname_to_entry: nickname_to_entry[normed_nickname] = [ date, skill_mean, skill_error, rank, eligible_games_played ] else: log.info('normed nickname %s already exists for %s', normed_nickname, date) last_rank = rank pos = match.end() log.info('%d entries matched', num_matches) if num_matches == 0: log.error( 'No entries found, so the regex is probably not doing its job anymore.' ) break if num_matches != last_rank: log.error( 'ERROR: # entries does not match last rank, so the regex is probably not doing its job anymore.' ) break for nickname, data in nickname_to_entry.iteritems(): history_collection.update({'_id': nickname}, {'$push': { 'history': data }}, upsert=True) log.info('%d player histories updated', len(nickname_to_entry)) last_date = date scanner_collection.update({'_id': 'leaderboard_history'}, {'$set': { 'last_date': last_date }}, upsert=True)
def GET(self): web.header("Content-Type", "text/html; charset=utf-8") query_dict = dict(urlparse.parse_qsl(web.ctx.env['QUERY_STRING'])) target_player = query_dict['player'].decode('utf-8') db = utils.get_mongo_database() games = db.games norm_target_player = NormName(target_player) games_coll = games.find({'players': norm_target_player}) keyed_by_opp = collections.defaultdict(list) real_name_usage = collections.defaultdict( lambda: collections.defaultdict(int)) game_list = [] aliases = set() overall_record = RecordSummary() rec_by_game_size = collections.defaultdict(RecordSummary) rec_by_date = collections.defaultdict(RecordSummary) rec_by_turn_order = collections.defaultdict(RecordSummary) date_buckets = (1, 3, 5, 10) for g in games_coll: game_val = game.Game(g) if game_val.DubiousQuality(): continue all_player_names = game_val.AllPlayerNames() norm_names = map(NormName, all_player_names) if len(set(norm_names)) != len(all_player_names): continue target_player_cur_name_cand = [ n for n in all_player_names if NormName(n) == norm_target_player ] if len(target_player_cur_name_cand) != 1: continue game_list.append(game_val) target_player_cur_name = target_player_cur_name_cand[0] aliases.add(target_player_cur_name) for p in game_val.PlayerDecks(): if p.Name() != target_player_cur_name: other_norm_name = NormName(p.Name()) keyed_by_opp[other_norm_name].append( (p.Name(), target_player_cur_name, game_val)) real_name_usage[other_norm_name][p.Name()] += 1 else: res = game_val.WinLossTie(p.Name()) overall_record.RecordResult(res, p.WinPoints()) game_len = len(game_val.PlayerDecks()) rec_by_game_size[game_len].RecordResult(res, p.WinPoints()) rec_by_turn_order[p.TurnOrder()].RecordResult( res, p.WinPoints()) for delta in date_buckets: delta_padded_date = ( game_val.Date() + datetime.timedelta(days=delta)).date() today = datetime.datetime.now().date() if (delta_padded_date >= today): rec_by_date[delta].RecordResult(res, p.WinPoints()) keyed_by_opp_list = keyed_by_opp.items() keyed_by_opp_list.sort(key=lambda x: (-len(x[1]), x[0])) ret = ('<html><head><title>CouncilRoom.com: Dominion Stats: ' '%s</title></head>\n' % target_player) ret += '<body><A HREF="/">Back to CouncilRoom.com</A><BR><BR>' ret += """ Player: <form action='/player' method='get'> <input type="text" name="player" style="width:100px;" /> <input type="submit" value="Submit" /> </form> """ if len(aliases) > 1: ret += 'Player aliases: ' + ', '.join(aliases) + '<br>\n' ret += RenderRecordTable('Record by game size', overall_record, rec_by_game_size, lambda game_size: '%d players' % game_size) ret += RenderRecordTable('Recent Record', overall_record, rec_by_date, lambda num_days: 'Last %d days' % num_days) ret += RenderRecordTable('Record by turn order', overall_record, rec_by_turn_order, lambda pos: 'Table position %d' % pos) ret += '<div style="clear: both;"> </div>' ret += goals.MaybeRenderGoals(db, norm_target_player) ret += '<h2>Most recent games</h2>\n' game_list.sort(key=game.Game.Id, reverse=True) qm = query_matcher.QueryMatcher(p1_name=target_player) for g in game_list[:3]: ret += (query_matcher.GameMatcher(g, qm).DisplayGameSnippet() + '<br>') ret += ('<A HREF="/search_result?p1_name=%s">(See more)</A>' % target_player) ret += '<h2>Record by opponent</h2>' ret += '<table border=1>' ret += '<tr><td>Opponent</td><td>Record</td></tr>' for opp_norm_name, game_list in keyed_by_opp_list: record = [0, 0, 0] for opp_name, targ_player_cur_name, g in game_list: record[g.WinLossTie(targ_player_cur_name, opp_name)] += 1 ret += '<tr>' opp_cannon_name = max( # Get most freq used name for opponent real_name_usage[opp_norm_name].iteritems(), key=lambda x: x[1])[0] row_span = (len(game_list) - 1) / 10 + 1 ret += '<td rowspan=%d>%s</td>' % ( row_span, game.PlayerDeck.PlayerLink(opp_cannon_name)) ret += '<td rowspan=%d>%d-%d-%d</td>' % (row_span, record[0], record[1], record[2]) for idx, (opp_name, targ_player_cur_name, g) in enumerate(game_list): if idx % 10 == 0 and idx > 0: ret += '</tr><tr>' ret += g.ShortRenderCellWithPerspective( targ_player_cur_name, opp_name) ret += '</tr>\n' ret += '</table></body></html>' return ret