def main(): """ Scan and update buy data""" start = time.time() con = pymongo.Connection() games = con.test.games output_db = con.test parser = utils.incremental_parser() parser.add_argument('--max_games', default=-1, type=int) args = parser.parse_args() overall_stats = DeckBuyStats() scanner = incremental_scanner.IncrementalScanner('buys', output_db) buy_collection = output_db['buys'] if not args.incremental: print 'resetting scanner and db' scanner.Reset() buy_collection.drop() start_size = scanner.NumGames() print scanner.StatusMsg() do_scan(scanner, games, overall_stats, args.max_games) print scanner.StatusMsg() end_size = scanner.NumGames() if args.incremental: existing_overall_data = DeckBuyStats() utils.read_object_from_db(existing_overall_data, buy_collection, '') overall_stats.Merge(existing_overall_data) def deck_freq(data_set): return data_set['Estate'].available.Frequency() print 'existing', deck_freq(existing_overall_data), 'decks' print 'after merge', deck_freq(overall_stats), 'decks' utils.write_object_to_db(overall_stats, buy_collection, '') scanner.Save() time_diff = time.time() - start games_diff = end_size - start_size print 'took', time_diff, 'seconds for', games_diff, 'games for a rate' \ ' of', games_diff / time_diff, 'games/sec'
def main(): """ Scan and update buy data""" start = time.time() con = pymongo.Connection() games = con.test.games output_db = con.test parser = utils.incremental_parser() parser.add_argument('--max_games', default=-1, type=int) args = parser.parse_args() overall_stats = DeckBuyStats() scanner = incremental_scanner.IncrementalScanner('buys', output_db) buy_collection = output_db['buys'] if not args.incremental: print 'resetting scanner and db' scanner.Reset() buy_collection.drop() start_size = scanner.NumGames() print scanner.StatusMsg() do_scan(scanner, games, overall_stats, args.max_games) print scanner.StatusMsg() end_size = scanner.NumGames() if args.incremental: existing_overall_data = DeckBuyStats() utils.read_object_from_db(existing_overall_data, buy_collection, '') overall_stats.Merge(existing_overall_data) def deck_freq(data_set): return data_set['Estate'].available.Frequency() print 'existing', deck_freq(existing_overall_data), 'decks' print 'after merge', deck_freq(overall_stats), 'decks' utils.write_object_to_db(overall_stats, buy_collection, '') scanner.Save() time_diff = time.time() - start games_diff = end_size - start_size print 'took', time_diff, 'seconds for', games_diff, 'games for a rate' \ ' of', games_diff / time_diff, 'games/sec'
else: log.info('normed nickname %s already exists for %s', normed_nickname, date) last_rank = rank pos = match.end() log.info('%d entries matched', num_matches) if num_matches == 0: log.error('No entries found, so the regex is probably not doing its job anymore.') break if num_matches != last_rank: log.error('ERROR: # entries does not match last rank, so the regex is probably not doing its job anymore.') break for nickname, data in nickname_to_entry.iteritems(): history_collection.update({'_id': nickname}, {'$push': {'history': data}}, upsert=True) log.info('%d player histories updated', len(nickname_to_entry)) last_date = date scanner_collection.update({'_id': 'leaderboard_history'}, {'$set': {'last_date': last_date}}, upsert=True) if __name__ == '__main__': parser = utils.incremental_parser() args = parser.parse_args() dominionstats.utils.log.initialize_logging(args.debug) main()
if num_matches != last_rank: log.error( 'ERROR: # entries does not match last rank, so the regex is probably not doing its job anymore.' ) break for nickname, data in nickname_to_entry.iteritems(): history_collection.update({'_id': nickname}, {'$push': { 'history': data }}, upsert=True) log.info('%d player histories updated', len(nickname_to_entry)) last_date = date scanner_collection.update({'_id': 'leaderboard_history'}, {'$set': { 'last_date': last_date }}, upsert=True) if __name__ == '__main__': parser = utils.incremental_parser() args = parser.parse_args() dominionstats.utils.log.initialize_logging(args.debug) main()
def ensure_all_indexes(db): """Ensure all expected indexes are in place, for all tables""" for table_name, index_list in INDEXES.items(): for index in index_list: log.info("Ensuring %s index for %s", index, table_name) db[table_name].ensure_index(index) def main(): con = utils.get_mongo_connection() ensure_all_indexes(con.test) if __name__ == '__main__': args = utils.incremental_parser().parse_args() script_root = os.path.splitext(sys.argv[0])[0] # Create the basic logger #logging.basicConfig() log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) # Log to a file fh = logging.handlers.TimedRotatingFileHandler(script_root + '.log', when='midnight') if args.debug: fh.setLevel(logging.DEBUG) else: fh.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')