def _log_missing_char(self, missing, bid, realm): if self.LOG_BAD: basename = 'api-fail-missing-char.txt' key = (bid, realm) if missing: # Check if missing and found at the same time. for t in self.get(): for m in t['member']: char = m.get('character_link', {}) legacy = m['legacy_link'] if char and (legacy['id'], legacy['realm']) == key: # Disabled due to too common. # self._write_to_file(basename, "BOTH MISSING AND FOUND %s IN %s %s" % # (key, self.url, utcnow())) return self._write_to_file(basename, "MISSING %s IN %s %s" % (key, self.url, utcnow()), dump_data=False) self.MISSING.add(key) else: if key in self.MISSING: self._write_to_file(basename, "FOUND %s IN %s %s" % (key, self.url, utcnow()), dump_data=False)
def save_ranking(self, cpp, ranking, queue_length): ranking.set_data_time(ranking.season.reload(), cpp) logger.info( "saving ranking %d, %d updates left in queue not included, new data_time is %s" % (ranking.id, queue_length, ranking.data_time)) cpp.save_data(ranking.id, ranking.season_id, to_unix(utcnow())) cpp.save_stats(ranking.id, to_unix(utcnow())) ranking.status = Ranking.COMPLETE_WITH_DATA ranking.save() # Ping server to reload ranking. try: raw = request_tcp('localhost', 4747, json.dumps({ 'cmd': 'refresh' }).encode('utf-8'), timeout=self.server_ping_timeout) response = json.loads(raw.decode('utf-8')) code = response.get('code') if code == 'ok': logger.info("refresh ping returned ok") else: logger.warning("refresh ping returned %s" % code) except OSError as e: logger.warning("refresh ping to server failed: " + str(e))
def run(self, args, logger): cpp = sc2.RankingData(get_db_name(), Enums.INFO) for ranking in Ranking.objects.filter(season_id__gte=28): cpp.load(ranking.id) cpp.save_data(ranking.id, ranking.season_id, to_unix(utcnow())) cpp.save_stats(ranking.id, to_unix(utcnow())) return 0
def create_ranking(self, **kwargs): kwargs = merge_args( dict(created=utcnow(), data_time=utcnow(), min_data_time=utcnow(), max_data_time=utcnow(), status=Ranking.COMPLETE_WITH_DATA, season=self.season), **kwargs) self.ranking = Ranking.objects.create(**kwargs) return self.ranking
def process_ladder(self, load=False, save=False, region=Region.EU, fetch_time=None, mode=Mode.TEAM_1V1, version=Version.HOTS, league=League.GOLD, season=None, tier=0, members=None, **kwargs): """ Update a ranking building single member with kwargs or use members if set. """ season = season or self.db.season fetch_time = fetch_time or utcnow() members = members or [gen_member(**kwargs)] if not getattr(self, 'cpp', None): self.cpp = sc2.RankingData(self.db.db_name, Enums.INFO) if load: self.load() self.cpp.update_with_ladder(0, # bid 0, # source_id region, mode, league, tier, version, season.id, to_unix(fetch_time), fetch_time.date().isoformat(), Mode.team_size(mode), members) if save: self.save_to_ranking()
def setUp(self): super().setUp() self.db.delete_all() self.now = utcnow() self.today = self.now.date() self.s15 = self.db.create_season(id=15, start_date=self.date(days=-120), end_date=self.date(days=-21), version=Version.LOTV) self.s16 = self.db.create_season(id=16, start_date=self.date(days=-20), end_date=None, version=Version.LOTV) self.s17 = self.db.create_season(id=17, start_date=None, end_date=None, version=Version.LOTV) # Create ladders for existing seasons to make code eligible to switch to season. for bid in range(1500, 1510): self.db.create_ladder(bid=bid, season=self.s15) for bid in range(1600, 1610): self.db.create_ladder(bid=bid, season=self.s16)
def mock_current_season(self, status=200, season_id=None, start_time=None, fetch_time=None): self.bnet.fetch_current_season = \ Mock(return_value=SeasonResponse(status, ApiSeason({'seasonId': season_id or self.db.season.id, 'startDate': to_unix(start_time or utcnow())}, 'http://fake-url'), fetch_time or utcnow(), 0))
def refetch_past_seasons(check_stop=lambda: None, bnet_client=None, now=None, skip_fetch_new=False): bnet_client = bnet_client or BnetClient() now = now or utcnow() # Wait for this date before refetching season. season_end_limit = now - timedelta( days=Season.REFETCH_PAST_MIN_DAYS_AFTER_SEASON_END) # Fetch new for a while after season close to make sure we got all ladders. Since we don't save non 200 leagues # we can still miss ladders here, but unlikely. There is no point in continuing after need_refetch_limit since they # will not be picked up in the ranking anyway. prev_season = Season.get_current_season().get_prev() need_refetch_limit = prev_season.end_time() + timedelta( days=Season.REFETCH_PAST_UNTIL_DAYS_AFTER_SEASON_END) if not skip_fetch_new and prev_season.end_time( ) < now <= need_refetch_limit: for region in Region.ranking_ids: fetch_new_in_region(check_stop, bnet_client, prev_season, region) # Refetch all past seasons. for season in Season.objects.filter( id__gt=14, end_date__lt=season_end_limit).order_by('-id'): refetch_past_season(season, now, check_stop, bnet_client) check_stop()
def setUp(self): super().setUp() self.now = utcnow() self.db.delete_all(keep=[Season]) self.ladder = self.db.create_ladder( bid=1, created=self.datetime(seconds=1)) # Always using same ladder.
def get(self, request, mode_id=None): mode_id = int(mode_id) if not (mode_id in Mode.stat_v1_ids): return HttpResponse(status=404) last_updated = to_unix( cache_value("ranking_stats_last_modified", 600, ranking_stats_last_modified)) now = to_unix(utcnow()) try: if_modified_since = parse_http_date( request.META['HTTP_IF_MODIFIED_SINCE']) except (ValueError, KeyError): if_modified_since = 0 if if_modified_since >= last_updated: response = HttpResponse("", content_type="application/json", status=304) else: response = HttpResponse(cache_value("ranking_stats_%d" % mode_id, 600, rankings_view_client, 'ranking_stats', mode_id), content_type="application/json") response['Cache-Control'] = "max-age=86400" response['Date'] = http_date(now) response['Expires'] = http_date(now + 86400) response['Last-Modified'] = http_date(last_updated) return response
def update_ranking_stats(self, ranking_id=None): """ Will build ranking stats based of the ranking by calling c++. """ if ranking_id is None: ranking_id = self.ranking.id cpp = sc2.RankingData(self.db_name, Enums.INFO) cpp.load(ranking_id) cpp.save_stats(ranking_id, to_unix(utcnow())) cpp.release()
def get_thought(self): if not self.is_valid(): # throw exception? pass thought = TextThought(title = self.cleaned_data['title'], content = self.cleaned_data['content']) thought.tags = set(item.strip() for item in self.cleaned_data['tags'].split(",")) thought.last_update = utcnow() return thought
def gen_api_data(l_bid, *p_bids, mmq="HOTS_SOLO", league="SILVER", race="ZERG", join_time=None, join_times=None, season="currentSeason", team_size=None): """ Generate api data from blizzard. Returns <ladder_data, player_ladder_data>. Player ladder data will contain one team. """ join_time = join_time or utcnow() join_times = join_times or [join_time] team_size = team_size or len(p_bids) names = [uniqueid(10) for _ in p_bids] pld = { "ladderId": l_bid, "league": league, "matchMakingQueue": mmq, "wins": 20, "losses": 22 } ppds = [{ "id": p_bid, "realm": 1, "displayName": name, "profilePath": "/profile/%d/1/%s/" % (p_bid, name) } for p_bid, name in zip(p_bids, names)] members = [{ "character": { "id": p_bid, "realm": 1, "profilePath": "/profile/%d/1/%s/" % (p_bid, name) }, "joinTimestamp": int(jt.strftime("%s")), "points": 102.2, "wins": 20, "losses": 22, "favoriteRaceP1": race } for p_bid, jt, name in zip(p_bids, itertools.cycle(join_times), names)] for member in members: if team_size > 1: member['favoriteRaceP2'] = race if team_size > 2: member['favoriteRaceP3'] = race if team_size > 3: member['favoriteRaceP4'] = race l = {"ladderMembers": members} p = {season: [{"ladder": [pld], "characters": ppds}]} # To simplify l_bid = p_bid. return ApiLadder(l, '/ladder/%d/' % l_bid), ApiPlayerLadders( p, '/profile/%d/' % l_bid)
def update_age(data): """ Update age of ladder data to present it as correctly as possible (to not be cached in outer layers). """ now = to_unix(utcnow()) for t in data['teams']: delta = now - int(t["data_time"]) if delta < 3600: t["age"] = "%dm" % max((delta + 60) // 60, 1) else: t["age"] = "%dh" % (delta // 3600)
def _log_bad_mmr(self, bad, bid, realm, url): if self.LOG_BAD: basename = 'api-fail-bad-mmr.txt' key = (bid, realm, url) if bad: if key in self.BAD_MMR: self._write_to_file(basename, 'STILL BAD %s %s' % (key, utcnow()), dump_data=False) else: self._write_to_file(basename, 'NEW BAD %s %s' % (key, utcnow()), dump_data=False) else: if key in self.BAD_MMR: self._write_to_file(basename, 'NOW GOOD %s %s' % (key, utcnow()), dump_data=False)
def run(self, args, logger): log_region('ALL') rankings = Ranking.objects.order_by('-id') status = OK if rankings[0].season.near_start(utcnow(), days=5): print(f"{TEXT[status]} - skipping check, close to season start") return status latest_time_by_region = {} latest_count_by_region = {} diff_time_by_region = {} get = sc2.Get(settings.DATABASES['default']['NAME'], Enums.INFO, 0) regions = {r for r in args.regions} for ranking in rankings: counts = get.games_played(ranking.id) if not latest_count_by_region: latest_time_by_region = { region: ranking.data_time for region in regions } latest_count_by_region = { region: counts.get(region, 0) for region in regions } logger.info(f"start at {ranking.data_time}") else: for region in list(regions): if latest_count_by_region[region] != counts.get(region): diff_time_by_region[region] = ranking.data_time regions.remove(region) logger.info( f"region {region} differs in ranking {ranking.id} at {ranking.data_time}" ) if not regions: break ages = {} for region in args.regions: age = (latest_time_by_region[region] - diff_time_by_region[region]).days logger.info(f"region {region} age {age} days") ages[Region.key_by_ids[region]] = age if age > 4: status = max(status, CRITICAL) elif age > 1: status = max(status, WARNING) print(f"{TEXT[status]} - age by region in days: {ages}") return status
def create_ranking_data(self, raw=True, **kwargs): kwargs = merge_args(dict(ranking=self.ranking, updated=utcnow()), kwargs) data = kwargs.pop('data', []) ranking = kwargs['ranking'] for team_rank in data: self._default_team_rank(team_rank) ranking.sources.add(self.get(Cache, pk=team_rank['source_id'])) self.ranking_data = RankingData.objects.create(**kwargs) sc2.save_ranking_data_raw(self.db_name, ranking.id, 0, data, True) if not raw: cpp = sc2.RankingData(self.db_name, Enums.INFO) cpp.load(ranking.id) cpp.save_data(ranking.id, ranking.season_id, to_unix(utcnow())) cpp.release() return self.ranking_data
def mock_fetch_league(self, status=200, fetch_time=None, season_id=None, t0_bids=None, t1_bids=None, t2_bids=None): season_id = season_id or self.db.season.id self.bnet.fetch_league = \ Mock(return_value=LeagueResponse(status, ApiLeague({'tier': [ {'id': 0, 'division': [{'ladder_id': lid} for lid in t0_bids or []]}, {'id': 1, 'division': [{'ladder_id': lid} for lid in t1_bids or []]}, {'id': 2, 'division': [{'ladder_id': lid} for lid in t2_bids or []]}, ]}, url="http://fake-url", bid=season_id * 100000), fetch_time or utcnow(), 0))
def setUpClass(self): super(Test, self).setUpClass() self.db = Db() self.now = utcnow() self.today = self.now.date() # Required objects, not actually used in test cases. self.db.create_cache() self.db.create_ladder() self.s15 = self.db.create_season(id=15, start_date=self.date(days=-200), end_date=self.date(days=-101)) self.s16 = self.db.create_season(id=16, start_date=self.date(days=-100), end_date=None)
def delete_old_cache_data(self, keep_days=30): """ Delete all cache data that is no longer linked from rankings or ladders but only if older than 30 days. """ with transaction.atomic(): objects = Cache.objects.filter(ladder__isnull=True, ranking__isnull=True, status=200, type__in=(Cache.LADDER, Cache.PLAYER, Cache.PLAYER_LADDERS, Cache.SEASON), updated__lt=utcnow() - timedelta(days=keep_days)) count = objects.count() logger.info("%sremoving unreferenced %d cache objects" % (self.prefix, count)) if self.do_delete: objects.delete()
def create_ladder(self, **kwargs): kwargs = merge_args( { 'bid': 1, 'region': Region.EU, 'strangeness': Ladder.GOOD, 'league': League.GOLD, 'tier': 0, 'version': Version.HOTS, 'mode': Mode.TEAM_1V1, 'season': self.season, 'first_join': utcnow(), 'last_join': utcnow(), 'created': utcnow(), 'updated': utcnow(), 'max_points': 20 }, **kwargs) self.ladder = Ladder(**kwargs) self.ladder.save() return self.ladder
def __init__(self): super().__init__( "Archive caches for old rankings to files and remove the from db to save space.", pid_file=True, stoppable=True) self.add_argument( '--now', dest="now", type=utcdatetime, default=utcnow(), help= "The date (YYYY-MM-DD HH:MM:SS) in utc to use for the processing.")
def fetch_current_season(self, region, timeout=60): """ Fetch current season information. :return: <status code, ApiSeasonInfo or None, fetch time, fetch duration> """ url_prefix = self.REGION_URL_PREFIXES_2[region] url = '%s/season/current' % url_prefix timer = Timer() status, data = self.http_get_json(url, timeout, ACCESS_TOKEN_AUTH) return SeasonResponse(status, ApiSeason(data, url), utcnow(), timer.end())
def create_cache(self, type=Cache.LADDER, members=None, **kwargs): data = kwargs.pop('data', None) if data is None and members is not None: data = gen_ladder_data(members) kwargs = merge_args( { 'bid': randint(1, 1e6), 'url': 'http://bnet/' + uniqueid(10), 'type': type, 'region': Region.EU, 'created': utcnow(), 'updated': utcnow(), 'status': 200, 'retry_count': 0 }, **kwargs) kwargs['data'] = json.dumps(data) self.cache = Cache(**kwargs) self.cache.save() return self.cache
def fetch_player_ladders(self, region, player_path, timeout=60): """ Fetch player ladders from blizzard api. :return: <status code, ApiPlayerLadders or None, fetch time, fetch duration> """ url_prefix = self.REGION_URL_PREFIXES_1[region] url = '%s%sladders' % (url_prefix, player_path) timer = Timer() status, data = self.http_get_json(url, timeout, API_KEY_AUTH) return PlayerLaddersResponse(status, ApiPlayerLadders(data, url), utcnow(), timer.end())
def fetch_ladder(self, region, bid, timeout=60): """ Fetch ladder from blizzard api. :return: <status code, ApiLadder or None, fetch time, fetch duration> """ url_prefix = self.REGION_URL_PREFIXES[region] url = f"{url_prefix}/data/sc2/ladder/{bid}" timer = Timer() status, data = self.http_get_json(url, timeout, ACCESS_TOKEN_AUTH) al = ApiLadder(data, url) return LadderResponse(status, al, utcnow(), timer.end())
def setUpClass(self): super().setUpClass() self.now = utcnow() self.today = self.now.today() self.s1 = self.db.create_season(id=1, start_date=self.date(days=-499), end_date=self.date(days=-300)) self.s2 = self.db.create_season(id=2, start_date=self.date(days=-299), end_date=self.date(days=-100)) self.s3 = self.db.create_season(id=3, start_date=self.date(days=-99), end_date=self.date(days=100))
def handle(self, *args, **options): if 'number' in options.keys(): self._number = options.pop('number') if 'cleanup' in options.keys(): self._cleanup = options.pop('cleanup') thoughts = [] user_tags = {} self.stdout.write("Generating %d entries.\n" % self._number) for i in range(self._number): thought = { 'author_id': long(random.choice(self.AUTHORS.keys())), 'title': self._get_random_text(random.randint(self.MIN_TITLE, self.MAX_TITLE)), 'content': self._get_random_text(random.randint(self.MIN_CONTENT, self.MAX_CONTENT)), 'last_update': utcnow(), 'tags': self._get_random_tags(random.randint(self.MIN_TAG, self.MAX_TAG)), } thought.update({'author': self.AUTHORS[thought['author_id']]}) if thought['author_id'] not in user_tags: user_tags[thought['author_id']] = [] index = [] for tag in thought['tags']: if tag not in index: user_tags[thought['author_id']].append({tag: 1}) index.append(tag) else: i = index.index(tag) user_tags[thought['author_id']][i][tag] += 1 thoughts.append(thought) user_tags_ext = [] for item in user_tags.iteritems(): user_tag = { '_id': item[0], 'tags': sorted(item[1], key=lambda t: t.values(), reverse=True) } user_tags_ext.append(user_tag) client = MongoClient() db = client[settings.DBNAME] if self._cleanup: self.stdout.write("Performing cleanup.\n") db.text_thought.remove() db.user_tags.remove() self.stdout.write("Saving %d thoughts and %s tag entries to db.\n" % (len(thoughts), len(user_tags_ext))) db.text_thought.insert(thoughts) db.user_tags.insert(user_tags_ext) client.close()
def _get_season_list(): return json.dumps([{ "id": season.id, "number": season.number, "year": season.year, "start": to_unix(season.start_time()), "end": to_unix(season.end_time()) if season.end_date else to_unix(utcnow()), "color": "#ff6666" if season.id % 2 == 0 else "#6666ff" } for season in Season.objects.filter( id__gt=14, start_date__isnull=False).order_by('id')])
def gen_member(**kwargs): """ Generate member as returned from ApiLadder and used by process_ladder. """ return merge_args( { "bid": randint(1, 1e6), "realm": 0, "name": uniqueid(length=12), "clan": uniqueid(length=32), "tag": uniqueid(length=6), "join_time": int(to_unix(utcnow())), "points": float(randint(0, 2000)), "wins": randint(0, 200), "mmr": randint(1000, 5000), "losses": randint(0, 200), "race": choice([Race.ZERG, Race.PROTOSS, Race.TERRAN, Race.RANDOM]) }, **kwargs)
def test_to_dict(self): self.assertEqual(TextThought.objects.count(), 0) thought = TextThought() thought.author_id = self.user.id thought.author = self.user.username thought.content = 'test content' thought.title = 'test title' thought.tags = {'tag1', 'tag2', 'tag3'} thought.save() d = thought.to_dict() self.assertEqual(len(d), 4) self.assertEqual(set(d['tags']), {'tag1', 'tag2', 'tag3'}) self.assertEqual(d['content'], 'test content') self.assertEqual(d['title'], 'test title') now = timestamp(utcnow()) just_now = now - 5 self.assertTrue(now >= d['pub_date'] >= just_now)
def setUp(self): super().setUp() self.db.delete_all() self.now = utcnow() self.today = self.now.date() self.s16 = self.db.create_season(id=16, start_date=self.date(days=-20), end_date=None, version=Version.LOTV) self.s17 = self.db.create_season(id=17, start_date=None, end_date=None, version=Version.LOTV) # Create a present ladder. self.l99 = self.db.create_ladder(bid=99, season=self.s16)
def setUp(self): super().setUp() self.db.delete_all() self.now = utcnow() self.today = self.now.date() self.s35 = self.db.create_season(id=35, start_date=self.date(days=-120), end_date=self.date(days=-21), version=Version.LOTV) self.s36 = self.db.create_season(id=36, start_date=self.date(days=-20), end_date=self.date(days=-2), version=Version.LOTV) self.s37 = self.db.create_season(id=37, start_date=self.date(days=-1), end_date=None, version=Version.LOTV)
def test_add_thought(self): self.assertEqual(TextThought.objects.count(), 0) thought = TextThought() thought.author_id = self.user.id thought.author = self.user.username thought.content = 'test content' thought.title = 'test title' thought.tags = {'tag1', 'tag2', 'tag3'} thought.save() self.assertEqual(TextThought.objects.count(), 1) time.sleep(1) # test if thought was indexed thought_ids = search_by_phrase(self.user.id, 'test content') self.assertEqual(len(thought_ids), 1) thought = TextThought.objects.get(id=thought_ids[0]) self.assertEqual(thought.author_id, self.user.id) self.assertEqual(thought.author, self.user.username) self.assertEqual(thought.content, 'test content') self.assertEqual(thought.title, 'test title') self.assertEqual(thought.tags, {'tag1', 'tag2', 'tag3'}) now = utcnow() just_now = now - timedelta(seconds=5) self.assertTrue(now > thought.id.generation_time > just_now)
def save(self, force_insert=False, validate=True, clean=True, write_concern=None, cascade=None, cascade_kwargs=None, _refs=None, **kwargs): self.last_update = utcnow() super(TextThought, self).save(force_insert, validate, clean, write_concern, cascade, cascade_kwargs, _refs, **kwargs)