def log_events(have_logged, seen, local, remote): # map: event id -> event_stub_t events = {k[0]: k for k in prime_from_cursor("event_stub_t", local.execute(QUERY_GET_EVENT_STUBS)) } # generate a set of all event ids with the type bitfield set correctly event_h_ids = set(map(lambda x: (models.HISTORY_TYPE_EVENT << 28) | (x & 0x0FFFFFFF), events)) # xor the top 4 bits (i.e., the history type id) of each descriptor by 7, # which just turns the 2 (HISTORY_TYPE_EVENT) to a 5 (HISTORY_TYPE_EVENT_END). event_end_h_ids = set(map(lambda x: x ^ 0x70000000, event_h_ids)) need_to_add = (event_h_ids | event_end_h_ids) - have_logged def ge_cards(event_id): if event_id in overridden_events: rl = list(get_overridden(event_id)) else: rl = [k for k, in local.execute(QUERY_GET_REWARDS_FOR_EVENT, (event_id,)).fetchall()] seen.update(rl) return json.dumps({ "event": rl }) def eti(event_id): base = (events[event_id].type - 1) & 0x7 # TODO figure out where to get token attribute/medley focus... return base with remote as s: # add event markers for desc in event_h_ids - have_logged: if starlight.JST(events[internal_id(desc)].event_start).year >= 2099: continue s.add(models.HistoryEventEntry( descriptor=desc, extra_type_info=eti(internal_id(desc)), added_cards=ge_cards(internal_id(desc)), event_name=events[internal_id(desc)].name, start_time=starlight.JST(events[internal_id(desc)].event_start).timestamp(), end_time=starlight.JST(events[internal_id(desc)].event_end).timestamp() )) # add event end markers # for desc in event_end_h_ids - have_logged: # s.add(models.HistoryEventEntry( # descriptor=desc, # extra_type_info=0, # added_cards=None, # event_name=events[internal_id(desc)].name, # # start_time=starlight.JST(events[internal_id(desc)].event_end).timestamp(), # end_time=0 # )) s.commit()
def log_lastresort(have_logged, seen, local, remote): orphans = set(k for k, in local.execute(QUERY_GET_ROOTS).fetchall()) - seen buckets = defaultdict(lambda: []) spec = ",".join(map(str, orphans)) for card, datestr in local.execute(QUERY_GET_STORY_START_DATES.format(spec)): buckets[starlight.JST(datestr).timestamp()].append(card) seen.add(card) with remote as s: for time in buckets: # hours since the epoch, hopefully will last us long enough lul primary_key = int(time / (60 * 60)) s.add(models.HistoryEventEntry( descriptor=primary_key | (models.HISTORY_TYPE_ADD_N << 28), extra_type_info=0, added_cards=json.dumps({ "new": buckets[time] }), event_name=None, start_time=time, end_time=0, )) s.commit()
def get(self): eda = starlight.cached_db(starlight.ark_data_path("event_data.csv")) now = pytz.utc.localize(datetime.utcnow()) if now.day == 29 and now.month == 2: now += timedelta(days=1) for event in eda: if starlight.JST(event.event_start) < now < starlight.JST( event.event_end): break else: event = None # FIXME this is ridiculous. i just want to convert a f*****g timestamp to a f*****g UTC timestamp. if event: evedt = starlight.JST(event.event_end) event_end = timegm(evedt.timetuple()) else: event_end = None local_gachas = list( filter( lambda x: starlight.JST(x.start_date) < now < starlight.JST( x.end_date), self.gachas)) card_ids = itertools.chain( *([x.reward_id for x in llist] for llist in [x.clist for x in local_gachas])) s_cards = { idp: starlight.evolutionary_chains[x.series_id][0] for idp, x in filter(bool, [(x, starlight.card_db.get(x)) for x in card_ids]) } self.render("main.html", history=HISTORY, has_event=bool(event), event=event, event_end=event_end, gachas={ "lg": local_gachas, "ci": s_cards }, **self.settings) self.settings["analytics"].analyze_request(self.request, self.__class__.__name__)
def get(self): self.set_header("Content-Type", "text/plain; charset=utf-8") eda = starlight.cached_db(starlight.ark_data_path("event_data.csv")) now = pytz.utc.localize(datetime.utcnow()) for event in eda: if starlight.JST(event.event_start) < now < starlight.JST( event.event_end): break else: event = None # FIXME this is ridiculous. i just want to convert a f*****g timestamp to a f*****g UTC timestamp. if event: evedt = starlight.JST(event.event_end, to_utc=0) self.set_header("Content-Type", "text/plain; charset=utf-8") self.write("{1}".format(event.name, evedt.strftime("%B %d, %Y %H:%M"))) else: self.write("None")
def update_add_set(s, gacha, add_set): for flag, key in enumerate(("other", "limited")): for c in add_set.get(key, []): row = s.query(models.GachaLookupEntry).filter( models.GachaLookupEntry.card_id == c and models.GachaLookupEntry.is_limited == flag).all() if row: row[0].last_gacha_id = gacha.id row[0].last_available = starlight.JST( gacha.end_date).timestamp() s.add(row[0]) else: s.add( models.GachaLookupEntry(card_id=c, first_gacha_id=gacha.id, last_gacha_id=gacha.id, first_available=starlight.JST( gacha.start_date).timestamp(), last_available=starlight.JST( gacha.end_date).timestamp(), is_limited=flag))
def log_gachas(have_logged, seen, seen_in_gacha, local, remote): # code sucks # TODO clean up and refactor everything after this line have_gacha_set = set( map( lambda x: internal_id(x), filter(lambda x: htype(x) == 3, have_logged) ) ) # map: gacha id -> gacha_stub_t gachas = {k[0]: k for k in prime_from_cursor("gacha_stub_t", local.execute(QUERY_GET_NORMAL_GACHAS))} # { gacha id -> { "limited": [], "other": [] ... } } add_sets = {k: defaultdict(lambda: []) for k in gachas} new_gachas = set(gachas.keys()) - have_gacha_set gachas_in_chrono_order = sorted(new_gachas, key=lambda x: starlight.JST(gachas[x].start_date)) orphans = set(k for k, in local.execute(QUERY_GET_ROOTS).fetchall()) - seen_in_gacha is_limited = {} # check limited/featured for gid in new_gachas: keys = {} my_add_set = add_sets[gid] for a_card, lim_flag, order in local.execute(QUERY_GET_GACHA_REWARD_META, (gid,)): my_add_set["limited" if lim_flag else "other"].append(a_card) seen.add(a_card) seen_in_gacha.add(a_card) try: orphans.remove(a_card) except KeyError: pass keys[a_card] = order if lim_flag: # mark the gacha as limited is_limited[gid] = 1 # now sort the add set if "limited" in my_add_set: my_add_set["limited"].sort(key=keys.get) if "other" in my_add_set: my_add_set["other"].sort(key=keys.get) gspec = ",".join(map(str, new_gachas)) for orphan in orphans: havers = [k for k, in local.execute(QUERY_FIND_CONTAINING_GACHA.format(gspec), (orphan,))] for gid in gachas_in_chrono_order: if gid in havers: break else: # print("orphan:", orphan) continue seen.add(orphan) seen_in_gacha.add(orphan) add_sets[gid]["other"].append(orphan) with remote as s: for gid in new_gachas: s.add(models.HistoryEventEntry( descriptor=gid | (models.HISTORY_TYPE_GACHA << 28), extra_type_info=is_limited.get(gid, 0), added_cards=json.dumps(add_sets[gid]) if add_sets[gid] else None, event_name=gachas[gid].name, start_time=starlight.JST(gachas[gid].start_date).timestamp(), end_time=starlight.JST(gachas[gid].end_date).timestamp() )) s.commit()
def log_events(have_logged, seen, local, remote): # map: event id -> event_stub_t events = { k[0]: k for k in prime_from_cursor("event_stub_t", local.execute(QUERY_GET_EVENT_STUBS)) } # generate a set of all event ids with the type bitfield set correctly event_h_ids = set( map(lambda x: (models.HISTORY_TYPE_EVENT << 28) | (x & 0x0FFFFFFF), events)) # xor the top 4 bits (i.e., the history type id) of each descriptor by 7, # which just turns the 2 (HISTORY_TYPE_EVENT) to a 5 (HISTORY_TYPE_EVENT_END). event_end_h_ids = set(map(lambda x: x ^ 0x70000000, event_h_ids)) need_to_add = (event_h_ids | event_end_h_ids) - have_logged def from_event_available(sql, event_id): return { "event": [ k for k, in sql.execute(QUERY_GET_REWARDS_FOR_EVENT, ( event_id, )).fetchall() ] } def ge_cards(event_id, type_info): queryer = EVENT_REWARD_SPECIALIZATIONS.get(type_info & 0xFF, from_event_available) if event_id in overridden_events: groups = {"event": list(get_overridden(event_id))} else: groups = queryer(local, event_id) if not groups.get("event", []): if "event" not in groups: print( "error: specialization didn't return an event list. this will cause UI problems" ) print("warning: specialization returned no data for", event_id, "trying generic") groups = from_event_available(local, event_id) for rl in groups.values(): seen.update(rl) return groups def eti(event_id): base = (events[event_id].type - 1) & 0xFF # TODO figure out where to get token attribute/medley focus... return base with remote as s: # add event markers for desc in event_h_ids - have_logged: if starlight.JST( events[internal_id(desc)].event_start).year >= 2099: continue cats = ge_cards(internal_id(desc), eti(internal_id(desc))) s.add( models.HistoryEventEntry( descriptor=desc, extra_type_info=eti(internal_id(desc)), added_cards=json.dumps(cats), event_name=events[internal_id(desc)].name, start_time=starlight.JST( events[internal_id(desc)].event_start).timestamp(), end_time=starlight.JST( events[internal_id(desc)].event_end).timestamp())) seen = set() for cid in cats.get("progression", []): s.merge( models.EventLookupEntry(card_id=cid, event_id=internal_id(desc), acquisition_type=1)) seen.add(cid) for cid in cats.get("ranking", []): s.merge( models.EventLookupEntry(card_id=cid, event_id=internal_id(desc), acquisition_type=2)) seen.add(cid) for cid in cats.get("gacha", []): s.merge( models.EventLookupEntry(card_id=cid, event_id=internal_id(desc), acquisition_type=3)) seen.add(cid) for cid in cats.get("event", []): if cid not in seen: s.merge( models.EventLookupEntry(card_id=cid, event_id=internal_id(desc), acquisition_type=0)) # add event end markers # for desc in event_end_h_ids - have_logged: # s.add(models.HistoryEventEntry( # descriptor=desc, # extra_type_info=0, # added_cards=None, # event_name=events[internal_id(desc)].name, # # start_time=starlight.JST(events[internal_id(desc)].event_end).timestamp(), # end_time=0 # )) s.commit()