def prepare(self) -> None: for p in self.people: key = f'logsite:people:{p.id}' data = redis.get_container(key, ex=3600) if data: p.fav_format = data.fav_format p.num_matches = data.num_matches else: p.num_matches = match.get_recent_matches_by_player( p.name).count() stmt = text(""" SELECT f.name, COUNT(*) AS num_matches FROM match_players AS mp INNER JOIN `match` AS m ON mp.match_id = m.id INNER JOIN format AS f ON m.format_id = f.id WHERE mp.user_id = :pid GROUP BY f.id; """) p.formats = db.DB.session.query( 'name', 'num_matches').from_statement(stmt).params(pid=p.id).all() if p.formats: p.fav_format = '{0} ({1} matches)'.format( p.formats[0][0], p.formats[0][1]) else: p.fav_format = '⸺' redis.store(key, { 'fav_format': p.fav_format, 'num_matches': p.num_matches }, ex=3600)
def subreddit(start_date: datetime.datetime, end_date: datetime.datetime, max_items: int = sys.maxsize) -> List[Container]: try: redis_key = 'decksite:news:subreddit' items = redis.get_container_list(redis_key) if items: for item in items: item.date = dtutil.ts2dt(item.date) return items feed = fetcher.subreddit() items = [] for entry in feed.entries: item = Container({ 'title': entry.title, 'date': dtutil.parse(entry.updated, '%Y-%m-%dT%H:%M:%S+00:00', dtutil.UTC_TZ), 'url': entry.link, 'type': 'subreddit-post' }) if item.date > end_date: continue if item.date < start_date: break items.append(item) if len(items) >= max_items: break redis.store(redis_key, items, ex=3600) return items except ConnectionError: return []
def set_issue_bbt(number: int, text: Optional[str]) -> None: key = f'modobugs:bug_blog_text:{number}' if text is None: ISSUE_CODES.pop(number, None) redis.clear(key) else: ISSUE_CODES[number] = text redis.store(key, text, ex=1200)
async def on_member_update(self, before: Member, after: Member) -> None: if before.bot: return # streamers. streaming_role = await get_role(before.guild, 'Currently Streaming') if streaming_role: if not isinstance(after.activity, Streaming) and streaming_role in before.roles: await after.remove_roles(streaming_role) if isinstance(after.activity, Streaming) and not streaming_role in before.roles: await after.add_roles(streaming_role) # Achievements role = await get_role(before.guild, 'Linked Magic Online') if role and before.status == Status.offline and after.status == Status.online: data = None # Linked to PDM if role is not None and not role in before.roles: if data is None: data = await fetcher.person_data_async(before.id) if data.get('id', None): await after.add_roles(role) key = f'discordbot:achievements:players:{before.id}' if is_pd_server( before.guild) and not redis.get_bool(key) and not data: data = await fetcher.person_data_async(before.id) redis.store(key, True, ex=14400) # Trophies if is_pd_server(before.guild) and data is not None and data.get( 'achievements', None) is not None: expected: List[Role] = [] remove: List[Role] = [] async def achievement_name(key: str) -> str: name = self.achievement_cache.get(key, None) if name is None: self.achievement_cache.update( await fetcher.achievement_cache_async()) name = self.achievement_cache[key] return f'🏆 {name["title"]}' for name, count in data['achievements'].items(): if int(count) > 0: trophy = await achievement_name(name) role = await get_role(before.guild, trophy, create=True) if role is not None: expected.append(role) for role in before.roles: if role in expected: expected.remove(role) elif '🏆' in role.name: remove.append(role) await before.remove_roles(*remove) await before.add_roles(*expected)
def get_file_contents(file: str) -> List[str]: key = f'decksite:rotation:file:{file}' contents = redis.get_list(key) if contents is not None: return contents with open(file) as f: contents = f.readlines() redis.store(key, contents, ex=604800) return contents
def do_push() -> None: gh_repo = os.path.join(configuration.get_str('legality_dir'), 'gh_pages') if not os.path.exists(gh_repo): subprocess.run(['git', 'clone', 'https://github.com/PennyDreadfulMTG/pennydreadfulmtg.github.io.git', gh_repo], check=True) setcode = rotation.next_rotation_ex().mtgo_code files = ['legal_cards.txt', f'{setcode}_legal_cards.txt'] for fn in files: source = os.path.join(configuration.get_str('legality_dir'), fn) dest = os.path.join(gh_repo, fn) shutil.copy(source, dest) os.chdir(gh_repo) subprocess.run(['git', 'add'] + files, check=True) subprocess.run(['git', 'commit', '-m', f'{setcode} rotation'], check=True) subprocess.run(['git', 'push'], check=True) checklist = f"""{setcode} rotation checklist https://pennydreadfulmagic.com/admin/rotation/ - [ ] upload legal_cards.txt to S3 - [ ] upload {setcode}_legal_cards.txt to S3 - [ ] ping scryfall - [ ] email mtggoldfish - [ ] ping tappedout """ if redis.get_str('discordbot:commit_id'): redis.store('discordbot:do_reboot', True) else: checklist += '- [ ] restart discordbot' ds = os.path.expanduser('~/decksite/') failed = False try: if os.path.exists(ds): os.chdir(ds) subprocess.run(['python3', 'run.py', 'maintenance', 'post_rotation'], check=True) else: failed = True except Exception: # pylint: disable=broad-except failed = True if failed: checklist += '- [ ] run post_rotation\n' try: fetch_tools.post('https://gatherling.com/util/updateDefaultFormats.php') except fetch_tools.FetchException: checklist += '- [ ] Update Gatherling legal cards list' for path in ['/etc/uwsgi/vassals/decksite.ini', '/home/discord/vassals/decksite.ini']: srv = pathlib.Path(path) if srv.exists(): srv.touch() break else: checklist += '- [ ] touch /etc/uwsgi/vassals/decksite.ini\n' repo.create_issue(checklist, 'rotation script', 'rotation')
def get_parents(repo: Repository, sha: str) -> List[str]: value = redis.get_list(f'github:parents:{repo.full_name}:{sha}') if value is None: # print(f'getting parents for {sha}') commit = repo.get_commit(sha) parents = [p.sha for p in commit.parents] redis.store(f'github:parents:{repo.full_name}:{sha}', list(parents), ex=604800) return parents return value
def search_scryfall(query: str, exhaustive: bool = False) -> Tuple[int, List[str]]: """Returns a tuple. First member is an integer indicating how many cards match the query total, second member is a list of card names up to the maximum that could be fetched in a timely fashion. Supply exhaustive=True to instead retrieve the full list (potentially very slow).""" if query == '': return False, [] redis_key = f'scryfall:query:{query}:' + ('exhaustive' if exhaustive else 'nonexhaustive') cached = redis.get_list(redis_key) result_data: List[Dict] if cached: total_cards, result_data = int(cached[0]), cached[1] else: url = 'https://api.scryfall.com/cards/search?q=' + fetch_tools.escape( query) result_data = [] while True: for _ in range(3): try: result_json = fetch_tools.fetch_json(url) break except FetchException as c: print(c) if 'code' in result_json.keys(): # The API returned an error if result_json['status'] == 404: # No cards found return False, [] print('Error fetching scryfall data:\n', result_json) return False, [] for warning in result_json.get( 'warnings', []): # scryfall-provided human-readable warnings print(warning) # Why aren't we displaying these to the user? result_data += result_json['data'] total_cards = int(result_json['total_cards']) if not exhaustive or len(result_data) >= total_cards: break sleep(0.1) url = result_json['next_page'] redis.store(redis_key, [total_cards, result_data], ex=3600) result_data.sort(key=lambda x: x['legalities']['penny']) def get_frontside(scr_card: Dict) -> str: """If card is transform, returns first name. Otherwise, returns name. This is to make sure cards are later found in the database""" # not sure how to handle meld cards if scr_card['layout'] in [ 'transform', 'flip', 'adventure', 'modal_dfc' ]: return scr_card['card_faces'][0]['name'] return scr_card['name'] result_cardnames = [get_frontside(obj) for obj in result_data] return total_cards, result_cardnames
def get_pr_from_commit(repo: Repository, sha: str) -> Optional[PullRequest]: cached = redis.get_int(f'github:head:{sha}') if cached: try: pr = repo.get_pull(cached) if pr.head.sha == sha and pr.state == 'open': return pr except UnknownObjectException: pass for pr in repo.get_pulls(): head = pr.head.sha redis.store(f'github:head:{head}', pr.number, ex=3600) if head == sha: return pr return None
def whatsinstandard() -> WISSchemaType: cached = redis.get_container('magic:fetcher:whatisinstandard_6') if cached is not None: return cached try: info = fetch_tools.fetch_json('http://whatsinstandard.com/api/v6/standard.json') except FetchException: cached = redis.get_container('magic:fetcher:whatisinstandard_noex') if cached is not None: return cached raise redis.store('magic:fetcher:whatisinstandard_6', info, ex=86400) redis.store('magic:fetcher:whatisinstandard_noex', info) return info
def __init__(self, **kwargs: Any) -> None: self.launch_time = perf.start() commit_id = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode() redis.store('discordbot:commit_id', commit_id) help_command = commands.DefaultHelpCommand(dm_help=None, no_category='Commands') super().__init__(command_prefix=commands.when_mentioned_or('!'), help_command=help_command, case_insensitive=True, **kwargs) self.voice = None self.achievement_cache: Dict[str, Dict[str, str]] = {} for task in TASKS: asyncio.ensure_future(task(self), loop=self.loop) discordbot.commands.setup(self)
def calculate_similar_decks(ds: List[Deck]) -> None: threshold = 20 cards_escaped = ', '.join(sqlescape(name) for name in all_card_names(ds)) if not cards_escaped: for d in ds: d.similar_decks = [] return potentially_similar = load_decks( 'd.id IN (SELECT deck_id FROM deck_card WHERE card IN ({cards_escaped}))' .format(cards_escaped=cards_escaped)) for d in ds: for psd in potentially_similar: psd.similarity_score = round(similarity_score(d, psd) * 100) d.similar_decks = [ psd for psd in potentially_similar if psd.similarity_score >= threshold and psd.id != d.id ] d.similar_decks.sort(key=lambda d: -(d.similarity_score)) redis.store('decksite:deck:{id}:similar'.format(id=d.id), d.similar_decks, ex=172800)
def rotation_redis_store() -> Tuple[int, int, List[Card]]: lines = [] fs = files() if len(fs) == 0: if not os.path.isdir(os.path.expanduser(configuration.get_str('legality_dir'))): print('WARNING: Could not find legality_dir.') return (0, 0, []) with open(fs[-1], 'r') as f: latest_list = f.read().splitlines() for filename in fs: for line in get_file_contents(filename): line = text.sanitize(line) lines.append(line.strip()) scores = Counter(lines).most_common() runs = scores[0][1] runs_percent = round(round(runs / TOTAL_RUNS, 2) * 100) cs = oracle.cards_by_name() cards = [] card_names_by_status: Dict[str, List[str]] = {} for name, hits in scores: c = process_score(name, hits, cs, runs, latest_list) if c is not None: cards.append(c) classify_by_status(c, card_names_by_status) redis.store('decksite:rotation:summary:runs', runs, ex=604800) redis.store('decksite:rotation:summary:runs_percent', runs_percent, ex=604800) redis.store('decksite:rotation:summary:cards', cards, ex=604800) if 'Undecided' in card_names_by_status: redis.sadd('decksite:rotation:summary:undecided', *card_names_by_status['Undecided'], ex=604800) if 'Legal' in card_names_by_status: redis.sadd('decksite:rotation:summary:legal', *card_names_by_status['Legal'], ex=604800) if 'Not Legal' in card_names_by_status: redis.sadd('decksite:rotation:summary:notlegal', *card_names_by_status['Not Legal'], ex=604800) return (runs, runs_percent, cards)
def code_merges(start_date: datetime.datetime, end_date: datetime.datetime, max_items: int = sys.maxsize) -> List[Container]: try: merges = redis.get_container_list('decksite:news:merges') if merges is None: merges = [ Container({ 'date': dtutil.UTC_TZ.localize(pull.merged_at), 'title': pull.title, 'url': pull.html_url, 'type': 'code-release' }) for pull in repo.get_pull_requests( start_date, end_date, max_items) if not 'Not News' in [label.name for label in pull.as_issue().labels] ] redis.store('decksite:news:merges', merges, ex=3600) else: for merge in merges: merge.date = dtutil.ts2dt(merge.date) return merges except ConnectionError: return []
def load_decks_heavy( where: str = 'TRUE', having: str = 'TRUE', order_by: Optional[str] = None, limit: str = '', season_id: Optional[Union[str, int]] = None) -> List[Deck]: if order_by is None: order_by = 'active_date DESC, d.finish IS NULL, d.finish' sql = """ SELECT d.id, d.name AS original_name, d.created_date, d.updated_date, SUM(CASE WHEN dm.games > IFNULL(odm.games, 0) THEN 1 ELSE 0 END) AS wins, SUM(CASE WHEN dm.games < odm.games THEN 1 ELSE 0 END) AS losses, SUM(CASE WHEN dm.games = odm.games THEN 1 ELSE 0 END) AS draws, d.finish, d.archetype_id, d.url AS source_url, d.competition_id, c.name AS competition_name, c.end_date AS competition_end_date, c.top_n AS competition_top_n, ct.name AS competition_type_name, d.identifier, {person_query} AS person, p.id AS person_id, p.banned, p.discord_id, d.decklist_hash, d.retired, d.reviewed, s.name AS source_name, IFNULL(a.name, '') AS archetype_name, cache.normalized_name AS name, cache.colors, cache.colored_symbols, cache.color_sort, cache.legal_formats, ROUND(cache.omw * 100, 2) AS omw, season.id AS season_id, IFNULL(MAX(m.date), d.created_date) AS active_date FROM deck AS d LEFT JOIN person AS p ON d.person_id = p.id LEFT JOIN source AS s ON d.source_id = s.id LEFT JOIN archetype AS a ON d.archetype_id = a.id {competition_join} LEFT JOIN deck_cache AS cache ON d.id = cache.deck_id LEFT JOIN deck_match AS dm ON d.id = dm.deck_id LEFT JOIN `match` AS m ON dm.match_id = m.id LEFT JOIN deck_match AS odm ON odm.deck_id <> d.id AND dm.match_id = odm.match_id {season_join} WHERE ({where}) AND ({season_query}) GROUP BY d.id, d.competition_id, -- Every deck has only one competition_id but if we want to use competition_id in the HAVING clause we need this. season.id -- In theory this is not necessary as all decks are in a single season and we join on the date but MySQL cannot work that out so give it the hint it needs. HAVING {having} ORDER BY {order_by} {limit} """.format(person_query=query.person_query(), competition_join=query.competition_join(), season_join=query.season_join(), where=where, season_query=query.season_query(season_id, 'season.id'), having=having, order_by=order_by, limit=limit) db().execute('SET group_concat_max_len=100000') rows = db().select(sql) decks = [] for row in rows: d = Deck(row) d.maindeck = [] d.sideboard = [] d.competition_top_n = Top(d.competition_top_n or 0) d.colored_symbols = json.loads(d.colored_symbols or '[]') d.colors = json.loads(d.colors or '[]') d.legal_formats = set(json.loads(d.legal_formats or '[]')) d.active_date = dtutil.ts2dt(d.active_date) d.created_date = dtutil.ts2dt(d.created_date) d.updated_date = dtutil.ts2dt(d.updated_date) if d.competition_end_date: d.competition_end_date = dtutil.ts2dt(d.competition_end_date) d.can_draw = 'Divine Intervention' in [ card.name for card in d.all_cards() ] d.wins = int(d.wins) d.losses = int(d.losses) d.draws = int(d.draws) decks.append(d) load_cards(decks) load_competitive_stats(decks) for d in decks: expiry = 60 if d.is_in_current_run() else 3600 redis.store('decksite:deck:{id}'.format(id=d.id), d, ex=expiry) return decks
def rotate() -> str: redis.store('discordbot:do_reboot', True) return 'True'
def lock(channel_id: int) -> None: redis.store(f'discordbot:p1p1:{channel_id}', True, ex=300)