def times_from_location(q: str, twentyfour: bool) -> Dict[str, List[str]]: api_key = configuration.get('google_maps_api_key') if not api_key: raise NotConfiguredException('No value found for google_maps_api_key') url = 'https://maps.googleapis.com/maps/api/geocode/json?address={q}&key={api_key}&sensor=false'.format( q=internal.escape(q), api_key=api_key) info = internal.fetch_json(url) if 'error_message' in info: return info['error_message'] try: location = info['results'][0]['geometry']['location'] except IndexError as e: raise TooFewItemsException(e) url = 'https://maps.googleapis.com/maps/api/timezone/json?location={lat},{lng}×tamp={timestamp}&key={api_key}&sensor=false'.format( lat=internal.escape(str(location['lat'])), lng=internal.escape(str(location['lng'])), timestamp=internal.escape(str(dtutil.dt2ts(dtutil.now()))), api_key=api_key) timezone_info = internal.fetch_json(url) if 'error_message' in timezone_info: return timezone_info['error_message'] if timezone_info['status'] == 'ZERO_RESULTS': raise TooFewItemsException(timezone_info['status']) try: timezone = dtutil.timezone(timezone_info['timeZoneId']) except KeyError as e: raise TooFewItemsException( f'Unable to find a timezone in {timezone_info}') return {current_time(timezone, twentyfour): [q]}
def fetch_paginated_list(uri): results = fetcher_internal.fetch_json(uri) data = results['data'] while results['has_more']: uri = results['next_page'] results = fetcher_internal.fetch_json(uri) data += results['data'] return data
def all_sets() -> List[Dict[str, Any]]: try: d = json.load(open('sets.json')) except FileNotFoundError: d = internal.fetch_json('https://api.scryfall.com/sets') assert not d['has_more'] return d['data']
def search_scryfall(query): """Returns a tuple. First member is an integer indicating how many cards match the query total, second member is a list of card names up to the maximum that could be fetched in a timely fashion.""" if query == '': return False, [] print(f'Searching scryfall for `{query}`') result_json = internal.fetch_json('https://api.scryfall.com/cards/search?q=' + internal.escape(query), character_encoding='utf-8') if 'code' in result_json.keys(): # The API returned an error if result_json['status'] == 404: # No cards found return False, [] print('Error fetching scryfall data:\n', result_json) return False, [] for warning in result_json.get('warnings', []): #scryfall-provided human-readable warnings print(warning) result_data = result_json['data'] result_data.sort(key=lambda x: x['legalities']['penny']) def get_frontside(scr_card): """If card is transform, returns first name. Otherwise, returns name. This is to make sure cards are later found in the database""" #not sure how to handle meld cards if scr_card['layout'] in ['transform', 'flip']: return scr_card['card_faces'][0]['name'] return scr_card['name'] result_cardnames = [get_frontside(obj) for obj in result_data] return result_json['total_cards'], result_cardnames, result_json.get('warnings', [])
def scryfall_last_updated() -> datetime.datetime: d = internal.fetch_json('https://api.scryfall.com/bulk-data') for o in d['data']: if o['type'] == 'default_cards': return dtutil.parse_rfc3339(o['updated_at']) raise InvalidDataException( f'Could not get the last updated date from Scryfall: {d}')
def make_final_list() -> None: planes = fetcher_internal.fetch_json( 'https://api.scryfall.com/cards/search?q=t:plane%20or%20t:phenomenon' )['data'] plane_names = [p['name'] for p in planes] files = rotation.files() lines: List[str] = [] for line in fileinput.input(files): line = text.sanitize(line) if line in plane_names: print(f'DISCARDED: [{line}] is a plane.') continue lines.append(line) scores = Counter(lines).most_common() passed: List[str] = [] for name, count in scores: if count >= TOTAL_RUNS / 2: passed.append(name) final = list(passed) if is_supplemental(): temp = set(passed) final = list(temp.union([c + '\n' for c in fetcher.legal_cards()])) final.sort() h = open(os.path.join(configuration.get_str('legality_dir'), 'legal_cards.txt'), mode='w', encoding='utf-8') h.write(''.join(final)) h.close() print('Generated legal_cards.txt. {0}/{1} cards.'.format( len(passed), len(scores)))
def catalog_cardnames() -> List[str]: result_json = internal.fetch_json('https://api.scryfall.com/catalog/card-names') names: List[str] = result_json['data'] for n in names: if ' // ' in n: names.extend(n.split(' // ')) return names
def all_cards() -> List[CardDescription]: try: f = open('all-default-cards.json') return json.load(f) except FileNotFoundError: return internal.fetch_json( 'https://archive.scryfall.com/json/scryfall-default-cards.json', character_encoding='utf-8')
def whatsinstandard() -> Dict[str, Union[bool, List[Dict[str, str]]]]: cached = redis.get_container('magic:fetcher:whatisinstandard') if cached is not None: return cached info = internal.fetch_json('http://whatsinstandard.com/api/v5/sets.json') redis.store('magic:fetcher:whatisinstandard', info, ex=86400) return info
def bugged_cards() -> Optional[List[Dict[str, Any]]]: try: bugs = internal.fetch_json( 'https://pennydreadfulmtg.github.io/modo-bugs/bugs.json') except FetchException: print("WARNING: Couldn't fetch bugs") bugs = None if bugs is None: return None return bugs
def time(q: str) -> str: if len(q) > 3: url = 'http://maps.googleapis.com/maps/api/geocode/json?address={q}&sensor=false'.format(q=internal.escape(q)) info = internal.fetch_json(url) try: location = info['results'][0]['geometry']['location'] except IndexError as e: raise TooFewItemsException(e) url = 'https://maps.googleapis.com/maps/api/timezone/json?location={lat},{lng}×tamp={timestamp}&sensor=false'.format(lat=internal.escape(str(location['lat'])), lng=internal.escape(str(location['lng'])), timestamp=internal.escape(str(dtutil.dt2ts(dtutil.now())))) timezone_info = internal.fetch_json(url) if timezone_info['status'] == 'ZERO_RESULTS': raise TooFewItemsException(timezone_info['status']) timezone = dtutil.timezone(timezone_info['timeZoneId']) else: try: timezone = dtutil.timezone(q.upper()) except pytz.exceptions.UnknownTimeZoneError: # type: ignore raise TooFewItemsException('Not a recognized timezone: {q}'.format(q=q)) return dtutil.now(timezone).strftime('%l:%M %p')
def tournament(comp: Dict[str, Any]) -> None: comp = fetcher_internal.fetch_json(comp['url']) dt = dtutil.ts2dt(comp['start_date']) de = dtutil.ts2dt(comp['end_date']) competition_id = competition.get_or_insert_competition( dt, de, comp['name'], comp['series_name'], comp['url'], top.Top(comp['top_n'])) print(f"{comp['name']} = {competition_id}") loaded_competition = competition.load_competition(competition_id) if loaded_competition.num_decks < comp['num_decks']: for d in comp['decks']: store_deck(d)
def search_scryfall(query: str, exhaustive: bool = False) -> Tuple[int, List[str]]: """Returns a tuple. First member is an integer indicating how many cards match the query total, second member is a list of card names up to the maximum that could be fetched in a timely fashion. Supply exhaustive=True to instead retrieve the full list (potentially very slow).""" if query == '': return False, [] redis_key = f'scryfall:query:{query}:' + ('exhaustive' if exhaustive else 'nonexhaustive') cached = redis.get_list(redis_key) result_data: List[Dict] if cached: total_cards, result_data = int(cached[0]), cached[1] else: url = 'https://api.scryfall.com/cards/search?q=' + internal.escape( query) result_data = [] while True: result_json = internal.fetch_json(url, character_encoding='utf-8') if 'code' in result_json.keys(): # The API returned an error if result_json['status'] == 404: # No cards found return False, [] print('Error fetching scryfall data:\n', result_json) return False, [] for warning in result_json.get( 'warnings', []): #scryfall-provided human-readable warnings print(warning) # Why aren't we displaying these to the user? result_data += result_json['data'] total_cards = int(result_json['total_cards']) if not exhaustive or len(result_data) >= total_cards: break sleep(0.1) url = result_json['next_page'] redis.store(redis_key, [total_cards, result_data], ex=3600) result_data.sort(key=lambda x: x['legalities']['penny']) def get_frontside(scr_card: Dict) -> str: """If card is transform, returns first name. Otherwise, returns name. This is to make sure cards are later found in the database""" #not sure how to handle meld cards if scr_card['layout'] in ['transform', 'flip']: return scr_card['card_faces'][0]['name'] return scr_card['name'] result_cardnames = [get_frontside(obj) for obj in result_data] return total_cards, result_cardnames
def fetch_deck_details(raw_deck: DeckType) -> DeckType: return fetcher_internal.fetch_json( 'https://tappedout.net/api/collection/collection:deck/{slug}/'.format( slug=raw_deck['slug']))
def fetch_decks() -> List[DeckType]: return fetcher_internal.fetch_json( 'https://tappedout.net/api/deck/latest/penny-dreadful/')
def card_by_id(id): results = fetcher_internal.fetch_json('https://api.scryfall.com/cards/{0}'.format(id)) return results
def card_price(cardname: str) -> Dict[str, Any]: return internal.fetch_json('http://vorpald20.com:5800/{0}/'.format(cardname.replace('//', '-split-')))
def bugged_cards() -> Optional[List[Dict[str, Any]]]: bugs = internal.fetch_json('https://pennydreadfulmtg.github.io/modo-bugs/bugs.json') if bugs is None: return None return bugs
def disabled() -> None: competitions = fetcher_internal.fetch_json( 'https://pennydreadfulmagic.com/api/competitions') competitions.reverse() for c in competitions: tournament(c)
def sitemap() -> List[str]: return internal.fetch_json(decksite_url('/api/sitemap/'))
def rulings(cardname: str) -> List[Dict[str, str]]: card = internal.fetch_json('https://api.scryfall.com/cards/named?exact={name}'.format(name=cardname)) return internal.fetch_json(card['uri'] + '/rulings')['data']
def mtgjson_version() -> str: return cast(str, internal.fetch_json('https://mtgjson.com/json/version.json'))