def test_break_object_cache(baddata: bytes) -> None: scrydata1 = fetcher.scryfetch() with open(fetcher._cache_path(fetcher.OBJECT_CACHE_URL), "wb") as cache_file: cache_file.write(baddata) scrydata2 = fetcher.scryfetch() assert scrydata1 == scrydata2
def test_scryfetch() -> None: scrydata1 = fetcher.scryfetch() scrydata2 = fetcher.scryfetch() assert scrydata1 == scrydata2 assert {s.code for s in scrydata1.sets} == gen_testdata.TEST_SETS_TO_CARDS.keys() assert {(c.set, c.name) for c in scrydata1.cards } == {(key, card) for key, value in gen_testdata.TEST_SETS_TO_CARDS.items() for card in value}
def test_data_fixtures(scryfall_data: ScryfallDataSet, sets_data: List[ScrySet], cards_data: List[ScryCard]) -> None: scrydata = fetcher.scryfetch() assert scrydata == scryfall_data assert scrydata.sets == sets_data assert scrydata.cards == cards_data
def get_oracle(set_types: Set[ScrySetType], include_digital: bool) -> Oracle: """Get a card_db with current mtgjson data.""" scrydata = fetcher.scryfetch() scrydata = bundles.filter_set_types(scrydata, set_types) if not include_digital: scrydata = bundles.remove_digital(scrydata) return Oracle(scrydata)
def main() -> None: # pylint: disable=too-many-locals """Read scryfall data and write a subset for use as test data.""" print("Fetching scryfall data") scrydata = fetcher.scryfetch() bulk_json = fetcher._fetch_endpoint(fetcher.BULK_DATA_ENDPOINT, dirty=False, write_cache=False) bulk_data = cast( List[models.ScryBulkData], cast(models.ScryObjectList, fetcher._deserialize_object(bulk_json)).data, ) print("Selecting sets") accepted_sets = sorted( (s for s in scrydata.sets if s.code in TEST_SETS_TO_CARDS), key=lambda sset: sset.code, ) missing_sets = set( TEST_SETS_TO_CARDS.keys()) - {s.code for s in accepted_sets} if missing_sets: raise Exception("Missing sets: " + str(missing_sets)) print("Selecting cards") accepted_cards = sorted( (c for c in scrydata.cards if c.name in TEST_SETS_TO_CARDS.get(c.set, set())), key=lambda card: (card.set, card.name, card.collector_number, card.id), ) missing_cards = copy.deepcopy(TEST_SETS_TO_CARDS) for card in accepted_cards: missing_cards[card.set].discard(card.name) missing_cards = {k: v for k, v in missing_cards.items() if v} if missing_cards: raise Exception("Missing cards: " + str(missing_cards)) print("Selecting bulk data") accepted_bulk = sorted( (bd for bd in bulk_data if bd.type == fetcher.BULK_TYPE), key=lambda bulk: bulk.type, ) print("Adjusting sets") accepted_sets = [ dataclasses.replace( cset, card_count=len([c for c in accepted_cards if c.set == cset.code])) for cset in accepted_sets ] print("Writing sets") sets_list = models.ScryObjectList( data=accepted_sets, has_more=False, next_page=None, total_cards=None, warnings=None, ) sets_list1 = models.ScryObjectList( data=accepted_sets[:5], has_more=True, next_page=models.URI(SETS_NEXTPAGE_URL), total_cards=None, warnings=None, ) sets_list2 = models.ScryObjectList( data=accepted_sets[5:], has_more=False, next_page=None, total_cards=None, warnings=None, ) os.makedirs(TEST_DATA_DIR, exist_ok=True) with open(TARGET_SETS_FILE, "wt", encoding="utf-8") as sets_file: json.dump( _OBJECT_SCHEMA.dump(sets_list).data, sets_file, ensure_ascii=False, indent=2, sort_keys=True, ) sets_file.write("\n") with open(TARGET_SETS_FILE1, "wt", encoding="utf-8") as sets_file1: json.dump( _OBJECT_SCHEMA.dump(sets_list1).data, sets_file1, ensure_ascii=False, indent=2, sort_keys=True, ) sets_file1.write("\n") with open(TARGET_SETS_FILE2, "wt", encoding="utf-8") as sets_file2: json.dump( _OBJECT_SCHEMA.dump(sets_list2).data, sets_file2, ensure_ascii=False, indent=2, sort_keys=True, ) sets_file2.write("\n") print("Writing cards") with open(TARGET_CARDS_FILE, "wt", encoding="utf-8") as cards_file: json.dump( [_OBJECT_SCHEMA.dump(c).data for c in accepted_cards], cards_file, ensure_ascii=False, indent=2, sort_keys=True, ) cards_file.write("\n") print("Writing bulk data") bulk_list = models.ScryObjectList( data=accepted_bulk, has_more=False, next_page=None, total_cards=None, warnings=None, ) with open(TARGET_BULK_FILE, "wt", encoding="utf-8") as bulk_file: json.dump( _OBJECT_SCHEMA.dump(bulk_list).data, bulk_file, ensure_ascii=False, indent=2, sort_keys=True, ) bulk_file.write("\n") print("Done")
def main() -> None: # pylint: disable=too-many-locals """Read scryfall data and write a subset for use as test data.""" print("Fetching scryfall data") scrydata = fetcher.scryfetch() bulk_json = fetcher._fetch_endpoint( fetcher.BULK_DATA_ENDPOINT, dirty=False, write_cache=False ) bulk_data = models.ScryObjectList[models.ScryBulkData].parse_obj(bulk_json).data print("Selecting sets") accepted_sets = sorted( (s for s in scrydata.sets if s.code in TEST_SETS_TO_CARDS), key=lambda sset: sset.code, ) missing_sets = set(TEST_SETS_TO_CARDS.keys()) - {s.code for s in accepted_sets} if missing_sets: raise Exception("Missing sets: " + str(missing_sets)) print("Selecting cards") accepted_cards = sorted( (c for c in scrydata.cards if c.name in TEST_SETS_TO_CARDS.get(c.set, set())), key=lambda card: (card.set, card.name, card.collector_number, card.id), ) missing_cards = copy.deepcopy(TEST_SETS_TO_CARDS) for card in accepted_cards: missing_cards[card.set].discard(card.name) missing_cards = {k: v for k, v in missing_cards.items() if v} if missing_cards: raise Exception("Missing cards: " + str(missing_cards)) print("Selecting bulk data") accepted_bulk = sorted( (bd for bd in bulk_data if bd.type == fetcher.BULK_TYPE), key=lambda bulk: bulk.type, ) print("Adjusting sets") for cset in accepted_sets: cset.card_count = len([c for c in accepted_cards if c.set == cset.code]) print("Writing sets") sets_list = models.ScryObjectList[models.ScrySet]( data=accepted_sets, has_more=False, next_page=None, total_cards=None, warnings=None, ) sets_list1 = models.ScryObjectList[models.ScrySet]( data=accepted_sets[:5], has_more=True, next_page=SETS_NEXTPAGE_URL, total_cards=None, warnings=None, ) sets_list2 = models.ScryObjectList[models.ScrySet]( data=accepted_sets[5:], has_more=False, next_page=None, total_cards=None, warnings=None, ) os.makedirs(TEST_DATA_DIR, exist_ok=True) with open(TARGET_SETS_FILE, "wt", encoding="utf-8") as sets_file: sets_file.write( sets_list.json( ensure_ascii=False, indent=2, sort_keys=True, exclude_none=True, ) ) sets_file.write("\n") with open(TARGET_SETS_FILE1, "wt", encoding="utf-8") as sets_file1: sets_file1.write( sets_list1.json( ensure_ascii=False, indent=2, sort_keys=True, exclude_none=True, ) ) sets_file1.write("\n") with open(TARGET_SETS_FILE2, "wt", encoding="utf-8") as sets_file2: sets_file2.write( sets_list2.json( ensure_ascii=False, indent=2, sort_keys=True, exclude_none=True, ) ) sets_file2.write("\n") print("Writing cards") with open(TARGET_CARDS_FILE, "wt", encoding="utf-8") as cards_file: root_list: models.ScryRootList[models.ScryCard] = models.ScryRootList( __root__=accepted_cards ) cards_file.write( root_list.json( ensure_ascii=False, indent=2, sort_keys=True, exclude_none=True, ) ) cards_file.write("\n") print("Writing bulk data") bulk_list = models.ScryObjectList[models.ScryBulkData]( data=accepted_bulk, has_more=False, next_page=None, total_cards=None, warnings=None, ) with open(TARGET_BULK_FILE, "wt", encoding="utf-8") as bulk_file: bulk_file.write( bulk_list.json( ensure_ascii=False, indent=2, sort_keys=True, exclude_none=True, ) ) bulk_file.write("\n") print("Done")