def create_standard_only_output() -> Dict[str, Any]: """ Use whatsinstandard to determine all sets that are legal in the standard format. Return an AllSets version that only has Standard legal sets. :return: AllSets for Standard only """ standard_data: Dict[str, Any] = {} # Get all sets currently in standard standard_url_content = util.get_generic_session().get(STANDARD_API_URL) standard_json = [ set_obj["code"].upper() for set_obj in json.loads(standard_url_content.text)["sets"] if str(set_obj["enter_date"]) < datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") < str(set_obj["exit_date"]) ] for set_code in standard_json: set_file = mtgjson4.COMPILED_OUTPUT_DIR.joinpath( util.win_os_fix(set_code) + ".json" ) if not set_file.is_file(): LOGGER.warning( "Set {} not found in compiled outputs (Standard)".format(set_code) ) continue with set_file.open("r", encoding="utf-8") as f: file_content = json.load(f) standard_data[set_code] = file_content return standard_data
def create_standard_only_output() -> Dict[str, Any]: """ Use whatsinstandard to determine all sets that are legal in the standard format. Return an AllSets version that only has Standard legal sets. :return: AllSets for Standard only """ standard_data: Dict[str, Any] = {} # Get all sets currently in standard standard_url_content = util.get_generic_session().get(STANDARD_API_URL) standard_json = [ set_obj["code"] for set_obj in json.loads(standard_url_content.text)["sets"] if str(set_obj["enter_date"]) < datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") < str(set_obj["exit_date"]) ] for set_code in standard_json: set_file = mtgjson4.COMPILED_OUTPUT_DIR.joinpath( win_os_fix(set_code) + ".json") if not set_file.is_file(): LOGGER.warning( "Set {} not found in compiled outputs (Standard)".format( set_code)) continue with set_file.open("r", encoding="utf-8") as f: file_content = json.load(f) standard_data[set_code] = file_content return standard_data
def build_and_write_decks(decks_url: str) -> Iterator[Dict[str, Any]]: """ Given the URL to the precons list, this will compile them in MTGJSONv4 format and write out the decks to the "decks/" folder. :return Each deck completed, one by one """ session = util.get_generic_session() response: Any = session.get(url=decks_url, timeout=5.0) util.print_download_status(response) # Location of AllSets.json -- Must be compiled before decks! all_sets_path: pathlib.Path = mtgjson4.COMPILED_OUTPUT_DIR.joinpath( mtgjson4.ALL_SETS_OUTPUT + ".json" ) file_loaded: bool = False # Does the file exist if all_sets_path.is_file(): # Is the file > 100MB? (Ensure we have all sets in it) if all_sets_path.stat().st_size > 1e8: with all_sets_path.open("r") as f: SET_SESSION.set(json.load(f)) file_loaded = True if not file_loaded: LOGGER.warning("AllSets must be fully compiled before decks. Aborting.") return with multiprocessing.Pool(processes=8) as pool: for deck in response.json(): deck_to_output = { "name": deck["name"], "code": deck["set_code"].upper(), "type": deck["type"], "releaseDate": deck["release_date"], "mainBoard": [], "sideBoard": [], "meta": { "version": mtgjson4.__VERSION__, "date": mtgjson4.__VERSION_DATE__, "pricesDate": mtgjson4.__PRICE_UPDATE_DATE__, }, } # Pool main board first results: List[Any] = pool.map(build_single_card, deck["cards"]) for cards in results: for card in cards: deck_to_output["mainBoard"].append(card) # Now pool side board results = pool.map(build_single_card, deck["sideboard"]) for cards in results: for card in cards: deck_to_output["sideBoard"].append(card) LOGGER.info("Finished deck {}".format(deck["name"])) yield deck_to_output
def download_from_wizards(url: str) -> str: """ Generic download class for Wizards URLs :param url: URL to download (prob from Wizards website) :return: Text from page """ session = util.get_generic_session() response: Any = session.get(url=url, timeout=5.0) util.print_download_status(response) session.close() return str(response.text)
def get_cards(multiverse_id: str) -> List[GathererCard]: """Get card(s) matching a given multiverseId.""" session = util.get_generic_session() response = session.get( url=GATHERER_CARD, params={"multiverseid": multiverse_id, "printed": "true"}, timeout=5.0, ) LOGGER.info("Retrieved: %s", response.url) session.close() return parse_cards(response.text)
def download_from_wizards(url: str) -> str: """ Generic download class for Wizards URLs :param url: URL to download (prob from Wizards website) :return: Text from page """ session = util.get_generic_session() response = session.get(url=url, timeout=5.0) response.encoding = "windows-1252" # WHY DO THEY DO THIS LOGGER.info("Downloaded URL: {0}".format(response.url)) session.close() return response.text
def get_cards(multiverse_id: str) -> List[GathererCard]: """Get card(s) matching a given multiverseId.""" session = util.get_generic_session() response: Any = session.get( url=GATHERER_CARD, params={"multiverseid": multiverse_id, "printed": "true"}, timeout=8.0, ) util.print_download_status(response) session.close() return parse_cards(response.text)
def download(url: str, encoding: Optional[str] = None) -> str: """ Download a file from a specified source using our generic session. :param url: URL to download :param encoding: URL encoding (if necessary) :return: URL content """ session = util.get_generic_session() response: Any = session.get(url) if encoding: response.encoding = encoding util.print_download_status(response) return str(response.text)
def get_cards(multiverse_id: str) -> List[GathererCard]: """Get card(s) matching a given multiverseId.""" session = util.get_generic_session() response: Any = session.get( url=GATHERER_CARD, params={ "multiverseid": multiverse_id, "printed": "true" }, timeout=8.0, ) util.print_download_status(response) session.close() return parse_cards(response.text)
def get_modern_sets() -> List[str]: """ Pull the modern legal page from Gamepedia and parse it out to get the sets that are legal in modern :return: List of set codes legal in modern """ session = util.get_generic_session() response: Any = session.get(url=MODERN_GAMEPEDIA_URL, timeout=5.0) util.print_download_status(response) soup = bs4.BeautifulSoup(response.text, "html.parser") soup = soup.find("div", class_="div-col columns column-width") soup = soup.find_all("a") modern_legal_sets = [ scryfall.get_set_header(strip_bad_sf_chars(x.text)).get("code", "").upper() for x in soup ] return modern_legal_sets