def sync_all_cards(): start_time = time.time() count = 0 cards = api_cards.all() for card in cards: set = Set.objects.get(abbreviation=card.set) body = { 'name': card.name, 'mana_cost': card.mana_cost, 'converted_mana_cost': card.cmc, 'abilities': card.text, 'power': card.power, 'toughness': card.toughness, 'loyalty': None if card.loyalty == 0 else card.loyalty, 'rarity': card.rarity, 'number': card.number, 'img_url': card.image_url, 'type': card.type, 'flavor_text': card.flavor, 'set': set } Card.objects.update_or_create(defaults=body, set_id=set.id, number=card.number) count += 1 end_time = time.time() print('{0} seconds to sync {1} cards.'.format(end_time - start_time, count))
def update(self, filename='cardnames.txt'): """This will take a long time like 20+ mins""" cards = Card.all() # get a set of all the card names lower case cs = {card.name.lower() for card in cards} self.card_names = cs # save to current instance with open(filename, 'w') as fi: for a_card_name in cs: fi.write("%s\n" % a_card_name) pickle.dump(cards, open("cards.p", "wb")) # dump all the cards
def Index(): # Get all cards cards = Card.all() # Filter Cards # You can chain 'where' clauses together. The key of the hash # should be the URL parameter you are trying to filter on cards = Card.where(supertypes='legendary') \ .where(types='creature') \ .where(colors='red,white') \ .all()
import json from mtgsdk import Card, Set, Subtype cards = Card.all() print("{") print("\"allcards\":[") for card in cards: print(" {") card_Id = card.id print(" \"cardID\":" + "\"" + card_Id + "\",") card_name = "none" if card.name is not None: card_name = card.name print(" \"name\":" + "\"" + card_name + "\",") card_mainType = "none" if card.type is not None: card_mainType = card.type print(" \"mainType\":" + "\"" + card_mainType + "\",") slist = ["none"] if (card.subtypes is not None): slist = list() for stype in card.subtypes: slist.append(stype) card_subtype = json.dumps(slist)
def download(self): # make data folder if it doesn't exist already os.makedirs("data", exist_ok=True) # if dictionary index is None, download Magic the Gathering card data if self.di is None: app_print("downloading cards...") self.cards = Card.all() print(".") self.di = 0 pickle.dump(self, open(file_path, "wb"), -1) print(".") # loop through all cards app_print("downloading images and generating dictionary...") total = len(self.cards) while self.di < total: c = self.cards[self.di] out = "{}/{}: {}/{}".format(self.di, total, c.set, c.name) app_print(out) # make a folder for the current card's set if it doesn't exist already filename = "data/{}/{}{}.jpg".format(c.set, c.name, c.multiverse_id) if not os.path.exists(os.path.dirname(filename)): directory = os.path.dirname(filename) os.makedirs(directory, exist_ok=True) # if the card's image has already been downloaded, add it to the dictionary using its art's hash as a key if os.path.isfile(filename): app_print(out + " : file exists") image = cv2.imread(filename) self.dictionary[dhash(image[37:172, 20:204])] = self.di self.di += 1 if self.di % 50 is 0: pickle.dump(self, open(file_path, "wb"), -1) app_print("saved card database") continue # if the card's image is not available, skip it if c.image_url is None: app_print(out + " : image N/A") self.di += 1 continue # keep trying to download the card's image until successful, adding it to the dictionary using its art's # hash as a key when done and saving every 50 cards print_message = True while True: try: urllib.request.urlretrieve(c.image_url, filename) image = cv2.imread(filename) self.dictionary[dhash(image[37:172, 20:204])] = self.di self.di += 1 if self.di % 50 is 0: pickle.dump(self, open(file_path, "wb"), -1) break except: if print_message: print_message = False app_print(out + " : retrying @ " + c.image_url) # set dictionary index to None to denote completion and then save everything self.di = None pickle.dump(self, open(file_path, "wb"), -1) app_print("saved dictionary")