def find_nice_twins(self, max_check=1000): session = mv_model.Session() open_twins = list( session.query(mv_model.TwinDeck).filter( mv_model.TwinDeck.standard_key == None)) random.shuffle(open_twins) print(f"Analyzing {len(open_twins)} twin decks") for potential_twin in open_twins[:max_check]: potential_twin_deck = potential_twin.evil_deck house_decks = session.query( mv_model.Deck).filter(mv_model.Deck.expansion == 496) house_decks = house_decks.filter( concat(potential_twin_deck.name).like( concat('%', mv_model.Deck.name, '%'))) for d in house_decks: if d.name == potential_twin_deck.name: continue print("found!", potential_twin_deck.name, '-', d.name) pt_card_names = sorted( [c.name for c in potential_twin_deck.cards]) t_card_names = sorted([c.name for c in d.cards]) if pt_card_names == t_card_names: print(" - matching cards") potential_twin.standard_key = d.key session.add(potential_twin) session.commit() #self.alert(f'Found twin: {potential_twin.standard_key}, {potential_twin.evil_key}', ['discord']) break time.sleep(0.5)
def count_decks(self): logging.debug("counting decks") session = mv_model.Session() self._count_decks_expansion(session) logging.debug("--getting distinct") for expansion in session.query(mv_model.Deck.expansion).distinct(): self._count_decks_expansion(session, expansion) session.commit() logging.debug(">>decks counted")
def deck_scrape_lag(self): from mastervault import mastervault session = mv_model.Session() deckq = session.query(mv_model.Deck) total_scraped = deckq.count() total_official = mastervault.get_mastervault_deck_count() lag = total_official - total_scraped + 1 print(f"{total_official} {total_scraped} {lag}") if lag > 25: self.discord_alert( f"(Test) Deck scrape is {lag} decks behind. Official count: {total_official} Scrape count: {total_scraped}" )
import os import json import time from sqlalchemy.sql.expression import bindparam import __updir__ from models import wiki_card_db, mv_model from sqlalchemy.sql import and_, not_, func, update from models import shared session = mv_model.Session() def fix_scrape_dates(): from datetime import datetime, timedelta query = session.query(mv_model.Deck).filter( mv_model.Deck.scrape_date.is_(None)) print(query.count()) for deck in query.all(): deck.scrape_date = datetime.utcnow() - timedelta(days=287) session.add(deck) session.commit() def fix_deck_card_counts(): query = session.query(mv_model.DeckCard, mv_model.Deck) query = query.filter(mv_model.DeckCard.deck_key == mv_model.Deck.key, mv_model.DeckCard.card_key == mv_model.Card.key) print("start querying!") bulk_update = []
def scrape_cards_locale(self, locale, card_title=None, rescrape=False): """Updates card definitions by retriving the decks that contain the cards using the given locale""" #Get all cards that need to be updated... all cards session = mv_model.Session() if rescrape: query = session.query(mv_model.Card) else: query = session.query(mv_model.Card, mv_model.LocaleCard) if card_title: query = query.filter(mv_model.Card.name == card_title) def lookat_card(card): return (not (card.data["is_maverick"] or card.data["is_enhanced"]) or (card.name in ["Exchange Officer"] and card.data["house"] == "Dis")) if not rescrape: query = query.outerjoin( mv_model.LocaleCard, and_(mv_model.LocaleCard.en_name == mv_model.Card.name, mv_model.LocaleCard.locale == locale)) print(query) cards = [ row[0] for row in query.all() if not row[1] and lookat_card(row[0]) ] else: cards = [card for card in query.all() if lookat_card(card)] deck_pages = {} handled_cards = {} #For each card, find a deck that it has and that decks page. for i, card in enumerate(cards): print(i, "/", len(cards)) if card.key in handled_cards: continue decks = session.query(mv_model.DeckCard).filter( mv_model.DeckCard.card_key == card.key).limit(1) deck_key = decks.first().deck_key deck = session.query( mv_model.Deck).filter(mv_model.Deck.key == deck_key).first() page = deck.page deck_pages[page] = 1 # For each card that is on that decks page, mark it as handled if we update that page for deck in session.query( mv_model.Deck).filter(mv_model.Deck.page == page).all(): for card_id in deck.data["_links"]["cards"]: handled_cards[card_id] = 1 #Pull the selected pages from mv and update the cards with the right locale for i, page in enumerate(sorted(deck_pages.keys())): print("GET PAGE", page, i, '/', len(deck_pages.keys())) while 1: try: while self.last_locale_call and time.time( ) - self.last_locale_call < 10: time.sleep(1) self.last_locale_call = time.time() decks, cards, proxy = self.get_decks_with_cards( "", page, locale) update_cards = [] for card in cards: if card["is_legacy"]: continue en_card = session.query(mv_model.Card).filter( mv_model.Card.key == card["id"]).first() locale_card = mv_model.LocaleCard( en_name=en_card.name, key=card["id"], data=card, locale=locale, deck_expansion=card["deck_expansion"]) update_cards.append(locale_card) print("adding", len(cards)) mv_model.postgres_upsert(session, mv_model.LocaleCard, update_cards) session.commit() break except: time.sleep(10)
def new_cards(self, cards=None, savedb=True, only_new_edits=False): # TODO refactor this method into a class/module import util import tool_update_cards import connections scope = mv_model.UpdateScope() session = mv_model.Session() recognized_sets = list(shared.get_set_numbers()) if not cards: cardq = session.query(mv_model.Card) cardq = cardq.filter( or_( mv_model.Card.deck_expansion.notin_( [str(x) for x in recognized_sets]), mv_model.Card.deck_expansion.in_( [str(x) for x in shared.NEW_SETS]))) cardq = cardq.order_by(mv_model.Card.name, mv_model.Card.key) cards = cardq.all() logging.debug("\nChecking for new cards:\n") card_batch = [ card for card in cards if (card.is_from_current_set and not card.is_maverick and not card.is_enhanced) ] logging.debug([card.name for card in card_batch]) card_datas = wiki_card_db.process_mv_card_batch(card_batch) wp = connections.get_wiki() processed_cards = {} changes = [] for new_card_data in card_datas: new_card = wiki_card_db.add_card(new_card_data, wiki_card_db.cards) card = processed_cards[new_card[ "card_title"]] = wiki_card_db.cards[new_card["card_title"]] logging.debug("%s - %s", new_card["card_title"], new_card["house"]) if [new_set for new_set in card if new_set not in recognized_sets ] and len(card) > 1: print("old set") logging.debug("> updating old set: %s, %s", new_card["card_title"], card.keys()) if tool_update_cards.update_card_page_cargo( wp, card, "updating reprint with new sets", "carddb", only_sets=True, pause=False): changes.append(("reprint", new_card["card_title"])) self.alert( "Worker updated reprint %s" % new_card["card_title"], ['discord']) else: print("new set") logging.debug("< create new data %s", new_card["card_title"]) # Look for locale self._update_locales(new_card["card_title"]) wiki_card_db.build_localization( scope, wiki_card_db.cards, wiki_card_db.locales, from_cards=session.query(mv_model.LocaleCard).filter( mv_model.LocaleCard.en_name == new_card["card_title"]).all()) any_changes = 0 if tool_update_cards.update_card_page_cargo( wp, card, "updating new card", "carddb", pause=False, only_new_edits=only_new_edits): any_changes += 1 # TODO CardLocaleData is all on one page, its inefficient to try and edit the page 11 times for locale in wiki_card_db.locale_db: if locale == "en": continue if new_card["card_title"] in wiki_card_db.locales[locale]: locale_card = wiki_card_db.locales[locale][ new_card["card_title"]] card = wiki_card_db.cards[new_card["card_title"]] if tool_update_cards.upload_image_for_card( wp, locale, locale_card): any_changes += 1 if tool_update_cards.update_card_page_cargo( wp, card, "updating new card", "carddb", pause=False, locale=locale, only_new_edits=only_new_edits): any_changes += 1 #if tool_update_cards.update_cards_v2( # wp, # card_name=new_card["card_title"], # update_reason="add card view for new card", # data_to_update="update_card_views", # upload_image=True, # pause=False, # only_new_edits=only_new_edits #): # any_changes += 1 if any_changes: changes.append(("new", new_card["card_title"])) self.alert( "Worker updated new card %s" % new_card["card_title"], ['discord']) if savedb: print("Saving json") wiki_card_db.build_links(processed_cards) #wiki_card_db.add_artists_from_text(wiki_card_db.cards) wiki_card_db.clean_fields(wiki_card_db.cards, {}) wiki_card_db.save_json(wiki_card_db.cards, wiki_card_db.locales) logging.debug("Done: %s", len(card_datas))