def close_connections(self): try: self.conn.commit() self.cur.close() self.conn.close() except pymysql.err.DatabaseError as e: Writer.error("Critical error closing database connection:", e) exit() else: Writer.action("Database connection closed.")
def query(self, query): try: self.cur.execute(query) self.conn.commit() except pymysql.err.DatabaseError as e: Writer.SQL_error("Critical error working with database:", e, query) exit() except UnicodeEncodeError as e: Writer.SQL_error("Critical encoding error:", e, query) exit()
def query(self, query): if self.verbose: Writer.action_with_highlight("Performing SQL Query: ", query, ".") try: self.cur.execute(query) except pymysql.err.DatabaseError as e: Writer.SQL_error("Critical error working with database:", e, query) exit() except UnicodeEncodeError as e: Writer.SQL_error("Critical encoding error:", e, query) exit()
def __init__(self, **kwargs): self.verbose = True if kwargs.get('verbose') else False try: self.conn = pymysql.connect(host=database.identity.host, user=database.identity.user, passwd=database.identity.passwd, db=database.identity.db) self.cur = self.conn.cursor() except pymysql.err.DatabaseError as e: Writer.error("Critical error connecting to database", e) exit() else: Writer.action("Connected to database.") self.conn.set_charset('utf8')
def __init__(self): try: self.conn = pymysql.connect(host=identity.host, user=identity.user, passwd=identity.passwd, db=identity.db, cursorclass=pymysql.cursors.DictCursor) self.cur = self.conn.cursor() except pymysql.err.DatabaseError as e: Writer.error("\nCritical error connecting to database", e) exit() else: pass # Writer.action("Connected to database.") self.conn.set_charset('utf8') DataSource.__init__(self, "Database")
def __init__(self, name, **kwargs): self.name = name self.update = kwargs.get('update') Writer.action(Writer.pad_right(self.name, 13)) self.sets = self.sets_initiate() Writer.highlight(len(self.sets)) Writer.newline()
def validate_set(cls, subject, legend): for key in subject: if key == "type": check_key = "set_type" elif key == "release_date": check_key = "released_at" else: check_key = key if key != "id": if str(subject[key]) != str(Helper.select(legend, check_key)): if str(subject[key]) == 'None' and str( Helper.select(legend, check_key) is 'null'): pass else: Writer.error( "data mismatch on " + subject['code'] + "[" + key + "]", " data-source: " + str(subject[key]) + ", database: " + str(Helper.select(legend, check_key)) + "\n") return False return True
def download_set_data(self): try: urlretrieve(self.api_url, self.local_storage + "sets.zip") except urllib.error.HTTPError as e: Writer.error("Critical error retrieving source data", e) exit() zip_ref = zipfile.ZipFile(self.local_storage + "sets.zip", 'r') zip_ref.extractall(self.local_storage) zip_ref.close() os.rename(self.local_storage + "AllSets-x.json", self.local_storage + "sets.json") os.remove(self.local_storage + "sets.zip") # Remove cards from the set file: # TODO: The cards will need to be retained somehow. with open(self.local_storage + 'sets.json', encoding="utf8") as data_file: data = json.load(data_file) for set in data: data[set].pop('cards') with open(self.local_storage + 'sets.txt', 'w') as outfile: json.dump(data, outfile) os.remove(self.local_storage + "sets.json")
def sets_initiate(self): if not self.set_data_exists() or self.update: Writer.option(Writer.pad_right("[download]", 13)) self.download_set_data() return self.load_set_data() else: Writer.option(Writer.pad_right(" [local]", 13)) return self.load_set_data()
def sets_initiate(self): try: self.cur.execute("SELECT * FROM sets") Writer.option(Writer.pad_right(" [exists]", 13)) except pymysql.err.ProgrammingError: Writer.option(Writer.pad_right(" [created]", 13)) self.build_sets_table() return self.cur.fetchall()
import argparse from urllib.request import urlretrieve import os from model.sources import scryfall, mtgjson, database import pprint from writer_service import Writer from helper_service import Helper parser = argparse.ArgumentParser() parser.add_argument("--update", help="force download of source data", action="store_true") args = parser.parse_args() Writer.action("DATA SOURCE | SETS | COUNT | \n") Scryfall = scryfall.Scryfall(update=args.update) Mtgjson = mtgjson.Mtgjson(update=args.update) Database = database.Database() primary = Scryfall.sets for set in primary: try: set_2 = Mtgjson.set_by_code(set['code']) except KeyError: set_2 = {} #Look for set in database try: if not Helper.validate_set(Database.set_by_code(set['code']), [set, set_2]):
# Raw unmodified JSON data DATA_CARDS = JsonService.import_data() DATA_SETS = JsonService.create_set_data(DATA_CARDS) # Drops and recreates the database tables if we are building. if args.build: DatabaseService.build_database() current_card_count = 0 previous_sets_total = 0 variant_builder = {} # only used when building # Output header if args.build or args.images: Writer.action_with_highlight_stub("[", " PROGRESS ", "]" + " " * 56) else: Writer.action_stub(" " * 43) Writer.action_highlight_alternating("|", "CARDS", "|", " DB_ACTION ", "|", " IMAGES", "|", " IMG_ACTION ", "|",) # Main street for s_index, set in enumerate(DATA_SETS): if args.build: DatabaseService.add_set(set) img_count = 0 for card in DATA_CARDS[set['code']]["cards"]: current_card_count += 1 translated_card = TranslatorService(card) if args.build:
def build_database(self): tables = ('cards', 'names', 'sets', 'colors', 'color_identities', 'supertypes', 'types', 'subtypes', 'variations') Writer.action_stub("Dropping tables ") for table in tables: Writer.action_with_highlight_stub( "`", table, "` " if table != 'variations' else "`") self.drop_table(table) Writer.action('.') Writer.action_stub("Creating tables ") for table in tables: Writer.action_with_highlight_stub( "`", table, "` " if table != 'variations' else "`") self.create_table(table) Writer.action('.')
# -*- coding: utf-8 -*- import json from pokemon_service import Pokemon from writer_service import Writer from move_service import Move Writer.clean() Writer.init() with open('master/move.json') as m: moves = json.load(m) move_ids, move_names = [], [] for i in range(len(moves)): move = Move(moves[i]) move_ids.append(move.get_id()) move_names.append(move.get_name()) Writer.output_append(Writer.moves_path, "INSERT INTO moves VALUES(" + str(i+1) + ", " + move.get_name() + ", " + move.get_power() + ", " + move.get_cooldown() + ", " + move.get_type() + ", " + move.get_style() + ");\n") with open('master/pokemon.json') as f: data = json.load(f) a = 0 #types counter