def has_cached_live_details(): try: db.cachedb.execute_and_fetchall("SELECT * FROM live_detail_cache") return True except sqlite3.OperationalError: logger.debug("No local cached live details found") return False
def import_from_gameid(game_id): try: assert len(str(game_id)) == 9 z = int(game_id) assert 0 <= z <= 999999999 logger.info( "Trying to import from ID {}, this might take a while".format( game_id)) cards = list(map(int, get_cards(game_id))) for idx, card in enumerate(cards): if card % 2 == 1: cards[idx] += 1 card_dict = defaultdict(int) for card in cards: card_dict[card] += 1 for card_id, number in card_dict.items(): z = list( zip(*db.cachedb.execute_and_fetchall( "SELECT number FROM owned_card WHERE card_id = ? OR card_id = ?", [card_id, card_id - 1])))[0] if z[0] + z[1] < number: db.cachedb.execute( """ INSERT OR REPLACE INTO owned_card (card_id, number) VALUES (?,?) """, [card_id, number - z[0]]) db.cachedb.commit() logger.info("Imported {} cards successfully".format(len(card_dict))) return list(card_dict.keys()) except: logger.debug(traceback.print_exc()) logger.info("Failed to import cards")
def _update_manifest(): logger.debug("Updating manifest.db") manifest_response = cgss_query.get_manifests() with storage.get_writer(MANIFEST_PATH, 'wb') as fwb: fwb.write(decompress(manifest_response.content)) logger.info("manifest.db updated")
def initialize_index(self): results = db.cachedb.execute_and_fetchall( "SELECT card_id, fields FROM card_index_keywords") schema = Schema(title=ID(stored=True), idolized=BOOLEAN, short=TEXT, owned=NUMERIC, chara=TEXT, rarity=TEXT, color=TEXT, skill=TEXT, carnival=TEXT, leader=TEXT, fes=TEXT, noir=TEXT, blanc=TEXT, main_attribute=TEXT, time_prob_key=TEXT, content=TEXT(analyzer=SimpleAnalyzer())) ix = create_in(INDEX_PATH, schema) writer = ix.writer() logger.debug("Initializing quicksearch index for {} cards".format( len(results))) for result in results: fields = ast.literal_eval(result[1]) content = " ".join([fields[key] for key in KEYWORD_KEYS_STR_ONLY]) writer.add_document(title=str(result[0]), content=content, **fields) writer.commit() self.index = ix logger.debug("Quicksearch index initialized for {} cards".format( len(results)))
def dropEvent(self, e): mimetext = e.mimeData().text() if mimetext.startswith(UNIT_EDITOR_UNIT): card_ids = ast.literal_eval(mimetext[len(UNIT_EDITOR_UNIT):]) logger.debug("Dragged {} into calculator".format(card_ids)) self.calculator_view.add_unit(card_ids) else: e.acceptProposedAction()
def insert_unit(self): self.widget.insertRow(self.widget.rowCount()) simulator_unit_widget = CalculatorUnitWidget(self, None, size=32) self.widget.setCellWidget(self.widget.rowCount() - 1, 0, simulator_unit_widget) logger.debug("Inserted empty unit at {}".format( self.widget.rowCount())) self.widget.setColumnWidth(0, 40 * 6)
def add_unit_int(self, cards): for r in range(self.widget.rowCount()): if self.widget.cellWidget(r, 0).card_ids == [None] * 6: logger.debug("Empty calculator unit at row {}".format(r)) self.set_unit(row=r, cards=cards) return self.model.add_empty_unit(AddEmptyUnitEvent(self.model)) self.set_unit(row=self.widget.rowCount() - 1, cards=cards)
def cleanup(self): try: if INDEX_PATH.exists(): shutil.rmtree(str(INDEX_PATH)) logger.debug("Index cleaned up.") return True except PermissionError: return False
def call_searchengine(self, query): if query == "": live_detail_ids = search_engine.song_query("*") else: live_detail_ids = search_engine.song_query(query) logger.debug("Query: {}".format(query)) logger.debug("Result: {}".format(live_detail_ids)) self._song_view.show_only_ids(live_detail_ids)
def handle_lost_mime(self, mime_text): if mime_text.startswith(CALCULATOR_UNIT): logger.debug("Dragged {} into unit editor".format( mime_text[len(CALCULATOR_UNIT):])) self.add_unit(mime_text[len(CALCULATOR_UNIT):]) elif mime_text.startswith(CALCULATOR_GRANDUNIT): logger.debug("Dragged {} into unit editor".format( mime_text[len(CALCULATOR_GRANDUNIT):])) self.add_units(mime_text[len(CALCULATOR_UNIT):])
def update_musicscores(): logger.debug("Updating all musicscores") if not storage.exists(MANIFEST_PATH): logger.debug("manifest.db not found, updating metadata") meta_updater.update_database() with db.CustomDB(meta_updater.get_manifestdb_path()) as manifest_conn: all_musicscores = manifest_conn.execute_and_fetchall(""" SELECT name,hash FROM manifests WHERE (name LIKE "musicscores\_m___.bdb" ESCAPE '\\') """) all_musicscores = {_[0].split(".")[0]: _[1] for _ in all_musicscores} if not _score_cache_db_exists(): _initialize_score_cache_db() new_scores = all_musicscores.keys() updated_scores = set() else: scores_meta = db.cachedb.execute_and_fetchall( "SELECT score_id, score_hash FROM score_cache") scores_meta = {_: __ for _, __ in scores_meta} deleted_scores = set(scores_meta.keys()).difference( all_musicscores.keys()) if len(deleted_scores) > 0: logger.info( "Found {} defunct musicscores, removing them...".format( len(deleted_scores))) for deleted_score in deleted_scores: path = MUSICSCORES_PATH / "{}.db".format(deleted_score) path.unlink() new_scores = set(all_musicscores.keys()).difference(scores_meta.keys()) updated_scores = [ _ for _ in set(all_musicscores.keys()).intersection( scores_meta.keys()) if scores_meta[_] != all_musicscores[_] ] logger.info( "Found {} musicscores, {} of them are new, {} are updated...".format( len(all_musicscores), len(new_scores), len(updated_scores))) if len(new_scores) + len(updated_scores) > 50: logger.info("It will take some time to download, please wait...") for musicscore_name in set(new_scores).union(set(updated_scores)): musicscore_hash = all_musicscores[musicscore_name] musicscore_response = cgss_query.get_db(musicscore_hash) with storage.get_writer( MUSICSCORES_PATH / "{}.db".format(musicscore_name), 'wb') as fwb: fwb.write(decompress(musicscore_response.content)) db.cachedb.execute( """ INSERT OR REPLACE INTO score_cache (score_id, score_hash) VALUES (?,?) """, [musicscore_name, musicscore_hash]) db.cachedb.commit() logger.info("All musicscores updated")
def dropEvent(self, e): mimetext = e.mimeData().text() if mimetext.startswith(CALCULATOR_UNIT): logger.debug("Dragged {} into unit editor".format( mimetext[len(CALCULATOR_UNIT):])) self.unit_view.add_unit(mimetext[len(CALCULATOR_UNIT):]) elif mimetext.startswith(CALCULATOR_GRANDUNIT): logger.debug("Dragged {} into unit editor".format( mimetext[len(CALCULATOR_GRANDUNIT):])) self.unit_view.add_units(mimetext[len(CALCULATOR_UNIT):]) e.ignore()
def _check_remote_cache(url): initialize_score_db() response = requests.get(url) if response.status_code == 404: logger.debug("No remote live detail cache found at {}".format(url)) return df = pd.read_csv(StringIO(response.content.decode("utf-8"))) logger.debug("Remote live detail cache found at {}, {} rows".format( url, len(df))) for _, row in df.iterrows(): _insert_into_live_detail_cache(row) db.cachedb.commit()
def update_all(sleep=0.1): logger.info("Updating images, please wait...") _try_extract_cache() if not IMAGE_PATH64.exists(): IMAGE_PATH64.mkdir() if not IMAGE_PATH32.exists(): IMAGE_PATH32.mkdir() card_data = _base_query("list/card_t")['result'] logger.debug("Getting icons for {} cards".format(len(card_data))) card_ids = [int(card['id']) for card in card_data] card_ids_plus = [_ + 1 for _ in card_ids] with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: for card_id in card_ids + card_ids_plus: executor.submit(update_image, card_id, sleep)
def _update_masterdb(): logger.debug("Updating master.db") manifest_conn = sqlite3.connect(get_manifestdb_path()) manifest_c = manifest_conn.cursor() manifest_c.execute('SELECT hash FROM manifests WHERE name="master.mdb"') master_hash = manifest_c.fetchone()[0] master_response = cgss_query.get_db(master_hash) with storage.get_writer(MASTERDB_PATH, 'wb') as fwb: fwb.write(decompress(master_response.content)) manifest_c.close() manifest_conn.close() logger.info("master.db updated")
def call_searchengine(self, query): if query == "" \ and not self.options["ssr"].isChecked() \ and not self.options["idolized"].isChecked() \ and not self.options["owned_only"].isChecked(): query = "*" card_ids = search_engine.advanced_single_query( query, ssr=self.options["ssr"].isChecked(), idolized=self.options["idolized"].isChecked(), partial_match=self.options["partial_match"].isChecked(), owned_only=self.options["owned_only"].isChecked() ) logger.debug("Query: {}".format(query)) logger.debug("Result: {}".format(card_ids)) self._card_view.show_only_ids(card_ids)
def get_probability(self, idx=None): if self.probabilities is None: card_probabilities = np.zeros((5, 3)) for card_idx, card in enumerate(self.unit.all_cards()): card_probabilities[card_idx, card.color.value] = card.skill.probability self.get_bonuses() probability_bonus = self.bonuses[idx, 4, :] card_probabilities = (card_probabilities * (1 + probability_bonus / 100) / 10000).max(axis=1) logger.debug("Card probabilities: {}".format(card_probabilities)) self.probabilities = np.clip(card_probabilities, a_min=0, a_max=1) if idx is None: return self.probabilities return self.probabilities[idx]
def initialize_chart_index(self): results = db.cachedb.execute_and_fetchall( "SELECT live_detail_id, performers, special_keys, jp_name, name, level, color, difficulty FROM live_detail_cache" ) schema = Schema(title=ID(stored=True), live_detail_id=NUMERIC, performers=TEXT, special_keys=TEXT, jp_name=TEXT, name=TEXT, difficulty=TEXT, level=NUMERIC, color=TEXT, content=TEXT(analyzer=SimpleAnalyzer())) ix = create_in(INDEX_PATH, schema, indexname="score") writer = ix.writer() logger.debug("Initializing quicksearch index for {} charts".format( len(results))) for result in results: difficulty = Difficulty(result[-1]).name.lower() performers = result[1].replace(",", "") if result[1] else "" color = Color(result[6] - 1).name.lower() content = " ".join([ performers, result[2] if result[2] else "", result[3], result[4], difficulty, color, str(result[5]) ]) writer.add_document( title=str(result[0]), content=content, live_detail_id=result[0], performers=performers, special_keys=result[2], jp_name=result[3], name=result[4], level=result[5], color=color, difficulty=difficulty, ) writer.commit() self.song_index = ix logger.debug("Quicksearch index initialized for {} charts".format( len(results)))
def update_cache_scores(): if not has_cached_live_details(): _check_remote_cache(REMOTE_CACHE_SCORES_URL) song_list = _get_song_list() expanded_song_list = _expand_song_list(song_list) cached_live_detail_ids = { _[0] for _ in db.cachedb.execute_and_fetchall( "SELECT live_detail_id FROM live_detail_cache") } new_live_detail_ids = set( expanded_song_list.keys()).difference(cached_live_detail_ids) logger.debug("Uncached live detail IDs: {}".format(new_live_detail_ids)) for ldid in new_live_detail_ids: live_data = expanded_song_list[ldid] with db.CustomDB(MUSICSCORES_PATH / "musicscores_m{:03d}.db".format( live_data["live_id"])) as score_conn: try: score = score_conn.execute_and_fetchone(""" SELECT * FROM blobs WHERE name LIKE "musicscores/m{:03d}/{:d}_{:d}.csv" """.format(live_data["live_id"], live_data["live_id"], live_data["diff"]))[1] except TypeError: logger.debug( "Cannot find chart for live detail ID {} difficulty {}". format(ldid, live_data["diff"])) continue notes_data = pd.read_csv(StringIO(score.decode())) live_data["duration"] = notes_data.iloc[-1]['sec'] notes_data = notes_data[notes_data["type"] < 10].reset_index(drop=True) notes_data['note_type'] = notes_data.apply(classify_note, axis=1) note_count = dict(notes_data['note_type'].value_counts()) for note_type in NoteType: key_str = note_type.name.capitalize() if note_type in note_count: live_data[key_str] = int(note_count[note_type]) else: live_data[key_str] = 0 live_data['difficulty'] = live_data['diff'] _insert_into_live_detail_cache(live_data) _overwrite_song_name(expanded_song_list) db.cachedb.commit()
def reindex(self, card_ids=None): logger.debug("Reindexing for {} cards".format(len(card_ids))) if card_ids is not None: results = db.cachedb.execute_and_fetchall( """ SELECT card_id, fields FROM card_index_keywords WHERE card_id IN ({}) """.format(','.join(['?'] * len(card_ids))), card_ids) else: results = db.cachedb.execute_and_fetchall( "SELECT card_id, fields FROM card_index_keywords") writer = self.index.writer() for result in results: fields = ast.literal_eval(result[1]) content = " ".join([fields[key] for key in KEYWORD_KEYS_STR_ONLY]) writer.delete_by_term('title', str(result[0])) writer.add_document(title=str(result[0]), content=content, **fields) writer.commit()
def add_unit(self, cards): if len(cards) == 6: cards = cards[:5] if len(cards) == 15: # Duplicate unit for r in range(self.widget.rowCount()): if self.widget.cellWidget(r, 0).card_ids == [None] * 15: logger.debug("Empty calculator unit at row {}".format(r)) self.set_unit(row=r, unit=0, cards=cards) return self.model.add_empty_unit(AddEmptyUnitEvent(self.model)) self.set_unit(row=self.widget.rowCount() - 1, unit=0, cards=cards) return for r in range(self.widget.rowCount()): card_ids = self.widget.cellWidget(r, 0).card_ids for u_id in range(3): if card_ids[u_id * 5:(u_id + 1) * 5] == [None] * 5: logger.debug("Empty calculator unit at row {}.{}".format( r, u_id)) self.set_unit(row=r, unit=u_id, cards=cards) return self.model.add_empty_unit(AddEmptyUnitEvent(self.model)) self.set_unit(row=self.widget.rowCount() - 1, unit=0, cards=cards)
33: {"id": 33, "name": "Cool Ensemble", "keywords": ["ens"], "color": (63, 63, 204)}, 34: {"id": 34, "name": "Passion Ensemble", "keywords": ["ens"], "color": (204, 162, 65)}, 35: {"id": 35, "name": "Vocal Motif", "color": (255, 94, 94)}, 36: {"id": 36, "name": "Dance Motif", "color": (0, 201, 212)}, 37: {"id": 37, "name": "Visual Motif", "color": (255, 178, 84)}, 38: {"id": 38, "name": "Tricolor Symphony", "keywords": ["sym"], "color": (255, 0, 238)}, 39: {"id": 39, "name": "Alternate", "keywords": ["alt"], "color": (158, 158, 158)}, 40: {"id": 40, "name": "Refrain", "keywords": ["ref"], "color": (100, 26, 20)}, 41: {"id": 41, "name": "Magic", "keywords": ["mag"], "color": (185, 242, 136)}, } SKILL_COLOR_BY_NAME = { v['name']: v['color'] for v in SKILL_BASE.values() } logger.debug("Creating chihiro.skill_keywords...") db.cachedb.execute(""" DROP TABLE IF EXISTS skill_keywords """) db.cachedb.execute(""" CREATE TABLE IF NOT EXISTS skill_keywords ( "id" INTEGER UNIQUE PRIMARY KEY, "skill_name" TEXT, "keywords" TEXT ) """) for skill_id, skill_data in SKILL_BASE.items(): db.cachedb.execute(""" INSERT OR IGNORE INTO skill_keywords ("id", "skill_name", "keywords") VALUES (?,?,?) """, [skill_id, skill_data['name'],
def get_masterdb_path(): if not storage.exists(MASTERDB_PATH): logger.debug("master.db not found, triggering manifest updater") _update_masterdb() return MASTERDB_PATH
def get_manifestdb_path(): if not storage.exists(MANIFEST_PATH): logger.debug("manifest.db not found, triggering manifest updater") _update_manifest() return MANIFEST_PATH
def initialize_index_db(self, card_list=None): logger.info("Building quicksearch index, please wait...") carnival_idols = ",".join( map(str, Live.static_get_chara_bonus_set(get_name=False))) db.cachedb.execute("""ATTACH DATABASE "{}" AS masterdb""".format( get_masterdb_path())) query = """ SELECT cdc.id, LOWER(cnc.card_short_name) as short, oc.number as owned, LOWER(cc.full_name) as chara, LOWER(rt.text) as rarity, LOWER(ct.text) as color, CASE WHEN cdc.rarity % 2 == 0 THEN 1 ELSE 0 END idolized, CASE WHEN pk.id IS NOT NULL THEN sd.condition || pk.short ELSE '' END time_prob_key, IFNULL(LOWER(sk.keywords), "") as skill, IFNULL(LOWER(lk.keywords), "") as leader, CASE WHEN cdc.leader_skill_id IN (70,71,72,73,81,82,83,84,104,105,106,113,117,118) AND cdc.rarity > 6 THEN "fes" ELSE "" END fes, CASE WHEN cdc.leader_skill_id IN (70,71,72,73,81,82,83,84,104,105,106,113,117) AND cdc.rarity > 6 THEN "blanc" ELSE "" END blanc, CASE WHEN cdc.leader_skill_id IN (118) AND cdc.rarity > 6 THEN "noir" ELSE "" END noir, CASE WHEN cdc.chara_id IN ({}) THEN "carnival" ELSE "" END carnival, CASE WHEN 1.0 * cdc.vocal_min / (cdc.vocal_min + cdc.visual_min + cdc.dance_min) > 0.39 THEN "vocal" WHEN 1.0 * cdc.visual_min / (cdc.vocal_min + cdc.visual_min + cdc.dance_min) > 0.39 THEN "visual" WHEN 1.0 * cdc.dance_min / (cdc.vocal_min + cdc.visual_min + cdc.dance_min) > 0.39 THEN "dance" ELSE "balance" END main_attribute FROM card_data_cache as cdc INNER JOIN card_name_cache cnc on cdc.id = cnc.card_id INNER JOIN owned_card oc on oc.card_id = cnc.card_id INNER JOIN chara_cache cc on cdc.chara_id = cc.chara_id INNER JOIN rarity_text rt on cdc.rarity = rt.id INNER JOIN color_text ct on cdc.attribute = ct.id LEFT JOIN masterdb.skill_data sd on cdc.skill_id = sd.id LEFT JOIN probability_keywords pk on pk.id = sd.probability_type LEFT JOIN skill_keywords sk on sd.skill_type = sk.id LEFT JOIN leader_keywords lk on cdc.leader_skill_id = lk.id """.format(carnival_idols) if card_list is not None: query += "WHERE cdc.id IN ({})".format(','.join(['?'] * len(card_list))) data = db.cachedb.execute_and_fetchall(query, card_list, out_dict=True) else: data = db.cachedb.execute_and_fetchall(query, out_dict=True) db.cachedb.execute("DROP TABLE IF EXISTS card_index_keywords") db.cachedb.execute(""" CREATE TABLE IF NOT EXISTS card_index_keywords ( "card_id" INTEGER UNIQUE PRIMARY KEY, "fields" BLOB ) """) logger.debug("Initializing quicksearch db for {} cards".format( len(data))) for card in data: card_id = card['id'] fields = {_: card[_] for _ in KEYWORD_KEYS} db.cachedb.execute( """ INSERT OR REPLACE INTO card_index_keywords ("card_id", "fields") VALUES (?,?) """, [card_id, str(fields)]) db.cachedb.commit() logger.debug( "Quicksearch db transaction for {} cards completed".format( len(data))) db.cachedb.execute("DETACH DATABASE masterdb")
import customlogger as logger from db import db PROBABILITY_BASE = { 1: ("Very Low", "vl"), 2: ("Low", "lo"), 3: ("Medium", "med"), 4: ("High", "hi"), 5: ("Very High", "vh") } logger.debug("Creating chihiro.probability_keywords...") db.cachedb.execute(""" DROP TABLE IF EXISTS probability_keywords """) db.cachedb.execute(""" CREATE TABLE IF NOT EXISTS probability_keywords ( "id" INTEGER UNIQUE PRIMARY KEY, "keywords" TEXT UNIQUE, "short" TEXT UNIQUE ) """) for prob_id, (prob_name, prob_key) in PROBABILITY_BASE.items(): db.cachedb.execute( """ INSERT OR IGNORE INTO probability_keywords ("id", "keywords", "short") VALUES (?,?,?) """, [prob_id, prob_name, prob_key]) db.cachedb.commit() logger.debug("chihiro.probability_keywords created.")
class Color(Enum): CUTE = 0 COOL = 1 PASSION = 2 ALL = 3 CARD_GUI_COLORS = { "Cute": (204, 3, 93), "Cool": (4, 82, 231), "Passion": (252, 169, 38), } logger.debug("Creating chihiro.color_text...") db.cachedb.execute(""" DROP TABLE IF EXISTS color_text """) db.cachedb.execute(""" CREATE TABLE IF NOT EXISTS color_text ( "id" INTEGER UNIQUE PRIMARY KEY, "text" TEXT UNIQUE ) """) for color in Color: db.cachedb.execute( """ INSERT OR IGNORE INTO color_text ("id", "text") VALUES (?,?) """, [color.value + 1, color.name.capitalize()]) db.cachedb.commit()
106: "Resonance Makeup", 107: "CutexCool", 108: "CutexPassion", 109: "CoolxCute", 110: "CoolxPassion", 111: "PassionxCute", 112: "PassionxCool", 113: "Cinderella Yell", 114: "Tricolor Ability", 115: "Cinderella Charm", 116: "World Level", 117: "Cinderella Wish", 118: "Cinderella Bless", } logger.debug("Creating chihiro.leader_keywords...") db.cachedb.execute(""" DROP TABLE IF EXISTS leader_keywords """) db.cachedb.execute(""" CREATE TABLE IF NOT EXISTS leader_keywords ( "id" INTEGER UNIQUE PRIMARY KEY, "keywords" TEXT ) """) for skill_id, skill_data in SKILL_BASE.items(): db.cachedb.execute( """ INSERT OR IGNORE INTO leader_keywords ("id", "keywords") VALUES (?,?) """, [skill_id, skill_data]) db.cachedb.commit()
import customlogger as logger from db import db class Rarity(Enum): NU = 1 N = 2 RU = 3 R = 4 SRU = 5 SR = 6 SSRU = 7 SSR = 8 logger.debug("Creating chihiro.rarity_text...") db.cachedb.execute(""" DROP TABLE IF EXISTS rarity_text """) db.cachedb.execute(""" CREATE TABLE IF NOT EXISTS rarity_text ( "id" INTEGER UNIQUE PRIMARY KEY, "text" TEXT UNIQUE ) """) for rarity in Rarity: db.cachedb.execute(""" INSERT OR IGNORE INTO rarity_text ("id", "text") VALUES (?,?) """, [rarity.value, rarity.name.lower()]) db.cachedb.commit()
def execute_query(self, query_str, limit=None): query = QueryParser("content", self._ix.schema).parse(query_str) results = self._searcher.search(query, limit=limit) logger.debug("Query '{}' took {} to run.".format(query_str, results.runtime)) return results