def _process_skills(self, db: DbWrapper): skill_id_to_crossed_skills = { css.skill_id: css for css in self.data.skills } # Identify every skill ID currently in use across all servers. ls_in_use = set() as_in_use = set() for m in self.data.ownable_cards: ls_in_use.update( filter(None, [ m.jp_card.leader_skill_id, m.na_card.leader_skill_id, m.kr_card.leader_skill_id, ])) as_in_use.update( filter(None, [ m.jp_card.active_skill_id, m.na_card.active_skill_id, m.kr_card.active_skill_id, ])) logger.warning('loading %s in-use leader skills', len(ls_in_use)) for ls_skill_id in ls_in_use: ls_css = skill_id_to_crossed_skills[ls_skill_id] calc_skill = self.data.calculated_skills.get(ls_skill_id) db.insert_or_update(LeaderSkill.from_css(ls_css, calc_skill)) logger.warning('loading %s in-use active skills', len(as_in_use)) for as_skill_id in as_in_use: as_css = skill_id_to_crossed_skills[as_skill_id] calc_skill = self.data.calculated_skills.get(as_skill_id) db.insert_or_update(ActiveSkill.from_css(as_css, calc_skill))
def _process_subdungeons(self, db: DbWrapper): logger.info('loading sub_dungeons') for dungeon in self.data.dungeons: items = SubDungeon.from_csd(dungeon) for item in items: db.insert_or_update(item) logger.info('done loading subdungeons')
def _maybe_insert_encounters(self, db: DbWrapper, dungeon: CrossServerDungeon, sub_dungeon: CrossServerSubDungeon, result_floor: ResultFloor): sql = 'SELECT count(*) FROM encounters WHERE dungeon_id={} and sub_dungeon_id={}'.format( dungeon.dungeon_id, sub_dungeon.sub_dungeon_id) encounter_count = db.get_single_value(sql, int) if encounter_count: logger.debug('Skipping encounter insert for {}-{}'.format( dungeon.dungeon_id, sub_dungeon.sub_dungeon_id)) return logger.warning('Executing encounter insert for {}-{}'.format( dungeon.dungeon_id, sub_dungeon.sub_dungeon_id)) for stage in result_floor.stages: for slot in stage.slots: csc = self.data.card_by_monster_id(slot.monster_id) card = csc.jp_card.card enemy = card.enemy() turns = card.enemy_turns if dungeon.jp_dungeon.full_dungeon_type == RawDungeonType.TECHNICAL and card.enemy_turns_alt: turns = card.enemy_turns_alt sd = sub_dungeon.jp_sub_dungeon hp = int( round(sd.hp_mult * enemy.hp.value_at(slot.monster_level))) atk = int( round(sd.atk_mult * enemy.atk.value_at(slot.monster_level))) defence = int( round(sd.def_mult * enemy.defense.value_at(slot.monster_level))) # TODO: add comments based on slot data encounter = Encounter( dungeon_id=dungeon.dungeon_id, sub_dungeon_id=sub_dungeon.sub_dungeon_id, enemy_id=slot.monster_id, monster_id=slot.visible_monster_id(), stage=stage.stage_idx, comment_jp=None, comment_na=None, comment_kr=None, amount=slot.min_spawn if slot.min_spawn == slot.max_spawn else None, order_idx=slot.order, turns=turns, level=slot.monster_level, hp=hp, atk=atk, defence=defence) db.insert_or_update(encounter, force_insert=True) drops = Drop.from_slot(slot, encounter) for drop in drops: db.insert_or_update(drop)
def upsert_active_skill_data(db: DbWrapper, skill: CrossServerSkill): db.insert_or_update(ActiveSkill.from_css(skill)) for c, subskill in enumerate(skill.cur_skill.subskills): db.insert_or_update(ActiveSubskill.from_as(subskill)) for c2, part in enumerate(subskill.parts): db.insert_or_update(ActivePart.from_as(part)) db.insert_or_update( ActiveSubskillsParts.from_css(subskill, part, c2)) db.insert_or_update(ActiveSkillsSubskills.from_css(skill, subskill, c))
def process(self, db: DbWrapper): for item in D_ATTRIBUTES: db.insert_or_update(item) for item in D_TYPES: db.insert_or_update(item) for item in D_SERVERS: db.insert_or_update(item) for item in D_EVENT_TYPES: db.insert_or_update(item) for item in D_EGG_MACHINE_TYPES: db.insert_or_update(item)
def _process_auto_override(self, db: DbWrapper): logger.info('checking for auto name overrides') for m in self.data.ownable_cards: name = m.na_card.card.name name_clean = remove_diacritics(name) if name != name_clean: existing_name = db.get_single_value( 'select name_en_override from monsters where monster_id = {}'.format(m.monster_id), fail_on_empty=False) if not existing_name: logger.info('applying name override (%s): %s -> %s', m.monster_id, name, name_clean) db.update_item('update monsters set name_en_override = "{}" where monster_id = {}'.format( name_clean, m.monster_id))
def main(args): with open(args.db_config) as f: global db_config db_config = base_json.load(f) global connection connection = utils.connect(db_config) global db_wrapper db_wrapper = DbWrapper(False) db_wrapper.connect(db_config) app.run(host='0.0.0.0', port=int(args.port))
def process(self, db: DbWrapper): logger.warning('timestamp update of %s tables', len(_UPDATE_TABLES)) for table in _UPDATE_TABLES: max_tstamp_sql = 'SELECT MAX(tstamp) AS tstamp FROM `{}`'.format( table) tstamp = db.get_single_value(max_tstamp_sql, op=int) update_sql = "INSERT INTO timestamps (name, tstamp) values ('{}', {}) ON DUPLICATE KEY UPDATE tstamp = {}".format( table, tstamp, tstamp) rows_updated = db.update_item(update_sql) if rows_updated: logger.info('Updated tstamp for {} to {}'.format( table, tstamp)) logger.warning('done updating timestamps')
def _process_skills(self, db: DbWrapper): logger.info('loading skills for %s cards', len(self.data.ownable_cards)) ls_count = 0 as_count = 0 for csc in self.data.ownable_cards: if csc.leader_skill: ls_count += 1 db.insert_or_update(LeaderSkill.from_css(csc.leader_skill)) if csc.active_skill: as_count += 1 upsert_active_skill_data(db, csc.active_skill) logger.info('loaded %s leader skills and %s active skills', ls_count, as_count)
def _process_skills(self, db: DbWrapper): logger.info('loading skills for %s cards', len(self.data.ownable_cards)) ls_count = 0 as_count = 0 for csc in self.data.ownable_cards: card_ls = csc.leader_skill if card_ls: ls_count += 1 db.insert_or_update(LeaderSkill.from_css(card_ls)) card_as = csc.active_skill if card_as: as_count += 1 db.insert_or_update(ActiveSkill.from_css(card_as)) logger.info('loaded %s leader skills and %s active skills', ls_count, as_count)
def load_es_quick_and_die(args): with open(args.db_config) as f: db_config = json.load(f) jp_database = merged_database.Database(Server.jp, args.input_dir) jp_database.load_database() cs_database = crossed_data.CrossServerDatabase(jp_database, jp_database, jp_database) db_wrapper = DbWrapper(False) db_wrapper.connect(db_config) es_processor = EnemySkillProcessor(db_wrapper, cs_database) es_processor.load_enemy_data(args.es_dir) print('done loading ES') exit(0)
def process(self, db: DbWrapper): ready_dungeons = db.fetch_data(FIND_DUNGEONS_SQL) for row in ready_dungeons: dungeon_id = row['dungeon_id'] dungeon = self.data.dungeon_by_id(dungeon_id) self._process_dungeon(db, dungeon)
def process(self, db: DbWrapper): for server, purchase_map in self.purchase_data.items(): logger.debug('Process {} purchases'.format(server.name.upper())) for raw in purchase_map: logger.debug('Creating purchase: %s', raw) p_item = Purchase.from_raw_purchase(raw) db.insert_or_update(p_item) def purchases_to_map(purchases: List[Purchase]): return {x.monster_id: x.cost for x in purchases} monster_id_to_mp = purchases_to_map(self.purchase_data[Server.kr]) monster_id_to_mp.update(purchases_to_map(self.purchase_data[Server.na])) monster_id_to_mp.update(purchases_to_map(self.purchase_data[Server.jp])) for monster_id, mp_cost in monster_id_to_mp.items(): m_item = MonsterWithMPValue(monster_id=monster_id, buy_mp=mp_cost) db.insert_or_update(m_item)
def process(self, db: DbWrapper): for server, egg_machine_list in self.egg_machines.items(): logger.debug('Process {} egg machines'.format(server.name.upper())) for egg_machine in egg_machine_list: item = EggMachine.from_eem(egg_machine, server) egg_machine_id = db.insert_or_update(item) # process contents of the eggmachines id_mapper = server_monster_id_fn(server) monsters = [ EggMachinesMonster(egg_machine_monster_id=None, monster_id=id_mapper(k), roll_chance=v, egg_machine_id=egg_machine_id) for k, v in egg_machine.contents.items() ] for emm in monsters: db.insert_or_update(emm)
def _process_dungeon_contents(self, db: DbWrapper): for dungeon in self.data.dungeons: if dungeon.dungeon_id % 250 == 0: logger.info('scanning dungeon:%s', dungeon.dungeon_id) sub_dungeon_items = [] for sub_dungeon in dungeon.sub_dungeons: result_floor = self._compute_result_floor(db, dungeon, sub_dungeon) if result_floor: item = SubDungeonWaveData.from_waveresult(result_floor, sub_dungeon) db.insert_or_update(item) sub_dungeon_items.append(item) self._maybe_insert_encounters(db, dungeon, sub_dungeon, result_floor) if sub_dungeon_items: max_sub_dungeon = max(sub_dungeon_items, key=lambda x: x.sub_dungeon_id) item = DungeonWaveData(dungeon_id=dungeon.dungeon_id, icon_id=max_sub_dungeon.icon_id) db.insert_or_update(item)
def load_data(args): logger.info('Connecting to database') with open(args.db_config) as f: db_config = json.load(f) db_wrapper = DbWrapper() db_wrapper.connect(db_config) data = db_wrapper.fetch_data(ENCOUNTER_QUERY) output = {} for encounter in data: sdgid = encounter['sdgid'] floor = encounter['floor'] spawn = {'id': encounter['enemy_id'], 'lv': encounter['level']} if sdgid not in output: output[sdgid] = {'name': encounter['stage_name'], 'floors': {}} if floor not in output[sdgid]['floors']: output[sdgid]['floors'][floor] = {'spawns': []} if spawn not in output[sdgid]['floors'][floor]['spawns']: output[sdgid]['floors'][floor]['spawns'].append(spawn) with open(os.path.join(args.output_dir, "encounter_data.json"), 'w+') as f: json.dump(output, f)
def main(args): with open(args.db_config) as f: global db_config db_config = base_json.load(f) global connection connection = utils.connect(db_config) global db_wrapper db_wrapper = DbWrapper(False) db_wrapper.connect(db_config) global es_dir es_dir = args.es_dir if args.web_dir: app.static('/', os.path.join(args.web_dir, 'index.html')) app.static('', args.web_dir) app.run(host='0.0.0.0', port=int(args.port))
def _process_dungeons(self, db: DbWrapper): for dungeon in self.data.dungeons: db.insert_or_update(Dungeon.from_csd(dungeon)) for subdungeon in dungeon.sub_dungeons: db.insert_or_update( SubDungeon.from_cssd(subdungeon, dungeon.dungeon_id)) if not subdungeon.cur_sub_dungeon.fixed_monsters: continue db.insert_or_update(FixedTeam.from_cssd(subdungeon)) for fcid in range(6): fixed = subdungeon.cur_sub_dungeon.fixed_monsters.get(fcid) db.insert_or_update( FixedTeamMonster.from_fc(fixed, fcid, subdungeon))
def _print_bad_enemies(self, desc: str, dungeon, sub_dungeon, db: DbWrapper, sql: str): bad_stored_encounters = db.fetch_data(sql) if not bad_stored_encounters: return encounter_list_str = ','.join([str(x['encounter_id']) for x in bad_stored_encounters]) encounter_info_list_str = ','.join( ['{}/{}'.format(x['encounter_id'], x['enemy_id']) for x in bad_stored_encounters]) delete_drops_sql = 'DELETE FROM drops WHERE encounter_id IN ({});'.format(encounter_list_str) delete_encounters_sql = 'DELETE FROM encounters WHERE encounter_id IN ({});'.format( encounter_list_str) human_fix_logger.warning('Found bad %s stored encounters for %s: [%s] - %s\n%s\n%s', desc, dungeon.na_dungeon.clean_name, sub_dungeon.na_sub_dungeon.clean_name, encounter_info_list_str, delete_drops_sql, delete_encounters_sql) deleted_drops = db.update_item(delete_drops_sql) deleted_encounters = db.update_item(delete_encounters_sql) human_fix_logger.warning('Auto deleted {} drops and {} encounters'.format(deleted_drops, deleted_encounters))
def process(self, db: DbWrapper): db.fetch_data("DELETE FROM `schedule` WHERE end_timestamp < {}" \ .format(date2tstamp(datetime.now()-timedelta(weeks=4)))) schedule = db.fetch_data("SELECT ROW_COUNT()")[0]['ROW_COUNT()'] db.fetch_data("DELETE FROM `deleted_rows` WHERE tstamp < {}" \ .format(date2tstamp(datetime.now()-timedelta(weeks=4)))) del_rows = db.fetch_data("SELECT ROW_COUNT()")[0]['ROW_COUNT()'] logger.info("purged {} old schedules and {} old deleted_rows".format( schedule, del_rows))
def _try_group(self, db: DbWrapper): # Try to infer the series of a monster via the series of monsters in its group. monster_id_to_series_id = db.load_to_key_value('monster_id', 'series_id', 'monsters') # Partition existing DadGuide monster series by PAD group ID. group_id_to_series_ids = defaultdict(set) for csc in self.data.ownable_cards: monster_id = csc.monster_id series_id = monster_id_to_series_id.get(monster_id) if series_id is None: monster_id_to_series_id[monster_id] = 0 logger.warning('Series was null for monster %d', monster_id) continue if series_id == 0: # No useful data from this card continue group_id = csc.jp_card.card.group_id group_id_to_series_ids[group_id].add(series_id) # Now loop through again and see if we can apply any series via group mapping. for csc in self.data.ownable_cards: monster_id = csc.monster_id series_id = monster_id_to_series_id[monster_id] if series_id != 0: # Series already set. continue group_id = csc.jp_card.card.group_id possible_series = group_id_to_series_ids[group_id] if len(possible_series) == 1: group_series_id = list(possible_series)[0] item = MonsterWithSeries(monster_id=monster_id, series_id=group_series_id) db.insert_or_update(item)
def _compute_result_floor(self, db: DbWrapper, dungeon: CrossServerDungeon, sub_dungeon: CrossServerSubDungeon) -> Optional[ResultFloor]: floor_id = sub_dungeon.sub_dungeon_id % 1000 sql = 'SELECT * FROM wave_data WHERE dungeon_id={} and floor_id={}'.format( dungeon.dungeon_id, floor_id) wave_items = db.custom_load_multiple_objects(WaveItem, sql) if not wave_items: return None normal_or_tech = dungeon.cur_dungeon.full_dungeon_type in [RawDungeonType.NORMAL, RawDungeonType.TECHNICAL] try_common_monsters = normal_or_tech and dungeon.cur_dungeon.dungeon_id < 1000 return self.converter.convert(wave_items, try_common_monsters)
def load_data(args): logger.info('Connecting to database') with open(args.db_config) as f: db_config = json.load(f) dry_run = not args.doupdates db_wrapper = DbWrapper(dry_run) db_wrapper.connect(db_config) data = db_wrapper.fetch_data("SELECT * FROM egg_machines") for machine_sql in data: egg_machine_id = machine_sql['egg_machine_id'] contents = ast.literal_eval(machine_sql['contents']) for monster_id in contents.keys(): real_monster_id = int(monster_id.strip("()")) emm = EggMachinesMonster(egg_machine_monster_id=None, monster_id=real_monster_id, roll_chance=contents.get(monster_id), egg_machine_id=egg_machine_id) db_wrapper.insert_or_update(emm)
def process(self, db: DbWrapper): print('Starting deletion of old records') print('schedule size:', db.get_single_value('select count(*) from schedule')) print('deleted_rows size', db.get_single_value('select count(*) from deleted_rows')) # This is a hint to mysql that we shouldn't insert into deleted_rows # while purging. The client should handle deleting old events in bulk. db.fetch_data('set @TRIGGER_DISABLED=true') delete_timestamp = date2tstamp(datetime.now() - timedelta(weeks=4)) print('deleting before', delete_timestamp) schedule_deletes = db.update_item( "DELETE FROM `schedule` WHERE end_timestamp < {}".format( delete_timestamp)) deleted_row_deletes = db.update_item( "DELETE FROM `deleted_rows` WHERE tstamp < {}".format( delete_timestamp)) logger.info("purged {} old schedules and {} old deleted_rows".format( schedule_deletes, deleted_row_deletes))
def pull_data(args): if args.logsql: logging.getLogger('database').setLevel(logging.DEBUG) server = args.server.upper() endpoint = None if server == 'NA': endpoint = pad_api.ServerEndpoint.NA elif server == 'JP': endpoint = pad_api.ServerEndpoint.JA else: raise Exception('unexpected server:' + args.server) api_client = pad_api.PadApiClient(endpoint, args.user_uuid, args.user_intid) print('login') api_client.login() print('load_player_data') api_client.load_player_data() friend_card = api_client.get_any_card_except_in_cur_deck() dungeon_id = args.dungeon_id floor_id = args.floor_id loop_count = args.loop_count pull_id = int(time.time()) print('Connecting to database') with open(args.db_config) as f: db_config = json.load(f) dry_run = False db_wrapper = DbWrapper(dry_run) db_wrapper.connect(db_config) entry_id = int( db_wrapper.get_single_value("SELECT MAX(entry_id) FROM wave_data;")) print('entering dungeon', dungeon_id, 'floor', floor_id, loop_count, 'times') for e_idx in range(loop_count): print('entering', e_idx) entry_id += 1 entry_json = api_client.enter_dungeon(dungeon_id, floor_id, self_card=friend_card) wave_response = pad_api.extract_wave_response_from_entry(entry_json) leaders = entry_json['entry_leads'] for stage_idx, floor in enumerate(wave_response.floors): for monster_idx, monster in enumerate(floor.monsters): wave_item = WaveItem(pull_id=pull_id, entry_id=entry_id, server=server, dungeon_id=dungeon_id, floor_id=floor_id, stage=stage_idx, slot=monster_idx, monster=monster, leader_id=leaders[0], friend_id=leaders[1]) db_wrapper.insert_item(wave_item.insert_sql()) if server != 'NA': time.sleep(.5)
def process(self, db: DbWrapper): for item in DIMENSION_OBJECTS: db.insert_or_update(item)
def load_data(args): if args.logsql: logging.getLogger('database').setLevel(logging.DEBUG) dry_run = not args.doupdates logger.info('Loading data') jp_database = merged_database.Database(Server.jp, args.input_dir) jp_database.load_database() na_database = merged_database.Database(Server.na, args.input_dir) na_database.load_database() kr_database = merged_database.Database(Server.kr, args.input_dir) kr_database.load_database() cs_database = crossed_data.CrossServerDatabase(jp_database, na_database, kr_database) if args.media_dir: cs_database.load_extra_image_info(args.media_dir) if not args.skipintermediate: logger.info('Storing intermediate data') jp_database.save_all(args.output_dir, args.pretty) na_database.save_all(args.output_dir, args.pretty) kr_database.save_all(args.output_dir, args.pretty) logger.info('Connecting to database') with open(args.db_config) as f: db_config = json.load(f) db_wrapper = DbWrapper(dry_run) db_wrapper.connect(db_config) # Load dimension tables DimensionProcessor().process(db_wrapper) # Load rank data RankRewardProcessor().process(db_wrapper) # Ensure awakenings AwakeningProcessor().process(db_wrapper) # Ensure tags SkillTagProcessor().process(db_wrapper) # Load enemy skills es_processor = EnemySkillProcessor(db_wrapper, cs_database) es_processor.load_static() es_processor.load_enemy_skills() if args.es_dir: es_processor.load_enemy_data(args.es_dir) # Load basic series data series_processor = SeriesProcessor(cs_database) series_processor.pre_process(db_wrapper) # Load monster data MonsterProcessor(cs_database).process(db_wrapper) # Auto-assign monster series series_processor.post_process(db_wrapper) # Egg machines EggMachineProcessor(cs_database).process(db_wrapper) # Load dungeon data dungeon_processor = DungeonProcessor(cs_database) dungeon_processor.process(db_wrapper) if not args.skip_long: # Load dungeon data derived from wave info DungeonContentProcessor(cs_database).process(db_wrapper) # Toggle any newly-available dungeons visible dungeon_processor.post_encounter_process(db_wrapper) # Load event data ScheduleProcessor(cs_database).process(db_wrapper) # Load exchange data ExchangeProcessor(cs_database).process(db_wrapper) # Load purchase data PurchaseProcessor(cs_database).process(db_wrapper) # Update timestamps TimestampProcessor().process(db_wrapper) # Purge old schedule items and deleted_rows # This is dangerous, so we won't do it yet # PurgeDataProcessor().process(db_wrapper) print('done')
def post_encounter_process(self, db: DbWrapper): logger.info('post-encounter processing') updated_rows = db.update_item(_ENCOUNTER_VISIBILITY_SQL) logger.info('Updated visibility of %s dungeons', updated_rows)
def _maybe_insert_encounters(self, db: DbWrapper, dungeon: CrossServerDungeon, sub_dungeon: CrossServerSubDungeon, result_floor: ResultFloor): for stage in result_floor.stages: seen_enemies = set() for slot in stage.slots: csc = self.data.card_by_monster_id(slot.monster_id) card = csc.cur_card.card enemy = card.enemy() seen_enemies.add(slot.monster_id) turns = card.enemy_turns if dungeon.cur_dungeon.full_dungeon_type == RawDungeonType.TECHNICAL and card.enemy_turns_alt: turns = card.enemy_turns_alt sd = sub_dungeon.cur_sub_dungeon hp = int(round(sd.hp_mult * enemy.hp.value_at(slot.monster_level))) atk = int(round(sd.atk_mult * enemy.atk.value_at(slot.monster_level))) defence = int(round(sd.def_mult * enemy.defense.value_at(slot.monster_level))) exp = int(round(enemy.xp.value_at(slot.monster_level))) # TODO: add comments based on slot data encounter = Encounter( dungeon_id=dungeon.dungeon_id, sub_dungeon_id=sub_dungeon.sub_dungeon_id, enemy_id=slot.monster_id, monster_id=slot.visible_monster_id(), stage=stage.stage_idx, comment_jp=None, comment_na=None, comment_kr=None, amount=slot.min_spawn if slot.min_spawn == slot.max_spawn else None, order_idx=slot.order, turns=turns, level=slot.monster_level, hp=hp, atk=atk, defence=defence, exp=exp) sql = ''' SELECT encounter_id FROM encounters WHERE dungeon_id={} AND sub_dungeon_id={} AND stage={} AND enemy_id={} AND level={} '''.format(dungeon.dungeon_id, sub_dungeon.sub_dungeon_id, stage.stage_idx, slot.monster_id, slot.monster_level) stored_encounter_id = db.get_single_value(sql, int, fail_on_empty=False) if stored_encounter_id: encounter.encounter_id = stored_encounter_id db.insert_or_update(encounter) drops = Drop.from_slot(slot, encounter) for drop in drops: db.insert_or_update(drop) if seen_enemies: sql = ''' SELECT encounter_id, enemy_id FROM encounters WHERE dungeon_id={} AND sub_dungeon_id={} AND stage={} AND enemy_id not in ({}) '''.format(dungeon.dungeon_id, sub_dungeon.sub_dungeon_id, stage.stage_idx, ','.join(map(str, seen_enemies))) self._print_bad_enemies('in-stage', dungeon, sub_dungeon, db, sql) # In case there are missing stages (e.g. no more invades/commons) seen_stage_indexes = [stage.stage_idx for stage in result_floor.stages] sql = ''' SELECT encounter_id, enemy_id FROM encounters WHERE dungeon_id={} AND sub_dungeon_id={} AND stage not in ({}) '''.format(dungeon.dungeon_id, sub_dungeon.sub_dungeon_id, ','.join(map(str, seen_stage_indexes))) self._print_bad_enemies('out-stage', dungeon, sub_dungeon, db, sql)
def load_data(args): if args.logsql: logging.getLogger('database').setLevel(logging.DEBUG) dry_run = not args.doupdates logger.info('Loading data') jp_database = merged_database.Database(Server.jp, args.input_dir) jp_database.load_database() na_database = merged_database.Database(Server.na, args.input_dir) na_database.load_database() kr_database = merged_database.Database(Server.kr, args.input_dir) kr_database.load_database() if input_args.server.lower() == "combined": cs_database = crossed_data.CrossServerDatabase(jp_database, na_database, kr_database, Server.jp) elif input_args.server.lower() == "jp": cs_database = crossed_data.CrossServerDatabase(jp_database, jp_database, jp_database, Server.jp) elif input_args.server.lower() == "na": cs_database = crossed_data.CrossServerDatabase(na_database, na_database, na_database, Server.na) elif input_args.server.lower() == "kr": cs_database = crossed_data.CrossServerDatabase(kr_database, kr_database, kr_database, Server.kr) else: raise ValueError() if args.media_dir: cs_database.load_extra_image_info(args.media_dir) if not args.skipintermediate: logger.info('Storing intermediate data') # This is supported for https://pad.chesterip.cc/ and PadSpike, until we can support it better in the dg db jp_database.save_all(args.output_dir, args.pretty) na_database.save_all(args.output_dir, args.pretty) # kr_database.save_all(args.output_dir, args.pretty) logger.info('Connecting to database') with open(args.db_config) as f: db_config = json.load(f) db_wrapper = DbWrapper(dry_run) db_wrapper.connect(db_config) if args.processors: for processor in args.processors.split(","): processor = processor.strip() logger.info('Running specific processor {}'.format(processor)) class_type = type_name_to_processor[processor] processor = class_type(cs_database) processor.process(db_wrapper) logger.info('done') return # Load dimension tables DimensionProcessor().process(db_wrapper) # # Load rank data RankRewardProcessor().process(db_wrapper) # # Ensure awakenings AwakeningProcessor().process(db_wrapper) # # Ensure tags SkillTagProcessor().process(db_wrapper) # # Load enemy skills es_processor = EnemySkillProcessor(db_wrapper, cs_database) es_processor.load_static() es_processor.load_enemy_skills() if args.es_dir: es_processor.load_enemy_data(args.es_dir) # Load basic series data series_processor = SeriesProcessor(cs_database) series_processor.pre_process(db_wrapper) # # Load monster data MonsterProcessor(cs_database).process(db_wrapper) # Auto-assign monster series series_processor.post_process(db_wrapper) # Egg machines EggMachineProcessor(cs_database).process(db_wrapper) # Load dungeon data dungeon_processor = DungeonProcessor(cs_database) dungeon_processor.process(db_wrapper) if not args.skip_long: # Load dungeon data derived from wave info DungeonContentProcessor(cs_database).process(db_wrapper) # Toggle any newly-available dungeons visible dungeon_processor.post_encounter_process(db_wrapper) # Load event data ScheduleProcessor(cs_database).process(db_wrapper) # Load exchange data ExchangeProcessor(cs_database).process(db_wrapper) # Load purchase data PurchaseProcessor(cs_database).process(db_wrapper) # Update timestamps TimestampProcessor().process(db_wrapper) PurgeDataProcessor().process(db_wrapper) logger.info('done')