def load_data(args): if args.logsql: logging.getLogger('database').setLevel(logging.DEBUG) dry_run = not args.doupdates logger.info('Loading data') jp_database = merged_database.Database(Server.jp, args.input_dir) jp_database.load_database() na_database = merged_database.Database(Server.na, args.input_dir) na_database.load_database() kr_database = merged_database.Database(Server.kr, args.input_dir) kr_database.load_database() if input_args.server.lower() == "combined": cs_database = crossed_data.CrossServerDatabase(jp_database, na_database, kr_database, Server.jp) elif input_args.server.lower() == "jp": cs_database = crossed_data.CrossServerDatabase(jp_database, jp_database, jp_database, Server.jp) elif input_args.server.lower() == "na": cs_database = crossed_data.CrossServerDatabase(na_database, na_database, na_database, Server.na) elif input_args.server.lower() == "kr": cs_database = crossed_data.CrossServerDatabase(kr_database, kr_database, kr_database, Server.kr) else: raise ValueError() if args.media_dir: cs_database.load_extra_image_info(args.media_dir) if not args.skipintermediate: logger.info('Storing intermediate data') # This is supported for https://pad.chesterip.cc/ and PadSpike, until we can support it better in the dg db jp_database.save_all(args.output_dir, args.pretty) na_database.save_all(args.output_dir, args.pretty) # kr_database.save_all(args.output_dir, args.pretty) logger.info('Connecting to database') with open(args.db_config) as f: db_config = json.load(f) db_wrapper = DbWrapper(dry_run) db_wrapper.connect(db_config) if args.processors: for processor in args.processors.split(","): processor = processor.strip() logger.info('Running specific processor {}'.format(processor)) class_type = type_name_to_processor[processor] processor = class_type(cs_database) processor.process(db_wrapper) logger.info('done') return # Load dimension tables DimensionProcessor().process(db_wrapper) # # Load rank data RankRewardProcessor().process(db_wrapper) # # Ensure awakenings AwakeningProcessor().process(db_wrapper) # # Ensure tags SkillTagProcessor().process(db_wrapper) # # Load enemy skills es_processor = EnemySkillProcessor(db_wrapper, cs_database) es_processor.load_static() es_processor.load_enemy_skills() if args.es_dir: es_processor.load_enemy_data(args.es_dir) # Load basic series data series_processor = SeriesProcessor(cs_database) series_processor.pre_process(db_wrapper) # # Load monster data MonsterProcessor(cs_database).process(db_wrapper) # Auto-assign monster series series_processor.post_process(db_wrapper) # Egg machines EggMachineProcessor(cs_database).process(db_wrapper) # Load dungeon data dungeon_processor = DungeonProcessor(cs_database) dungeon_processor.process(db_wrapper) if not args.skip_long: # Load dungeon data derived from wave info DungeonContentProcessor(cs_database).process(db_wrapper) # Toggle any newly-available dungeons visible dungeon_processor.post_encounter_process(db_wrapper) # Load event data ScheduleProcessor(cs_database).process(db_wrapper) # Load exchange data ExchangeProcessor(cs_database).process(db_wrapper) # Load purchase data PurchaseProcessor(cs_database).process(db_wrapper) # Update timestamps TimestampProcessor().process(db_wrapper) PurgeDataProcessor().process(db_wrapper) logger.info('done')
def load_data(args): if args.logsql: logging.getLogger('database').setLevel(logging.DEBUG) dry_run = not args.doupdates logger.info('Loading data') jp_database = merged_database.Database(Server.jp, args.input_dir) jp_database.load_database() na_database = merged_database.Database(Server.na, args.input_dir) na_database.load_database() kr_database = merged_database.Database(Server.kr, args.input_dir) kr_database.load_database() cs_database = crossed_data.CrossServerDatabase(jp_database, na_database, kr_database) if args.media_dir: cs_database.load_extra_image_info(args.media_dir) if not args.skipintermediate: logger.info('Storing intermediate data') jp_database.save_all(args.output_dir, args.pretty) na_database.save_all(args.output_dir, args.pretty) kr_database.save_all(args.output_dir, args.pretty) logger.info('Connecting to database') with open(args.db_config) as f: db_config = json.load(f) db_wrapper = DbWrapper(dry_run) db_wrapper.connect(db_config) # Load dimension tables DimensionProcessor().process(db_wrapper) # Load rank data RankRewardProcessor().process(db_wrapper) # Ensure awakenings AwakeningProcessor().process(db_wrapper) # Ensure tags SkillTagProcessor().process(db_wrapper) # Load enemy skills es_processor = EnemySkillProcessor(db_wrapper, cs_database) es_processor.load_static() es_processor.load_enemy_skills() if args.es_dir: es_processor.load_enemy_data(args.es_dir) # Load basic series data series_processor = SeriesProcessor(cs_database) series_processor.pre_process(db_wrapper) # Load monster data MonsterProcessor(cs_database).process(db_wrapper) # Auto-assign monster series series_processor.post_process(db_wrapper) # Egg machines EggMachineProcessor(cs_database).process(db_wrapper) # Load dungeon data dungeon_processor = DungeonProcessor(cs_database) dungeon_processor.process(db_wrapper) if not args.skip_long: # Load dungeon data derived from wave info DungeonContentProcessor(cs_database).process(db_wrapper) # Toggle any newly-available dungeons visible dungeon_processor.post_encounter_process(db_wrapper) # Load event data ScheduleProcessor(cs_database).process(db_wrapper) # Load exchange data ExchangeProcessor(cs_database).process(db_wrapper) # Load purchase data PurchaseProcessor(cs_database).process(db_wrapper) # Update timestamps TimestampProcessor().process(db_wrapper) # Purge old schedule items and deleted_rows # This is dangerous, so we won't do it yet # PurgeDataProcessor().process(db_wrapper) print('done')
def load_data(args): if args.processors == "None": return if args.logsql: logging.getLogger('database').setLevel(logging.DEBUG) dry_run = not args.doupdates logger.info('Loading data') jp_database = merged_database.Database(Server.jp, args.input_dir) jp_database.load_database() na_database = merged_database.Database(Server.na, args.input_dir) na_database.load_database() kr_database = merged_database.Database(Server.kr, args.input_dir) kr_database.load_database() if input_args.server.lower() == "combined": cs_database = crossed_data.CrossServerDatabase(jp_database, na_database, kr_database, Server.jp) elif input_args.server.lower() == "jp": cs_database = crossed_data.CrossServerDatabase(jp_database, jp_database, jp_database, Server.jp) elif input_args.server.lower() == "na": cs_database = crossed_data.CrossServerDatabase(na_database, na_database, na_database, Server.na) elif input_args.server.lower() == "kr": cs_database = crossed_data.CrossServerDatabase(kr_database, kr_database, kr_database, Server.kr) else: raise ValueError() if args.media_dir: cs_database.load_extra_image_info(args.media_dir) if not args.skipintermediate: logger.info('Storing intermediate data') # This is supported for https://pad.chesterip.cc/ and PadSpike, until we can support it better in the dg db jp_database.save_all(args.output_dir, args.pretty) na_database.save_all(args.output_dir, args.pretty) # kr_database.save_all(args.output_dir, args.pretty) logger.info('Connecting to database') with open(args.db_config) as f: db_config = json.load(f) db_wrapper = DbWrapper(dry_run) db_wrapper.connect(db_config) processors = [] for proc in args.processors.split(","): proc = proc.strip() if proc in type_name_to_processor: processors.extend(type_name_to_processor[proc]) else: logger.warning("Unknown processor: {}\nSkipping...".format(proc)) # Load dimension tables if DimensionProcessor in processors: DimensionProcessor().process(db_wrapper) # # Load rank data if RankRewardProcessor in processors: RankRewardProcessor().process(db_wrapper) # # Ensure awakenings if AwokenSkillProcessor in processors: AwokenSkillProcessor().process(db_wrapper) # # Ensure tags if SkillTagProcessor in processors: SkillTagProcessor().process(db_wrapper) # # Load enemy skills if EnemySkillProcessor in processors: es_processor = EnemySkillProcessor(db_wrapper, cs_database) es_processor.load_static() es_processor.load_enemy_skills() if args.es_dir: es_processor.load_enemy_data(args.es_dir) # Load basic series data if SeriesProcessor in processors: SeriesProcessor(cs_database).process(db_wrapper) # # Load monster data if MonsterProcessor in processors: MonsterProcessor(cs_database).process(db_wrapper) # Egg machines if EggMachineProcessor in processors: EggMachineProcessor(cs_database).process(db_wrapper) # Load dungeon data dungeon_processor = None if DungeonProcessor in processors: dungeon_processor = DungeonProcessor(cs_database) dungeon_processor.process(db_wrapper) if DungeonContentProcessor in processors and input_args.server.lower() == "combined": # Load dungeon data derived from wave info DungeonContentProcessor(cs_database).process(db_wrapper) # Toggle any newly-available dungeons visible if dungeon_processor is not None: dungeon_processor.post_encounter_process(db_wrapper) # Load event data if ScheduleProcessor in processors: ScheduleProcessor(cs_database).process(db_wrapper) # Load exchange data if ExchangeProcessor in processors: ExchangeProcessor(cs_database).process(db_wrapper) # Load purchase data if PurchaseProcessor in processors: PurchaseProcessor(cs_database).process(db_wrapper) # Update timestamps if ExchangeProcessor in processors: TimestampProcessor().process(db_wrapper) if PurgeDataProcessor in processors: PurgeDataProcessor().process(db_wrapper) logger.info('Done')