예제 #1
0
def dump_data(args):
    input_dir = args.input_dir
    output_dir = args.output_dir

    save_region_files(output_dir, Server.jp, padtools.regions.japan.server)
    save_region_files(output_dir, Server.kr, padtools.regions.north_america.server)
    save_region_files(output_dir, Server.na, padtools.regions.korea.server)

    if args.image_data_only:
        exit(0)

    print('Processing JP')
    jp_db = merged_database.Database(Server.jp, input_dir)
    jp_db.load_database(skip_extra=True)

    print('Processing NA')
    na_db = merged_database.Database(Server.na, input_dir)
    na_db.load_database(skip_extra=True)

    print('Processing KR')
    kr_db = merged_database.Database(Server.kr, input_dir)
    kr_db.load_database(skip_extra=True)

    print('Merging and saving')
    if args.server.lower() == "jp":
        server = Server.jp
    elif args.server.lower() == "na":
        server = Server.na
    elif args.server.lower() == "kr":
        server = Server.kr
    else:
        raise ValueError("Server must be JP, NA, or KR")

    cross_db = CrossServerDatabase(jp_db, na_db, kr_db, server)
    save_cross_database(output_dir, cross_db)
예제 #2
0
def dump_data(args):
    input_dir = args.input_dir
    output_dir = args.output_dir

    save_region_files(output_dir, Server.jp, padtools.regions.japan.server)
    save_region_files(output_dir, Server.kr,
                      padtools.regions.north_america.server)
    save_region_files(output_dir, Server.na, padtools.regions.korea.server)

    if args.image_data_only:
        exit(0)

    print('Processing JP')
    jp_db = merged_database.Database(Server.jp, input_dir)
    jp_db.load_database(skip_extra=True)

    print('Processing NA')
    na_db = merged_database.Database(Server.na, input_dir)
    na_db.load_database(skip_extra=True)

    print('Processing KR')
    kr_db = merged_database.Database(Server.kr, input_dir)
    kr_db.load_database(skip_extra=True)

    print('Merging and saving')
    cross_db = CrossServerDatabase(jp_db, na_db, kr_db)
    save_cross_database(output_dir, cross_db)
예제 #3
0
def load_data(args):
    dry_run = not args.doupdates

    input_dir = args.input_dir

    logger.info('Loading data')
    jp_database = merged_database.Database(Server.jp, input_dir)
    jp_database.load_database()

    na_database = merged_database.Database(Server.na, input_dir)
    na_database.load_database()

    kr_database = merged_database.Database(Server.kr, input_dir)
    kr_database.load_database()

    cs_database = crossed_data.CrossServerDatabase(jp_database, na_database, kr_database)

    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)

    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    do_migration(cs_database, db_wrapper)

    print('Not migration, but consider running:')
    print('UPDATE dungeons SET visible = false WHERE dungeon_id NOT IN (select dungeon_id  from encounters group by 1)')
def run(args):
    behavior_data_dir = os.path.join(args.output_dir, 'behavior_data')
    os.makedirs(behavior_data_dir, exist_ok=True)
    behavior_text_dir = os.path.join(args.output_dir, 'behavior_text')
    os.makedirs(behavior_text_dir, exist_ok=True)

    jp_db = merged_database.Database(Server.jp, args.input_dir)
    na_db = merged_database.Database(Server.na, args.input_dir)

    jp_db.load_database(skip_skills=True, skip_bonus=True, skip_extra=True)
    na_db.load_database(skip_skills=True, skip_bonus=True, skip_extra=True)

    print('merging data')
    # Skipping KR database; we don't need it to compute ES
    cross_db = CrossServerDatabase(jp_db, na_db, na_db)

    combined_cards = cross_db.all_cards

    fixed_card_id = args.card_id
    if args.interactive:
        fixed_card_id = input("enter a card id:").strip()

    count = 0
    for csc in combined_cards:
        merged_card = csc.na_card
        card = merged_card.card
        if fixed_card_id and csc.monster_id != int(fixed_card_id):
            continue
        try:
            count += 1
            if count % 100 == 0:
                print('processing {} of {}'.format(count, len(combined_cards)))
            monster_behavior = process_card(csc)
            if monster_behavior is None:
                continue

            # Do some sanity cleanup on the behavior
            monster_behavior = clean_monster_behavior(monster_behavior)

            behavior_data_file = os.path.join(behavior_data_dir, '{}.textproto'.format(csc.monster_id))
            safe_save_to_file(behavior_data_file, monster_behavior)

            behavior_text_file = os.path.join(behavior_text_dir, '{}.txt'.format(csc.monster_id))
            save_monster_behavior(behavior_text_file, csc, monster_behavior)

            # TODO: Add raw behavior dump

        except Exception as ex:
            print('failed to process', csc.monster_id, card.name)
            print(ex)
            # if 'unsupported operation' not in str(ex):
            import traceback
            traceback.print_exc()
            exit(0)
예제 #5
0
def main(args):
    input_dir = args.input_dir

    logger.info('Loading data')
    jp_database = merged_database.Database(Server.jp, input_dir)
    jp_database.load_database()

    na_database = merged_database.Database(Server.na, input_dir)
    na_database.load_database()

    csd = crossed_data.CrossServerDatabase(jp_database, na_database, na_database)
    current_dungeons = active_dungeons.extract_active_dungeons(csd)
    for cd in current_dungeons:
        print(cd.na_dungeon.clean_name)
예제 #6
0
def load_data(args):
    server = Server.from_str(args.server)

    if os.name != 'nt':
        fail_logger.addHandler(
            logging.FileHandler('/tmp/autodungeon_processor_issues.txt',
                                mode='w'))

    pad_db = merged_database.Database(server, args.input_dir)
    pad_db.load_database(skip_skills=True, skip_extra=True)

    with open(args.db_config) as f:
        db_config = json.load(f)

    dry_run = not args.doupdates
    db_wrapper = db_util.DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    dungeons = identify_dungeons(pad_db, args.group)

    if server == Server.na:
        endpoint = pad_api.ServerEndpoint.NA
    elif server == Server.jp:
        endpoint = pad_api.ServerEndpoint.JA
    else:
        raise Exception('unexpected server:' + args.server)

    api_client = pad_api.PadApiClient(endpoint, args.user_uuid,
                                      args.user_intid)
    api_client.login()
    print('load_player_data')
    api_client.load_player_data()

    load_dungeons(args, db_wrapper, dungeons, api_client)
예제 #7
0
def run(args):
    enemy_skillset_dump.set_data_dir(args.output_dir)

    raw_input_dir = os.path.join(args.input_dir, 'raw')
    jp_db = merged_database.Database(Server.jp, raw_input_dir)
    na_db = merged_database.Database(Server.na, raw_input_dir)

    jp_db.load_database(skip_skills=True, skip_bonus=True, skip_extra=True)
    na_db.load_database(skip_skills=True, skip_bonus=True, skip_extra=True)

    print('merging data')
    cross_db = CrossServerDatabase(jp_db, na_db, na_db)

    combined_cards = cross_db.all_cards

    fixed_card_id = args.card_id
    if args.interactive:
        fixed_card_id = input("enter a card id:").strip()

    count = 0
    for csc in combined_cards:
        merged_card = csc.na_card
        card = merged_card.card
        if fixed_card_id and csc.monster_id != int(fixed_card_id):
            continue
        try:
            count += 1
            if count % 100 == 0:
                print('processing {} of {}'.format(count, len(combined_cards)))
            process_card(csc)

        except Exception as ex:
            print('failed to process', card.name)
            print(ex)
            # if 'unsupported operation' not in str(ex):
            import traceback
            traceback.print_exc()
            exit(0)
예제 #8
0
def load_es_quick_and_die(args):
    with open(args.db_config) as f:
        db_config = json.load(f)

    jp_database = merged_database.Database(Server.jp, args.input_dir)
    jp_database.load_database()
    cs_database = crossed_data.CrossServerDatabase(jp_database, jp_database, jp_database)

    db_wrapper = DbWrapper(False)
    db_wrapper.connect(db_config)

    es_processor = EnemySkillProcessor(db_wrapper, cs_database)
    es_processor.load_enemy_data(args.es_dir)

    print('done loading ES')
    exit(0)
def load_data(args):
    server = Server.from_str(args.server)

    pad_db = merged_database.Database(server, args.input_dir)
    pad_db.load_database(skip_skills=True, skip_extra=True)

    with open(args.db_config) as f:
        db_config = json.load(f)

    dry_run = not args.doupdates
    db_wrapper = db_util.DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    dungeons = identify_dungeons(pad_db, args.group)

    load_dungeons(args, db_wrapper, dungeons)
예제 #10
0
def load_data(args):
    if args.server == 'jp':
        server = Server.jp
    elif args.server == 'na':
        server = Server.na
    elif args.server == 'kr':
        server = Server.kr
    else:
        raise ValueError('unexpected argument: ' + args.server)

    pad_db = merged_database.Database(server, args.input_dir)
    pad_db.load_database(skip_skills=True, skip_extra=True)

    with open(args.db_config) as f:
        db_config = json.load(f)

    dry_run = not args.doupdates
    db_wrapper = db_util.DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    dungeons = identify_dungeons(pad_db, args.group)

    load_dungeons(args, db_wrapper, dungeons)
예제 #11
0
def run_test(args):
    esd.set_data_dir(args.es_input_dir)

    raw_input_dir = os.path.join(args.input_dir, 'raw')
    processed_input_dir = os.path.join(args.input_dir, 'processed')

    output_dir = args.output_dir
    new_output_dir = os.path.join(output_dir, 'new')
    pathlib.Path(new_output_dir).mkdir(parents=True, exist_ok=True)
    golden_output_dir = os.path.join(output_dir, 'golden')
    pathlib.Path(golden_output_dir).mkdir(parents=True, exist_ok=True)

    db = merged_database.Database(Server.na, raw_input_dir)

    print('loading')
    db.load_database(skip_skills=True, skip_bonus=True, skip_extra=True)

    dungeon_id_to_wavedata = defaultdict(set)
    wave_summary_data = wave.load_wave_summary(processed_input_dir)
    for w in wave_summary_data:
        dungeon_id_to_wavedata[w.dungeon_id].add(w)

    split_dungeons = [
        # Marks dungeons which are enormous and should be broken into subfiles
        110,  # Endless Corridors
    ]

    golden_dungeons = [
        116,  # Gunma
        158,  # Goemon
        172,  # Taiko
        176,  # Valkyrie
        307,  # Hera-Is
        308,  # Gung-ho
        318,  # Zeus-Dios
        317,  # ECO
        331,  # Hera-Ur
        337,  # Dragon Zombie
        354,  # Takeminakata
    ]

    for dungeon_id, wave_data in dungeon_id_to_wavedata.items():
        dungeon = db.dungeon_by_id(dungeon_id)
        if not dungeon:
            print('skipping', dungeon_id)
            continue

        print('processing', dungeon_id, dungeon.clean_name)
        file_output_dir = golden_output_dir if dungeon_id in golden_dungeons else new_output_dir

        if dungeon_id in split_dungeons:
            # Disable endless for now it takes a long time to run
            continue
            # for floor in dungeon.floors:
            #     floor_id = floor.floor_number
            #     file_name = '{}_{}.txt'.format(dungeon_id, floor_id)
            #     with open(os.path.join(file_output_dir, file_name), encoding='utf-8', mode='w') as f:
            #         f.write(flatten_data(wave_data, dungeon, db, limit_floor_id=floor_id))
        else:
            file_name = '{}.txt'.format(dungeon_id)
            with open(os.path.join(file_output_dir, file_name),
                      encoding='utf-8',
                      mode='w') as f:
                f.write(flatten_data(wave_data, dungeon, db))
예제 #12
0
def run(args):
    behavior_data_dir = os.path.join(args.output_dir, 'behavior_data')
    os.makedirs(behavior_data_dir, exist_ok=True)
    behavior_text_dir = os.path.join(args.output_dir, 'behavior_text')
    os.makedirs(behavior_text_dir, exist_ok=True)
    behavior_plain_dir = os.path.join(args.output_dir, 'behavior_plain')
    os.makedirs(behavior_plain_dir, exist_ok=True)

    jp_db = merged_database.Database(Server.jp, args.input_dir)
    na_db = merged_database.Database(Server.na, args.input_dir)

    jp_db.load_database(skip_bonus=True, skip_extra=True)
    na_db.load_database(skip_bonus=True, skip_extra=True)

    print('merging data')
    if args.server.lower() == "jp":
        server = Server.jp
    elif args.server.lower() == "na":
        server = Server.na
    elif args.server.lower() == "kr":
        server = Server.kr
    else:
        raise ValueError("Server must be JP, NA, or KR")
    # Skipping KR database; we don't need it to compute ES
    cross_db = CrossServerDatabase(jp_db, na_db, na_db, server)

    combined_cards = cross_db.all_cards

    fixed_card_id = args.card_id
    if args.interactive:
        fixed_card_id = input("enter a card id:").strip()

    count = 0
    for csc in combined_cards[count:]:
        merged_card = csc.na_card
        card = merged_card.card
        if fixed_card_id and csc.monster_id != int(fixed_card_id):
            continue
        try:
            count += 1
            if count % 100 == 0:
                print('processing {:4d} of {}'.format(count,
                                                      len(combined_cards)))
            monster_behavior = process_card(csc)
            if monster_behavior is None:
                continue

            # Do some sanity cleanup on the behavior
            monster_behavior = clean_monster_behavior(monster_behavior)

            behavior_data_file = os.path.join(
                behavior_data_dir, '{}.textproto'.format(csc.monster_id))
            safe_save_to_file(behavior_data_file, monster_behavior)

            behavior_text_file = os.path.join(behavior_text_dir,
                                              '{}.txt'.format(csc.monster_id))
            save_monster_behavior(behavior_text_file, csc, monster_behavior)

            enemy_behavior = [x.na_skill for x in csc.enemy_behavior]
            behavior_plain_file = os.path.join(behavior_plain_dir,
                                               '{}.txt'.format(csc.monster_id))
            save_behavior_plain(behavior_plain_file, csc, enemy_behavior)

        except Exception as ex:
            print('failed to process', csc.monster_id, card.name)
            print(ex)
            # if 'unsupported operation' not in str(ex):
            import traceback
            traceback.print_exc()
            exit(0)
    sattr_imgs[t] = templates_img.crop(box=(xstart, ystart, xend, yend))

card_types = []

attr_map = {
    -1: '',
    0: 'r',
    1: 'b',
    2: 'g',
    3: 'l',
    4: 'd',
}

server = Server.from_str(args.server)
pad_db = merged_database.Database(server, args.data_dir)
pad_db.load_database(skip_skills=True, skip_extra=True)

for merged_card in pad_db.cards:
    card = merged_card.card
    card_id = card.monster_no
    released = card.released_status

    # Prevent loading junk entries (fake enemies) and also limit to data which has
    # been officially released.
    if card_id > 9999 or not released:
        continue

    card_types.append(
        [card_id, attr_map[card.attr_id], attr_map[card.sub_attr_id]])
예제 #14
0
def run_test(args):
    input_dir = args.input_dir
    output_dir = args.output_dir

    new_output_dir = os.path.join(output_dir, 'new')
    pathlib.Path(new_output_dir).mkdir(parents=True, exist_ok=True)
    golden_output_dir = os.path.join(output_dir, 'golden')
    pathlib.Path(golden_output_dir).mkdir(parents=True, exist_ok=True)

    jp_db = merged_database.Database(Server.jp, input_dir)
    na_db = merged_database.Database(Server.na, input_dir)
    kr_db = merged_database.Database(Server.kr, input_dir)

    print('loading JP')
    jp_db.load_database(skip_extra=True)
    print('loading NA')
    na_db.load_database(skip_extra=True)
    print('loading KR')
    kr_db.load_database(skip_extra=True)

    print('merging data')
    cross_db = CrossServerDatabase(jp_db, na_db, kr_db)

    print('saving merged data')
    cross_db.save_all(new_output_dir, True)

    # Add sort by something, bonuses seem to be non-deterministicly ordered.
    files = {
        'all_cards.json': cross_db.all_cards,
        'dungeons.json': cross_db.dungeons,
        'active_skills.json': cross_db.active_skills,
        'leader_skills.json': cross_db.leader_skills,
        'enemy_skills.json': cross_db.enemy_skills,
        # 'jp_bonuses.json': cross_db.jp_bonuses,
        # 'na_bonuses.json': cross_db.na_bonuses,
        # 'kr_bonuses.json': cross_db.kr_bonuses,
    }

    failed_comparisons = 0
    bad_records = 0

    print('starting diff')
    for file, data in files.items():
        new_file = os.path.join(new_output_dir, file)
        golden_file = os.path.join(golden_output_dir, file)
        if not os.path.exists(golden_file):
            print('golden file does not exist, creating', golden_file)
            shutil.copy(new_file, golden_file)
            continue

        print('diffing', golden_file, 'against', new_file)
        with open(golden_file, encoding="utf-8") as f:
            golden_data = json.load(f)

        if len(golden_data) != len(data):
            print('ERROR')
            print(
                'ERROR: file lengths differed ({} vs {}), indicates old golden data for {}'
                .format(len(golden_data), len(data), file))
            print('ERROR')
            failed_comparisons += 1
            continue

        failures = []
        for i in range(len(golden_data)):
            gold_row = golden_data[i]
            new_row = data[i]

            gold_str = json_string_dump(gold_row, pretty=True)
            new_str = json_string_dump(new_row, pretty=True)

            if gold_str != new_str:
                failures.append([gold_str, new_str])

        if not failures:
            continue

        fail_count = len(failures)
        disp_count = min(fail_count, 6)
        print('encountered', fail_count, 'errors, displaying the first',
              disp_count)

        failed_comparisons += 1
        bad_records += fail_count

        failure_ids = []
        for failure in failures:
            gold_str = failure[0]
            failure_ids.append(find_ids(gold_str))

        print('All failing ids:\n' + '\n'.join(failure_ids))

        for i in range(disp_count):
            gold_str = failures[i][0]
            new_str = failures[i][1]

            print('row identifiers:\n{}\n'.format(find_ids(gold_str)))
            diff_lines = difflib.context_diff(gold_str.split('\n'),
                                              new_str.split('\n'),
                                              fromfile='golden',
                                              tofile='new',
                                              n=1)
            print('\n'.join(diff_lines))

    if failed_comparisons:
        print('Bad files:', failed_comparisons)
        print('Bad records:', bad_records)
        exit(1)
예제 #15
0
def load_data(args):
    if args.logsql:
        logging.getLogger('database').setLevel(logging.DEBUG)
    dry_run = not args.doupdates

    logger.info('Loading data')
    jp_database = merged_database.Database(Server.jp, args.input_dir)
    jp_database.load_database()

    na_database = merged_database.Database(Server.na, args.input_dir)
    na_database.load_database()

    kr_database = merged_database.Database(Server.kr, args.input_dir)
    kr_database.load_database()

    if input_args.server.lower() == "combined":
        cs_database = crossed_data.CrossServerDatabase(jp_database,
                                                       na_database,
                                                       kr_database, Server.jp)
    elif input_args.server.lower() == "jp":
        cs_database = crossed_data.CrossServerDatabase(jp_database,
                                                       jp_database,
                                                       jp_database, Server.jp)
    elif input_args.server.lower() == "na":
        cs_database = crossed_data.CrossServerDatabase(na_database,
                                                       na_database,
                                                       na_database, Server.na)
    elif input_args.server.lower() == "kr":
        cs_database = crossed_data.CrossServerDatabase(kr_database,
                                                       kr_database,
                                                       kr_database, Server.kr)
    else:
        raise ValueError()

    if args.media_dir:
        cs_database.load_extra_image_info(args.media_dir)

    if not args.skipintermediate:
        logger.info('Storing intermediate data')
        # This is supported for https://pad.chesterip.cc/ and PadSpike, until we can support it better in the dg db
        jp_database.save_all(args.output_dir, args.pretty)
        na_database.save_all(args.output_dir, args.pretty)
        # kr_database.save_all(args.output_dir, args.pretty)

    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)

    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    if args.processors:
        for processor in args.processors.split(","):
            processor = processor.strip()
            logger.info('Running specific processor {}'.format(processor))
            class_type = type_name_to_processor[processor]
            processor = class_type(cs_database)
            processor.process(db_wrapper)
        logger.info('done')
        return

    # Load dimension tables
    DimensionProcessor().process(db_wrapper)

    # # Load rank data
    RankRewardProcessor().process(db_wrapper)

    # # Ensure awakenings
    AwakeningProcessor().process(db_wrapper)

    # # Ensure tags
    SkillTagProcessor().process(db_wrapper)

    # # Load enemy skills
    es_processor = EnemySkillProcessor(db_wrapper, cs_database)
    es_processor.load_static()
    es_processor.load_enemy_skills()
    if args.es_dir:
        es_processor.load_enemy_data(args.es_dir)

    # Load basic series data
    series_processor = SeriesProcessor(cs_database)
    series_processor.pre_process(db_wrapper)

    # # Load monster data
    MonsterProcessor(cs_database).process(db_wrapper)

    # Auto-assign monster series
    series_processor.post_process(db_wrapper)

    # Egg machines
    EggMachineProcessor(cs_database).process(db_wrapper)

    # Load dungeon data
    dungeon_processor = DungeonProcessor(cs_database)
    dungeon_processor.process(db_wrapper)
    if not args.skip_long:
        # Load dungeon data derived from wave info
        DungeonContentProcessor(cs_database).process(db_wrapper)

    # Toggle any newly-available dungeons visible
    dungeon_processor.post_encounter_process(db_wrapper)

    # Load event data
    ScheduleProcessor(cs_database).process(db_wrapper)

    # Load exchange data
    ExchangeProcessor(cs_database).process(db_wrapper)

    # Load purchase data
    PurchaseProcessor(cs_database).process(db_wrapper)

    # Update timestamps
    TimestampProcessor().process(db_wrapper)

    PurgeDataProcessor().process(db_wrapper)

    logger.info('done')
예제 #16
0
def load_data(args):
    if args.logsql:
        logging.getLogger('database').setLevel(logging.DEBUG)
    dry_run = not args.doupdates

    logger.info('Loading data')
    jp_database = merged_database.Database(Server.jp, args.input_dir)
    jp_database.load_database()

    na_database = merged_database.Database(Server.na, args.input_dir)
    na_database.load_database()

    kr_database = merged_database.Database(Server.kr, args.input_dir)
    kr_database.load_database()

    cs_database = crossed_data.CrossServerDatabase(jp_database, na_database,
                                                   kr_database)

    if args.media_dir:
        cs_database.load_extra_image_info(args.media_dir)

    if not args.skipintermediate:
        logger.info('Storing intermediate data')
        jp_database.save_all(args.output_dir, args.pretty)
        na_database.save_all(args.output_dir, args.pretty)
        kr_database.save_all(args.output_dir, args.pretty)

    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)

    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    # Load dimension tables
    DimensionProcessor().process(db_wrapper)

    # Load rank data
    RankRewardProcessor().process(db_wrapper)

    # Ensure awakenings
    AwakeningProcessor().process(db_wrapper)

    # Ensure tags
    SkillTagProcessor().process(db_wrapper)

    # Load enemy skills
    es_processor = EnemySkillProcessor(db_wrapper, cs_database)
    es_processor.load_static()
    es_processor.load_enemy_skills()
    if args.es_dir:
        es_processor.load_enemy_data(args.es_dir)

    # Load basic series data
    series_processor = SeriesProcessor(cs_database)
    series_processor.pre_process(db_wrapper)

    # Load monster data
    MonsterProcessor(cs_database).process(db_wrapper)

    # Auto-assign monster series
    series_processor.post_process(db_wrapper)

    # Egg machines
    EggMachineProcessor(cs_database).process(db_wrapper)

    # Load dungeon data
    dungeon_processor = DungeonProcessor(cs_database)
    dungeon_processor.process(db_wrapper)
    if not args.skip_long:
        # Load dungeon data derived from wave info
        DungeonContentProcessor(cs_database).process(db_wrapper)

    # Toggle any newly-available dungeons visible
    dungeon_processor.post_encounter_process(db_wrapper)

    # Load event data
    ScheduleProcessor(cs_database).process(db_wrapper)

    # Load exchange data
    ExchangeProcessor(cs_database).process(db_wrapper)

    # Load purchase data
    PurchaseProcessor(cs_database).process(db_wrapper)

    # Update timestamps
    TimestampProcessor().process(db_wrapper)

    # Purge old schedule items and deleted_rows
    # This is dangerous, so we won't do it yet
    # PurgeDataProcessor().process(db_wrapper)

    print('done')
def load_data(args):
    if args.processors == "None":
        return
    
    if args.logsql:
        logging.getLogger('database').setLevel(logging.DEBUG)
    dry_run = not args.doupdates

    logger.info('Loading data')
    jp_database = merged_database.Database(Server.jp, args.input_dir)
    jp_database.load_database()

    na_database = merged_database.Database(Server.na, args.input_dir)
    na_database.load_database()

    kr_database = merged_database.Database(Server.kr, args.input_dir)
    kr_database.load_database()

    if input_args.server.lower() == "combined":
        cs_database = crossed_data.CrossServerDatabase(jp_database, na_database, kr_database, Server.jp)
    elif input_args.server.lower() == "jp":
        cs_database = crossed_data.CrossServerDatabase(jp_database, jp_database, jp_database, Server.jp)
    elif input_args.server.lower() == "na":
        cs_database = crossed_data.CrossServerDatabase(na_database, na_database, na_database, Server.na)
    elif input_args.server.lower() == "kr":
        cs_database = crossed_data.CrossServerDatabase(kr_database, kr_database, kr_database, Server.kr)
    else:
        raise ValueError()

    if args.media_dir:
        cs_database.load_extra_image_info(args.media_dir)

    if not args.skipintermediate:
        logger.info('Storing intermediate data')
        # This is supported for https://pad.chesterip.cc/ and PadSpike, until we can support it better in the dg db
        jp_database.save_all(args.output_dir, args.pretty)
        na_database.save_all(args.output_dir, args.pretty)
        # kr_database.save_all(args.output_dir, args.pretty)

    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)

    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    processors = []
    for proc in args.processors.split(","):
        proc = proc.strip()
        if proc in type_name_to_processor:
            processors.extend(type_name_to_processor[proc])
        else:
            logger.warning("Unknown processor: {}\nSkipping...".format(proc))

    # Load dimension tables
    if DimensionProcessor in processors:
        DimensionProcessor().process(db_wrapper)

    # # Load rank data
    if RankRewardProcessor in processors:
        RankRewardProcessor().process(db_wrapper)

    # # Ensure awakenings
    if AwokenSkillProcessor in processors:
        AwokenSkillProcessor().process(db_wrapper)

    # # Ensure tags
    if SkillTagProcessor in processors:
        SkillTagProcessor().process(db_wrapper)

    # # Load enemy skills
    if EnemySkillProcessor in processors:
        es_processor = EnemySkillProcessor(db_wrapper, cs_database)
        es_processor.load_static()
        es_processor.load_enemy_skills()
        if args.es_dir:
            es_processor.load_enemy_data(args.es_dir)

    # Load basic series data
    if SeriesProcessor in processors:
        SeriesProcessor(cs_database).process(db_wrapper)

    # # Load monster data
    if MonsterProcessor in processors:
        MonsterProcessor(cs_database).process(db_wrapper)

    # Egg machines
    if EggMachineProcessor in processors:
        EggMachineProcessor(cs_database).process(db_wrapper)

    # Load dungeon data
    dungeon_processor = None
    if DungeonProcessor in processors:
        dungeon_processor = DungeonProcessor(cs_database)
        dungeon_processor.process(db_wrapper)

    if DungeonContentProcessor in processors and input_args.server.lower() == "combined":
        # Load dungeon data derived from wave info
        DungeonContentProcessor(cs_database).process(db_wrapper)

    # Toggle any newly-available dungeons visible
    if dungeon_processor is not None:
        dungeon_processor.post_encounter_process(db_wrapper)

    # Load event data
    if ScheduleProcessor in processors:
        ScheduleProcessor(cs_database).process(db_wrapper)

    # Load exchange data
    if ExchangeProcessor in processors:
        ExchangeProcessor(cs_database).process(db_wrapper)

    # Load purchase data
    if PurchaseProcessor in processors:
        PurchaseProcessor(cs_database).process(db_wrapper)

    # Update timestamps
    if ExchangeProcessor in processors:
        TimestampProcessor().process(db_wrapper)

    if PurgeDataProcessor in processors:
        PurgeDataProcessor().process(db_wrapper)

    logger.info('Done')