Beispiel #1
0
def load_data(args):
    dry_run = not args.doupdates

    input_dir = args.input_dir

    logger.info('Loading data')
    jp_database = merged_database.Database(Server.jp, input_dir)
    jp_database.load_database()

    na_database = merged_database.Database(Server.na, input_dir)
    na_database.load_database()

    kr_database = merged_database.Database(Server.kr, input_dir)
    kr_database.load_database()

    cs_database = crossed_data.CrossServerDatabase(jp_database, na_database, kr_database)

    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)

    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    do_migration(cs_database, db_wrapper)

    print('Not migration, but consider running:')
    print('UPDATE dungeons SET visible = false WHERE dungeon_id NOT IN (select dungeon_id  from encounters group by 1)')
Beispiel #2
0
def main(args):
    with open(args.db_config) as f:
        global db_config
        db_config = base_json.load(f)

    global connection
    connection = pymysql.connect(host=db_config['host'],
                                 user=db_config['user'],
                                 password=db_config['password'],
                                 db=db_config['db'],
                                 charset=db_config['charset'],
                                 cursorclass=pymysql.cursors.DictCursor)

    global db_wrapper
    db_wrapper = DbWrapper(False)
    db_wrapper.connect(db_config)

    global es_dir
    es_dir = args.es_dir

    if args.web_dir:
        app.static('/', os.path.join(args.web_dir, 'index.html'))
        app.static('', args.web_dir)

    app.run(host='0.0.0.0', port=8000)
Beispiel #3
0
def main(args):
    with open(args.db_config) as f:
        global db_config
        db_config = base_json.load(f)

    global connection
    connection = utils.connect(db_config)

    global db_wrapper
    db_wrapper = DbWrapper(False)
    db_wrapper.connect(db_config)

    app.run(host='0.0.0.0', port=int(args.port))
Beispiel #4
0
def load_es_quick_and_die(args):
    with open(args.db_config) as f:
        db_config = json.load(f)

    jp_database = merged_database.Database(Server.jp, args.input_dir)
    jp_database.load_database()
    cs_database = crossed_data.CrossServerDatabase(jp_database, jp_database, jp_database)

    db_wrapper = DbWrapper(False)
    db_wrapper.connect(db_config)

    es_processor = EnemySkillProcessor(db_wrapper, cs_database)
    es_processor.load_enemy_data(args.es_dir)

    print('done loading ES')
    exit(0)
Beispiel #5
0
def load_data(args):
    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)
    dry_run = not args.doupdates
    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)
    data = db_wrapper.fetch_data("SELECT * FROM egg_machines")
    for machine_sql in data:
        egg_machine_id = machine_sql['egg_machine_id']
        contents = ast.literal_eval(machine_sql['contents'])
        for monster_id in contents.keys():
            real_monster_id = int(monster_id.strip("()"))
            emm = EggMachinesMonster(egg_machine_monster_id=None, monster_id=real_monster_id,
                                     roll_chance=contents.get(monster_id),
                                     egg_machine_id=egg_machine_id)
            db_wrapper.insert_or_update(emm)
Beispiel #6
0
def main(args):
    with open(args.db_config) as f:
        global db_config
        db_config = base_json.load(f)

    global connection
    connection = utils.connect(db_config)

    global db_wrapper
    db_wrapper = DbWrapper(False)
    db_wrapper.connect(db_config)

    global es_dir
    es_dir = args.es_dir

    if args.web_dir:
        app.static('/', os.path.join(args.web_dir, 'index.html'))
        app.static('', args.web_dir)

    app.run(host='0.0.0.0', port=int(args.port))
Beispiel #7
0
def load_data(args):
    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)
    db_wrapper = DbWrapper()
    db_wrapper.connect(db_config)
    data = db_wrapper.fetch_data(ENCOUNTER_QUERY)
    output = {}
    for encounter in data:
        sdgid = encounter['sdgid']
        floor = encounter['floor']
        spawn = {'id': encounter['enemy_id'], 'lv': encounter['level']}
        if sdgid not in output:
            output[sdgid] = {'name': encounter['stage_name'], 'floors': {}}
        if floor not in output[sdgid]['floors']:
            output[sdgid]['floors'][floor] = {'spawns': []}
        if spawn not in output[sdgid]['floors'][floor]['spawns']:
            output[sdgid]['floors'][floor]['spawns'].append(spawn)
    with open(os.path.join(args.output_dir, "encounter_data.json"), 'w+') as f:
        json.dump(output, f)
Beispiel #8
0
def pull_data(args):
    if args.logsql:
        logging.getLogger('database').setLevel(logging.DEBUG)

    server = args.server.upper()
    endpoint = None
    if server == 'NA':
        endpoint = pad_api.ServerEndpoint.NA
    elif server == 'JP':
        endpoint = pad_api.ServerEndpoint.JA
    else:
        raise Exception('unexpected server:' + args.server)

    api_client = pad_api.PadApiClient(endpoint, args.user_uuid,
                                      args.user_intid)

    print('login')
    api_client.login()

    print('load_player_data')
    api_client.load_player_data()

    friend_card = api_client.get_any_card_except_in_cur_deck()
    dungeon_id = args.dungeon_id
    floor_id = args.floor_id
    loop_count = args.loop_count
    pull_id = int(time.time())

    print('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)

    dry_run = False
    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)
    entry_id = int(
        db_wrapper.get_single_value("SELECT MAX(entry_id) FROM wave_data;"))

    print('entering dungeon', dungeon_id, 'floor', floor_id, loop_count,
          'times')
    for e_idx in range(loop_count):
        print('entering', e_idx)
        entry_id += 1
        entry_json = api_client.enter_dungeon(dungeon_id,
                                              floor_id,
                                              self_card=friend_card)
        wave_response = pad_api.extract_wave_response_from_entry(entry_json)
        leaders = entry_json['entry_leads']

        for stage_idx, floor in enumerate(wave_response.floors):
            for monster_idx, monster in enumerate(floor.monsters):
                wave_item = WaveItem(pull_id=pull_id,
                                     entry_id=entry_id,
                                     server=server,
                                     dungeon_id=dungeon_id,
                                     floor_id=floor_id,
                                     stage=stage_idx,
                                     slot=monster_idx,
                                     monster=monster,
                                     leader_id=leaders[0],
                                     friend_id=leaders[1])
                db_wrapper.insert_item(wave_item.insert_sql())

        if server != 'NA':
            time.sleep(.5)
def load_data(args):
    if args.logsql:
        logging.getLogger('database').setLevel(logging.DEBUG)
    dry_run = not args.doupdates

    logger.info('Loading data')
    jp_database = merged_database.Database(Server.jp, args.input_dir)
    jp_database.load_database()

    na_database = merged_database.Database(Server.na, args.input_dir)
    na_database.load_database()

    kr_database = merged_database.Database(Server.kr, args.input_dir)
    kr_database.load_database()

    cs_database = crossed_data.CrossServerDatabase(jp_database, na_database,
                                                   kr_database)

    if args.media_dir:
        cs_database.load_extra_image_info(args.media_dir)

    if not args.skipintermediate:
        logger.info('Storing intermediate data')
        jp_database.save_all(args.output_dir, args.pretty)
        na_database.save_all(args.output_dir, args.pretty)
        kr_database.save_all(args.output_dir, args.pretty)

    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)

    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    # Load dimension tables
    DimensionProcessor().process(db_wrapper)

    # Load rank data
    RankRewardProcessor().process(db_wrapper)

    # Ensure awakenings
    AwakeningProcessor().process(db_wrapper)

    # Ensure tags
    SkillTagProcessor().process(db_wrapper)

    # Load enemy skills
    es_processor = EnemySkillProcessor(db_wrapper, cs_database)
    es_processor.load_static()
    es_processor.load_enemy_skills()
    if args.es_dir:
        es_processor.load_enemy_data(args.es_dir)

    # Load basic series data
    series_processor = SeriesProcessor(cs_database)
    series_processor.pre_process(db_wrapper)

    # Load monster data
    MonsterProcessor(cs_database).process(db_wrapper)

    # Auto-assign monster series
    series_processor.post_process(db_wrapper)

    # Egg machines
    EggMachineProcessor(cs_database).process(db_wrapper)

    # Load dungeon data
    dungeon_processor = DungeonProcessor(cs_database)
    dungeon_processor.process(db_wrapper)
    if not args.skip_long:
        # Load dungeon data derived from wave info
        DungeonContentProcessor(cs_database).process(db_wrapper)

    # Toggle any newly-available dungeons visible
    dungeon_processor.post_encounter_process(db_wrapper)

    # Load event data
    ScheduleProcessor(cs_database).process(db_wrapper)

    # Load exchange data
    ExchangeProcessor(cs_database).process(db_wrapper)

    # Load purchase data
    PurchaseProcessor(cs_database).process(db_wrapper)

    # Update timestamps
    TimestampProcessor().process(db_wrapper)

    # Purge old schedule items and deleted_rows
    # This is dangerous, so we won't do it yet
    # PurgeDataProcessor().process(db_wrapper)

    print('done')
Beispiel #10
0
def load_data(args):
    if args.logsql:
        logging.getLogger('database').setLevel(logging.DEBUG)
    dry_run = not args.doupdates

    logger.info('Loading data')
    jp_database = merged_database.Database(Server.jp, args.input_dir)
    jp_database.load_database()

    na_database = merged_database.Database(Server.na, args.input_dir)
    na_database.load_database()

    kr_database = merged_database.Database(Server.kr, args.input_dir)
    kr_database.load_database()

    if input_args.server.lower() == "combined":
        cs_database = crossed_data.CrossServerDatabase(jp_database,
                                                       na_database,
                                                       kr_database, Server.jp)
    elif input_args.server.lower() == "jp":
        cs_database = crossed_data.CrossServerDatabase(jp_database,
                                                       jp_database,
                                                       jp_database, Server.jp)
    elif input_args.server.lower() == "na":
        cs_database = crossed_data.CrossServerDatabase(na_database,
                                                       na_database,
                                                       na_database, Server.na)
    elif input_args.server.lower() == "kr":
        cs_database = crossed_data.CrossServerDatabase(kr_database,
                                                       kr_database,
                                                       kr_database, Server.kr)
    else:
        raise ValueError()

    if args.media_dir:
        cs_database.load_extra_image_info(args.media_dir)

    if not args.skipintermediate:
        logger.info('Storing intermediate data')
        # This is supported for https://pad.chesterip.cc/ and PadSpike, until we can support it better in the dg db
        jp_database.save_all(args.output_dir, args.pretty)
        na_database.save_all(args.output_dir, args.pretty)
        # kr_database.save_all(args.output_dir, args.pretty)

    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)

    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    if args.processors:
        for processor in args.processors.split(","):
            processor = processor.strip()
            logger.info('Running specific processor {}'.format(processor))
            class_type = type_name_to_processor[processor]
            processor = class_type(cs_database)
            processor.process(db_wrapper)
        logger.info('done')
        return

    # Load dimension tables
    DimensionProcessor().process(db_wrapper)

    # # Load rank data
    RankRewardProcessor().process(db_wrapper)

    # # Ensure awakenings
    AwakeningProcessor().process(db_wrapper)

    # # Ensure tags
    SkillTagProcessor().process(db_wrapper)

    # # Load enemy skills
    es_processor = EnemySkillProcessor(db_wrapper, cs_database)
    es_processor.load_static()
    es_processor.load_enemy_skills()
    if args.es_dir:
        es_processor.load_enemy_data(args.es_dir)

    # Load basic series data
    series_processor = SeriesProcessor(cs_database)
    series_processor.pre_process(db_wrapper)

    # # Load monster data
    MonsterProcessor(cs_database).process(db_wrapper)

    # Auto-assign monster series
    series_processor.post_process(db_wrapper)

    # Egg machines
    EggMachineProcessor(cs_database).process(db_wrapper)

    # Load dungeon data
    dungeon_processor = DungeonProcessor(cs_database)
    dungeon_processor.process(db_wrapper)
    if not args.skip_long:
        # Load dungeon data derived from wave info
        DungeonContentProcessor(cs_database).process(db_wrapper)

    # Toggle any newly-available dungeons visible
    dungeon_processor.post_encounter_process(db_wrapper)

    # Load event data
    ScheduleProcessor(cs_database).process(db_wrapper)

    # Load exchange data
    ExchangeProcessor(cs_database).process(db_wrapper)

    # Load purchase data
    PurchaseProcessor(cs_database).process(db_wrapper)

    # Update timestamps
    TimestampProcessor().process(db_wrapper)

    PurgeDataProcessor().process(db_wrapper)

    logger.info('done')
def load_data(args):
    if args.processors == "None":
        return
    
    if args.logsql:
        logging.getLogger('database').setLevel(logging.DEBUG)
    dry_run = not args.doupdates

    logger.info('Loading data')
    jp_database = merged_database.Database(Server.jp, args.input_dir)
    jp_database.load_database()

    na_database = merged_database.Database(Server.na, args.input_dir)
    na_database.load_database()

    kr_database = merged_database.Database(Server.kr, args.input_dir)
    kr_database.load_database()

    if input_args.server.lower() == "combined":
        cs_database = crossed_data.CrossServerDatabase(jp_database, na_database, kr_database, Server.jp)
    elif input_args.server.lower() == "jp":
        cs_database = crossed_data.CrossServerDatabase(jp_database, jp_database, jp_database, Server.jp)
    elif input_args.server.lower() == "na":
        cs_database = crossed_data.CrossServerDatabase(na_database, na_database, na_database, Server.na)
    elif input_args.server.lower() == "kr":
        cs_database = crossed_data.CrossServerDatabase(kr_database, kr_database, kr_database, Server.kr)
    else:
        raise ValueError()

    if args.media_dir:
        cs_database.load_extra_image_info(args.media_dir)

    if not args.skipintermediate:
        logger.info('Storing intermediate data')
        # This is supported for https://pad.chesterip.cc/ and PadSpike, until we can support it better in the dg db
        jp_database.save_all(args.output_dir, args.pretty)
        na_database.save_all(args.output_dir, args.pretty)
        # kr_database.save_all(args.output_dir, args.pretty)

    logger.info('Connecting to database')
    with open(args.db_config) as f:
        db_config = json.load(f)

    db_wrapper = DbWrapper(dry_run)
    db_wrapper.connect(db_config)

    processors = []
    for proc in args.processors.split(","):
        proc = proc.strip()
        if proc in type_name_to_processor:
            processors.extend(type_name_to_processor[proc])
        else:
            logger.warning("Unknown processor: {}\nSkipping...".format(proc))

    # Load dimension tables
    if DimensionProcessor in processors:
        DimensionProcessor().process(db_wrapper)

    # # Load rank data
    if RankRewardProcessor in processors:
        RankRewardProcessor().process(db_wrapper)

    # # Ensure awakenings
    if AwokenSkillProcessor in processors:
        AwokenSkillProcessor().process(db_wrapper)

    # # Ensure tags
    if SkillTagProcessor in processors:
        SkillTagProcessor().process(db_wrapper)

    # # Load enemy skills
    if EnemySkillProcessor in processors:
        es_processor = EnemySkillProcessor(db_wrapper, cs_database)
        es_processor.load_static()
        es_processor.load_enemy_skills()
        if args.es_dir:
            es_processor.load_enemy_data(args.es_dir)

    # Load basic series data
    if SeriesProcessor in processors:
        SeriesProcessor(cs_database).process(db_wrapper)

    # # Load monster data
    if MonsterProcessor in processors:
        MonsterProcessor(cs_database).process(db_wrapper)

    # Egg machines
    if EggMachineProcessor in processors:
        EggMachineProcessor(cs_database).process(db_wrapper)

    # Load dungeon data
    dungeon_processor = None
    if DungeonProcessor in processors:
        dungeon_processor = DungeonProcessor(cs_database)
        dungeon_processor.process(db_wrapper)

    if DungeonContentProcessor in processors and input_args.server.lower() == "combined":
        # Load dungeon data derived from wave info
        DungeonContentProcessor(cs_database).process(db_wrapper)

    # Toggle any newly-available dungeons visible
    if dungeon_processor is not None:
        dungeon_processor.post_encounter_process(db_wrapper)

    # Load event data
    if ScheduleProcessor in processors:
        ScheduleProcessor(cs_database).process(db_wrapper)

    # Load exchange data
    if ExchangeProcessor in processors:
        ExchangeProcessor(cs_database).process(db_wrapper)

    # Load purchase data
    if PurchaseProcessor in processors:
        PurchaseProcessor(cs_database).process(db_wrapper)

    # Update timestamps
    if ExchangeProcessor in processors:
        TimestampProcessor().process(db_wrapper)

    if PurgeDataProcessor in processors:
        PurgeDataProcessor().process(db_wrapper)

    logger.info('Done')