Пример #1
0
    def main(self, parsed_args):
        self.rr['PantheonSouls.dat'].build_index('PantheonPanelLayoutKey')

        pantheon = []
        pantheon_souls = []
        pantheon_stats = []

        for row in self.rr['PantheonPanelLayout.dat']:
            if row['IsDisabled']:
                continue

            self._copy_from_keys(row, self._COPY_KEYS_PANTHEON, pantheon)
            for i in range(1, 5):
                values = row['Effect%s_Values' % i]
                if not values:
                    continue
                stats = [s['Id'] for s in row['Effect%s_StatsKeys' % i]]
                tr = self.tc['stat_descriptions.txt'].get_translation(
                    tags=stats, values=values, full_result=True)

                od = OrderedDict()
                od['id'] = row['Id']
                od['ordinal'] = i
                od['name'] = row['GodName%s' % i]
                od['stat_text'] = '<br>'.join(tr.lines).replace('\n', '<br>')

                # The first entry is the god itself
                if i > 1:
                    souls = self.rr['PantheonSouls.dat'].index[
                        'PantheonPanelLayoutKey'][row][i - 2]

                    od.update(
                        self._copy_from_keys(souls,
                                             self._COPY_KEYS_PANTHEON_SOULS,
                                             rtr=True))
                pantheon_souls.append(od)

                for j, (stat, value) in enumerate(zip(stats, values), start=1):
                    pantheon_stats.append(
                        OrderedDict((
                            ('pantheon_id', row['Id']),
                            (
                                'pantheon_ordinal',
                                i,
                            ),
                            ('ordinal', j),
                            ('stat', stat),
                            ('value', value),
                        )))

        r = ExporterResult()
        for k in ('', '_souls', '_stats'):
            r.add_result(text=lua_formatter(locals()['pantheon' + k]),
                         out_file='pantheon%s.lua' % k,
                         wiki_page=[{
                             'page': 'Module:Pantheon/pantheon%s' % k,
                             'condition': None,
                         }])

        return r
Пример #2
0
    def main(self, parsed_args):
        data = {}
        for definition in self._DATA:
            data[definition['key']] = []
            for row in self.rr[definition['file']]:
                self._copy_from_keys(row, definition['data'],
                                     data[definition['key']])

        for key, data_map in self._ENUM_DATA.items():
            map_multi = []
            for file_name, definition in data_map.items():
                for i, row in enumerate(self.rr[file_name]):
                    self._copy_from_keys(row, definition, map_multi, i)

            data[key] = map_multi

        r = ExporterResult()
        for key, v in data.items():
            r.add_result(text=lua_formatter(v),
                         out_file='%s.lua' % key,
                         wiki_page=[{
                             'page': 'Module:Monster/%s' % key,
                             'condition': None,
                         }])

        return r
Пример #3
0
    def main(self, parsed_args):
        data = {
            'crafting_bench_options': [],
            'crafting_bench_options_costs': [],
        }
        for row in self.rr['CraftingBenchOptions.dat']:
            self._copy_from_keys(row, self._DATA,
                                 data['crafting_bench_options'])
            data['crafting_bench_options'][-1]['id'] = row.rowid

            for i, base_item in enumerate(row['Cost_BaseItemTypesKeys']):
                data['crafting_bench_options_costs'].append(
                    OrderedDict(
                        (('option_id', row.rowid), ('name', base_item['Name']),
                         ('amount', row['Cost_Values'][i]))))

        r = ExporterResult()
        for key, data in data.items():
            r.add_result(text=lua_formatter(data),
                         out_file='%s.lua' % key,
                         wiki_page=[{
                             'page': 'Module:Crafting bench/%s' % key,
                             'condition': None,
                         }])

        return r
Пример #4
0
    def main(self, parsed_args):
        atlas_regions = []
        atlas_base_item_types = []

        for row in self.rr['AtlasRegions.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_ATLAS_REGIONS,
                                 atlas_regions)

        for row in self.rr['AtlasBaseTypeDrops.dat']:
            for i, tag in enumerate(row['SpawnWeight_TagsKeys']):
                self._copy_from_keys(row,
                                     self._COPY_KEYS_ATLAS_BASE_TYPE_DROPS,
                                     atlas_base_item_types)
                atlas_base_item_types[-1]['tag'] = tag['Id']
                atlas_base_item_types[-1]['weight'] = \
                    row['SpawnWeight_Values'][i]

        r = ExporterResult()
        for k in ('atlas_regions', 'atlas_base_item_types'):
            r.add_result(text=lua_formatter(locals()[k]),
                         out_file='%s.lua' % k,
                         wiki_page=[{
                             'page': 'Module:Atlas/%s' % k,
                             'condition': None,
                         }])

        return r
Пример #5
0
    def main(self, parsed_args):
        data = {}
        for definition in self._DATA:
            data[definition['key']] = []
            for row in self.rr[definition['file']]:
                self._copy_from_keys(row, definition['data'],
                                     data[definition['key']])

        for row in data['synthesis_mods']:
            row['stat_text'] = \
                '<br>'.join(self.tc['stat_descriptions.txt'].get_translation(
                    tags=(row['stat_id'], ),
                    values=(row['stat_value'], ),
                    lang=self.lang,
                )).replace('\n', '')

        r = ExporterResult()
        for definition in self._DATA:
            key = definition['key']
            r.add_result(text=lua_formatter(data[key]),
                         out_file='%s.lua' % key,
                         wiki_page=[{
                             'page': 'Module:Synthesis/%s' % key,
                             'condition': None,
                         }])

        return r
Пример #6
0
    def main(self, parsed_args):
        delve_level_scaling = []
        delve_resources_per_level = []
        delve_upgrades = []
        delve_upgrade_stats = []
        fossils = []
        fossil_weights = []

        for row in self.rr['DelveLevelScaling.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_DELVE_LEVEL_SCALING,
                                 delve_level_scaling)

        for row in self.rr['DelveResourcePerLevel.dat']:
            self._copy_from_keys(row,
                                 self._COPY_KEYS_DELVE_RESOURCES_PER_LEVEL,
                                 delve_resources_per_level)

        for row in self.rr['DelveUpgrades.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_DELVE_UPGRADES,
                                 delve_upgrades)
            delve_upgrades[-1]['cost'] = row['Cost']

            for i, (stat, value) in enumerate(row['Stats']):
                self._copy_from_keys(row, self._COPY_KEYS_DELVE_UPGRADES,
                                     delve_upgrade_stats)
                delve_upgrade_stats[-1]['id'] = stat['Id']
                delve_upgrade_stats[-1]['value'] = value

        for row in self.rr['DelveCraftingModifiers.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_DELVE_CRAFTING_MODIFIERS,
                                 fossils)

            for data_prefix, data_type in (
                ('NegativeWeight', 'override'),
                ('Weight', 'added'),
            ):
                for i, tag in enumerate(row['%s_TagsKeys' % data_prefix]):
                    entry = OrderedDict()
                    entry['base_item_id'] = row['BaseItemTypesKey']['Id']
                    entry['type'] = data_type
                    entry['ordinal'] = i
                    entry['tag'] = tag['Id']
                    entry['weight'] = row['%s_Values' % data_prefix][i]
                    fossil_weights.append(entry)

        r = ExporterResult()
        for k in ('delve_level_scaling', 'delve_resources_per_level',
                  'delve_upgrades', 'delve_upgrade_stats', 'fossils',
                  'fossil_weights'):
            r.add_result(text=lua_formatter(locals()[k]),
                         out_file='%s.lua' % k,
                         wiki_page=[{
                             'page': 'Module:Delve/%s' % k,
                             'condition': None,
                         }])

        return r
Пример #7
0
    def export(self, parsed_args, monsters):
        r = ExporterResult()

        if not monsters:
            console(
                'No monsters found for the specified parameters. Quitting.',
                msg=Msg.warning,
            )
            return r

        console('Found %s monsters, parsing...' % len(monsters))

        console('Accessing additional data...')

        for monster in monsters:
            data = OrderedDict()

            for row_key, copy_data in self._COPY_KEYS.items():
                value = monster[row_key]

                condition = copy_data.get('condition')
                if condition is not None and not condition(monster):
                    continue

                fmt = copy_data.get('format')
                if fmt:
                    value = fmt(value)

                if value:
                    data[copy_data['template']] = value

            cond = MonsterWikiCondition(
                data=data,
                cmdargs=parsed_args,
            )

            r.add_result(
                text=cond,
                out_file='monster_%s.txt' % data['metadata_id'].replace(
                    '/', '_'),
                wiki_page=[
                    {
                        'page': 'Monster:' +
                                self._format_wiki_title(data['metadata_id']),
                        'condition': cond,
                    },
                ],
                wiki_message='Monster updater',
            )

        return r
Пример #8
0
    def main(self, parsed_args):
        heist_areas = []
        for row in self.rr['HeistAreas.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_HEIST_AREAS, heist_areas)

        heist_jobs = []
        for row in self.rr['HeistJobs.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_HEIST_JOBS, heist_jobs)

        heist_npcs = []
        heist_npc_skills = []
        heist_npc_stats = []
        for row in self.rr['HeistNPCs.dat']:
            mid = row['MonsterVarietiesKey']['Id']
            self._copy_from_keys(row, self._COPY_KEYS_HEIST_NPCS, heist_npcs)

            skills = [r['Id'] for r in row['SkillLevel_HeistJobsKeys']]
            for i, job_id in enumerate(skills):
                entry = OrderedDict()
                entry['npc_id'] = mid
                entry['job_id'] = job_id
                entry['level'] = row['SkillLevel_Values'][i]
                # StatValues2?
                heist_npc_skills.append(entry)

            stats = [r['StatsKey']['Id'] for r in row['HeistNPCStatsKeys']]
            for i, stat_id in enumerate(stats):
                entry = OrderedDict()
                entry['npc_id'] = mid
                entry['stat_id'] = stat_id
                entry['value'] = row['StatValues'][i]
                # StatValues2?
                heist_npc_stats.append(entry)

            heist_npcs[-1]['stat_text'] = self._format_tr(
                self.tc['stat_descriptions.txt'].get_translation(
                    stats, [int(v) for v in row['StatValues']],
                    full_result=True))

        r = ExporterResult()
        for k in ('heist_areas', 'heist_jobs', 'heist_npcs',
                  'heist_npc_skills', 'heist_npc_stats'):
            r.add_result(text=LuaFormatter.format_module(locals()[k]),
                         out_file='%s.lua' % k,
                         wiki_page=[{
                             'page': 'Module:Heist/%s' % k,
                             'condition': None,
                         }])

        return r
Пример #9
0
    def _write_lua(self, outdata, data_type):
        # Pre-sort
        outdata.sort(key=lambda x: x['reward'])
        outdata.sort(key=lambda x: x['quest_id'])
        outdata.sort(key=lambda x: x['act'])

        r = ExporterResult()
        r.add_result(text=lua_formatter(outdata),
                     out_file='%s_rewards.txt' % data_type,
                     wiki_page=[{
                         'page':
                         'Module:Quest reward/data/%s_rewards' % data_type,
                         'condition':
                         None,
                     }])

        return r
Пример #10
0
    def main(self, parsed_args):
        tag_handler = HarvestTagHandler(rr=self.rr)
        harvest_craft_options = []

        for row in self.rr['HarvestCraftOptions.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_HARVEST_CRAFT_OPTIONS,
                                 harvest_craft_options)
            harvest_craft_options[-1]['text'] = parse_description_tags(
                harvest_craft_options[-1]['text']).handle_tags(
                    tag_handler.tag_handlers)

        r = ExporterResult()
        for k in ('harvest_craft_options', ):
            r.add_result(text=LuaFormatter.format_module(locals()[k]),
                         out_file='%s.lua' % k,
                         wiki_page=[{
                             'page': 'Module:Harvest/%s' % k,
                             'condition': None,
                         }])

        return r
Пример #11
0
    def main(self, parsed_args):
        minimap_icons = []
        minimap_icons_lookup = OrderedDict()

        for row in self.rr['MinimapIcons.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_MINIMAP_ICONS,
                                 minimap_icons)

            # Lua starts offsets at 1
            minimap_icons_lookup[row['Id']] = row.rowid + 1

        r = ExporterResult()
        for k in ('minimap_icons', 'minimap_icons_lookup'):
            r.add_result(text=LuaFormatter.format_module(locals()[k]),
                         out_file='%s.lua' % k,
                         wiki_page=[{
                             'page': 'Module:Minimap/%s' % k,
                             'condition': None,
                         }])

        return r
Пример #12
0
    def main(self, parsed_args):
        blight_crafting_recipes = []
        blight_crafting_recipes_items = []
        blight_towers = []

        self.rr['BlightTowersPerLevel.dat'].build_index('BlightTowersKey')

        for row in self.rr['BlightCraftingRecipes.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_CRAFTING_RECIPES,
                                 blight_crafting_recipes)

            for i, blight_crafting_item in enumerate(
                    row['BlightCraftingItemsKeys'], start=1):
                blight_crafting_recipes_items.append(
                    OrderedDict((
                        ('ordinal', i),
                        ('recipe_id', row['Id']),
                        ('item_id',
                         blight_crafting_item['BaseItemTypesKey']['Id']),
                    )))

        for row in self.rr['BlightTowers.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_BLIGHT_TOWERS,
                                 blight_towers)
            blight_towers[-1]['cost'] = self.rr[
                'BlightTowersPerLevel.dat'].index['BlightTowersKey'][row][0][
                    'Cost']

        r = ExporterResult()
        for k in ('crafting_recipes', 'crafting_recipes_items', 'towers'):
            r.add_result(text=LuaFormatter.format_module(locals()['blight_' +
                                                                  k]),
                         out_file='blight_%s.lua' % k,
                         wiki_page=[{
                             'page': 'Module:Blight/blight_%s' % k,
                             'condition': None,
                         }])

        return r
Пример #13
0
    def main(self, parsed_args):
        r = ExporterResult()
        for data in self._DATA:
            stats = []

            ot = self.ot[data['src']]

            for stat, value in ot['Stats'].items():
                # Stats that are zero effectively do not exist, so might as well
                # skip them
                if value == 0:
                    continue

                txt = self._format_tr(
                    self.tc['stat_descriptions.txt'].get_translation(
                        tags=[
                            stat,
                        ], values=[
                            value,
                        ], full_result=True))

                stats.append(
                    OrderedDict((
                        ('name', data['fn']),
                        ('id', stat),
                        ('value', value),
                        ('stat_text', txt or ''),
                    )))

            r.add_result(text=LuaFormatter.format_module(stats),
                         out_file='%s_stats.lua' % data['fn'],
                         wiki_page=[{
                             'page':
                             'Module:Data tables/%s_stats' % data['fn'],
                             'condition':
                             None,
                         }])

        return r
Пример #14
0
    def main(self, parsed_args):
        recipes = []
        components = []
        recipe_components_temp = defaultdict(lambda: defaultdict(int))

        for row in self.rr['BestiaryRecipes.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_BESTIARY, recipes)
            for value in row['BestiaryRecipeComponentKeys']:
                recipe_components_temp[row['Id']][value['Id']] += 1

        for row in self.rr['BestiaryRecipeComponent.dat']:
            self._copy_from_keys(row, self._COPY_KEYS_BESTIARY_COMPONENTS,
                                 components)
            if row['RarityKey'] != RARITY.ANY:
                components[-1]['rarity'] = self.rr['ClientStrings.dat'].index[
                    'Id']['ItemDisplayString' +
                          row['RarityKey'].name_upper]['Text']

        recipe_components = []
        for recipe_id, data in recipe_components_temp.items():
            for component_id, amount in data.items():
                recipe_components.append(
                    OrderedDict(
                        (('recipe_id', recipe_id),
                         ('component_id', component_id), ('amount', amount))))

        r = ExporterResult()
        for k in ('recipes', 'components', 'recipe_components'):
            r.add_result(text=lua_formatter(locals()[k]),
                         out_file='bestiary_%s.lua' % k,
                         wiki_page=[{
                             'page': 'Module:Bestiary/%s' % k,
                             'condition': None,
                         }])

        return r
Пример #15
0
    def tempest(self, parsed_args):
        tf = self.tc['map_stat_descriptions.txt']
        data = []
        for mod in self.rr['Mods.dat']:
            # Is it a tempest mod?
            if mod['CorrectGroup'] != 'MapEclipse':
                continue

            # Doesn't have a name - probably not implemented
            if not mod['Name']:
                continue

            stats = []
            for i in MOD_STATS_RANGE:
                stat = mod['StatsKey%s' % i]
                if stat:
                    stats.append(stat)

            info = {}
            info['name'] = mod['Name']
            effects = []

            stat_ids = [st['Id'] for st in stats]
            stat_values = []

            for i, stat in enumerate(stats):
                j = i + 1
                values = [mod['Stat%sMin' % j], mod['Stat%sMax' % j]]
                if values[0] == values[1]:
                    values = values[0]
                stat_values.append(values)

            try:
                index = stat_ids.index('map_summon_exploding_buff_storms')
            except ValueError:
                pass
            else:
                # Value is incremented by 1 for some reason
                tempest = self.rr['ExplodingStormBuffs.dat'][stat_values[index]
                                                             - 1]

                stat_ids.pop(index)
                stat_values.pop(index)

                if tempest['BuffDefinitionsKey']:
                    tempest_stats = tempest['BuffDefinitionsKey']['StatKeys']
                    tempest_values = tempest['StatValues']
                    tempest_stat_ids = [st['Id'] for st in tempest_stats]
                    t = tf.get_translation(tempest_stat_ids,
                                           tempest_values,
                                           full_result=True,
                                           lang=config.get_option('language'))
                    self._append_effect(
                        t, effects,
                        'The tempest buff provides the following effects:')
                #if tempest['MonsterVarietiesKey']:
                #    print(tempest['MonsterVarietiesKey'])
                #    break

            t = tf.get_translation(stat_ids,
                                   stat_values,
                                   full_result=True,
                                   lang=config.get_option('language'))
            self._append_effect(t, effects,
                                'The area gets the following modifiers:')

            info['effect'] = '\n'.join(effects)
            data.append(info)

        data.sort(key=lambda info: info['name'])

        out = []
        for info in data:
            out.append('|-\n')
            out.append('| %s\n' % info['name'])
            out.append('| %s\n' % info['effect'])
            out.append('| \n')

        r = ExporterResult()
        r.add_result(lines=out, out_file='tempest_mods.txt')

        return r
Пример #16
0
    def _export(self, parsed_args, mods):
        r = ExporterResult()

        if mods:
            console('Found %s mods. Processing...' % len(mods))
        else:
            console('No mods found for the specified parameters. Quitting.',
                    msg=Msg.warning)
            return r

        # Needed for localizing sell prices
        self.rr['BaseItemTypes.dat'].build_index('Id')

        for mod in mods:
            data = OrderedDict()

            for k in (
                ('Id', 'id'),
                ('CorrectGroup', 'mod_group'),
                ('Domain', 'domain'),
                ('GenerationType', 'generation_type'),
                ('Level', 'required_level'),
            ):
                v = mod[k[0]]
                if v:
                    data[k[1]] = v

            if mod['Name']:
                root = text.parse_description_tags(mod['Name'])

                def handler(hstr, parameter):
                    return hstr if parameter == 'MS' else ''

                data['name'] = root.handle_tags({
                    'if': handler,
                    'elif': handler
                })

            if mod['BuffDefinitionsKey']:
                data['granted_buff_id'] = mod['BuffDefinitionsKey']['Id']
                data['granted_buff_value'] = mod['BuffValue']
            # todo ID for GEPL
            if mod['GrantedEffectsPerLevelKeys']:
                data['granted_skill'] = ', '.join([
                    k['GrantedEffectsKey']['Id']
                    for k in mod['GrantedEffectsPerLevelKeys']
                ])
            data['mod_type'] = mod['ModTypeKey']['Name']

            stats = []
            values = []
            for i in MOD_STATS_RANGE:
                k = mod['StatsKey%s' % i]
                if k is None:
                    continue

                stat = k['Id']
                value = mod['Stat%sMin' % i], mod['Stat%sMax' % i]

                if value[0] == 0 and value[1] == 0:
                    continue

                stats.append(stat)
                values.append(value)

            data['stat_text'] = '<br>'.join(self._get_stats(
                stats, values, mod))

            for i, (sid, (vmin, vmax)) in enumerate(zip(stats, values),
                                                    start=1):
                data['stat%s_id' % i] = sid
                data['stat%s_min' % i] = vmin
                data['stat%s_max' % i] = vmax

            for i, tag in enumerate(mod['SpawnWeight_TagsKeys']):
                j = i + 1
                data['spawn_weight%s_tag' % j] = tag['Id']
                data['spawn_weight%s_value' % j] = mod['SpawnWeight_Values'][i]

            for i, tag in enumerate(mod['GenerationWeight_TagsKeys']):
                j = i + 1
                data['generation_weight%s_tag' % j] = tag['Id']
                data['generation_weight%s_value' % j] = \
                    mod['GenerationWeight_Values'][i]

            tags = ', '.join([t['Id'] for t in mod['ModTypeKey']['TagsKeys']] +
                             [t['Id'] for t in mod['TagsKeys']])
            if tags:
                data['tags'] = tags

            if mod['ModTypeKey']:
                sell_price = defaultdict(int)
                for msp in mod['ModTypeKey']['ModSellPriceTypesKeys']:
                    for i, (item_id, amount) in enumerate(
                            MOD_SELL_PRICES[msp['Id']].items(), start=1):
                        data['sell_price%s_name' % i] = self.rr[
                            'BaseItemTypes.dat'].index['Id'][item_id]['Name']
                        data['sell_price%s_amount' % i] = amount

                # Make sure this is always the same order
                sell_price = sorted(sell_price.items(), key=lambda x: x[0])

                for i, (item_name, amount) in enumerate(sell_price, start=1):
                    data['sell_price%s_name' % i] = item_name
                    data['sell_price%s_amount' % i] = amount

            # 3+ tildes not allowed
            page_name = 'Modifier:' + self._format_wiki_title(mod['Id'])
            cond = ModWikiCondition(data, parsed_args)

            r.add_result(
                text=cond,
                out_file='mod_%s.txt' % data['id'],
                wiki_page=[
                    {
                        'page': page_name,
                        'condition': cond
                    },
                ],
                wiki_message='Mod updater',
            )

        return r
Пример #17
0
    def export(self, parsed_args, areas):
        console('Found %s areas, parsing...' % len(areas))

        r = ExporterResult()

        if not areas:
            console(
                'No areas found for the specified parameters. Quitting.',
                msg=Msg.warning,
            )
            return r

        console('Accessing additional data...')

        self.rr['MapPins.dat'].build_index('WorldAreasKeys')
        self.rr['AtlasNode.dat'].build_index('WorldAreasKey')
        self.rr['MapSeries.dat'].build_index('Id')
        if not parsed_args.skip_main_page:
            self.rr['Maps.dat'].build_index('Regular_WorldAreasKey')
            self.rr['UniqueMaps.dat'].build_index('WorldAreasKey')

        console('Found %s areas. Processing...' % len(areas))

        lang = self._LANG[config.get_option('language')]

        for area in areas:
            data = OrderedDict()

            for row_key, copy_data in self._COPY_KEYS.items():
                value = area[row_key]

                condition = copy_data.get('condition')
                if condition is not None and not condition(area):
                    continue

                # Skip default values to reduce size of template
                if value == copy_data.get('default'):
                    continue
                '''default = copy_data.get('default')
                if default is not None and value == default:
                        continue'''

                fmt = copy_data.get('format')
                if fmt:
                    value = fmt(value)
                data[copy_data['template']] = value

            for i, (tag, value) in enumerate(zip(area['SpawnWeight_TagsKeys'],
                                                 area['SpawnWeight_Values']),
                                             start=1):
                data['spawn_weight%s_tag' % i] = tag['Id']
                data['spawn_weight%s_value' % i] = value

            map_pin = self.rr['MapPins.dat'].index['WorldAreasKeys'].get(area)
            if map_pin:
                data['flavour_text'] = map_pin[0]['FlavourText']

            atlas_node = self.rr['AtlasNode.dat'].index['WorldAreasKey'].get(
                area)
            if atlas_node:
                data['flavour_text'] = atlas_node[0]['FlavourTextKey']['Text']

            #
            # Add main-page if possible
            #
            if not parsed_args.skip_main_page:
                map = self.rr['Maps.dat'].index['Regular_WorldAreasKey'].get(
                    area)
                if map:
                    map = map[0]
                    if map['MapSeriesKey']['Id'] == 'MapWorlds':
                        data['main_page'] = map['BaseItemTypesKey']['Name']
                    else:
                        data['main_page'] = '%s (%s)' % (
                            map['BaseItemTypesKey']['Name'],
                            map['MapSeriesKey']['Name'])
                elif data.get('tags') and 'map' in data['tags']:
                    map_version = None
                    for row in self.rr['MapSeries.dat']:
                        if not area['Id'].startswith(row['Id']):
                            continue
                        map_version = row['Name']

                    if map_version:
                        if map_version == self.rr['MapSeries.dat'].index['Id'][
                                'MapWorlds']['Name']:
                            map_version = None

                        if 'Unique' in area['Id'] or 'BreachBoss' in area['Id']\
                                or area['Id'].endswith('ShapersRealm'):
                            if map_version is None:
                                data['main_page'] = area['Name']
                            else:
                                data['main_page'] = '%s (%s)' % (area['Name'],
                                                                 map_version)
                        elif 'Harbinger' in area['Id']:
                            tier = re.sub('^.*Harbinger', '', area['Id'])
                            if tier:
                                if map_version is None:
                                    data['main_page'] = '%s (%s)' % (
                                        area['Name'],
                                        lang[tier],
                                    )
                                else:
                                    data['main_page'] = '%s (%s) (%s)' % (
                                        area['Name'],
                                        lang[tier],
                                        map_version,
                                    )
                            else:
                                if map_version is None:
                                    data['main_page'] = area['Name']
                                else:
                                    data['main_page'] = '%s (%s)' % (
                                        area['Name'],
                                        map_version,
                                    )

            cond = WikiCondition(
                data=data,
                cmdargs=parsed_args,
            )

            r.add_result(
                text=cond,
                out_file='area_%s.txt' % data['id'],
                wiki_page=[
                    {
                        'page': 'Area:' + self._format_wiki_title(data['id']),
                        'condition': cond,
                    },
                ],
                wiki_message='Area updater',
            )

        return r
Пример #18
0
    def export(self, parsed_args, incursion_rooms):
        r = ExporterResult()

        if not incursion_rooms:
            console(
                'No incursion rooms  found for the specified parameters. '
                'Quitting.',
                msg=Msg.warning,
            )
            return r
        console('Found %s rooms...' % len(incursion_rooms))

        console(
            'Additional files may be loaded. Processing information - this '
            'may take a while...')
        self._image_init(parsed_args)
        idl_sources = set()
        if parsed_args.store_images:
            idl = IDLFile()
            idl.read(file_path_or_raw=self.file_system.get_file(
                'Art/UIImages1.txt'))
            idl_lookup = idl.as_dict()

        console('Parsing data into templates...')
        for incursion_room in incursion_rooms:
            if 'TEMPLATE' in incursion_room['Id']:
                console('Skipping template room "%s"' % incursion_room['Id'],
                        msg=Msg.warning)
                continue
            elif not incursion_room['Name']:
                console('Skipping incursion room "%s" without a name' %
                        incursion_room['Id'],
                        msg=Msg.warning)
                continue
            data = OrderedDict()

            for row_key, copy_data in self._COPY_KEYS.items():
                value = incursion_room[row_key]

                condition = copy_data.get('condition')
                if condition is not None and not condition(incursion_room):
                    continue

                # Skip default values to reduce size of template
                if value == copy_data.get('default'):
                    continue

                fmt = copy_data.get('format')
                if fmt:
                    value = fmt(value)
                data[copy_data['template']] = value

            if incursion_room['IncursionArchitectKey']:
                mv = incursion_room['IncursionArchitectKey'][
                    'MonsterVarietiesKey']
                data['architect_metadata_id'] = mv['Id']
                data['architect_name'] = mv['Name']

            cond = IncursionRoomWikiCondition(
                data=data,
                cmdargs=parsed_args,
            )

            if parsed_args.store_images and incursion_room['UIIcon']:
                idl_record = idl_lookup[incursion_room['UIIcon']]
                src = os.path.join(self._img_path,
                                   os.path.split(idl_record.source)[-1])
                if src not in idl_sources:
                    console('Writing source file "%s" to images' % src)
                    with open(src, 'wb') as f:
                        img_data = self.file_system.extract_dds(
                            self.file_system.get_file(idl_record.source))
                        f.write(img_data[:84])
                        if img_data[84:88].decode('ascii') == 'DXT4':
                            f.write('DXT5'.encode('ascii'))
                        else:
                            f.write(img_data[84:88])
                        f.write(img_data[88:])
                    idl_sources.add(src)

                os.system(
                    'magick "%(src)s" -crop %(w)sx%(h)s+%(x)s+%(y)s '
                    '"%(dst)s incursion room icon.png"' % {
                        'src': src,
                        'dst': os.path.join(self._img_path, data['icon']),
                        'h': idl_record.h,
                        'w': idl_record.w,
                        'x': idl_record.x1,
                        'y': idl_record.y1,
                    })

            r.add_result(
                text=cond,
                out_file='incursion_room_%s.txt' % data['name'],
                wiki_page=[{
                    'page': data['name'],
                    'condition': cond,
                }, {
                    'page':
                    data['name'] + ' (%s)' % (self._incursion_room_page_name[
                        config.get_option('language')]),
                    'condition':
                    cond,
                }],
                wiki_message='Incursion room updater',
            )

        if idl_sources:
            console('Cleaning up image files that are no longer necessary')
            for src in idl_sources:
                os.remove(os.path.join(self._img_path, src))

        return r
Пример #19
0
    def export(self, parsed_args, passives):
        r = ExporterResult()

        passives = self._apply_filter(parsed_args, passives)

        if not passives:
            console(
                'No passives found for the specified parameters. Quitting.',
                msg=Msg.warning,
            )
            return r

        console('Accessing additional data...')

        psg = PSGFile()
        psg.read(file_path_or_raw=self.file_system.get_file(
            'Metadata/PassiveSkillGraph.psg'), )

        node_index = {}
        for group in psg.groups:
            for node in group.nodes:
                node_index[node.passive_skill] = node
        # Connections are one-way, make them two way
        for psg_id, node in node_index.items():
            for other_psg_id in node.connections:
                node_index[other_psg_id].connections.append(psg_id)

        self.rr['PassiveSkills.dat'].build_index('PassiveSkillGraphId')

        self._image_init(parsed_args)

        console('Found %s, parsing...' % len(passives))

        for passive in passives:
            data = OrderedDict()

            for row_key, copy_data in self._COPY_KEYS.items():
                value = passive[row_key]

                condition = copy_data.get('condition')
                if condition is not None and not condition(passive):
                    continue

                # Skip default values to reduce size of template
                if value == copy_data.get('default'):
                    continue

                fmt = copy_data.get('format')
                if fmt:
                    value = fmt(value)
                data[copy_data['template']] = value

            if passive['Icon_DDSFile']:
                icon = passive['Icon_DDSFile'].split('/')
                if passive['Icon_DDSFile'].startswith(
                        'Art/2DArt/SkillIcons/passives/'):
                    if icon[-2] == 'passives':
                        data['icon'] = icon[-1]
                    else:
                        data['icon'] = '%s (%s)' % (icon[-1], icon[-2])
                else:
                    data['icon'] = icon[-1]

            data['icon'] = data['icon'].replace('.dds', '')

            stat_ids = []
            values = []

            j = 0
            for i in range(0, self._MAX_STAT_ID):
                try:
                    stat = passive['StatsKeys'][i]
                except IndexError:
                    break
                j = i + 1
                stat_ids.append(stat['Id'])
                data['stat%s_id' % j] = stat['Id']
                values.append(passive['Stat%sValue' % j])
                data['stat%s_value' % j] = passive['Stat%sValue' % j]

            data['stat_text'] = '<br>'.join(
                self._get_stats(
                    stat_ids,
                    values,
                    translation_file='passive_skill_stat_descriptions.txt'))

            # For now this is being added to the stat text
            for ps_buff in passive['PassiveSkillBuffsKeys']:
                stat_ids = [
                    stat['Id']
                    for stat in ps_buff['BuffDefinitionsKey']['StatsKeys']
                ]
                values = ps_buff['Buff_StatValues']
                #if passive['Id'] == 'AscendancyChampion7':
                #    index = stat_ids.index('damage_taken_+%_from_hits')
                #    del stat_ids[index]
                #    del values[index]
                for i, (sid, val) in enumerate(zip(stat_ids, values)):
                    j += 1
                    data['stat%s_id' % j] = sid
                    data['stat%s_value' % j] = val

                text = '<br>'.join(
                    self._get_stats(
                        stat_ids,
                        values,
                        translation_file=
                        'passive_skill_aura_stat_descriptions.txt'))

                if data['stat_text']:
                    data['stat_text'] += '<br>' + text
                else:
                    data['stat_text'] = text

            node = node_index.get(passive['PassiveSkillGraphId'])
            if node and node.connections:
                data['connections'] = ','.join([
                    self.rr['PassiveSkills.dat'].index['PassiveSkillGraphId']
                    [psg_id]['Id'] for psg_id in node.connections
                ])

            # extract icons if specified
            if parsed_args.store_images:
                fn = data['icon'] + ' passive skill icon'
                dds = os.path.join(self._img_path, fn + '.dds')
                png = os.path.join(self._img_path, fn + '.png')
                if not (os.path.exists(dds) or os.path.exists(png)):
                    self._write_dds(
                        data=self.file_system.get_file(
                            passive['Icon_DDSFile']),
                        out_path=dds,
                        parsed_args=parsed_args,
                    )

            cond = WikiCondition(
                data=data,
                cmdargs=parsed_args,
            )

            r.add_result(
                text=cond,
                out_file='passive_skill_%s.txt' % data['id'],
                wiki_page=[
                    {
                        'page':
                        'Passive Skill:' + self._format_wiki_title(data['id']),
                        'condition':
                        cond,
                    },
                ],
                wiki_message='Passive skill updater',
            )

        return r