def _setup(self, args): """ :param args: argparse args passed on :return: """ temp_dir = config.get_option('temp_dir', safe=False) content_ggpk = get_content_ggpk_path() console('Reading "%s"...' % content_ggpk) ggpk = GGPKFile() ggpk.read(content_ggpk) console('Building directory...') ggpk.directory_build() console('Extracting data files to "%s"...' % temp_dir) ggpk['Data'].extract_to(temp_dir) ggpk['Metadata'].extract_to(temp_dir) console('Hashing...') config.set_setup_variable('temp_dir', 'hash', get_content_ggpk_hash()) console('Done.')
def handle(self, args): ver = config.get_option('version') if ver != VERSION.DEFAULT: console('Loading specification for %s' % ver) dat.set_default_spec(version=ver) spec = dat._default_spec if args.files is None: args.files = list(spec) else: files = set() for file_name in args.files: if file_name in spec: files.add(file_name) elif not file_name.endswith('.dat'): file_name += '.dat' if file_name not in spec: console( '.dat file "%s" is not in specification. Removing.' % file_name, msg=Msg.error) else: files.add(file_name) files = list(files) files.sort() args.files = files args.spec = spec
def _read_dat_files(self, args, prefix=''): path = get_content_ggpk_path() console(prefix + 'Reading "%s"...' % path) ggpk = get_content_ggpk(path) console(prefix + 'Reading .dat files') dat_files = {} ggpk_data = ggpk['Data'] lang = args.language or config.get_option('language') if lang != 'English': ggpk_data = ggpk_data[lang] remove = [] for name in tqdm(args.files): try: node = ggpk_data[name] except FileNotFoundError: console('Skipping "%s" (missing)' % name, msg=Msg.warning) remove.append(name) continue df = dat.DatFile(name) df.read(file_path_or_raw=node.record.extract(), use_dat_value=False) dat_files[name] = df for file_name in remove: args.files.remove(file_name) return dat_files
def get_content_path(): """ Returns the path to the current content.ggpk based on the specified config variables for the version & distributor. :return: Path of the content ggpk :rtype: str :raises SetupError: if no valid path was found. """ path = config.get_option('ggpk_path') if path == '': args = config.get_option('version'), config.get_option('distributor') paths = PoEPath(*args).get_installation_paths() if not paths: raise SetupError('No PoE Installation found.') return paths[0] else: return path
def handle(self, *a, mwclient, result, cmdargs, parser): # First row is handled separately to prompt the user for his password url = WIKIS.get(config.get_option('language')) if url is None: console( 'There is no wiki defined for language "%s"' % cmdargs.language, msg=Msg.error ) return self.site = mwclient.Site( url, path='/', scheme='https' ) self.site.login( username=cmdargs.user or input('Enter your gamepedia user name:\n'), password=cmdargs.password or input( 'Please enter your password for the specified user\n' 'WARNING: Password will be visible in console\n' ), ) self.mwclient = mwclient self.cmdargs = cmdargs self.parser = parser if cmdargs.wiki_threads > 1: console('Starting thread pool...') tp = ThreadPoolExecutor(max_workers=cmdargs.wiki_threads) for row in result: tp.submit( self._error_catcher, row=row, ) tp.shutdown(wait=True) else: console('Editing pages...') for row in result: self._error_catcher(row=row) time.sleep(cmdargs.wiki_sleep)
def _read_dat_files(self, args, prefix=''): path = get_content_path() console(prefix + 'Loading file system...') file_system = FileSystem(root_path=path) console(prefix + 'Reading .dat files') dat_files = {} lang = args.language or config.get_option('language') dir_path = "Data/" if lang != 'English': #ggpk_data = index.get_dir_record("Data/%s" % lang) dir_path = "Data/%s/" % lang remove = [] for name in tqdm(args.files): file_path = dir_path + name try: data = file_system.get_file(file_path) except FileNotFoundError: console('Skipping "%s" (missing)' % file_path, msg=Msg.warning) remove.append(name) continue df = dat.DatFile(name) try: df.read(file_path_or_raw=data, use_dat_value=False) except Exception: print(name, traceback.format_exc()) remove.append(name) dat_files[name] = df for file_name in remove: args.files.remove(file_name) return dat_files
def wrapper(pargs, *args, **kwargs): # Check Hash if not check_hash(): console('Game file hash mismatch. Please rerun setup.', msg=Msg.error) return -1 # Check outdir, if specified: if hasattr(pargs, 'outdir') and pargs.outdir: out_dir = pargs.outdir else: out_dir = config.get_option('out_dir') temp_dir = config.get_option('temp_dir') for item in (out_dir, temp_dir): if not os.path.exists(item): console('Path "%s" does not exist' % item, msg=Msg.error) return -1 console('Reading .dat files...') parser = cls(base_path=temp_dir, parsed_args=pargs) console('Parsing...') if handler: return handler(parser, pargs, out_dir=out_dir) else: result = func(parser, pargs, *args, **kwargs) for item in result: if callable(item['text']): text = item['text']() else: text = item['text'] if pargs.print: console(text) if pargs.write: out_path = os.path.join(out_dir, item['out_file']) console('Writing data to "%s"...' % out_path) with open(out_path, 'w', encoding='utf-8') as f: f.write(text) if pargs.wiki: if mwclient is None: try: # Will raise the exception appropriately __import__('') except ImportError: console('Run pip install -e cli', msg=Msg.error) except Exception: raise if wiki_handler is None: console('No wiki-handler defined for this function', msg=Msg.error) return 0 console('Running wikibot...') console('-'*80) wiki_handler.handle(mwclient=mwclient, result=result, cmdargs=pargs, parser=parser) console('-'*80) console('Completed wikibot execution.') console('Done.') return 0
def tempest(self, parsed_args): tf = self.tc['map_stat_descriptions.txt'] data = [] for mod in self.rr['Mods.dat']: # Is it a tempest mod? if mod['CorrectGroup'] != 'MapEclipse': continue # Doesn't have a name - probably not implemented if not mod['Name']: continue stats = [] for i in MOD_STATS_RANGE: stat = mod['StatsKey%s' % i] if stat: stats.append(stat) info = {} info['name'] = mod['Name'] effects = [] stat_ids = [st['Id'] for st in stats] stat_values = [] for i, stat in enumerate(stats): j = i + 1 values = [mod['Stat%sMin' % j], mod['Stat%sMax' % j]] if values[0] == values[1]: values = values[0] stat_values.append(values) try: index = stat_ids.index('map_summon_exploding_buff_storms') except ValueError: pass else: # Value is incremented by 1 for some reason tempest = self.rr['ExplodingStormBuffs.dat'][stat_values[index] - 1] stat_ids.pop(index) stat_values.pop(index) if tempest['BuffDefinitionsKey']: tempest_stats = tempest['BuffDefinitionsKey']['StatKeys'] tempest_values = tempest['StatValues'] tempest_stat_ids = [st['Id'] for st in tempest_stats] t = tf.get_translation(tempest_stat_ids, tempest_values, full_result=True, lang=config.get_option('language')) self._append_effect( t, effects, 'The tempest buff provides the following effects:') #if tempest['MonsterVarietiesKey']: # print(tempest['MonsterVarietiesKey']) # break t = tf.get_translation(stat_ids, stat_values, full_result=True, lang=config.get_option('language')) self._append_effect(t, effects, 'The area gets the following modifiers:') info['effect'] = '\n'.join(effects) data.append(info) data.sort(key=lambda info: info['name']) out = [] for info in data: out.append('|-\n') out.append('| %s\n' % info['name']) out.append('| %s\n' % info['effect']) out.append('| \n') r = ExporterResult() r.add_result(lines=out, out_file='tempest_mods.txt') return r
def export(self, parsed_args, areas): console('Found %s areas, parsing...' % len(areas)) r = ExporterResult() if not areas: console( 'No areas found for the specified parameters. Quitting.', msg=Msg.warning, ) return r console('Accessing additional data...') self.rr['MapPins.dat'].build_index('WorldAreasKeys') self.rr['AtlasNode.dat'].build_index('WorldAreasKey') self.rr['MapSeries.dat'].build_index('Id') if not parsed_args.skip_main_page: self.rr['Maps.dat'].build_index('Regular_WorldAreasKey') self.rr['UniqueMaps.dat'].build_index('WorldAreasKey') console('Found %s areas. Processing...' % len(areas)) lang = self._LANG[config.get_option('language')] for area in areas: data = OrderedDict() for row_key, copy_data in self._COPY_KEYS.items(): value = area[row_key] condition = copy_data.get('condition') if condition is not None and not condition(area): continue # Skip default values to reduce size of template if value == copy_data.get('default'): continue '''default = copy_data.get('default') if default is not None and value == default: continue''' fmt = copy_data.get('format') if fmt: value = fmt(value) data[copy_data['template']] = value for i, (tag, value) in enumerate(zip(area['SpawnWeight_TagsKeys'], area['SpawnWeight_Values']), start=1): data['spawn_weight%s_tag' % i] = tag['Id'] data['spawn_weight%s_value' % i] = value map_pin = self.rr['MapPins.dat'].index['WorldAreasKeys'].get(area) if map_pin: data['flavour_text'] = map_pin[0]['FlavourText'] atlas_node = self.rr['AtlasNode.dat'].index['WorldAreasKey'].get( area) if atlas_node: data['flavour_text'] = atlas_node[0]['FlavourTextKey']['Text'] # # Add main-page if possible # if not parsed_args.skip_main_page: map = self.rr['Maps.dat'].index['Regular_WorldAreasKey'].get( area) if map: map = map[0] if map['MapSeriesKey']['Id'] == 'MapWorlds': data['main_page'] = map['BaseItemTypesKey']['Name'] else: data['main_page'] = '%s (%s)' % ( map['BaseItemTypesKey']['Name'], map['MapSeriesKey']['Name']) elif data.get('tags') and 'map' in data['tags']: map_version = None for row in self.rr['MapSeries.dat']: if not area['Id'].startswith(row['Id']): continue map_version = row['Name'] if map_version: if map_version == self.rr['MapSeries.dat'].index['Id'][ 'MapWorlds']['Name']: map_version = None if 'Unique' in area['Id'] or 'BreachBoss' in area['Id']\ or area['Id'].endswith('ShapersRealm'): if map_version is None: data['main_page'] = area['Name'] else: data['main_page'] = '%s (%s)' % (area['Name'], map_version) elif 'Harbinger' in area['Id']: tier = re.sub('^.*Harbinger', '', area['Id']) if tier: if map_version is None: data['main_page'] = '%s (%s)' % ( area['Name'], lang[tier], ) else: data['main_page'] = '%s (%s) (%s)' % ( area['Name'], lang[tier], map_version, ) else: if map_version is None: data['main_page'] = area['Name'] else: data['main_page'] = '%s (%s)' % ( area['Name'], map_version, ) cond = WikiCondition( data=data, cmdargs=parsed_args, ) r.add_result( text=cond, out_file='area_%s.txt' % data['id'], wiki_page=[ { 'page': 'Area:' + self._format_wiki_title(data['id']), 'condition': cond, }, ], wiki_message='Area updater', ) return r
def export(self, parsed_args, incursion_rooms): r = ExporterResult() if not incursion_rooms: console( 'No incursion rooms found for the specified parameters. ' 'Quitting.', msg=Msg.warning, ) return r console('Found %s rooms...' % len(incursion_rooms)) console( 'Additional files may be loaded. Processing information - this ' 'may take a while...') self._image_init(parsed_args) idl_sources = set() if parsed_args.store_images: idl = IDLFile() idl.read(file_path_or_raw=self.file_system.get_file( 'Art/UIImages1.txt')) idl_lookup = idl.as_dict() console('Parsing data into templates...') for incursion_room in incursion_rooms: if 'TEMPLATE' in incursion_room['Id']: console('Skipping template room "%s"' % incursion_room['Id'], msg=Msg.warning) continue elif not incursion_room['Name']: console('Skipping incursion room "%s" without a name' % incursion_room['Id'], msg=Msg.warning) continue data = OrderedDict() for row_key, copy_data in self._COPY_KEYS.items(): value = incursion_room[row_key] condition = copy_data.get('condition') if condition is not None and not condition(incursion_room): continue # Skip default values to reduce size of template if value == copy_data.get('default'): continue fmt = copy_data.get('format') if fmt: value = fmt(value) data[copy_data['template']] = value if incursion_room['IncursionArchitectKey']: mv = incursion_room['IncursionArchitectKey'][ 'MonsterVarietiesKey'] data['architect_metadata_id'] = mv['Id'] data['architect_name'] = mv['Name'] cond = IncursionRoomWikiCondition( data=data, cmdargs=parsed_args, ) if parsed_args.store_images and incursion_room['UIIcon']: idl_record = idl_lookup[incursion_room['UIIcon']] src = os.path.join(self._img_path, os.path.split(idl_record.source)[-1]) if src not in idl_sources: console('Writing source file "%s" to images' % src) with open(src, 'wb') as f: img_data = self.file_system.extract_dds( self.file_system.get_file(idl_record.source)) f.write(img_data[:84]) if img_data[84:88].decode('ascii') == 'DXT4': f.write('DXT5'.encode('ascii')) else: f.write(img_data[84:88]) f.write(img_data[88:]) idl_sources.add(src) os.system( 'magick "%(src)s" -crop %(w)sx%(h)s+%(x)s+%(y)s ' '"%(dst)s incursion room icon.png"' % { 'src': src, 'dst': os.path.join(self._img_path, data['icon']), 'h': idl_record.h, 'w': idl_record.w, 'x': idl_record.x1, 'y': idl_record.y1, }) r.add_result( text=cond, out_file='incursion_room_%s.txt' % data['name'], wiki_page=[{ 'page': data['name'], 'condition': cond, }, { 'page': data['name'] + ' (%s)' % (self._incursion_room_page_name[ config.get_option('language')]), 'condition': cond, }], wiki_message='Incursion room updater', ) if idl_sources: console('Cleaning up image files that are no longer necessary') for src in idl_sources: os.remove(os.path.join(self._img_path, src)) return r
def read_quest_rewards(self, args): compress = {} for row in self.rr['QuestRewards.dat']: # Find the corresponding keys item = row['BaseItemTypesKey'] # TODO: Skipping random map reward with zana mod here if item is None: continue quest = row['QuestRewardOffersKey']['QuestKey'] character = row['CharactersKey'] itemcls = item['ItemClassesKey']['Id'] # Format the data data = OrderedDict() data['quest'] = quest['Name'] data['quest_id'] = quest['Id'] # Quest not implemented or buggy or master stuff if not data['quest']: continue data['act'] = quest['Act'] if character is not None: data['classes'] = character['Name'] if row['RarityKey'] != RARITY.ANY: rarity = self.rr['ClientStrings.dat'].index['Id'][ 'ItemDisplayString' + row['RarityKey'].name_upper]['Text'] sockets = row['SocketGems'] if sockets: data['sockets'] = sockets name = item['Name'] # Some of unique items follow special rules if itemcls == 'QuestItem' and 'Book' in item['Id']: name = '%s (%s)' % (name, data['quest']) elif itemcls == 'Map': name = '%s (%s)' % (name, self.rr['MapSeries.dat'].index['Id'] ['MapWorlds']['Name']) # Non non quest items or skill gems have their rarity added if itemcls not in { 'Active Skill Gem', 'Support Skill Gem', 'QuestItem', 'StackableCurrency' }: data['item_level'] = row['ItemLevel'] data['rarity'] = rarity # Unique and not a quest item or gem if row['RarityKey'] == RARITY.ANY: uid = row['Key0'] item_map = self._ITEM_MAP.get( config.get_option('language')) if item_map is None: warnings.warn( 'No unique item mapping defined for the current ' 'language') elif uid in item_map: name = item_map[uid] data['rarity'] = self.rr['ClientStrings.dat'].index[ 'Id']['ItemDisplayStringUnique']['Text'] else: warnings.warn('Uncaptured unique item. %s %s %s' % (uid, data['quest'], name)) # Two stone rings two_stone_map = self._TWO_STONE_MAP.get( config.get_option('language')) if two_stone_map is None: warnings.warn( 'No two stone ring mapping for the current language') elif item['Id'] in two_stone_map: name = two_stone_map[item['Id']] data['reward'] = name # Add to formatting list key = quest['Id'] + item['Id'] + str(row['Key0']) if key in compress: compress[key]['classes'] += self._UNIT_SEP + character['Name'] else: compress[key] = data outdata = [data for data in compress.values()] return self._write_lua(outdata, 'quest')