def parseDataFile(fileName, filePath=r'sharecfg', mode=0): if mode == 0: filePath = os.path.join(DataDirectory, filePath, fileName + '.lua') with open(filePath, 'r', encoding='utf-8') as f: content = f.read() content = re.match(r".*" + fileName + r" = (\{.*\})", content, flags=re.DOTALL)[1] output = slpp.decode(content) if isinstance(output, dict) and 'all' in output.keys(): del output['all'] return output elif mode == 1: filePath = os.path.join(DataDirectory, filePath) output = {} templateFileNames = os.listdir(filePath) for fNames in templateFileNames: with open(os.path.join(filePath, fNames), 'r', encoding='utf-8') as f: content = f.read() content = re.match(r".*" + fileName + r"_\d+ = (\{.*\})", content, flags=re.DOTALL)[1] o = slpp.decode(content) for k, v in o.items(): output[k] = v if isinstance(output, dict) and 'all' in output.keys(): del output['all'] return output
def load_stats(): if not g_blockstats: if os.path.exists(BLOCKSTATS): print "Using local blockstats.lua" stats = lua.decode(open(BLOCKSTATS).read()) else: print "Fetching blockstats.lua from server" stats = lua.decode(urllib2.urlopen(BLOCKSTATS_URL).read()) for bl in stats: g_blockstats[bl["ident"]] = bl return g_blockstats
def test_table(self): # Bracketed string key: self.assertEqual(slpp.decode('{[10] = 1}'), {10: 1}) # Void table: self.assertEqual(slpp.decode('{nil}'), []) # Values-only table: self.assertEqual(slpp.decode('{"10"}'), ["10"]) # Last zero self.assertEqual(slpp.decode('{0, 1, 0}'), [0, 1, 0])
def test_string(self): # Escape test: self.assertEqual(slpp.decode(r"'test\'s string'"), "test's string") # Add escaping on encode: self.assertEqual(slpp.encode({'a': 'func("call()");'}), '{\n\t["a"] = "func(\\"call()\\");"\n}') # Strings inside double brackets longstr = ' ("word") . [ ["word"] . ["word"] . ("word" | "word" | "word" | "word") . ["word"] ] ' self.assertEqual(slpp.decode('[[' + longstr + ']]'), longstr) self.assertEqual( slpp.decode('{ [0] = [[' + longstr + ']], [1] = "a"}'), [longstr, "a"])
def test_string(self): # Escape test: self.assertEqual(slpp.decode(r"'test\'s string'"), "test's string") # Add escaping on encode: self.assertEqual(slpp.encode({'a': 'func("call()");'}), '{\n\ta = "func(\\"call()\\");"\n}')
def load_saveindex(root, use_cache=False): current = None if use_cache: current = load_cache(root) if current: return current s = read_file(root + '/saveindex') data = lua.decode(s[18:]) if data: index = data['last_used_slot'] - 1 slot = data['slots'][index] if slot.get('session_id'): session_dir = os.path.join(root, 'session', slot['session_id']) tmp = load_session(session_dir) if tmp: ot = tmp['summary']['title'] tmp['summary']['title'] = '{}\t{}'.format( ot, slot['server']['name']) if not current: current = tmp elif current['time'] < tmp['time']: current = tmp dump_cache(current, root) return current
def _get_skins(self): url = f"https://leagueoflegends.fandom.com/wiki/Module:SkinData/data" html = download_soup(url, False) soup = BeautifulSoup(html, "lxml") # Pull the relevant champData from the html tags spans = soup.find("pre", {"class": "mw-code mw-script"}) start = None spans = spans.text.split("\n") for i, span in enumerate(spans): if str(span) == "return {": start = i spans[i] = "{" spans = spans[start:] test1 = re.compile("\w -- \w|.\w--\w|\w --\w|.\w--\s") for i, span in enumerate(spans): if span in ["-- </pre>", "-- [[Category:Lua]]"]: spans[i] = "" if re.search(test1, span): test2 = re.search(test1, span) spans[i] = span.replace(test2.group()[2] + test2.group()[3], " ") span = spans[i] comment_start = span.find("--") # text = text.replace("-", " ") if comment_start > -1: spans[i] = span[:comment_start] spans = "".join(spans) skin_data = lua.decode(spans) return skin_data
def run(geojson: TextIO, domains: str): domains_site = [] domains_polygons = [] polygons = json.loads(geojson.read())["features"] # Read domain names from site for filename in os.listdir(domains): filepath = os.path.join(domains, filename) with open(filepath, 'r') as domainfile: for dom in lua.decode(domainfile.read())["domain_names"].keys(): domains_site.append(dom) # Read domain names from polygons for polygon in polygons: print(polygon["properties"]["name"]) domains_polygons.append(polygon["properties"]["name"]) print("Domains contained in geojson and site:") print_list(sorted(list(set(domains_polygons).intersection(domains_site)))) print("Domains contained exclusively in geojson:") print_list(sorted(list(set(domains_polygons) - set(domains_site)))) print("Domains contained exclusively in domains:") print_list(sorted(list(set(domains_site) - set(domains_polygons)))) print("Duplicate names in geojson:") print_list( sorted([ item for item, count in collections.Counter(domains_polygons).items() if count > 1 ]))
async def __append_shinkai_item(self, title): resp = await self.session.get( self.WIKIA_RAW_URL.format('Module:' + title)) item_info_text = await resp.text() while item_info_text.find('REDIRECT') != -1: title = REDIRECT_PATTERN.search(item_info_text).group(1).strip() resp = await self.session.get( self.WIKIA_RAW_URL.format('Module:' + title)) item_info_text = await resp.text() _luatable = re.search(LUATABLE_PATTERN, item_info_text) if not _luatable: return _luatable = _luatable.group(0) item_info = lua.decode(_luatable) item_id = item_info['_id'] chinese_name = self.SLOTITEMS_KCDATA[item_id]['chinese_name'] chinese_name = chinese_name if chinese_name else '' self.items_data[item_id] = { '日文名': item_info['_japanese_name'], '中文名': chinese_name, '类型': self.SLOTITEMS_KCDATA[item_id]['type'], '稀有度': item_info['_rarity'] } self.items_id_map[item_info['_name']] = item_id for key, val in item_info.items(): if key not in STATS: continue if val == False: continue if key == '_range': val = RANGE[val] self.items_data[item_id].update({STATS[key]: val}) if item_id in REMARKS: self.items_data[item_id].update({'备注': REMARKS[item_id]})
def getRawRecipes(game_dir='F:\\Tower Program Files\\Program Files (x86)\\SteamLibrary\\steamapps\\common\\Factorio', update_from_game_files=False): # recipe_dir = os.path.join(game_dir, 'data\\base\\prototypes\\recipe') # Changed in 1.1 # recipe_paths = glob(os.path.join(recipe_dir,'*')) if update_from_game_files: recipe_dir = os.path.join(game_dir, 'data\\base\\prototypes') recipe_lua_path = os.path.join(recipe_dir, 'recipe.lua') # path to the actual game file. Only used for updated JSON. recipe_json_path = 'recipe.json' raw_recipes = [] recipe_names = set() if update_from_game_files: with open(recipe_lua_path, 'r') as lua_file: recipe_string = lua_file.read().strip().lstrip('data:extend(').rstrip(')') recipe_list = lua.decode(recipe_string) with open(recipe_json_path, 'w') as json_file: json.dump(recipe_list, json_file) else: with open(recipe_json_path, 'r') as json_file: recipe_list = json.load(json_file) raw_recipes.extend(recipe_list) for recipe in recipe_list: if 'name' not in recipe: print('Warning: Malformed recipe:', recipe) else: if recipe['name'] not in recipe_names: recipe_names.add(recipe['name']) else: if recipe['name'] != recipe: #print('Warning: Multiple entries for recipe:', recipe['name'], '; first duplicate found in file:', os.path.basename(recipe_path)) pass return raw_recipes
def _get_loot_dict_from_lua_string(self, s): lua_table_dict = lua.decode(s) loot_dict = lua_table_dict['Default'][ '@%s' % self._eso_at_player_name]['$AccountWide'][_ESO_ADDON_LOOT_VAR_NAME] loot_tuple_list = sorted(loot_dict.items(), key=lambda kv: kv[0]) loot_dict = collections.OrderedDict(loot_tuple_list) return loot_dict
def get_items_dictionary(): if platform.system() == 'Windows': path = 'C:\\Program Files (x86)\\Windower4\\res\\items.lua' else: path = os.path.join(os.path.expanduser("~"), 'Resources/lua/items.lua') with open(path) as fd: data = fd.read().replace('return', '', 1) return lua.decode(data)
def parse_lua(luadb, gu_toon_name): """ Parse the lua data""" inventorydata = luadb.read() inventorydata = "{ "+inventorydata+" }" inventorydataparsed = lua.decode(inventorydata) itemid_list, itemname_list = iter_luadb(inventorydataparsed, gu_toon_name) qty_list = get_item_qty(inventorydataparsed, gu_toon_name, itemid_list) return itemid_list, itemname_list, qty_list
def test_numbers(self): # Integer and float: self.assertEqual(slpp.decode('3'), 3) self.assertEqual(slpp.decode('4.1'), 4.1) self.assertEqual(slpp.encode(3), '3') self.assertEqual(slpp.encode(4.1), '4.1') # Negative float: self.assertEqual(slpp.decode('-0.45'), -0.45) self.assertEqual(slpp.encode(-0.45), '-0.45') # Scientific: self.assertEqual(slpp.decode('3e-7'), 3e-7) self.assertEqual(slpp.decode('-3.23e+17'), -3.23e+17) self.assertEqual(slpp.encode(3e-7), '3e-07') self.assertEqual(slpp.encode(-3.23e+17), '-3.23e+17') # Hex: self.assertEqual(slpp.decode('0x3a'), 0x3a) differ( slpp.decode('''{ ID = 0x74fa4cae, Version = 0x07c2, Manufacturer = 0x21544948 }'''), { 'ID': 0x74fa4cae, 'Version': 0x07c2, 'Manufacturer': 0x21544948 })
def read_dump(source=input_dump): with open(source,'r') as f: dump = f.read() # Add quotes around hyphenated strings dump = re.sub(r'\w+(?:-\w+)+', r'"\g<0>"', dump) # Parse the dump as LUA data structure return slpp.decode(dump)
def _get_data(local_path: str, path: str): with zipfile.ZipFile(path, mode='r') as archive: with archive.open(local_path) as msnfile: raw_data = msnfile.read().decode('utf-8') match = re.search(rf'{local_path.split("/")[-1]}\s?=', raw_data) data_dict = raw_data[match.end() + 1:] data = lua.decode(data_dict) return data
def main(): if len(sys.argv) is not 2: print "To use this script, please provide the filename of a mdt lua object" print "These files need a bit of formating, refer the README to know how to create them" print "eg: './MdtToJson.py freeholdDungeonEnemies.lua'" f = open(sys.argv[1], "r") enemies = lua.decode(f.read()) dungeonEnemies(enemies)
def test_table(self): # Bracketed string key: self.assertEqual(slpp.decode('{[10] = 1}'), {10: 1}) # Void table: self.assertEqual(slpp.decode('{nil}'), {}) # Values-only table: self.assertEqual(slpp.decode('{"10"}'), ["10"]) # Last zero self.assertEqual(slpp.decode('{0, 1, 0}'), [0, 1, 0]) # Mixed encode self.assertEqual(slpp.encode({ '0': 0, 'name': 'john' }), '{\n\t["0"] = 0,\n\t["name"] = "john"\n}')
def table_to_dict(path): """Decodes lua SavedVariables table as dict.""" if not os.path.exists(path): return {} data = {} with open(path, encoding="utf-8") as file: if file: data = lua.decode(file.read().replace( STRING_TO_REMOVE, '')) return data
def add_event(): lua_events = request.data kills_table_name = 'NewDataPoints3' position_table_name = 'NewPositionPoints' # The text file has 2 LUA arrays in it, one for kill events and one for positional events # The order isn't guaranteed, so we need to find the line number that they are on and divide them up first_table_name_index = lua_events.find(' =') if first_table_name_index == -1: print 'Data was sent, but contained no table!' return '', 200 first_table_name = lua_events[:first_table_name_index].strip() # noinspection RegExpDuplicateAlternationBranch parts = re.split( '{} = |{} = '.format(kills_table_name, position_table_name), lua_events) if len(parts) == 3: kill_events_index = 1 if first_table_name == kills_table_name else 2 position_events_index = 2 if kill_events_index == 1 else 1 kill_events = lua.decode(parts[kill_events_index]) position_events = lua.decode(parts[position_events_index]) if kill_events is not None: ingest_kill_data(kill_events) if position_events is not None: ingest_position_data(position_events) if len(parts) == 2: if first_table_name == kills_table_name: kill_data = lua.decode(parts[1]) if kill_data is not None: ingest_kill_data(kill_data) else: position_data = lua.decode(parts[1]) if position_data is not None: ingest_position_data(position_data) return '', 200
def get_wl_profile(path: Path) -> dict: """ reads +Wowhead_Looter.lua file for wlProfile variable value :param path: path to +Wowhead_Looter.lua :return: content of wlProfile see +Wowhead_Looter.lua for structure """ content = path.read_text() match = re.search(r"wlProfile\s=\s({.*},\n})\n", content, re.DOTALL) if match: var = lua.decode(match.groups()[0]) return var raise RuntimeError("Could not find variable wlProfile in given lua file.")
def __init__(self, editor, software_version): self.logger = get_logger("gui") self.editor = editor self.captured_map_coords = None self.profile = Profile('') self.profile.aircraft = "hornet" self.exit_quick_capture = False self.values = None self.capturing = False self.capture_key = try_get_setting(self.editor.settings, "capture_key", "ctrl+t") self.quick_capture_hotkey = try_get_setting(self.editor.settings, "quick_capture_hotkey", "ctrl+alt+t") self.enter_aircraft_hotkey = try_get_setting(self.editor.settings, "enter_aircraft_hotkey", "ctrl+shift+t") self.software_version = software_version self.is_focused = True self.scaled_dcs_gui = False self.selected_wp_type = "WP" try: with open( f"{self.editor.settings.get('PREFERENCES', 'dcs_path')}\\Config\\options.lua", "r") as f: dcs_settings = lua.decode(f.read().replace("options = ", "")) self.scaled_dcs_gui = dcs_settings["graphics"]["scaleGui"] except (FileNotFoundError, ValueError, TypeError): self.logger.error("Failed to decode DCS settings", exc_info=True) tesseract_path = self.editor.settings['PREFERENCES'].get( 'tesseract_path', "tesseract") self.logger.info(f"Tesseract path is: {tesseract_path}") pytesseract.pytesseract.tesseract_cmd = tesseract_path try: self.tesseract_version = pytesseract.get_tesseract_version() self.capture_status = "Status: Not capturing" self.capture_button_disabled = False except pytesseract.pytesseract.TesseractNotFoundError: self.tesseract_version = None self.capture_status = "Status: Tesseract not found" self.capture_button_disabled = True self.logger.info(f"Tesseract version is: {self.tesseract_version}") self.window = self.create_gui() keyboard.add_hotkey(self.quick_capture_hotkey, self.toggle_quick_capture) if self.enter_aircraft_hotkey != '': keyboard.add_hotkey(self.enter_aircraft_hotkey, self.enter_coords_to_aircraft)
def convert_themes(dir_themes, dir_out): logging.info("walking through directory: %s", dir_themes) for path_directory, _, filenames in os.walk(dir_themes): for filename in filenames: path = os.path.join(path_directory, filename) name_theme = filename.lower() data = None if not name_theme.endswith(".theme"): logging.info( "File doesn't end with the .theme extension, skipping: %s", filename) continue name_theme = name_theme[:-len(".theme")] try: logging.info("reading file: %s", path) with open(path, "r") as fin: data = fin.read() if data: logging.debug("raw LUA: %s", data) ## XXX: re.DOTALL doesn't do anything, pass the regex flag (?s) instead data = re.sub(r"(?s)--\[\[.*?\]\]", "", data) data = re.sub(r"^\h*--.*", "", data) logging.debug("filtered LUA: %s", data) data = lua.decode("{ %s }" % data) except IOError as e: logging.error("Unable to open file: %s", e) continue except lua.ParseError as e: logging.error("Unable to decode LUA: %s", e) continue if not data: logging.error("No data loaded, skipping") continue logging.debug("resulting Python data: %s", data) data = collections.defaultdict(dict, data) write_theme(os.path.join(dir_out, "{0}.kak".format(name_theme)), name_theme, data)
def listToFile(data,filename): oldsize=len(data) data = re.sub('c\.StartList\(".*?"\)', '{', data) if len(data)==oldsize: # StartList not found return False data = re.sub('c\.EndList\(\)', '}', data) data = re.sub('L\["', '["', data) decoded = lua.decode(data) with open(filename,'wb') as file: json.dump(decoded,file,encoding='ISO-8859-2') # hax return True
def getStory(filename, type = 1): if type == 1: with open(os.path.join(util.DataDirectory, 'gamecfg', 'story', filename), 'r', encoding='utf-8') as f: content = f.read() content = re.match(r".*?(\{.*\})" ,content, flags = re.DOTALL)[1] output = slpp.decode(content) return output elif type == 2: with open(os.path.join(util.DataDirectory, 'gamecfg', 'dungeon', filename), 'r', encoding='utf-8') as f: content = f.read() content = re.match(r".*?(\{.*\})" ,content, flags = re.DOTALL)[1] dungeon = slpp.decode(content) storylist = [] if 'beginStoy' in dungeon.keys(): storylist.append(dungeon['beginStoy']) stage = dungeon['stages'] for wave in stage[0]['waves']: if wave['triggerType'] == 3: storylist.append(wave['triggerParams']['id']) output = [] for story in storylist: s = getStory(story.lower() + '.lua') output.append(s) return output
def get_champions(self) -> Iterator[Champion]: # Download the page source url = "https://leagueoflegends.fandom.com/wiki/Module:ChampionData/data" html = download_soup(url, self.use_cache) soup = BeautifulSoup(html, "lxml") # Pull the relevant champData from the html tags spans = soup.find("pre", {"class": "mw-code mw-script"}) start = None spans = spans.text.split("\n") for i, span in enumerate(spans): if str(span) == "return {": start = i spans[i] = "{" split_stuff = re.compile("({)|(})") spans = spans[start:] for i, span in enumerate(spans): if span in ["-- </pre>", "-- [[Category:Lua]]"]: spans[i] = "" spans = "".join(spans) data = lua.decode(spans) # Return the champData as a list of Champions self.skin_data = self._get_skins() for name, d in data.items(): if name in [ "Kled & Skaarl", "GnarBig", "Mega Gnar", ]: continue if name in ["Kled"]: # champion = self._render_champion_data(name, d) d["skill_i"] = {1: d["skills"][1], 2: d["skills"][2]} d["skill_q"] = {1: d["skills"][3], 2: d["skills"][4]} d["skill_e"] = {1: d["skills"][6], 2: d["skills"][7]} d["skill_r"] = {1: d["skills"][8], 2: d["skills"][9]} if ( d["id"] == 9999 or d["date"] == "Upcoming" or datetime.strptime(d["date"], "%Y-%m-%d") > datetime.today() ): # Champion not released yet continue champion = self._render_champion_data(name, d) yield champion
def process_lua_file(self,zone_file): v_str = "" for item in zone_file.readlines(): clean_str = item.strip() if not item.strip().startswith("--"): if item.startswith("return"): v_str += "{" elif clean_str.startswith("min_material_level"): v_str += clean_str[:21]+str(self.get_lowest_num_in_str(clean_str))+", " elif clean_str.startswith("max_material_level"): v_str += clean_str[:21]+str(self.get_highest_num_in_str(clean_str))+", " else: v_str += str(item) v_str = " ".join(v_str.split()) data = slpp.decode(v_str) self.process_dict(data,0)
def read_lua(datasource: str, merge_account_sources=True, accounts=["BLUEM", "396255466#1"]): """ Attempts to read lua from the given locations """ account_data = {key: None for key in accounts} for account_name in account_data.keys(): path_live = f"/Applications/World of Warcraft/_classic_/WTF/Account/{account_name}/SavedVariables/{datasource}.lua" with open(path_live, "r") as f: account_data[account_name] = lua.decode("{" + f.read() + "}") if merge_account_sources and len(accounts) > 1: return source_merge(account_data["BLUEM"], account_data["396255466#1"]) else: return account_data
def decode_agent(data, ptrace=False): stats = load_stats() ag = agent_summary() try: if isinstance(data, str): data = data.decode("utf-8") save = lua.decode(data) if isinstance(save, basestring): raise TypeError("decode failed, got str#%d" % len(save)) elif not isinstance(save, dict): raise TypeError("decode failed, got '%s'" % save.__class__.__name__) ag.name = save["name"] for bp in get_all_ships(save): bp["data"] points = 0 count = 0 for bl in bp["blocks"]: ident = bl if isinstance(bl, int) else bl[0] if (ident >= 0xffff): continue if ident == 833: ag.error = "contains Station Command block 833" return ag points += stats[ident].get("deadliness", 0) count += 1 ag.points = max(ag.points, points) if count > kConstructorBlockLimit: ag.error = "too many blocks (%d is over max)" % count return ag if points > kPointMax: ag.mods = True if ag.points == 0 or len(list(saved_blueprints(save))) == 0: ag.error = "empty fleet" if "mods" in save: ag.mods = True # except UnicodeDecodeError: # raise except Exception as e: if ptrace: ag.error = traceback.format_exc() else: ag.error = "%s(%s)" % (e.__class__.__name__, str(e)) return ag
def __init__(self, site_conf, ipv6_local_network=None, ipv6_uplink_network=None, icvpn_ipv4_network=None, icvpn_ipv6_network=None): # read and parse site.conf with open(site_conf, 'r') as f: self.site = lua.decode(f.read()) if not isinstance(self.site, dict): raise TypeError("Unable to parse site.conf") self.ipv4_network = ipcalc.Network(self.site["prefix4"]) self.icvpn_ipv4_network = ipcalc.Network(icvpn_ipv4_network) self.icvpn_ipv6_network = ipcalc.Network(icvpn_ipv6_network) self.ipv6_uplink_network = ipcalc.Network(ipv6_uplink_network) self.ipv6_local_network = ipcalc.Network(ipv6_local_network) if "prefix6" in self.site: self.ipv6_global_network = ipcalc.Network(self.site["prefix6"])
def write_make_policy() -> None: """Writes the make policy to all accounts.""" make_policy = io.reader("outputs", "make_policy", "parquet") new_craft_queue, item_groups = encode_make_policy(make_policy) ahm = utils.get_ahm() path = utils.make_lua_path( account_name=ahm["account"], datasource="TradeSkillMaster" ) content = io.reader(name=path, ftype="lua", custom="rb") craft_mark = ( f'f@Alliance - {cfg.wow["booty_server"]["server_name"]}@internalData@crafts' ) start, end = utils.find_tsm_marker(content, f'["{craft_mark}"]'.encode("ascii")) crafting_dict = lua.decode("{" + content[start:end].decode("ascii") + "}") for _, item_data in crafting_dict[craft_mark].items(): item_name = item_data.get("name", "_no_name") queued = new_craft_queue.get(item_name, 0) if "queued" in item_data: item_data["queued"] = queued new_craft = utils.dict_to_lua(crafting_dict).encode("ascii") new_craft = new_craft.replace( f"\n{craft_mark}".encode("ascii"), f'\n["{craft_mark}"]'.encode("ascii"), ) content = content[:start] + new_craft + content[end:] # Update item groups groups_mark = '["p@Default@userData@items"]' item_text = f"{groups_mark} = " + "{" for item_code, group in item_groups.items(): item_text += f'["i:{item_code}"] = "{group}", ' item_text += "}" start, end = utils.find_tsm_marker(content, groups_mark.encode("ascii")) content = content[:start] + item_text.encode("ascii") + content[end:] io.writer(content, name=path, ftype="lua", custom="wb")
def get_item_urls(use_cache: bool) -> List[str]: url = "https://leagueoflegends.fandom.com/wiki/Module:ItemData/data" html = download_soup(url, False) soup = BeautifulSoup(html, "lxml") spans = soup.find("pre", {"class": "mw-code mw-script"}) start = None spans = spans.text.split("\n") for i, span in enumerate(spans): if str(span) == "return {": start = i spans[i] = "{" split_stuff = re.compile("({)|(})") spans = spans[start:] for i, span in enumerate(spans): if span in ["-- </pre>", "-- [[Category:Lua]]"]: spans[i] = "" spans = "".join(spans) data = lua.decode(spans) menus = [] return data
def read_object(self,object_file): v_str = "" for item in object_file.readlines(): clean_str = item.strip() if not item.strip().startswith("--"): if item.startswith("load") or item.startswith("for") or item.startswith("local"): None elif item.startswith("return"): v_str += "{" else: v_str += str(item).replace("newEntity","|") v_str = " ".join(v_str.split()) obj_list = v_str.split("|") artifact_list = [] for object in obj_list: data = slpp.decode(object) #print data if data: if "base" in data: if data["base"] != "BASE_LORE": self.artifact_list.append(str(data["name"])) else: self.artifact_list.append(str(data["name"]))
def siteConf(self, site_conf): with open(site_conf,'r') as f: return lua.decode(f.read())
def get_items_dictionary(): path = 'C:\\Program Files (x86)\\Windower4\\res\\items.lua' with open(path) as fd: data = fd.read().replace('return', '', 1) return lua.decode(data)
def read_lua(path): fil = io.open(path, encoding="utf-8") text = lua.decode(fil.read()) fil.close() return text
def generic_get_module_data(module, REGEX): data = REGEX.search(module) return lua.decode(data.groupdict()['data']) if data else {}
if __name__ == '__main__': options, args = getopt.getopt(sys.argv[1:], "ha") agents = False for (opt, val) in options: if opt == "-h": help() if opt == "-a": agents = True if len(args) == 0: help() for path in args: dat = read_file(path) if agents: ag = decode_agent(dat) if not ag.error: print "OK %dP: %s" % (ag.points, path) else: print "FAILED %s: %s" % (ag.error, path) else: dec = None error = "" try: dec = lua.decode(dat) except Exception as e: error = str(e) if dec: print "OK: %s" % path else: print "FAILED (%s): %s" % (error, path)