def parse_links_items_rewards(): logging.debug('Parsing Maps <> Items (Rewards)...') ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies.ipf', 'map.ies') with open(ies_path, 'rb') as ies_file: for row in csv.DictReader(ies_file, delimiter=',', quotechar='"'): if int(row['MapRatingRewardCount1']) == 0 or len(row['MapRatingRewardItem1']) == 0: continue item = globals.get_item_link(row['MapRatingRewardItem1']).entity item_link = globals.get_item_link(row['MapRatingRewardItem1']) item_link = { 'Chance': 100, 'Item': item_link, 'Quantity_MAX': int(row['MapRatingRewardCount1']), 'Quantity_MIN': int(row['MapRatingRewardCount1']), } map = globals.maps_by_name[row['ClassName']] map_link = globals.get_map_link(map['$ID_NAME']) map_link = { 'Chance': 100, 'Map': map_link, 'Quantity_MAX': int(row['MapRatingRewardCount1']), 'Quantity_MIN': int(row['MapRatingRewardCount1']), } globals.link( map, 'Link_Items_Exploration', map_link, item, 'Link_Maps_Exploration', item_link )
def parse_links_jobs(): logging.debug("Parsing attributes <> jobs...") LUA = luautil.load_script('ability_price.lua', '*') LUA_UNLOCK = luautil.load_script('ability_unlock.lua', '*', False) # Parse level, unlock and formula ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies.ipf', 'job.ies') with open(ies_path, 'rb') as ies_file: for row in csv.DictReader(ies_file, delimiter=',', quotechar='"'): job = globals.jobs_by_name[row['ClassName']] ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies_ability.ipf', 'ability_' + row['EngName'].lower() + '.ies') # If this job is still under development, skip if not os.path.isfile(ies_path): continue with open(ies_path, 'rb') as ies_file: for row in csv.DictReader(ies_file, delimiter=',', quotechar='"'): attribute = globals.attributes_by_name[row['ClassName']] attribute['DescriptionRequired'] = attribute['DescriptionRequired'] if attribute['DescriptionRequired'] else '' attribute['DescriptionRequired'] = attribute['DescriptionRequired'] + '{nl}{b}' + parser_translations.translate(row['UnlockDesc']) + '{b}' attribute['LevelMax'] = int(row['MaxLevel']) # Parse attribute cost if row['ScrCalcPrice']: for lv in range(int(row['MaxLevel'])): attribute['UpgradePrice'].append(LUA[row['ScrCalcPrice']](None, row['ClassName'], lv + 1, attribute['LevelMax'])[0]) attribute['UpgradePrice'] = [value for value in attribute['UpgradePrice'] if value > 0] # Parse attribute skill (in case it is missing in the ability.ies) if not attribute['Link_Skills'] and row['UnlockArgStr'] in globals.skills_by_name: logging.debug('adding missing skill %s', row['UnlockArgStr']) skill = globals.skills_by_name[row['UnlockArgStr']] globals.link( attribute, 'Link_Skills', globals.get_attribute_link(attribute), skill, 'Link_Attributes', globals.get_skill_link(skill) ) # Parse attribute job if not attribute['Link_Skills'] or 'All' in attribute['Link_Skills']: globals.link( attribute, 'Link_Jobs', globals.get_attribute_link(attribute), job, 'Link_Attributes', globals.get_job_link(job) ) # Parse attribute unlock attribute['Unlock'] = luautil.lua_function_source_to_javascript( luautil.lua_function_source(LUA_UNLOCK[row['UnlockScr']])[1:-1] # remove 'function' and 'end' ) if not attribute['Unlock'] and row['UnlockScr'] else attribute['Unlock'] attribute['UnlockArgs'][job['$ID']] = { 'UnlockArgStr': row['UnlockArgStr'], 'UnlockArgNum': row['UnlockArgNum'], }
def parse_links_skills(): logging.debug("Parsing attributes <> skills...") for attribute in globals.attributes.values(): for skill in attribute['Link_Skills']: if isinstance(skill, (basestring,)) and skill != 'All' and skill in globals.skills_by_name: skill = globals.skills_by_name[skill] globals.link( attribute, 'Link_Skills', globals.get_attribute_link(attribute), skill, 'Link_Attributes', globals.get_skill_link(skill) ) attribute['Link_Skills'] = [skill for skill in attribute['Link_Skills'] if not isinstance(skill, (basestring,))]
def parse_links_items(): logging.debug('Parsing Monsters <> Items...') for monster in globals.monsters.values(): ies_file = monster['$ID_NAME'] + '.ies' ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies_drop.ipf', ies_file.lower()) try: with open(ies_path, 'rb') as ies_file: for row in csv.DictReader(ies_file, delimiter=',', quotechar='"'): if not row['ItemClassName'] or globals.get_item_link( row['ItemClassName']) is None: continue item = globals.get_item_link(row['ItemClassName']).entity item_link = globals.get_item_link(item) item_link = { 'Chance': int(row['DropRatio']) / 100.0, 'Item': item_link, 'Quantity_MAX': int(row['Money_Max']), 'Quantity_MIN': int(row['Money_Min']), } monster_link = globals.get_monster_link(monster) monster_link = { 'Chance': int(row['DropRatio']) / 100.0, 'Monster': monster_link, 'Quantity_MAX': int(row['Money_Max']), 'Quantity_MIN': int(row['Money_Min']), } globals.link(monster, 'Link_Items', monster_link, item, 'Link_Monsters', item_link) except IOError: continue
def parse_links_npcs(): logging.debug('Parsing Maps <> NPCs...') ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies.ipf', 'map.ies') with open(ies_path, 'rb') as ies_file: for row in csv.DictReader(ies_file, delimiter=',', quotechar='"'): map = globals.maps_by_name[row['ClassName']] map_offset_x = int(round(int(row['Width']) / 2.0)) map_offset_y = int(round(int(row['Height']) / 2.0)) anchors = {} # Spawn Positions (aka Anchors) ies_file = 'anchor_' + map['$ID_NAME'] + '.ies' ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies_mongen.ipf', ies_file.lower()) try: with open(ies_path, 'rb') as ies_file: for row in csv.DictReader(ies_file, delimiter=',', quotechar='"'): obj = anchors[row['GenType']] if row['GenType'] in anchors else { 'Anchors': [], 'GenType': {} } obj['Anchors'].append([ int((map_offset_x + float(row['PosX'])) * MAP_SCALE), int((map_offset_y - float(row['PosZ'])) * MAP_SCALE), ]) anchors[row['GenType']] = obj except IOError: continue # Spawn NPCs ies_file = 'gentype_' + map['$ID_NAME'] + '.ies' ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies_mongen.ipf', ies_file.lower()) try: with open(ies_path, 'rb') as ies_file: for row in csv.DictReader(ies_file, delimiter=',', quotechar='"'): if globals.get_npc_link(row['ClassType']) is None: continue if row['GenType'] not in anchors: continue obj = anchors[row['GenType']] obj['GenType'] = row except IOError: continue # Group by Item/NPC and join anchors anchors_by_npc = {} for anchor in anchors.values(): if len(anchor['GenType'].keys()) == 0: continue item_name = re.search('\w+:(\w+):\w+', anchor['GenType']['ArgStr2']) npc_name = item_name.group(1) if item_name else anchor['GenType']['ClassType'] if npc_name in anchors_by_npc: anchors_by_npc[npc_name]['Anchors'] += anchor['Anchors'] anchors_by_npc[npc_name]['GenType']['MaxPop'] = int(anchors_by_npc[npc_name]['GenType']['MaxPop']) + int(anchor['GenType']['MaxPop']) else: anchors_by_npc[npc_name] = anchor # Link everyone for anchor_name in anchors_by_npc.keys(): anchor = anchors_by_npc[anchor_name] if globals.get_item_link(anchor_name): item = globals.get_item_link(anchor_name).entity item_link = globals.get_item_link(item['$ID_NAME']) item_link = { 'Item': item_link, 'Population': int(anchor['GenType']['MaxPop']), 'Positions': anchor['Anchors'], 'TimeRespawn': int(anchor['GenType']['RespawnTime']) / 1000.0, } map_link = globals.get_map_link(map['$ID_NAME']) map_link = { 'Chance': 100, 'Map': map_link, 'Quantity_MAX': 1, 'Quantity_MIN': 1, } globals.link( map, 'Link_NPCs', map_link, item, 'Link_Maps', item_link ) elif globals.get_npc_link(anchor_name): map_link = globals.get_map_link(map['$ID_NAME']) map_link = { 'Map': map_link, 'Population': int(anchor['GenType']['MaxPop']), 'TimeRespawn': int(anchor['GenType']['RespawnTime']) / 1000.0, } npc = globals.get_npc_link(anchor_name).entity npc_link = globals.get_npc_link(npc['$ID_NAME']) npc_link = { 'NPC': npc_link, 'Population': int(anchor['GenType']['MaxPop']), 'Positions': anchor['Anchors'], 'TimeRespawn': int(anchor['GenType']['RespawnTime']) / 1000.0, } globals.link( map, 'Link_NPCs', map_link, npc, 'Link_Maps', npc_link )
def parse_links_items(): logging.debug('Parsing Maps <> Items...') for map in globals.maps.values(): ies_file = 'zonedropitemlist_' + map['$ID_NAME'] + '.ies' ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies_drop.ipf', 'zonedrop', ies_file.lower()) # For some reason IMC uses these 2 types of name formats... if not os.path.isfile(ies_path): ies_file = 'zonedropitemlist_f_' + map['$ID_NAME'] + '.ies' ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies_drop.ipf', 'zonedrop', ies_file.lower()) try: drops = [] with open(ies_path, 'rb') as ies_file: for zone_drop in csv.DictReader(ies_file, delimiter=',', quotechar='"'): if len(zone_drop['ItemClassName']) > 0: drops.append({ 'ItemClassName': zone_drop['ItemClassName'], 'DropRatio': int(zone_drop['DropRatio']) / 100.0, 'Money_Max': int(zone_drop['Money_Max']), 'Money_Min': int(zone_drop['Money_Min']), }) # Note: drop groups work like a loot table # Therefore we need to sum the DropRatio of the entire group before calculating the actual one if len(zone_drop['DropGroup']) > 0: ies_file = zone_drop['DropGroup'] + '.ies' ies_path = os.path.join(constants.PATH_INPUT_DATA, 'ies_drop.ipf', 'dropgroup', ies_file.lower()) group_drop_ratio = 0 group_drops = [] with open(ies_path, 'rb') as ies_file: for group_drop in csv.DictReader(ies_file, delimiter=',', quotechar='"'): group_drop_ratio += int(group_drop['DropRatio']) group_drops.append({ 'ItemClassName': group_drop['ItemClassName'], 'DropRatio': int(group_drop['DropRatio']), 'Money_Max': 0, 'Money_Min': 0, }) for group_drop in group_drops: group_drop['DropRatio'] = int(zone_drop['DropRatio']) / 100.0 * group_drop['DropRatio'] / group_drop_ratio drops.append(group_drop) for drop in drops: item_link = { 'Chance': drop['DropRatio'], 'Item': globals.get_item_link(drop['ItemClassName']), 'Quantity_MAX': drop['Money_Max'], 'Quantity_MIN': drop['Money_Min'], } map_link = { 'Chance': drop['DropRatio'], 'Map': globals.get_map_link(map['$ID_NAME']), 'Quantity_MAX': drop['Money_Max'], 'Quantity_MIN': drop['Money_Min'], } globals.link( map_link['Map'].entity, 'Link_Items', map_link, item_link['Item'].entity, 'Link_Maps', item_link ) except IOError: continue