def structure_import_orders(structure_id): print('importin page 1') market_url = "https://esi.tech.ccp.is/v1/markets/structures/"+structure_id+"/" all_orders = [] tokens = esi_calling.check_tokens(tokens = config['tokens'], client_secret = client_secret, client_id =client_id) #Save the new tokens config['tokens'] = tokens with open('config.json', 'w') as outfile: json.dump(config, outfile, indent=4) response = esi_calling.call_esi(scope = '/v1/markets/structures/{par}', url_parameter=structure_id, tokens = tokens, job = 'get citadel orders') all_orders.extend(response.json()) total_pages = int(response.headers['X-Pages']) print('total number of pages:'+str(total_pages)) responses = [] for page in range(2, total_pages + 1): print('\rimportin page: '+str(page)+'/'+str(total_pages), end="") response = esi_calling.call_esi(scope = '/v1/markets/structures/{par}', url_parameter=structure_id, parameters = {'page': page} , tokens = tokens, job = 'get citadel orders') responses.append(response) for response in responses: data = response.json() all_orders.extend(data) print('Got {:,d} orders.'.format(len(all_orders))) return all_orders
def import_regions(): print_time('Importing regions') response = esi_calling.call_esi(scope='/v1/universe/regions/', job='get regions') regions = {} for region_id in response.json(): print('importing a region name...') response = esi_calling.call_esi(scope='/v1/universe/regions/{par}/', url_parameters=[region_id], job='get region name') region_name = response.json()["name"] regions[region_name] = region_id with open('regions.json', 'w') as outfile: json.dump(regions, outfile, indent=4) return regions
def print_dogma_attributes(esi_response): type_info = esi_response.json() if 'dogma_attributes' not in type_info: print(type_info['name'], ' has no dogma attributes') else: print('\ndogma attributes:\n') length = len(type_info['dogma_attributes']) new_attributes = [] for n in range(0, length): dogma_id = type_info['dogma_attributes'][n]['attribute_id'] if not str(dogma_id) in dogma_attributes: #Find what this ID is for new_attributes.append(dogma_id) if len(new_attributes) != 0: #print('Getting info on', len(new_attributes), 'dogma attributes') esi_response_arrays = esi_calling.call_esi( scope='/v1/dogma/attributes/{par}', url_parameters=new_attributes, datasource=datasource, job='get info on dogma attribute') for array in esi_response_arrays: response_json = array[0].json() if 'attribute_id' in response_json: dogma_attributes[str( response_json['attribute_id'])] = response_json #Save the ID list with gzip.GzipFile('dogma_attributes.gz', 'w') as outfile: outfile.write( json.dumps(dogma_attributes, indent=2).encode('utf-8')) for n in range(0, length): dogma_id = type_info['dogma_attributes'][n]['attribute_id'] value = type_info['dogma_attributes'][n]['value'] if str(dogma_id) in dogma_attributes: name = dogma_attributes[str(dogma_id)]['name'] display_name = dogma_attributes[str(dogma_id)]['display_name'] if 'description' in dogma_attributes[str(dogma_id)]: description = dogma_attributes[str( dogma_id)]['description'] else: description = "" print(' {:<30s} {:<10s} {:<}{:<}'.format( name, str(value), '( ' + display_name, ', ' + description + ' )')) else: print("Unknown dogma ID ", str(dogma_id))
def import_orders(region_id): #'10000044' Solitude print('importin page 1') all_orders = [] response = esi_calling.call_esi(scope = '/v1/markets/{par}/orders/', url_parameter=region_id, job = 'get market orders') all_orders.extend(response.json()) total_pages = int(response.headers['X-Pages']) print('total number of pages:'+str(total_pages)) responses = [] for page in range(2, total_pages + 1): print('\rimportin page: '+str(page)+'/'+str(total_pages), end="") response = esi_calling.call_esi(scope = '/v1/markets/{par}/orders/', url_parameter=region_id, parameters = {'page': page}, job = 'get market orders') responses.append(response) for response in responses: data = response.json() all_orders.extend(data) print('. Got {:,d} orders.'.format(len(all_orders))) return all_orders
def import_orders(region_id): #'10000044' Solitude #10000002 = Jita print_time('fetching market for region ID' + str(region_id)) all_orders = [] response_array = esi_calling.call_esi(scope='/v1/markets/{par}/orders/', url_parameters=[region_id], job='get market orders')[0] #print(response_array) for response in response_array: all_orders.extend(response.json()) print_time('Got {:,d} orders.'.format(len(all_orders))) return all_orders
def get_group_info(group_ids): #/v1/universe/groups/{group_id}/ if len(group_ids) == 0: return response_array = esi_calling.call_esi(scope='/v1/universe/groups/{par}/', url_parameters=group_ids, job='get group info') for array in response_array: response = (array[0]).json() group_id = response['group_id'] group_cache[str(group_id)] = response with gzip.GzipFile('group_cache.gz', 'w') as outfile: outfile.write(json.dumps(group_cache).encode('utf-8'))
def fetch_contracts(region_id): #10000044 = Solitude #Returns an array that contains all contracts of a region print_time('fetching contracts for region ID' + str(region_id)) all_contracts = [] response_array = esi_calling.call_esi(scope='/v1/contracts/public/{par}/', url_parameters=[region_id], job='get region contracts')[0] expires = response_array[0].headers['expires'] for response in response_array: all_contracts.extend(response.json()) print_time('Got {:,d} contracts.'.format(len(all_contracts))) return all_contracts
def get_item_info(item_ids): #print('Getting item info for', len(item_ids), 'items') #Get attributes for item IDs listed #attributes are saved #/v3/universe/types/{type_id}/ if len(item_ids) == 0: return response_array = esi_calling.call_esi(scope='/v3/universe/types/{par}/', url_parameters=item_ids, job='get item infos') for array in response_array: response = (array[0]).json() #print(response) item_id = response['type_id'] item_cache[str(item_id)] = response
def get_item_attributes(type_id): #Note: type_id is string here esi_response = esi_calling.call_esi(scope = '/v3/universe/types/{par}', url_parameter=type_id, job = 'get item attributes') #Important attributes: #'group_id' #'market_group_id' #'name' #important dogma attributes #633 = meta level #422 = tech level #1692 = meta group id type_id_list[type_id] = {} type_id_list[type_id]['name'] = esi_response.json()['name'] if 'group_id' in esi_response.json(): type_id_list[type_id]['group_id'] = esi_response.json()['group_id'] if 'market_group_id' in esi_response.json(): type_id_list[type_id]['market_group_id'] = esi_response.json()['market_group_id'] if 'dogma_attributes' in esi_response.json(): for index in range(0, len(esi_response.json()['dogma_attributes'])): if esi_response.json()['dogma_attributes'][index]['attribute_id'] == 633: type_id_list[type_id]['meta_level'] = esi_response.json()['dogma_attributes'][index]['value'] elif esi_response.json()['dogma_attributes'][index]['attribute_id'] == 422: type_id_list[type_id]['tech_level'] = esi_response.json()['dogma_attributes'][index]['value'] elif esi_response.json()['dogma_attributes'][index]['attribute_id'] == 1692: type_id_list[type_id]['meta_group_id'] = esi_response.json()['dogma_attributes'][index]['value'] #Save the item info list with open('type_id_list.json', 'w') as outfile: json.dump(type_id_list, outfile, indent=4)
def import_characters(): if len(config['characters']) == 0: print('No characters to import') else: print('importing characters') #Load saved skill names and implant names try: item_id = json.load(open('item_id.json')) except (IOError, json.decoder.JSONDecodeError): #Some error. Just start the id list from scratch item_id = {} with open('item_id.json', 'w') as outfile: json.dump(item_id, outfile, indent=4) #Improt characters one by one for name in config['characters']: tokens = config['characters'][name]['tokens'] tokens = esi_calling.check_tokens( tokens, client_id=config['client_id'], client_secret=config['client_secret']) character_id = config['characters'][name]['character_id'] print('\nimporting ' + name + '...') #Import skills and put them in the string #import implants and put them to same string #Write the string to file and be done output = '' #Import skills first esi_response = esi_calling.call_esi( scope='/v4/characters/{par}/skills', url_parameter=character_id, tokens=tokens, calltype='get', job='import character skills')[0] skills = esi_response.json()['skills'] for n in range(0, len(skills)): skill_id = skills[n]['skill_id'] skill_level = skills[n]['active_skill_level'] #Get item name from the saved list try: skill_name = item_id[str(skill_id)] except KeyError: #Unknown skill. Get name from API #print(' \r', end="")#clears the line to stop things from previous loop from staying print(str(n + 1) + '/' + str(len(skills) + 1) + ' Fetching skill name for ID:', skill_id, end="") esi_response = esi_calling.call_esi( scope='/v3/universe/types/{par}', url_parameter=skill_id, tokens=tokens, calltype='get', job='get implant name')[0] skill_name = esi_response.json()['name'] print(' ', skill_name) item_id[skill_id] = skill_name output = output + skill_name + '=' + str(skill_level) + '\n' print('') esi_response = esi_calling.call_esi( scope='/v1/characters/{par}/implants', url_parameter=character_id, tokens=tokens, calltype='get', job='import character implants')[0] implants = esi_response.json() for implant_id in implants: try: implant_name = item_id[str(implant_id)] except KeyError: print('Fetching implant name for ID:', implant_id, end="") esi_response = esi_calling.call_esi( scope='/v3/universe/types/{par}', url_parameter=implant_id, tokens=tokens, calltype='get', job='get implant name')[0] implant_name = esi_response.json()['name'] print(' ', implant_name) item_id[implant_id] = implant_name output = output + 'Implant=' + implant_name + '\n' print('') #save item id names with open("item_id.json", "w") as outfile: json.dump(item_id, outfile, indent=4) #Write skills to txt filename = name + '.chr' with open(filename, "w") as text_file: print(output, file=text_file) #stop running after imported global run run = False return
#Load cached item data try: #Load cached dogma attribute ID info type_id_list = json.load(open('type_id_list.json')) except FileNotFoundError: #No file found. Start from scratch type_id_list = {} #Load cached categories try: categories = json.load(open('categories.json')) except FileNotFoundError: #No file found. Start from scratch print('Importing category list...') response = esi_calling.call_esi(scope = '/v1/universe/categories/', job = 'get list of categories') list_categories = response.json() print('\rImporting category names... ') categories = {} n = 1 for category_id in list_categories: #print('\rchecking item: '+str(n)+'/'+str(number_of_items)+' type ID '+key, end="") print( '\r'+str(n)+'/'+str(len(list_categories)), end="") response = esi_calling.call_esi(scope = '/v1/universe/categories/{par}', url_parameter = category_id, job = 'get list of categories') category_name = response.json()['name'] categories[str(category_id)] = category_name n= n+1
def analyze_contracts(): #Import all the contracts #Analyze all the contracts one by one region_id = regions[config['region']] all_contracts = fetch_contracts(region_id) profitable_buy = '' profitable_sell = '' profit_buy_contracts_array = [] profit_buy_percentage_array = [] profit_sell_contracts_array = [] profit_sell_percentage_array = [] good_contracts = {} number_of_contracts = len(all_contracts) print('') uncached_contracts = [] for contract in all_contracts: contract_id = str(contract['contract_id']) if not contract_id in contract_cache: uncached_contracts.append(contract) contract_cache[contract_id] = contract #Import items of 10 contracts at once #Then evalueate them one by one contracts_to_import = len(uncached_contracts) print('Importing', contracts_to_import, 'contracts...') counter = 0 ids = [] print('') for contract in uncached_contracts: counter += 1 if contract['type'] == 'item_exchange': ids.append(str(contract['contract_id'])) if len(ids) == 200 or counter == len(uncached_contracts): print('\rImporting ', counter, '/', len(uncached_contracts), end="", flush=True) small_contract_items = [] response_array = esi_calling.call_esi( scope='/v1/contracts/public/items/{par}/', url_parameters=ids, job='get contract items') for index in range(len(ids)): contract_cache[ids[index]]['items'] = [] contract_items = [] if not response_array[index][0].status_code in [ 204, 400, 403, 404, ]: #[204, 400, 403, 404,] would mean expired or accepted contract. Leave [] for items. for response in response_array[index]: contract_items.extend(response.json()) contract_cache[ids[index]]['items'].extend(contract_items) ids = [] with gzip.GzipFile('contract_cache.gz', 'w') as outfile: outfile.write(json.dumps(contract_cache, indent=2).encode('utf-8')) #All contracts are now in cache uncached_contracts = [] #Check contracts for items that need to be imported (items that are not on market) print_time('\nchecking contracts for new items') print(' contracts:', len(contract_cache)) all_items = [] fetch_ids = [] for contract_id in contract_cache: contract = contract_cache[contract_id] if 'items' in contract: for item_dict in contract['items']: type_id = item_dict['type_id'] if not type_id in all_items: all_items.append(type_id) print_time('Found' + str(len(all_items)) + 'unique items in contracts. Checking items.') counter = 0 for type_id in all_items: counter += 1 if not str(type_id) in item_cache: fetch_ids.append(type_id) if len(fetch_ids) == 500 or (counter == len(all_items) and len(fetch_ids) != 0): print(' importing item attributes', counter, '/', len(all_items)) get_item_info(fetch_ids) fetch_ids = [] with gzip.GzipFile('item_cache.gz', 'w') as outfile: outfile.write(json.dumps(item_cache, indent=2).encode('utf-8')) print_time('Item check done') number_of_contracts = len(all_contracts) index = 1 #Now estimate the value of the contract for contract in all_contracts: print(' \ranalyzing ', index, '/', number_of_contracts, end="") index = index + 1 if contract["type"] != "item_exchange": continue elif contract["start_location_id"] != 60003760 and config[ 'jita_limit'] == True and config['region'] == 'The Forge': continue contract_id = str(contract['contract_id']) if 'items' in contract_cache[contract_id]: contract_items = contract_cache[contract_id]['items'] cost = contract['price'] - contract['reward'] profit = evaluate_items(cost=cost, contract_items=contract_items) else: profit = {'profit_sell': [0, 0], 'profit_buy': [0, 0]} if profit['profit_buy'][0] > 0: profit_isk = profit['profit_buy'][0] if profit_isk > 1000000000: #1b profit_isk = str(round( profit_isk / 1000000000)) + ' billion isk' elif profit_isk > 1000000: #1m profit_isk = str(round(profit_isk / 1000000)) + ' million isk' elif profit_isk > 1000: #1k profit_isk = str(round(profit_isk / 1000)) + ' thousand isk' else: profit_isk = str(round(profit_isk)) + ' isk' profit_buy_contracts_array.append(contract['contract_id']) profit_buy_percentage_array.append(profit['profit_buy'][1]) good_contracts[contract['contract_id']] = { 'profit_isk': profit_isk, 'percentage': str(profit['profit_buy'][1]) } #string = '<url=contract:30003576//' + str(contract['contract_id']) + '>' + profit_isk + '</url> ' + str( round(profit['profit_buy'][1]) ) + '%' #profitable_buy = profitable_buy + '\n' + string elif profit['profit_sell'][0] > 0: profit_isk = profit['profit_sell'][0] if profit_isk > 1000000000: #1b profit_isk = str(round( profit_isk / 1000000000)) + ' billion isk' elif profit_isk > 1000000: #1m profit_isk = str(round(profit_isk / 1000000)) + ' million isk' elif profit_isk > 1000: #1k profit_isk = str(round(profit_isk / 1000)) + ' thousand isk' else: profit_isk = str(round(profit_isk)) + ' isk' profit_sell_contracts_array.append(contract['contract_id']) profit_sell_percentage_array.append(profit['profit_sell'][1]) good_contracts[contract['contract_id']] = { 'profit_isk': profit_isk, 'percentage': str(profit['profit_sell'][1]) } #string = '<url=contract:30003576//' + str(contract['contract_id']) + '>' + profit_isk + '</url> ' + str( round( profit['profit_sell'][1] ) ) + '%' #profitable_sell = profitable_sell + '\n' + string if len(profit_buy_contracts_array) == 0: print(' No profitable contracts') else: #Sort by percentage profit_buy_percentage_array, profit_buy_contracts_array = zip(*sorted( zip(profit_buy_percentage_array, profit_buy_contracts_array))) profit_sell_percentage_array, profit_sell_contracts_array = zip( *sorted( zip(profit_sell_percentage_array, profit_sell_contracts_array))) profit_buy_contracts_array = list(profit_buy_contracts_array) profit_sell_contracts_array = list(profit_sell_contracts_array) profit_buy_contracts_array.reverse() profit_sell_contracts_array.reverse() #profit buy for contract_id in profit_buy_contracts_array: string = '<url=contract:30003576//' + str( contract_id) + '>' + good_contracts[contract_id][ 'profit_isk'] + '</url> ' + good_contracts[contract_id][ 'percentage'] + '%' profitable_buy = profitable_buy + '\n' + string #profit sell for contract_id in profit_sell_contracts_array: string = '<url=contract:30003576//' + str( contract_id) + '>' + good_contracts[contract_id][ 'profit_isk'] + '</url> ' + good_contracts[contract_id][ 'percentage'] + '%' profitable_sell = profitable_sell + '\n' + string full_string = 'Profitable to sell to Jita buy orders:' + profitable_buy + '\n\nProfitable sell as Jita sell order' + profitable_sell with open('profitable.txt', 'w') as outfile: outfile.write(full_string)
def print_dogma_effects(esi_response): type_info = esi_response.json() if 'dogma_effects' not in type_info: print(type_info['name'], ' has no dogma effects') else: print('\ndogma effects:\n') length = len(type_info['dogma_effects']) new_effects = [] for n in range(0, length): dogma_id = type_info['dogma_effects'][n]['effect_id'] if not str(dogma_id) in dogma_effects: #Find what this ID is for new_effects.append(dogma_id) if len(new_effects) != 0: #print('Getting info on', len(new_effects), 'dogma effects') esi_response_arrays = esi_calling.call_esi( scope='/v2/dogma/effects/{par}', url_parameters=new_effects, datasource=datasource, job='get info on dogma attribute') for array in esi_response_arrays: response_json = array[0].json() if 'effect_id' in response_json: dogma_effects[str( response_json['effect_id'])] = response_json else: print("Something wrong: ", response_json) #Save the ID list with gzip.GzipFile('dogma_effects.gz', 'w') as outfile: outfile.write( json.dumps(dogma_effects, indent=2).encode('utf-8')) for n in range(0, length): dogma_id = type_info['dogma_effects'][n]['effect_id'] print(' ') if str(dogma_id) in dogma_effects: name = dogma_effects[str(dogma_id)]['name'] print(' ', name) for key in dogma_effects[str(dogma_id)]: if key == "modifiers": print(" modifiers:") for arr_element in dogma_effects[str( dogma_id)]["modifiers"]: for key2 in arr_element: if key2 in [ "modified_attribute_id", "modifying_attribute_id" ]: attr_id = arr_element[key2] if not str(attr_id) in dogma_attributes: #Find what this ID is for esi_response = esi_calling.call_esi( scope='/v1/dogma/attributes/{par}', url_parameters=[str(attr_id)], job='get info on dogma attribute' )[0][0] response_json = esi_response.json() if 'attribute_id' in response_json: dogma_attributes[str( response_json['attribute_id'] )] = response_json #Save the ID list with gzip.GzipFile( 'dogma_attributes.gz', 'w') as outfile: outfile.write( json.dumps( dogma_attributes, indent=2).encode('utf-8')) print( " ", key2, ":", str(attr_id), "-", dogma_attributes[str(attr_id)]["name"]) else: print(" ", key2, ":", arr_element[key2]) elif key in [ "discharge_attribute_id", "duration_attribute_id", "falloff_attribute_id", "tracking_speed_attribute_id", "range_attribute_id" ]: if not str(dogma_effects[str(dogma_id)] [key]) in dogma_attributes: esi_response = esi_calling.call_esi( scope='/v1/dogma/attributes/{par}', url_parameters=[ str(dogma_effects[str(dogma_id)][key]) ], job='get info on dogma attribute')[0][0] response_json = esi_response.json() if 'attribute_id' in response_json: dogma_attributes[str( response_json['attribute_id'] )] = response_json #Save the ID list with gzip.GzipFile('dogma_attributes.gz', 'w') as outfile: outfile.write( json.dumps(dogma_attributes, indent=2).encode('utf-8')) print( ' ', key, ': ', dogma_effects[str(dogma_id)][key], "-", dogma_attributes[str( dogma_effects[str(dogma_id)][key])]["name"]) else: print(' ', key, ': ', dogma_effects[str(dogma_id)][key]) else: print("Unknown dogma effect ", dogma_id)
try: #Load cached dogma attribute ID info with gzip.GzipFile('dogma_attributes.gz', 'r') as fin: dogma_attributes = json.loads(fin.read().decode('utf-8')) except FileNotFoundError: #No file found. Start from scratch dogma_attributes = {} try: #Load cached dogma effect ID info with gzip.GzipFile('dogma_effects.gz', 'r') as fin: dogma_effects = json.loads(fin.read().decode('utf-8')) except FileNotFoundError: #No file found. Start from scratch dogma_effects = {} print('Using', datasource, 'as data source') while True: #Call ESI type_id = input("Give type ID: ") esi_response = esi_calling.call_esi(scope='/v3/universe/types/{par}', url_parameters=[type_id], datasource=datasource, job='get type ID attributes')[0][0] if esi_response.status_code == 404: print('404 - Type ID: ' + type_id + ' not found') else: parse_stats(esi_response)
def process_response(esi_response): global dogma_attributes global dogma_attribute_names global dogma_effects global dogma_effect_names response_dic = esi_response.json() npc_stats = {} npc_stats['name'] = response_dic['name'] npc_stats['description'] = response_dic['description'] npc_stats['type_id'] = response_dic['type_id'] npc_stats['dogma_attributes'] = {} npc_stats['dogma_effects'] = [] # Dogma attributes if not 'dogma_attributes' in response_dic: print('Type ID', npc_stats['type_id'], 'has no defined attributes') elif len(response_dic['dogma_attributes']) == 0: print('Type ID', npc_stats['type_id'], 'has zero attributes') else: new_attributes = [] for attribute_dic in response_dic['dogma_attributes']: dogma_id = attribute_dic["attribute_id"] if not str(dogma_id) in dogma_attributes: #Find what this ID is for new_attributes.append(dogma_id) if len(new_attributes) != 0: esi_response_arrays = esi_calling.call_esi( scope='/v1/dogma/attributes/{par}', url_parameters=new_attributes, job='get info on dogma attribute') for array in esi_response_arrays: response_json = array[0].json() if 'attribute_id' in response_json: dogma_attributes[str( response_json['attribute_id'])] = response_json dogma_attribute_names[ response_json['name']] = response_json['attribute_id'] #Save the ID list with gzip.GzipFile('dogma_attributes.gz', 'w') as outfile: outfile.write( json.dumps(dogma_attributes, indent=2).encode('utf-8')) with gzip.GzipFile('dogma_attribute_names.gz', 'w') as outfile: outfile.write( json.dumps(dogma_attribute_names, indent=2).encode('utf-8')) for attribute_dic in response_dic['dogma_attributes']: npc_stats['dogma_attributes'][str( attribute_dic["attribute_id"])] = attribute_dic["value"] # Dogma effects if not 'dogma_effects' in response_dic: print('Type ID', npc_stats['type_id'], 'has no defined effects') elif len(response_dic['dogma_effects']) == 0: print('Type ID', npc_stats['type_id'], 'has zero effects') else: new_effects = [] for effect_dic in response_dic['dogma_effects']: dogma_id = effect_dic['effect_id'] if not str(dogma_id) in dogma_effects: #Find what this ID is for new_effects.append(dogma_id) if len(new_effects) != 0: esi_response_arrays = esi_calling.call_esi( scope='/v2/dogma/effects/{par}', url_parameters=new_effects, job='get info on dogma attribute') for array in esi_response_arrays: response_json = array[0].json() if 'effect_id' in response_json: dogma_effects[str( response_json['effect_id'])] = response_json dogma_effect_names[ response_json['name']] = response_json['effect_id'] else: print("Something wrong: ", response_json) #Save the ID list with gzip.GzipFile('dogma_effects.gz', 'w') as outfile: outfile.write( json.dumps(dogma_effects, indent=2).encode('utf-8')) with gzip.GzipFile('dogma_effect_names.gz', 'w') as outfile: outfile.write( json.dumps(dogma_effect_names, indent=2).encode('utf-8')) for effect_dic in response_dic['dogma_effects']: npc_stats['dogma_effects'].append(effect_dic["effect_id"]) return npc_stats
def print_damage(npc_stats): print('\n-- DAMAGE --') miss_total = 0 turr_total = 0 # Turret damage if has_effect('targetAttack', npc_stats) or has_effect( 'targetDisintegratorAttack', npc_stats): # em, th, kin, ex turr_damage = np.array([ get_attribute('emDamage', npc_stats), get_attribute('thermalDamage', npc_stats), get_attribute('kineticDamage', npc_stats), get_attribute('explosiveDamage', npc_stats) ]) * get_attribute('damageMultiplier', npc_stats) turr_total = sum(turr_damage) if turr_total != 0: turr_dps = turr_damage * (1000 / get_attribute('speed', npc_stats)) prnt_distribution = '' if turr_damage[0] > 0: prnt_distribution += 'EM: ' + str( round(100 * turr_damage[0] / turr_total)) + '% ' if turr_damage[1] > 0: prnt_distribution += 'Th: ' + str( round(100 * turr_damage[1] / turr_total)) + '% ' if turr_damage[2] > 0: prnt_distribution += 'Kin: ' + str( round(100 * turr_damage[2] / turr_total)) + '% ' if turr_damage[3] > 0: prnt_distribution += 'Ex: ' + str( round(100 * turr_damage[3] / turr_total)) + '% ' range = str(round( get_attribute('maxRange', npc_stats) / 1000, 1)) + ' + ' + str( round(get_attribute('falloff', npc_stats) / 1000, 1)) + ' km' tracking = str( round( 40000 * get_attribute('trackingSpeed', npc_stats) / get_attribute('optimalSigRadius', npc_stats), 3)) + ' rad/s' if has_effect('targetDisintegratorAttack', npc_stats): print('Disintegrator: ') bonus = str( 100 * get_attribute('damageMultiplierBonusPerCycle', npc_stats)) max_bonus = str( 100 * get_attribute('damageMultiplierBonusMax', npc_stats)) print('{:<2} {:<9} {:<10}'.format( ' ', 'Ramps up:', bonus + '% per cycle. Max: ' + max_bonus + '%')) else: print('Turrets: ') print('{:<2} {:<9} {:<10} {:<8}'.format(' ', 'DPS:', round(sum(turr_dps)), prnt_distribution)) print('{:<2} {:<9} {:<10}'.format(' ', 'Range:', range)) print('{:<2} {:<9} {:<10}'.format(' ', 'Tracking:', tracking)) # Missile damage if has_effect('missileLaunchingForEntity', npc_stats): missile_id = round(get_attribute('entityMissileTypeID', npc_stats)) esi_response = esi_calling.call_esi(scope='/v3/universe/types/{par}', url_parameters=[missile_id], job='get missile attributes')[0][0] missile_stats = process_response(esi_response) miss_damage = np.array([ get_attribute('emDamage', missile_stats), get_attribute('thermalDamage', missile_stats), get_attribute('kineticDamage', missile_stats), get_attribute('explosiveDamage', missile_stats) ]) * get_attribute('missileDamageMultiplier', npc_stats) miss_total = sum(miss_damage) miss_dps = miss_damage * ( 1000 / get_attribute('missileLaunchDuration', npc_stats)) range = str( round( get_attribute('missileEntityVelocityMultiplier', npc_stats) * get_attribute('maxVelocity', missile_stats) * get_attribute('missileEntityFlightTimeMultiplier', npc_stats) * get_attribute('explosionDelay', missile_stats) / 1000 / 1000, 1)) + ' km' expl_radius = str( round( get_attribute('aoeCloudSize', missile_stats) * get_attribute( 'missileEntityAoeCloudSizeMultiplier', npc_stats))) + ' m' expl_velocity = str( round( get_attribute('aoeVelocity', missile_stats) * get_attribute( 'missileEntityAoeVelocityMultiplier', npc_stats))) + ' m/s' prnt_distribution = '' if miss_damage[0] > 0: prnt_distribution += 'EM: ' + str( round(100 * miss_damage[0] / miss_total)) + '% ' if miss_damage[1] > 0: prnt_distribution += 'Th: ' + str( round(100 * miss_damage[1] / miss_total)) + '% ' if miss_damage[2] > 0: prnt_distribution += 'Kin: ' + str( round(100 * miss_damage[2] / miss_total)) + '% ' if miss_damage[3] > 0: prnt_distribution += 'Ex: ' + str( round(100 * miss_damage[3] / miss_total)) + '% ' print('Missiles: ') print('{:<2} {:<9} {:<10} {:<8}'.format(' ', 'DPS:', round(sum(miss_dps)), prnt_distribution)) print('{:<2} {:<9} {:<10}'.format(' ', 'Range:', range)) print('{:<2} {:<9} {:<10}'.format(' ', 'Expl rad:', expl_radius)) print('{:<2} {:<9} {:<10}'.format(' ', 'Expl vel:', expl_velocity)) # Total damage if miss_total != 0 and turr_total != 0: total_dps = turr_dps + miss_dps total_total = sum(total_dps) prnt_distribution = '' if total_dps[0] > 0: prnt_distribution += 'EM: ' + str( round(100 * total_dps[0] / total_total)) + '% ' if total_dps[1] > 0: prnt_distribution += 'Th: ' + str( round(100 * total_dps[1] / total_total)) + '% ' if total_dps[2] > 0: prnt_distribution += 'Kin: ' + str( round(100 * total_dps[2] / total_total)) + '% ' if total_dps[3] > 0: prnt_distribution += 'Ex: ' + str( round(100 * total_dps[3] / total_total)) + '% ' print('Total: ') print('{:<2} {:<9} {:<10} {:<8}'.format(' ', 'DPS:', round(total_total), prnt_distribution)) if has_effect('entitySuperWeapon', npc_stats): super_damage = np.array([ get_attribute('entitySuperWeaponEmDamage', npc_stats), get_attribute('entitySuperWeaponThermalDamage', npc_stats), get_attribute('entitySuperWeaponKineticDamage', npc_stats), get_attribute('entitySuperWeaponExplosiveDamage', npc_stats) ]) super_total = sum(super_damage) prnt_distribution = '' if super_damage[0] > 0: prnt_distribution += 'EM: ' + str( round(100 * super_damage[0] / super_total)) + '% ' if super_damage[1] > 0: prnt_distribution += 'Th: ' + str( round(100 * super_damage[1] / super_total)) + '% ' if super_damage[2] > 0: prnt_distribution += 'Kin: ' + str( round(100 * super_damage[2] / super_total)) + '% ' if super_damage[3] > 0: prnt_distribution += 'Ex: ' + str( round(100 * super_damage[3] / super_total)) + '% ' super_duration = str( round( get_attribute('entitySuperWeaponDuration', npc_stats) / 1000, 1)) + ' s' range = str( round( get_attribute('entitySuperWeaponMaxRange', npc_stats) / 1000, 1)) + ' + ' + str( round( get_attribute('entitySuperWeaponFallOff', npc_stats) / 1000, 1)) + ' km' tracking = str( round( 40000 * get_attribute('entitySuperWeaponTrackingSpeed', npc_stats) / get_attribute('entitySuperWeaponOptimalSignatureRadius', npc_stats), 3)) + ' rad/s' print('Superweapon: ') print('{:<2} {:<9} {:<10} {:<8}'.format(' ', 'Damage:', super_total, prnt_distribution)) print('{:<2} {:<9} {:<10}'.format(' ', 'Range:', range)) print('{:<2} {:<9} {:<10}'.format(' ', 'Tracking:', tracking)) print('{:<2} {:<9} {:<10}'.format(' ', 'Duration:', super_duration)) print('')