def find_trades(start_id=60008694, destination_id=10000002): """ Returns inter-region trade opportunities. """ cursor = connection.cursor() params = [destination_id, start_id, start_id] query = """SELECT * FROM ( SELECT t.id, t.name, t.volume, b.local_ask, a.foreign_bid, ((a.foreign_bid / b.local_ask) - 1) * 100 AS markup FROM eve_db_invtype t INNER JOIN (SELECT invtype_id, buyavg AS foreign_bid FROM market_data_itemregionstat WHERE mapregion_id = %s AND lastupdate > current_date - interval '3 days' ) a ON (t.id = a.invtype_id AND foreign_bid > 0) INNER JOIN (SELECT invtype_id, sellavg AS local_ask FROM market_data_itemregionstat WHERE mapregion_id = %s AND lastupdate > current_date - interval '3 days' ) b ON (t.id = b.invtype_id AND local_ask > 0) WHERE t.id IN (SELECT market_data_itemregionstat.invtype_id FROM market_data_itemregionstat WHERE market_data_itemregionstat.mapregion_id = %s) ) q WHERE q.markup > 0 ORDER BY q.markup DESC LIMIT 50;""" # Data retrieval operation - no commit required cursor.execute(query, params) return dictfetchall(cursor)
def find_trades(start_id=60008694, destination_id=10000002): """ Returns inter-region trade opportunities. """ cursor = connection.cursor() params = [destination_id, start_id, start_id] query = """SELECT * FROM ( SELECT t.id, t.name, t.volume, b.local_ask, a.foreign_bid, ((a.foreign_bid / b.local_ask) - 1) * 100 AS markup FROM eve_db_invtype t INNER JOIN (SELECT invtype_id, buyavg AS foreign_bid FROM market_data_itemregionstat WHERE mapregion_id = %s AND lastupdate > current_date - interval '3 days' ) a ON (t.id = a.invtype_id AND foreign_bid > 0) INNER JOIN (SELECT invtype_id, sellavg AS local_ask FROM market_data_itemregionstat WHERE mapregion_id = %s AND lastupdate > current_date - interval '3 days' ) b ON (t.id = b.invtype_id AND local_ask > 0) WHERE t.id IN (SELECT market_data_itemregionstat.invtype_id FROM market_data_itemregionstat WHERE market_data_itemregionstat.mapregion_id = %s) ) q WHERE q.markup > 0 ORDER BY q.markup DESC LIMIT 50;""" # Data retrieval operation - no commit required cursor.execute(query, params) return dictfetchall(cursor)
def type_volume(station_id=60008694): """ Returns the total/ask/bid ISK volume by type in a given station in descending order. """ cursor = connection.cursor() params = [station_id] query = """ SELECT invtype_id, SUM (price * volume_remaining) AS total, SUM (CASE WHEN is_bid = TRUE THEN price * volume_remaining ELSE 0 END) AS total_bid, SUM (CASE WHEN is_bid = FALSE THEN price * volume_remaining ELSE 0 END) AS total_ask FROM market_data_orders WHERE ( market_data_orders.is_active = TRUE AND market_data_orders.stastation_id = %s AND market_data_orders.minimum_volume = 1 ) GROUP BY invtype_id ORDER BY total DESC;""" # Data retrieval operation - no commit required cursor.execute(query, params) return dictfetchall(cursor)
def recadder(node): """ Function for recursively traversing the tree. """ if not node['has_items']: if not 'children' in node: node['children'] = [] cursor.execute('SELECT * FROM (SELECT * FROM eve_db_invmarketgroup WHERE parent_id = ' + str(node['id']) + ' ORDER BY name) as main ORDER BY has_items') res = dictfetchall(cursor) for new_node in res: node['children'].append(recadder(new_node)) # Casting and stuff to make dynatree happy isFolder = not bool(node['has_items']) del node['has_items'] node['isFolder'] = isFolder node['noLink'] = isFolder title = node['name'] del node['name'] node['title'] = title tooltip = node['description'] del node['description'] node['tooltip'] = tooltip iconid = node['icon_id'] del node['icon_id'] # Add proper icon ids if (iconid): string = icons[iconid]['iconFile'] # Handle different formats if 'res:/UI/Texture/Icons/' in string: node['icon'] = string.replace('res:/UI/Texture/Icons/', '').replace('_16_', '_').replace('_32_', '_').replace('_64_', '_').replace('_128_', '_') elif 'res:/UI/Texture/market/' in string: node['icon'] = '22_42.png' else: # Remove leading zeros for i in range(0, 10): string = string.replace('0'+str(i), str(i)) node['icon'] = string.replace('_16_', '_').replace('_32_', '_').replace('_64_', '_').replace('_128_', '_') + '.png' else: # Default icon node['icon'] = '22_42.png' key = node['id'] del node['id'] node['key'] = key del node['parent_id'] return node
def bid_ask_spread(station_id=60008694, region_id=10000002, market_group_id=1413): """ Returns top 100 spread items on a given station. This can especially be useful for identifying items worth for station trading if you take the volume into account. Defaults to Jita IV - Moon 4 - Caldari Navy Assembly Plant. """ cursor = connection.cursor() params = [market_group_id, station_id, station_id, region_id] query = """SELECT id, name, min_ask, max_bid, spread, spread_percent, weekly_volume, ((min_ask - max_bid) * weekly_volume / 7) AS potential_daily_profit FROM ( SELECT t.id, t.name, b.max_bid, a.min_ask, (a.min_ask - b.max_bid) AS spread, ((a.min_ask / b.max_bid) - 1) * 100 AS spread_percent FROM ( SELECT id, name FROM eve_db_invtype z WHERE market_group_id = %s AND is_published = 't') t INNER JOIN ( SELECT invtype_id, Max(price) AS max_bid FROM market_data_orders WHERE stastation_id = %s AND is_bid = 't' AND is_suspicious = 'f' AND minimum_volume = 1 AND is_active = 't' GROUP BY invtype_id ) b ON (t.id = b.invtype_id AND max_bid > 0) INNER JOIN ( SELECT invtype_id, Min(price) AS min_ask FROM market_data_orders WHERE stastation_id = %s AND is_bid = 'f' AND is_suspicious = 'f' AND minimum_volume = 1 AND is_active = 't' GROUP BY invtype_id ) a ON (t.id = a.invtype_id AND min_ask > 0) ) q INNER JOIN ( SELECT invtype_id, Sum(quantity) AS weekly_volume FROM market_data_orderhistory WHERE mapregion_id = %s AND date >= current_date - interval '7 days' GROUP BY invtype_id ) v ON (q.id = invtype_id AND weekly_volume > 0) ORDER BY potential_daily_profit DESC;""" # Data retrieval operation - no commit required cursor.execute(query, params) return dictfetchall(cursor)
node['key'] = key del node['parent_id'] return node # Load file contents icon_yaml = file('iconIDs.yaml', 'r') # Parse YAML icons = load(icon_yaml) # Initialize database connection cursor = connection.cursor() # Select Root Groups cursor.execute("SELECT * FROM eve_db_invmarketgroup WHERE parent_id IS NULL ORDER BY name") result = dictfetchall(cursor) # Initialize tree structure tree = [] # Start traversing for node in result: tree.append(recadder(node)) # Write file into appropiate asset folder json_file = open("element43/apps/market_browser/static/javascripts/groups.json", "w") json_file.write(json.dumps(tree)) json_file.close()
def import_markup(import_station_id=60008494, export_region_id=0, export_system_id=0, export_station_id=60003760): """ Returns top 100 markup values above 0% for a given station in comparison to a certain region, system or station. Passing in an export_region will do a region->station comparison. If 0 is passed in it will do a station->system/station. Defaults Export region: None Export system: None Import station: Amarr VIII (Oris) - Emperor Family Academy Export station: Jita IV - Moon 4 - Caldari Navy Assembly Plant Mapping: (invTyeID, invTypeName, foreign_sell, local_buy, markup, invTyeID) """ cursor = connection.cursor() # Build params if export_region_id: params = [export_region_id, import_station_id, import_station_id] elif export_system_id: params = [export_system_id, import_station_id, import_station_id] else: params = [export_station_id, import_station_id, import_station_id] query = """SELECT * FROM ( SELECT t.id, t.name, a.foreign_ask, b.local_bid, ((b.local_bid / a.foreign_ask) - 1) * 100 AS markup FROM eve_db_invtype t INNER JOIN (SELECT invtype_id, Min(price) AS foreign_ask FROM market_data_orders """ # Build query based on params if export_region_id: query += "WHERE mapregion_id = %s " elif export_system_id: query += "WHERE mapsolarsystem_id = %s " else: query += "WHERE stastation_id = %s " query += """ AND is_bid = 'f' AND is_suspicious = 'f' AND is_active = 't' AND minimum_volume = 1 GROUP BY invtype_id ) a ON (t.id = a.invtype_id AND foreign_ask > 0) INNER JOIN (SELECT invtype_id, Max(price) AS local_bid FROM market_data_orders WHERE stastation_id = %s AND is_bid = 't' AND is_suspicious = 'f' AND is_active = 't' AND minimum_volume = 1 GROUP BY invtype_id ) b ON (t.id = b.invtype_id AND local_bid > 0) WHERE t.id IN (SELECT DISTINCT market_data_orders.invtype_id FROM market_data_orders WHERE market_data_orders.stastation_id = %s ) ) q WHERE q.markup > 0 ORDER BY q.markup DESC LIMIT 100;""" # Data retrieval operation - no commit required cursor.execute(query, params) return dictfetchall(cursor)
def stats_json(request, region_id): """ Returns stat JSON for the front page """ # Connect to memcache mc = get_memcache_client() # Cursor for faster counting cursor = connection.cursor() # Collect stats # 1. Platform stats # these is a disconnect between history and history messages/min -- history is orderhistory table which is all rows # message per min is based on emdr which is multiple rows in one message. if (mc.get("e43-stats-activeorders") is not None): active_orders = mc.get("e43-stats-activeorders") else: active_orders = Orders.active.count() mc.set("e43-stats-activeorders", active_orders, time=3600) if (mc.get("e43-stats-archivedorders") is not None): archived_orders = mc.get("e43-stats-archivedorders") else: cursor.execute("SELECT reltuples::bigint AS estimate FROM pg_class WHERE relname='market_data_archivedorders'") archived_orders = dictfetchall(cursor)[0]['estimate'] mc.set("e43-stats-archivedorders", archived_orders, time=3600) if (mc.get("e43-stats-history") is not None): history = mc.get("e43-stats-history") else: cursor.execute("SELECT reltuples::bigint AS estimate FROM pg_class WHERE relname='market_data_orderhistory'") history = dictfetchall(cursor)[0]['estimate'] mc.set("e43-stats-history", history, time=3600) new_orders_per_minute = EmdrStats.objects.filter( status_type=1).order_by("-message_timestamp")[:1][0].status_count / 5 updated_orders_per_minute = EmdrStats.objects.filter( status_type=2).order_by("-message_timestamp")[:1][0].status_count / 5 old_orders_per_minute = EmdrStats.objects.filter( status_type=3).order_by("-message_timestamp")[:1][0].status_count / 5 history_messages_per_minute = EmdrStats.objects.filter( status_type=4).order_by("-message_timestamp")[:1][0].status_count / 5 # 2. Minerals and PLEX types = request.GET.getlist('type') new_types = [] for item in types: new_types.append(int(item)) types = new_types typestats = {} cache_item = {} buymedian = 0 sellmedian = 0 for item in types: # Still works if we have no data for that item try: # check to see if it's in the cache, if so use those values if (mc.get("e43-stats" + str(item)) is not None): cache_item = ujson.loads(mc.get("e43-stats" + str(item))) buymedian = cache_item['buymedian'] sellmedian = cache_item['sellmedian'] # otherwise go to the DB for it else: # Catch error if we don't have any data for that type try: region_stats = ItemRegionStat.objects.filter(mapregion_id=region_id, invtype_id=item)[:1][0] buymedian = region_stats.buymedian sellmedian = region_stats.sellmedian except: buymedian = 0 sellmedian = 0 region_stats_history = ItemRegionStatHistory.objects.filter(mapregion_id=region_id, invtype_id=item).order_by("-date")[:1][0] # Get Jita prices buy = Orders.active.filter(mapsolarsystem=30000142, invtype=item, is_bid=True).order_by("-price")[:1][0].price sell = Orders.active.filter(mapsolarsystem=30000142, invtype=item, is_bid=False).order_by("price")[:1][0].price stats = {'bid_max': buy, 'ask_min': sell, 'bid_median': buymedian, 'bid_median_move': region_stats_history.buymedian - buymedian, 'ask_median': sellmedian, 'ask_median_move': region_stats_history.sellmedian - sellmedian} if stats: typestats[item] = stats except pylibmc.Error as e: print e # Create JSON stat_json = simplejson.dumps({'active_orders': active_orders, 'archived_orders': archived_orders, 'history_records': history, 'new_orders': new_orders_per_minute, 'updated_orders': updated_orders_per_minute, 'old_orders': old_orders_per_minute, 'history_messages': history_messages_per_minute, 'typestats': typestats}) # Save complete stats to memcached mc.set("e43-fullstats", stat_json) # Return JSON without using any template return HttpResponse(stat_json, mimetype='application/json')