def test_contains(self): """ Tests the __contains__ method via the 'in' Python keyword. __contains__ lookup is based on the order ID, unlike market history, which is based off of type ID. """ order_list = MarketOrderList() # Order isn't there yet, so this Order ID shouldn't be found. self.assertFalse(2413387906 in order_list) new_order = MarketOrder( order_id=2413387906, is_bid=True, region_id=10000068, solar_system_id=30005316, station_id=60011521, type_id=10000068, price=52875, volume_entered=10, volume_remaining=4, minimum_volume=1, order_issue_date=now_dtime_in_utc(), order_duration=90, order_range=5, generated_at=now_dtime_in_utc() ) # Add an order to search for. order_list.add_order(new_order) # Search by order ID. self.assertTrue(2413387906 in order_list) # Use the MarketOrder instead of the order ID int. self.assertTrue(new_order in order_list)
def test_contains(self): """ Tests the __contains__ method via the 'in' Python keyword. __contains__ lookup is based off of the item's type ID. This is different from MarketOrderList, which uses order ID. """ history_list = MarketHistoryList() # This type ID hasn't been added yet, so this should be False. self.assertFalse(2413387906 in history_list) new_history = MarketHistoryEntry( type_id=2413387906, region_id=10000068, historical_date=now_dtime_in_utc(), num_orders=5, low_price=5.0, high_price=10.5, average_price=7.0, total_quantity=200, generated_at=now_dtime_in_utc(), ) history_list.add_entry(new_history) # The entry was added, so this should succeed. self.assertTrue(2413387906 in history_list) # Use the object form. self.assertTrue(new_history in history_list)
def test_naive_datetime(self): """ Feeds naive datetimes to the various objects to make sure that they are rejected. """ # Passed naive order_issue_date. self.assertRaises(NaiveDatetimeError, MarketOrder, order_id=2413387906, is_bid=True, region_id=10000068, solar_system_id=30005316, station_id=60011521, type_id=10000068, price=52875, volume_entered=10, volume_remaining=4, minimum_volume=1, order_issue_date=datetime.datetime.now(), order_duration=90, order_range=5, generated_at=now_dtime_in_utc() ) # Passed naive generated_at. self.assertRaises(NaiveDatetimeError, MarketOrder, order_id=2413387906, is_bid=True, region_id=10000068, solar_system_id=30005316, station_id=60011521, type_id=10000068, price=52875, volume_entered=10, volume_remaining=4, minimum_volume=1, order_issue_date=now_dtime_in_utc(), order_duration=90, order_range=5, generated_at=datetime.datetime.now(), ) # Passed naive generated_at. self.assertRaises(NaiveDatetimeError, MarketItemsInRegionList, region_id=10000068, type_id=10000068, generated_at=datetime.datetime.now(), )
def setUp(self): self.order_list = MarketOrderList() self.order1 = MarketOrder(order_id=2413387906, is_bid=True, region_id=10000068, solar_system_id=30005316, station_id=60011521, type_id=10000068, price=52875, volume_entered=10, volume_remaining=4, minimum_volume=1, order_issue_date=now_dtime_in_utc(), order_duration=90, order_range=5, generated_at=now_dtime_in_utc()) self.order_list.add_order(self.order1) # This order isn't added, but it's here for the test to add. self.order2 = MarketOrder(order_id=1234566, is_bid=False, region_id=10000032, solar_system_id=30005312, station_id=60011121, type_id=10000067, price=52, volume_entered=10, volume_remaining=500, minimum_volume=1, order_issue_date=now_dtime_in_utc(), order_duration=90, order_range=5, generated_at=now_dtime_in_utc()) self.history = MarketHistoryList() self.history1 = MarketHistoryEntry( type_id=2413387906, region_id=10000068, historical_date=now_dtime_in_utc(), num_orders=5, low_price=5.0, high_price=10.5, average_price=7.0, total_quantity=200, generated_at=now_dtime_in_utc(), ) self.history.add_entry(self.history1) # This order isn't added, but it's here for the test to add. self.history2 = MarketHistoryEntry( type_id=1413387203, region_id=10000067, historical_date=now_dtime_in_utc(), num_orders=50, low_price=50.0, high_price=100.5, average_price=70.0, total_quantity=2000, generated_at=now_dtime_in_utc(), )
def encode_to_json(order_list): """ Encodes this list of MarketOrder instances to a JSON string. :param MarketOrderList order_list: The order list to serialize. :rtype: str """ rowsets = [] for items_in_region_list in order_list._orders.values(): region_id = items_in_region_list.region_id type_id = items_in_region_list.type_id generated_at = gen_iso_datetime_str(items_in_region_list.generated_at) rows = [] for order in items_in_region_list.orders: issue_date = gen_iso_datetime_str(order.order_issue_date) # The order in which these values are added is crucial. It must # match STANDARD_ENCODED_COLUMNS. rows.append([ order.price, order.volume_remaining, order.order_range, order.order_id, order.volume_entered, order.minimum_volume, order.is_bid, issue_date, order.order_duration, order.station_id, order.solar_system_id, ]) rowsets.append( dict( generatedAt=generated_at, regionID=region_id, typeID=type_id, rows=rows, )) json_dict = { 'resultType': 'orders', 'version': '0.1', 'uploadKeys': order_list.upload_keys, 'generator': order_list.order_generator, 'currentTime': gen_iso_datetime_str(now_dtime_in_utc()), # This must match the order of the values in the row assembling portion # above this. 'columns': STANDARD_ENCODED_COLUMNS, 'rowsets': rowsets, } return json.dumps(json_dict)
def encode_to_json(order_list): """ Encodes this list of MarketOrder instances to a JSON string. :param MarketOrderList order_list: The order list to serialize. :rtype: str """ rowsets = [] for items_in_region_list in order_list._orders.values(): region_id = items_in_region_list.region_id type_id = items_in_region_list.type_id generated_at = gen_iso_datetime_str(items_in_region_list.generated_at) rows = [] for order in items_in_region_list.orders: issue_date = gen_iso_datetime_str(order.order_issue_date) # The order in which these values are added is crucial. It must # match STANDARD_ENCODED_COLUMNS. rows.append([ order.price, order.volume_remaining, order.order_range, order.order_id, order.volume_entered, order.minimum_volume, order.is_bid, issue_date, order.order_duration, order.station_id, order.solar_system_id, ]) rowsets.append(dict( generatedAt = generated_at, regionID = region_id, typeID = type_id, rows = rows, )) json_dict = { 'resultType': 'orders', 'version': '0.1', 'uploadKeys': order_list.upload_keys, 'generator': order_list.order_generator, 'currentTime': gen_iso_datetime_str(now_dtime_in_utc()), # This must match the order of the values in the row assembling portion # above this. 'columns': STANDARD_ENCODED_COLUMNS, 'rowsets': rowsets, } return json.dumps(json_dict)
def test_naive_datetime(self): """ Feeds naive datetimes to the various objects to make sure that they are rejected. """ # Passed naive historical_date. self.assertRaises(NaiveDatetimeError, MarketHistoryEntry, type_id=2413387906, region_id=10000068, historical_date=datetime.datetime.now(), num_orders=5, low_price=5.0, high_price=10.5, average_price=7.0, total_quantity=200, generated_at=now_dtime_in_utc(), ) # Passed naive generated_at. self.assertRaises(NaiveDatetimeError, MarketHistoryEntry, type_id=2413387906, region_id=10000068, historical_date=now_dtime_in_utc(), num_orders=5, low_price=5.0, high_price=10.5, average_price=7.0, total_quantity=200, generated_at=datetime.datetime.now(), ) # Passed naive generated_at. self.assertRaises(NaiveDatetimeError, HistoryItemsInRegionList, region_id=10000068, type_id=10000068, generated_at=datetime.datetime.now(), )
def encode_to_json(history_list): """ Encodes this MarketHistoryList instance to a JSON string. :param MarketHistoryList history_list: The history instance to serialize. :rtype: str """ rowsets = [] for items_in_region_list in history_list._history.values(): region_id = items_in_region_list.region_id type_id = items_in_region_list.type_id generated_at = gen_iso_datetime_str(items_in_region_list.generated_at) rows = [] for entry in items_in_region_list.entries: historical_date = gen_iso_datetime_str(entry.historical_date) # The order in which these values are added is crucial. It must # match STANDARD_ENCODED_COLUMNS. rows.append([ historical_date, entry.num_orders, entry.total_quantity, entry.low_price, entry.high_price, entry.average_price, ]) rowsets.append( dict( generatedAt=generated_at, regionID=region_id, typeID=type_id, rows=rows, )) json_dict = { 'resultType': 'history', 'version': '0.1', 'uploadKeys': history_list.upload_keys, 'generator': history_list.history_generator, 'currentTime': gen_iso_datetime_str(now_dtime_in_utc()), # This must match the order of the values in the row assembling portion # above this. 'columns': STANDARD_ENCODED_COLUMNS, 'rowsets': rowsets, } return json.dumps(json_dict)
def encode_to_json(history_list): """ Encodes this MarketHistoryList instance to a JSON string. :param MarketHistoryList history_list: The history instance to serialize. :rtype: str """ rowsets = [] for items_in_region_list in history_list._history.values(): region_id = items_in_region_list.region_id type_id = items_in_region_list.type_id generated_at = gen_iso_datetime_str(items_in_region_list.generated_at) rows = [] for entry in items_in_region_list.entries: historical_date = gen_iso_datetime_str(entry.historical_date) # The order in which these values are added is crucial. It must # match STANDARD_ENCODED_COLUMNS. rows.append([ historical_date, entry.num_orders, entry.total_quantity, entry.low_price, entry.high_price, entry.average_price, ]) rowsets.append(dict( generatedAt = generated_at, regionID = region_id, typeID = type_id, rows = rows, )) json_dict = { 'resultType': 'history', 'version': '0.1', 'uploadKeys': history_list.upload_keys, 'generator': history_list.history_generator, 'currentTime': gen_iso_datetime_str(now_dtime_in_utc()), # This must match the order of the values in the row assembling portion # above this. 'columns': STANDARD_ENCODED_COLUMNS, 'rowsets': rowsets, } return json.dumps(json_dict)
def serialize_orders(doc_dict, region_data): """ Serializes a GetOrders cache file's contents. :param dict doc_dict: The parsed cache document in dict form. :rtype: str :returns: The UUDIF serialized JSON message. """ order_list = MarketOrderList( order_generator=ORDER_GENERATOR, upload_keys=UPLOAD_KEYS, ) # timezone-aware datetime. generated_at = now_dtime_in_utc() region_id = region_data[2] type_id = region_data[3] for order_item in doc_dict['lret']: #print order_item for entry in order_item: #print entry order = MarketOrder( order_id=entry['orderID'], is_bid=entry['bid'], region_id=entry['regionID'], solar_system_id=entry['solarSystemID'], station_id=entry['stationID'], type_id=entry['typeID'], price=entry['price'], volume_entered=entry['volEntered'], volume_remaining=entry['volRemaining'], minimum_volume=entry['minVolume'], order_issue_date=wintime_to_datetime(entry['issueDate']), order_duration=entry['duration'], order_range=entry['range'], generated_at=generated_at, ) order_list.add_order(order) if len(order_list) is 0: # There were no orders for this item+region combo. order_list.set_empty_region(region_id, type_id, generated_at) return encode_to_json(order_list)
def test_entry_counting(self): """ Test the various history counting counting methods. """ history_list = MarketHistoryList() # There are no history entries yet. self.assertEqual(0, len(history_list)) history_list.add_entry(MarketHistoryEntry( type_id=2413387906, region_id=10000068, historical_date=now_dtime_in_utc(), num_orders=5, low_price=5.0, high_price=10.5, average_price=7.0, total_quantity=200, generated_at=now_dtime_in_utc(), )) # Just added one. self.assertEqual(1, len(history_list)) # Adding another item type in the same region. history_list.add_entry(MarketHistoryEntry( type_id=2413387905, region_id=10000068, historical_date=now_dtime_in_utc(), num_orders=5, low_price=5.0, high_price=10.5, average_price=7.0, total_quantity=200, generated_at=now_dtime_in_utc(), )) self.assertEqual(2, len(history_list)) # Adding to another region. history_list.add_entry(MarketHistoryEntry( type_id=2413387905, region_id=10000067, historical_date=now_dtime_in_utc(), num_orders=5, low_price=5.0, high_price=10.5, average_price=7.0, total_quantity=200, generated_at=now_dtime_in_utc(), )) # There are now three total. self.assertEqual(3, len(history_list))
def serialize_history(doc_dict, region_data): """ Serializes a GetOldPriceHistory cache file's contents. :param dict doc_dict: The parsed cache document in dict form. :rtype: str :returns: The UUDIF serialized JSON message. """ hist_list = MarketHistoryList( order_generator=ORDER_GENERATOR, upload_keys=UPLOAD_KEYS, ) # timezone-aware datetime. generated_at = now_dtime_in_utc() region_id = region_data[2] type_id = region_data[3] for hist_item in doc_dict['lret']: #print hist_item entry = MarketHistoryEntry( type_id=type_id, region_id=region_id, historical_date=wintime_to_datetime(hist_item['historyDate']), num_orders=hist_item['orders'], low_price=hist_item['lowPrice'], high_price=hist_item['highPrice'], average_price=hist_item['avgPrice'], total_quantity=hist_item['volume'], generated_at=generated_at, ) hist_list.add_entry(entry) if len(hist_list) is 0: # There were no orders for this item+region combo. hist_list.set_empty_region(region_id, type_id, generated_at) return encode_to_json(hist_list)
def test_order_counting(self): """ Test the various order counting methods. """ order_list = MarketOrderList() # There should be no orders so far. self.assertEqual(0, len(order_list)) order_list.add_order(MarketOrder( order_id=2413387906, is_bid=True, region_id=10000068, solar_system_id=30005316, station_id=60011521, type_id=10000068, price=52875, volume_entered=10, volume_remaining=4, minimum_volume=1, order_issue_date=now_dtime_in_utc(), order_duration=90, order_range=5, generated_at=now_dtime_in_utc() )) # Added one order. self.assertEqual(1, len(order_list)) # Adding a different item in the same region. order_list.add_order(MarketOrder( order_id=2413387907, is_bid=True, region_id=10000068, solar_system_id=30005316, station_id=60011521, type_id=10000067, price=52875, volume_entered=10, volume_remaining=4, minimum_volume=1, order_issue_date=now_dtime_in_utc(), order_duration=90, order_range=5, generated_at=now_dtime_in_utc() )) self.assertEqual(2, len(order_list)) # Adding an item to a different region. order_list.add_order(MarketOrder( order_id=2413387907, is_bid=True, region_id=10000067, solar_system_id=30005316, station_id=60011521, type_id=10000067, price=52875, volume_entered=10, volume_remaining=4, minimum_volume=1, order_issue_date=now_dtime_in_utc(), order_duration=90, order_range=5, generated_at=now_dtime_in_utc() )) self.assertEqual(3, len(order_list)) # Make sure that iterating over a MarketOrderList returns the correct # instance type. for olist in order_list: self.assertIsInstance(olist, MarketItemsInRegionList)
data = {} rowCount = 0 encodedData = {} decodedData = {} statsData = [] uniqueKey = "" rows = [] regionID = 0 checkHash = 0 row = (4, ) statsData.append(row) for history in market_list.get_all_entries_ungrouped(): rowCount = rowCount + 1 # Process the history rows date = str(history.historical_date).split("+", 1)[0] todayDate = now_dtime_in_utc().date() theDate = history.historical_date.date() generatedAt = str(history.generated_at).split("+", 1)[0] uniqueKey = str(history.region_id) + "-" + str(history.type_id) regionID = history.region_id typeID = history.type_id if todayDate != theDate: # clip the high and low if it's an order of magnitude too high or low to cut out the idiots if history.high_price > (history.average_price * 10): history.high_price = history.average_price * 10 if history.low_price < (history.average_price / 10): history.low_price = history.average_price / 10 data[date] = [ history.num_orders, history.low_price, history.high_price, history.average_price, history.total_quantity, msgKey ]
data = {} rowCount = 0 encodedData = {} decodedData = {} statsData = [] uniqueKey = "" rows = [] regionID = 0 checkHash = 0 row = (4,) statsData.append(row) for history in market_list.get_all_entries_ungrouped(): rowCount = rowCount+1 # Process the history rows date = str(history.historical_date).split("+", 1)[0] todayDate = now_dtime_in_utc().date() theDate = history.historical_date.date() generatedAt = str(history.generated_at).split("+", 1)[0] uniqueKey = str(history.region_id)+"-"+str(history.type_id) regionID = history.region_id typeID = history.type_id if todayDate!=theDate: # clip the high and low if it's an order of magnitude too high or low to cut out the idiots if history.high_price > (history.average_price*10): history.high_price = history.average_price*10 if history.low_price < (history.average_price/10): history.low_price = history.average_price/10 data[date] = [history.num_orders, history.low_price, history.high_price, history.average_price, history.total_quantity, msgKey] if regionID!=0: sql = "SELECT * FROM market_data_history WHERE id = '%s'" % uniqueKey
def thread(message): """ main flow of the app """ #print "<<< entered thread" curs = dbcon.cursor() mc = pylibmc.Client([mcserver], binary=True, behaviors={"tcp_nodelay": True, "ketama": True}) market_json = zlib.decompress(message) market_list = unified.parse_from_json(market_json) # Create unique identified for this message if debug is true if DEBUG==True: msgKey = str(uuid.uuid4()) else: msgKey = "" #print "<<- parsed message" if market_list.list_type == 'orders': #print "* Recieved Orders from: %s" % market_list.order_generator insertData = [] # clear the data structures updateData = [] insertSeen = [] insertEmpty = [] updateCounter = 0 duplicateData = 0 hashList = [] statsData = [] row=(5,) statsData.append(row) sql = "" #print "* Recieved Orders from: %s" % market_list.order_generator statTypeID = 0 statRegionID = 0 oldCounter = 0 ipHash = None for uploadKey in market_list.upload_keys: if uploadKey['name'] == 'EMDR': ipHash = uploadKey['key'] # empty order (no buy or sell orders) if len(market_list)==0: for item_region_list in market_list._orders.values(): if TERM_OUT==True: print "NO ORDERS for region: ", item_region_list.region_id, " item: ", item_region_list.type_id row = (abs(hash(str(item_region_list.region_id)+str(item_region_list.type_id))), item_region_list.type_id, item_region_list.region_id) insertEmpty.append(row) row = (0,) statsData.append(row) for components in insertEmpty: if mckey + str(components[0]) in mc: continue try: sql = "SELECT id FROM market_data_orders WHERE id = %s and is_active='f'" % components[0] curs.execute(sql) result = curs.fetchone() if result is not None: continue sql = "INSERT INTO market_data_seenorders (id, type_id, region_id) values (%s, %s, %s)" % components curs.execute(sql) mc.set(mckey + str(components[0]), True, time=2) except psycopg2.DatabaseError, e: if TERM_OUT == True: print "Key collision: ", components # at least some results to process else: for item_region_list in market_list.get_all_order_groups(): for order in item_region_list: # if order is in the future, skip it if order.generated_at > now_dtime_in_utc(): if TERM_OUT==True: print "000 Order has gen_at in the future 000" continue issue_date = str(order.order_issue_date).split("+", 1)[0] generated_at = str(order.generated_at).split("+", 1)[0] if (order.generated_at > (now_dtime_in_utc() - datetime.timedelta(hours=max_order_age))): # convert the bid true/false to binary if order.is_bid: bid = True else: bid = False # Check order if "supicious" which is an arbitrary definition. Any orders that are outside 2 standard deviations # of the mean AND where there are more than 5 orders of like type in the region will be flagged. Flagging could # be done on a per-web-request basis but doing it on order entry means you can report a little more on it. # Flags: True = Yes (suspicious), False = No (not suspicious), NULL = not enough information to determine suspicious = False if (order.type_id!=statTypeID) or (order.region_id!=statRegionID): gevent.sleep() sql = "SELECT COUNT(id), STDDEV(price), AVG(price) FROM market_data_orders WHERE invtype_id=%s AND mapregion_id=%s" % (order.type_id, order.region_id) statTypeID = order.type_id statRegionID = order.region_id recordCount = None curs.execute(sql) result = curs.fetchone() if result: recordCount = result[0] if recordCount!=None: stddev = result[1] mean = result[2] if (stddev!=None) and (recordCount > 3): # if price is outside 1 standard deviation of the mean flag as suspicious if ((float(order.price - mean)) > stddev) or ((float(order.price - mean)) < stddev): if bid and (order.price < mean): suspicious = True elif not bid and (order.price > mean): suspicious = True # See if the order already exists, if so, update if needed otherwise insert sql = "SELECT * FROM market_data_orders WHERE id = %s" % order.order_id curs.execute(sql) result = curs.fetchone() if result: if result[0] < order.generated_at: row=(2,) statsData.append(row) row = (order.price, order.volume_remaining, order.generated_at, issue_date, msgKey, suspicious, ipHash, order.order_id) updateData.append(row) else: if TERM_OUT==True: print "||| Older order, not updated |||" else: # set up the data insert for the specific order row = (1,) statsData.append(row) row = (order.order_id, order.type_id, order.station_id, order.solar_system_id, order.region_id, bid, order.price, order.order_range, order.order_duration, order.volume_remaining, order.volume_entered, order.minimum_volume, order.generated_at, issue_date, msgKey, suspicious, ipHash) insertData.append(row) updateCounter += 1 row = (order.order_id, order.type_id, order.region_id) if mckey + str(row[0]) in mc: continue insertSeen.append(row) mc.set(mckey + str(row[0]), True, time=2) stats(order.type_id, order.region_id) else: oldCounter += 1 row = (3,) statsData.append(row) if TERM_OUT==True: if (oldCounter>0): print "<<< ", oldCounter, "OLD ORDERS >>>" if len(updateData)>0: if TERM_OUT==True: print "::: UPDATING "+str(len(updateData))+" ORDERS :::" sql = """UPDATE market_data_orders SET price=%s, volume_remaining=%s, generated_at=%s, issue_date=%s, message_key=%s, is_suspicious=%s, uploader_ip_hash=%s, is_active='t' WHERE id = %s""" try: curs.executemany(sql, updateData) except psycopg2.DatabaseError, e: if TERM_OUT==True: print e.pgerror updateData = [] if len(insertData)>0: # Build our SQL statement if TERM_OUT==True: print "--- INSERTING "+str(len(insertData))+" ORDERS ---" #print insertData sql = "INSERT INTO market_data_orders (id, invtype_id, stastation_id, mapsolarsystem_id, mapregion_id," sql += "is_bid, price, order_range, " sql += "duration, volume_remaining, volume_entered, minimum_volume, generated_at, " sql += "issue_date, message_key, is_suspicious, uploader_ip_hash, is_active) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 't')" curs.executemany(sql, insertData) insertData = [] if duplicateData: if TERM_OUT==True: print "*** DUPLICATES: "+str(duplicateData)+" ORDERS ***" if len(insertSeen)>0: try: sql = "INSERT INTO market_data_seenorders (id, type_id, region_id) values (%s, %s, %s)" curs.executemany(sql, insertSeen) except psycopg2.DatabaseError, e: if TERM_OUT==True: print e.pgerror insertSeen = []
def thread(message): """ main flow of the app """ #print "<<< entered thread" curs = dbcon.cursor() mc = pylibmc.Client([mcserver], binary=True, behaviors={ "tcp_nodelay": True, "ketama": True }) market_json = zlib.decompress(message) market_list = unified.parse_from_json(market_json) # Create unique identified for this message if debug is true if DEBUG == True: msgKey = str(uuid.uuid4()) else: msgKey = "" #print "<<- parsed message" if market_list.list_type == 'orders': #print "* Recieved Orders from: %s" % market_list.order_generator insertData = [] # clear the data structures updateData = [] insertSeen = [] insertEmpty = [] updateCounter = 0 duplicateData = 0 hashList = [] statsData = [] row = (5, ) statsData.append(row) sql = "" #print "* Recieved Orders from: %s" % market_list.order_generator statTypeID = 0 statRegionID = 0 oldCounter = 0 ipHash = None for uploadKey in market_list.upload_keys: if uploadKey['name'] == 'EMDR': ipHash = uploadKey['key'] # empty order (no buy or sell orders) if len(market_list) == 0: for item_region_list in market_list._orders.values(): if TERM_OUT == True: print "NO ORDERS for region: ", item_region_list.region_id, " item: ", item_region_list.type_id row = (abs( hash( str(item_region_list.region_id) + str(item_region_list.type_id))), item_region_list.type_id, item_region_list.region_id) insertEmpty.append(row) row = (0, ) statsData.append(row) for components in insertEmpty: if mckey + str(components[0]) in mc: continue try: sql = "SELECT id FROM market_data_orders WHERE id = %s and is_active='f'" % components[ 0] curs.execute(sql) result = curs.fetchone() if result is not None: continue sql = "INSERT INTO market_data_seenorders (id, type_id, region_id) values (%s, %s, %s)" % components curs.execute(sql) mc.set(mckey + str(components[0]), True, time=2) except psycopg2.DatabaseError, e: if TERM_OUT == True: print "Key collision: ", components # at least some results to process else: for item_region_list in market_list.get_all_order_groups(): for order in item_region_list: # if order is in the future, skip it if order.generated_at > now_dtime_in_utc(): if TERM_OUT == True: print "000 Order has gen_at in the future 000" continue issue_date = str(order.order_issue_date).split("+", 1)[0] generated_at = str(order.generated_at).split("+", 1)[0] if (order.generated_at > (now_dtime_in_utc() - datetime.timedelta(hours=max_order_age))): # convert the bid true/false to binary if order.is_bid: bid = True else: bid = False # Check order if "supicious" which is an arbitrary definition. Any orders that are outside 2 standard deviations # of the mean AND where there are more than 5 orders of like type in the region will be flagged. Flagging could # be done on a per-web-request basis but doing it on order entry means you can report a little more on it. # Flags: True = Yes (suspicious), False = No (not suspicious), NULL = not enough information to determine suspicious = False if (order.type_id != statTypeID) or (order.region_id != statRegionID): gevent.sleep() sql = "SELECT COUNT(id), STDDEV(price), AVG(price) FROM market_data_orders WHERE invtype_id=%s AND mapregion_id=%s AND is_active=true" % ( order.type_id, order.region_id) statTypeID = order.type_id statRegionID = order.region_id recordCount = None curs.execute(sql) result = curs.fetchone() if result: recordCount = result[0] if recordCount != None: stddev = result[1] mean = result[2] if (stddev != None) and (recordCount > 3): # if price is outside 1 standard deviation of the mean flag as suspicious if ((float(order.price - mean)) > stddev ) or ((float(order.price - mean)) < stddev): if bid and (order.price < mean): suspicious = True elif not bid and (order.price > mean): suspicious = True # See if the order already exists, if so, update if needed otherwise insert sql = "SELECT * FROM market_data_orders WHERE id = %s" % order.order_id curs.execute(sql) result = curs.fetchone() if result: if result[0] < order.generated_at: row = (2, ) statsData.append(row) row = (order.price, order.volume_remaining, order.generated_at, issue_date, msgKey, suspicious, ipHash, order.order_id) updateData.append(row) else: if TERM_OUT == True: print "||| Older order, not updated |||" else: # set up the data insert for the specific order row = (1, ) statsData.append(row) row = (order.order_id, order.type_id, order.station_id, order.solar_system_id, order.region_id, bid, order.price, order.order_range, order.order_duration, order.volume_remaining, order.volume_entered, order.minimum_volume, order.generated_at, issue_date, msgKey, suspicious, ipHash) insertData.append(row) updateCounter += 1 row = (order.order_id, order.type_id, order.region_id) if mckey + str(row[0]) in mc: continue insertSeen.append(row) mc.set(mckey + str(row[0]), True, time=2) stats(order.type_id, order.region_id) else: oldCounter += 1 row = (3, ) statsData.append(row) if TERM_OUT == True: if (oldCounter > 0): print "<<< ", oldCounter, "OLD ORDERS >>>" if len(updateData) > 0: if TERM_OUT == True: print "::: UPDATING " + str( len(updateData)) + " ORDERS :::" sql = """UPDATE market_data_orders SET price=%s, volume_remaining=%s, generated_at=%s, issue_date=%s, message_key=%s, is_suspicious=%s, uploader_ip_hash=%s, is_active='t' WHERE id = %s""" try: curs.executemany(sql, updateData) except psycopg2.DatabaseError, e: if TERM_OUT == True: print e.pgerror updateData = [] if len(insertData) > 0: # Build our SQL statement if TERM_OUT == True: print "--- INSERTING " + str( len(insertData)) + " ORDERS ---" #print insertData sql = "INSERT INTO market_data_orders (id, invtype_id, stastation_id, mapsolarsystem_id, mapregion_id," sql += "is_bid, price, order_range, " sql += "duration, volume_remaining, volume_entered, minimum_volume, generated_at, " sql += "issue_date, message_key, is_suspicious, uploader_ip_hash, is_active) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 't')" curs.executemany(sql, insertData) insertData = [] if duplicateData: if TERM_OUT == True: print "*** DUPLICATES: " + str( duplicateData) + " ORDERS ***" if len(insertSeen) > 0: try: sql = "INSERT INTO market_data_seenorders (id, type_id, region_id) values (%s, %s, %s)" curs.executemany(sql, insertSeen) except psycopg2.DatabaseError, e: if TERM_OUT == True: print e.pgerror insertSeen = []
def setUp(self): self.order_list = MarketOrderList() self.order1 = MarketOrder( order_id=2413387906, is_bid=True, region_id=10000068, solar_system_id=30005316, station_id=60011521, type_id=10000068, price=52875, volume_entered=10, volume_remaining=4, minimum_volume=1, order_issue_date=now_dtime_in_utc(), order_duration=90, order_range=5, generated_at=now_dtime_in_utc() ) self.order_list.add_order(self.order1) # This order isn't added, but it's here for the test to add. self.order2 = MarketOrder( order_id=1234566, is_bid=False, region_id=10000032, solar_system_id=30005312, station_id=60011121, type_id=10000067, price=52, volume_entered=10, volume_remaining=500, minimum_volume=1, order_issue_date=now_dtime_in_utc(), order_duration=90, order_range=5, generated_at=now_dtime_in_utc() ) self.history = MarketHistoryList() self.history1 = MarketHistoryEntry( type_id=2413387906, region_id=10000068, historical_date=now_dtime_in_utc(), num_orders=5, low_price=5.0, high_price=10.5, average_price=7.0, total_quantity=200, generated_at=now_dtime_in_utc(), ) self.history.add_entry(self.history1) # This order isn't added, but it's here for the test to add. self.history2 = MarketHistoryEntry( type_id=1413387203, region_id=10000067, historical_date=now_dtime_in_utc(), num_orders=50, low_price=50.0, high_price=100.5, average_price=70.0, total_quantity=2000, generated_at=now_dtime_in_utc(), )