Exemplo n.º 1
0
 def test_simple_history_deserialization(self):
     """
     Test a basic case of deserializing a history message.
     """
     data = """
         {
           "resultType" : "history",
           "version" : "0.1alpha",
           "uploadKeys" : [
             { "name" : "emk", "key" : "abc" },
             { "name" : "ec" , "key" : "def" }
           ],
           "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
           "currentTime" : "2011-10-22T15:46:00+00:00",
           "columns" : ["date","orders","quantity","low","high","average"],
           "rowsets" : [
             {
               "generatedAt" : "2011-10-22T15:42:00+00:00",
               "regionID" : 10000065,
               "typeID" : 11134,
               "rows" : [
                 ["2011-12-03T00:00:00+00:00",40,40,1999,499999.99,35223.50],
                 ["2011-12-02T00:00:00+00:00",83,252,9999,11550,11550]
               ]
             }
           ]
         }
     """
     decoded_list = unified.parse_from_json(data)
     self.assertIsInstance(decoded_list, MarketHistoryList)
     self.assertEqual(len(decoded_list), 2)
Exemplo n.º 2
0
 def test_simple_history_deserialization(self):
     """
     Test a basic case of deserializing a history message.
     """
     data = """
         {
           "resultType" : "history",
           "version" : "0.1alpha",
           "uploadKeys" : [
             { "name" : "emk", "key" : "abc" },
             { "name" : "ec" , "key" : "def" }
           ],
           "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
           "currentTime" : "2011-10-22T15:46:00+00:00",
           "columns" : ["date","orders","quantity","low","high","average"],
           "rowsets" : [
             {
               "generatedAt" : "2011-10-22T15:42:00+00:00",
               "regionID" : 10000065,
               "typeID" : 11134,
               "rows" : [
                 ["2011-12-03T00:00:00+00:00",40,40,1999,499999.99,35223.50],
                 ["2011-12-02T00:00:00+00:00",83,252,9999,11550,11550]
               ]
             }
           ]
         }
     """
     decoded_list = unified.parse_from_json(data)
     self.assertIsInstance(decoded_list, MarketHistoryList)
     self.assertEqual(len(decoded_list), 2)
Exemplo n.º 3
0
 def test_history_serialization(self):
     # Encode the sample history instance.
     encoded_history = unified.encode_to_json(self.history)
     # Should return a string JSON representation.
     self.assertIsInstance(encoded_history, str)
     # De-code the JSON to instantiate a MarketHistoryList instances that
     # should be identical to self.orderlist.
     decoded_list = unified.parse_from_json(encoded_history)
     self.assertIsInstance(decoded_list, MarketHistoryList)
     re_encoded_history = unified.encode_to_json(decoded_list)
     # Re-encode the decoded history. Match the two encoded strings. They
     # should still be the same.
     self.assertEqual(encoded_history, re_encoded_history,
                      "Encoded and re-encoded history don't match.")
Exemplo n.º 4
0
 def test_history_serialization(self):
     # Encode the sample history instance.
     encoded_history = unified.encode_to_json(self.history)
     # Should return a string JSON representation.
     self.assertIsInstance(encoded_history, basestring)
     # De-code the JSON to instantiate a MarketHistoryList instances that
     # should be identical to self.orderlist.
     decoded_list = unified.parse_from_json(encoded_history)
     self.assertIsInstance(decoded_list, MarketHistoryList)
     re_encoded_history = unified.encode_to_json(decoded_list)
     # Re-encode the decoded history. Match the two encoded strings. They
     # should still be the same.
     self.assertEqual(
         encoded_history,
         re_encoded_history,
         "Encoded and re-encoded history don't match."
     )
Exemplo n.º 5
0
 def test_simple_order_deserialization(self):
     """
     Test a basic case of deserializing an order message.
     """
     data = """
         {
           "resultType" : "orders",
           "version" : "0.1alpha",
           "uploadKeys" : [
             { "name" : "emk", "key" : "abc" },
             { "name" : "ec" , "key" : "def" }
           ],
           "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
           "currentTime" : "2011-10-22T15:46:00+00:00",
           "columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"],
           "rowsets" : [
             {
               "generatedAt" : "2011-10-22T15:43:00+00:00",
               "regionID" : 10000065,
               "typeID" : 11134,
               "rows" : [
                 [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
                 [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
                 [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
               ]
             },
             {
               "generatedAt" : "2011-10-22T15:42:00+00:00",
               "regionID" : null,
               "typeID" : 11135,
               "rows" : [
                 [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
                 [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
                 [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
               ]
             }
           ]
         }
     """
     decoded_list = unified.parse_from_json(data)
     self.assertIsInstance(decoded_list, MarketOrderList)
Exemplo n.º 6
0
 def test_simple_order_deserialization(self):
     """
     Test a basic case of deserializing an order message.
     """
     data = """
         {
           "resultType" : "orders",
           "version" : "0.1alpha",
           "uploadKeys" : [
             { "name" : "emk", "key" : "abc" },
             { "name" : "ec" , "key" : "def" }
           ],
           "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
           "currentTime" : "2011-10-22T15:46:00+00:00",
           "columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"],
           "rowsets" : [
             {
               "generatedAt" : "2011-10-22T15:43:00+00:00",
               "regionID" : 10000065,
               "typeID" : 11134,
               "rows" : [
                 [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
                 [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
                 [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
               ]
             },
             {
               "generatedAt" : "2011-10-22T15:42:00+00:00",
               "regionID" : null,
               "typeID" : 11135,
               "rows" : [
                 [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
                 [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
                 [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
               ]
             }
           ]
         }
     """
     decoded_list = unified.parse_from_json(data)
     self.assertIsInstance(decoded_list, MarketOrderList)
Exemplo n.º 7
0
 def test_empty_history_reencoding(self):
     """
     Uses a repeated encoding-decoding cycle to determine whether we're
     handling empty rows within rowsets correctly.
     """
     data = """
         {
           "resultType" : "history",
           "version" : "0.1alpha",
           "uploadKeys" : [
             { "name" : "emk", "key" : "abc" },
             { "name" : "ec" , "key" : "def" }
           ],
           "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
           "currentTime" : "2011-10-22T15:46:00+00:00",
           "columns" : ["date","orders","quantity","low","high","average"],
           "rowsets" : [
             {
               "generatedAt" : "2011-10-22T15:42:00+00:00",
               "regionID" : 10000065,
               "typeID" : 11134,
               "rows" : []
             }
           ]
         }
     """
     decoded_list = unified.parse_from_json(data)
     re_encoded_list = unified.encode_to_json(decoded_list)
     re_decoded_list = json.loads(re_encoded_list)
     # There should always be one rowset, even if it ends up being empty.
     self.assertEqual(1, len(re_decoded_list['rowsets']))
     first_rowset = re_decoded_list['rowsets'][0]
     # Check for the empty rowsets with all data intact.
     self.assertListEqual(first_rowset['rows'], [])
     self.assertTrue('generatedAt' in first_rowset)
     self.assertTrue('regionID' in first_rowset)
     self.assertTrue('typeID' in first_rowset)
Exemplo n.º 8
0
 def test_empty_history_reencoding(self):
     """
     Uses a repeated encoding-decoding cycle to determine whether we're
     handling empty rows within rowsets correctly.
     """
     data = """
         {
           "resultType" : "history",
           "version" : "0.1alpha",
           "uploadKeys" : [
             { "name" : "emk", "key" : "abc" },
             { "name" : "ec" , "key" : "def" }
           ],
           "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
           "currentTime" : "2011-10-22T15:46:00+00:00",
           "columns" : ["date","orders","quantity","low","high","average"],
           "rowsets" : [
             {
               "generatedAt" : "2011-10-22T15:42:00+00:00",
               "regionID" : 10000065,
               "typeID" : 11134,
               "rows" : []
             }
           ]
         }
     """
     decoded_list = unified.parse_from_json(data)
     re_encoded_list = unified.encode_to_json(decoded_list)
     re_decoded_list = json.loads(re_encoded_list)
     # There should always be one rowset, even if it ends up being empty.
     self.assertEqual(1, len(re_decoded_list['rowsets']))
     first_rowset = re_decoded_list['rowsets'][0]
     # Check for the empty rowsets with all data intact.
     self.assertListEqual(first_rowset['rows'], [])
     self.assertTrue(first_rowset.has_key('generatedAt'))
     self.assertTrue(first_rowset.has_key('regionID'))
     self.assertTrue(first_rowset.has_key('typeID'))
Exemplo n.º 9
0
def thread(message):
    """
    main flow of the app
    """
    #print "<<< entered thread"
    curs = dbcon.cursor()

    mc = pylibmc.Client([mcserver],
                        binary=True,
                        behaviors={
                            "tcp_nodelay": True,
                            "ketama": True
                        })

    market_json = zlib.decompress(message)
    market_list = unified.parse_from_json(market_json)
    # Create unique identified for this message if debug is true
    if DEBUG == True:
        msgKey = str(uuid.uuid4())
    else:
        msgKey = ""

    #print "<<- parsed message"

    if market_list.list_type == 'orders':
        #print "* Recieved Orders from: %s" % market_list.order_generator
        insertData = []  # clear the data structures
        updateData = []
        insertSeen = []
        insertEmpty = []
        updateCounter = 0
        duplicateData = 0
        hashList = []
        statsData = []
        row = (5, )
        statsData.append(row)
        sql = ""
        #print "* Recieved Orders from: %s" % market_list.order_generator
        statTypeID = 0
        statRegionID = 0
        oldCounter = 0
        ipHash = None
        for uploadKey in market_list.upload_keys:
            if uploadKey['name'] == 'EMDR':
                ipHash = uploadKey['key']
        # empty order (no buy or sell orders)
        if len(market_list) == 0:
            for item_region_list in market_list._orders.values():
                if TERM_OUT == True:
                    print "NO ORDERS for region: ", item_region_list.region_id, " item: ", item_region_list.type_id
                row = (abs(
                    hash(
                        str(item_region_list.region_id) +
                        str(item_region_list.type_id))),
                       item_region_list.type_id, item_region_list.region_id)
                insertEmpty.append(row)
                row = (0, )
                statsData.append(row)

            for components in insertEmpty:
                if mckey + str(components[0]) in mc:
                    continue
                try:
                    sql = "SELECT id FROM market_data_orders WHERE id = %s and is_active='f'" % components[
                        0]
                    curs.execute(sql)
                    result = curs.fetchone()
                    if result is not None:
                        continue
                    sql = "INSERT INTO market_data_seenorders (id, type_id, region_id) values (%s, %s, %s)" % components
                    curs.execute(sql)
                    mc.set(mckey + str(components[0]), True, time=2)
                except psycopg2.DatabaseError, e:
                    if TERM_OUT == True:
                        print "Key collision: ", components
        # at least some results to process
        else:
            for item_region_list in market_list.get_all_order_groups():

                for order in item_region_list:
                    # if order is in the future, skip it
                    if order.generated_at > now_dtime_in_utc():
                        if TERM_OUT == True:
                            print "000 Order has gen_at in the future 000"
                        continue
                    issue_date = str(order.order_issue_date).split("+", 1)[0]
                    generated_at = str(order.generated_at).split("+", 1)[0]
                    if (order.generated_at >
                        (now_dtime_in_utc() -
                         datetime.timedelta(hours=max_order_age))):
                        # convert the bid true/false to binary
                        if order.is_bid:
                            bid = True
                        else:
                            bid = False

                        # Check order if "supicious" which is an arbitrary definition.  Any orders that are outside 2 standard deviations
                        # of the mean AND where there are more than 5 orders of like type in the region will be flagged.  Flagging could
                        # be done on a per-web-request basis but doing it on order entry means you can report a little more on it.
                        # Flags: True = Yes (suspicious), False = No (not suspicious), NULL = not enough information to determine
                        suspicious = False
                        if (order.type_id != statTypeID) or (order.region_id !=
                                                             statRegionID):
                            gevent.sleep()
                            sql = "SELECT COUNT(id), STDDEV(price), AVG(price) FROM market_data_orders WHERE invtype_id=%s AND mapregion_id=%s AND is_active=true" % (
                                order.type_id, order.region_id)
                            statTypeID = order.type_id
                            statRegionID = order.region_id
                            recordCount = None
                            curs.execute(sql)
                            result = curs.fetchone()
                            if result:
                                recordCount = result[0]
                                if recordCount != None:
                                    stddev = result[1]
                                    mean = result[2]
                                if (stddev != None) and (recordCount > 3):
                                    # if price is outside 1 standard deviation of the mean flag as suspicious
                                    if ((float(order.price - mean)) > stddev
                                        ) or ((float(order.price - mean)) <
                                              stddev):
                                        if bid and (order.price < mean):
                                            suspicious = True
                                        elif not bid and (order.price > mean):
                                            suspicious = True

                        # See if the order already exists, if so, update if needed otherwise insert
                        sql = "SELECT * FROM market_data_orders WHERE id = %s" % order.order_id
                        curs.execute(sql)
                        result = curs.fetchone()
                        if result:
                            if result[0] < order.generated_at:
                                row = (2, )
                                statsData.append(row)
                                row = (order.price, order.volume_remaining,
                                       order.generated_at, issue_date, msgKey,
                                       suspicious, ipHash, order.order_id)
                                updateData.append(row)
                            else:
                                if TERM_OUT == True:
                                    print "||| Older order, not updated |||"
                        else:
                            # set up the data insert for the specific order
                            row = (1, )
                            statsData.append(row)
                            row = (order.order_id, order.type_id,
                                   order.station_id, order.solar_system_id,
                                   order.region_id, bid, order.price,
                                   order.order_range, order.order_duration,
                                   order.volume_remaining,
                                   order.volume_entered, order.minimum_volume,
                                   order.generated_at, issue_date, msgKey,
                                   suspicious, ipHash)
                            insertData.append(row)
                            updateCounter += 1
                        row = (order.order_id, order.type_id, order.region_id)
                        if mckey + str(row[0]) in mc:
                            continue
                        insertSeen.append(row)
                        mc.set(mckey + str(row[0]), True, time=2)
                        stats(order.type_id, order.region_id)
                    else:
                        oldCounter += 1
                        row = (3, )
                        statsData.append(row)

            if TERM_OUT == True:
                if (oldCounter > 0):
                    print "<<< ", oldCounter, "OLD ORDERS >>>"

            if len(updateData) > 0:
                if TERM_OUT == True:
                    print "::: UPDATING " + str(
                        len(updateData)) + " ORDERS :::"
                sql = """UPDATE market_data_orders SET price=%s, volume_remaining=%s, generated_at=%s,
                            issue_date=%s, message_key=%s, is_suspicious=%s, uploader_ip_hash=%s, is_active='t' WHERE id = %s"""
                try:
                    curs.executemany(sql, updateData)
                except psycopg2.DatabaseError, e:
                    if TERM_OUT == True:
                        print e.pgerror
                updateData = []

            if len(insertData) > 0:
                # Build our SQL statement
                if TERM_OUT == True:
                    print "--- INSERTING " + str(
                        len(insertData)) + " ORDERS ---"
                #print insertData
                sql = "INSERT INTO market_data_orders (id, invtype_id, stastation_id, mapsolarsystem_id, mapregion_id,"
                sql += "is_bid, price, order_range, "
                sql += "duration, volume_remaining, volume_entered, minimum_volume, generated_at, "
                sql += "issue_date, message_key, is_suspicious, uploader_ip_hash, is_active) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 't')"

                curs.executemany(sql, insertData)
                insertData = []

            if duplicateData:
                if TERM_OUT == True:
                    print "*** DUPLICATES: " + str(
                        duplicateData) + " ORDERS ***"

            if len(insertSeen) > 0:
                try:
                    sql = "INSERT INTO market_data_seenorders (id, type_id, region_id) values (%s, %s, %s)"
                    curs.executemany(sql, insertSeen)
                except psycopg2.DatabaseError, e:
                    if TERM_OUT == True:
                        print e.pgerror
                insertSeen = []
Exemplo n.º 10
0
def thread(message):
    """
    main flow of the app
    """
    #print "<<< entered thread"
    curs = dbcon.cursor()

    mc = pylibmc.Client([mcserver], binary=True, behaviors={"tcp_nodelay": True, "ketama": True})

    market_json = zlib.decompress(message)
    market_list = unified.parse_from_json(market_json)
    # Create unique identified for this message if debug is true
    if DEBUG==True:
        msgKey = str(uuid.uuid4())
    else:
        msgKey = ""

    #print "<<- parsed message"

    if market_list.list_type == 'orders':
        #print "* Recieved Orders from: %s" % market_list.order_generator
        insertData = []       # clear the data structures
        updateData = []
        insertSeen = []
        insertEmpty = []
        updateCounter = 0
        duplicateData = 0
        hashList = []
        statsData = []
        row=(5,)
        statsData.append(row)
        sql = ""
        #print "* Recieved Orders from: %s" % market_list.order_generator
        statTypeID = 0
        statRegionID = 0
        oldCounter = 0
        ipHash = None
        for uploadKey in market_list.upload_keys:
            if uploadKey['name'] == 'EMDR':
                ipHash = uploadKey['key']
        # empty order (no buy or sell orders)
        if len(market_list)==0:
            for item_region_list in market_list._orders.values():
                if TERM_OUT==True:
                    print "NO ORDERS for region: ", item_region_list.region_id, " item: ", item_region_list.type_id
                row = (abs(hash(str(item_region_list.region_id)+str(item_region_list.type_id))), item_region_list.type_id, item_region_list.region_id)
                insertEmpty.append(row)
                row = (0,)
                statsData.append(row)
            
            for components in insertEmpty:
                if mckey + str(components[0]) in mc:
                    continue
                try:
                    sql = "SELECT id FROM market_data_orders WHERE id = %s and is_active='f'" % components[0]
                    curs.execute(sql)
                    result = curs.fetchone()
                    if result is not None:
                        continue
                    sql = "INSERT INTO market_data_seenorders (id, type_id, region_id) values (%s, %s, %s)" % components
                    curs.execute(sql)
                    mc.set(mckey + str(components[0]), True, time=2)
                except psycopg2.DatabaseError, e:
                    if TERM_OUT == True:
                        print "Key collision: ", components
        # at least some results to process    
        else:
            for item_region_list in market_list.get_all_order_groups():
                
                for order in item_region_list:
                    # if order is in the future, skip it
                    if order.generated_at > now_dtime_in_utc():
                        if TERM_OUT==True:
                            print "000 Order has gen_at in the future 000"
                        continue
                    issue_date = str(order.order_issue_date).split("+", 1)[0]
                    generated_at = str(order.generated_at).split("+", 1)[0]
                    if (order.generated_at > (now_dtime_in_utc() - datetime.timedelta(hours=max_order_age))):                   
                        # convert the bid true/false to binary
                        if order.is_bid:
                            bid = True
                        else:
                            bid = False

                        # Check order if "supicious" which is an arbitrary definition.  Any orders that are outside 2 standard deviations
                        # of the mean AND where there are more than 5 orders of like type in the region will be flagged.  Flagging could
                        # be done on a per-web-request basis but doing it on order entry means you can report a little more on it.
                        # Flags: True = Yes (suspicious), False = No (not suspicious), NULL = not enough information to determine
                        suspicious = False
                        if (order.type_id!=statTypeID) or (order.region_id!=statRegionID):
                            gevent.sleep()
                            sql = "SELECT COUNT(id), STDDEV(price), AVG(price) FROM market_data_orders WHERE invtype_id=%s AND mapregion_id=%s" % (order.type_id, order.region_id)
                            statTypeID = order.type_id
                            statRegionID = order.region_id
                            recordCount = None
                            curs.execute(sql)
                            result = curs.fetchone()
                            if result:
                                recordCount = result[0]
                                if recordCount!=None:
                                    stddev = result[1]
                                    mean = result[2]
                                if (stddev!=None) and (recordCount > 3):
                                    # if price is outside 1 standard deviation of the mean flag as suspicious 
                                    if ((float(order.price - mean)) > stddev) or ((float(order.price - mean)) < stddev):
                                        if bid and (order.price < mean):
                                            suspicious = True
                                        elif not bid and (order.price > mean):
                                            suspicious = True
                    
                        # See if the order already exists, if so, update if needed otherwise insert                
                        sql = "SELECT * FROM market_data_orders WHERE id = %s" % order.order_id
                        curs.execute(sql)
                        result = curs.fetchone()
                        if result:
                            if result[0] < order.generated_at:
                                row=(2,)
                                statsData.append(row)
                                row = (order.price, order.volume_remaining, order.generated_at, issue_date, msgKey, suspicious, ipHash, order.order_id)
                                updateData.append(row)
                            else:
                                if TERM_OUT==True:
                                    print "||| Older order, not updated |||"
                        else:
                            # set up the data insert for the specific order
                            row = (1,)
                            statsData.append(row)
                            row = (order.order_id, order.type_id, order.station_id, order.solar_system_id,
                                order.region_id, bid, order.price, order.order_range, order.order_duration,
                                order.volume_remaining, order.volume_entered, order.minimum_volume, order.generated_at, issue_date, msgKey, suspicious, ipHash)
                            insertData.append(row)
                            updateCounter += 1
                        row = (order.order_id, order.type_id, order.region_id)
                        if mckey + str(row[0]) in mc:
                            continue
                        insertSeen.append(row)
                        mc.set(mckey + str(row[0]), True, time=2)
                        stats(order.type_id, order.region_id)
                    else:
                        oldCounter += 1
                        row = (3,)
                        statsData.append(row)
                        
            if TERM_OUT==True:
                if (oldCounter>0):
                    print "<<< ", oldCounter, "OLD ORDERS >>>"
    
            if len(updateData)>0:
                if TERM_OUT==True:
                    print "::: UPDATING "+str(len(updateData))+" ORDERS :::"
                sql = """UPDATE market_data_orders SET price=%s, volume_remaining=%s, generated_at=%s,
                            issue_date=%s, message_key=%s, is_suspicious=%s, uploader_ip_hash=%s, is_active='t' WHERE id = %s"""
                try:
                    curs.executemany(sql, updateData)
                except psycopg2.DatabaseError, e:
                    if TERM_OUT==True:
                        print e.pgerror
                updateData = []
    
            if len(insertData)>0:
                # Build our SQL statement
                if TERM_OUT==True:
                    print "--- INSERTING "+str(len(insertData))+" ORDERS ---"
                #print insertData
                sql = "INSERT INTO market_data_orders (id, invtype_id, stastation_id, mapsolarsystem_id, mapregion_id,"
                sql += "is_bid, price, order_range, "
                sql += "duration, volume_remaining, volume_entered, minimum_volume, generated_at, "
                sql += "issue_date, message_key, is_suspicious, uploader_ip_hash, is_active) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 't')"
                
                curs.executemany(sql, insertData)
                insertData = []
    
            if duplicateData:
                if TERM_OUT==True:
                    print "*** DUPLICATES: "+str(duplicateData)+" ORDERS ***"
    
            if len(insertSeen)>0:
                try:
                    sql = "INSERT INTO market_data_seenorders (id, type_id, region_id) values (%s, %s, %s)"
                    curs.executemany(sql, insertSeen)
                except psycopg2.DatabaseError, e:
                    if TERM_OUT==True:
                        print e.pgerror
                insertSeen = []
Exemplo n.º 11
0
    def test_orderless_region(self):
        """
        Tests deserializing a region that has no orders.
        """
        data = """
            {
              "resultType" : "orders",
              "version" : "0.1alpha",
              "uploadKeys" : [
                { "name" : "emk", "key" : "abc" },
                { "name" : "ec" , "key" : "def" }
              ],
              "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
              "currentTime" : "2011-10-22T15:46:00+00:00",
              "columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"],
              "rowsets" : [
                {
                  "generatedAt" : "2011-10-22T15:43:00+00:00",
                  "regionID" : 10000065,
                  "typeID" : 11134,
                  "rows" : [
                    [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
                    [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
                    [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
                  ]
                },
                {
                  "generatedAt" : "2011-10-22T15:42:00+00:00",
                  "regionID" : 10000066,
                  "typeID" : 11135,
                  "rows" : []
                }
              ]
            }
        """
        # Parse JSON, spit out an order list.
        decoded_list = unified.parse_from_json(data)
        self.assertIsInstance(decoded_list, MarketOrderList)
        # There should be two item+region combos.
        self.assertEqual(len(list(decoded_list._orders.keys())), 2)
        # Now make sure there are three.
        self.assertEqual(len(decoded_list), 3)
        # These are regionID_itemID. Make sure the keys are set correctly.
        self.assertItemsEqual(['10000065_11134', '10000066_11135'],
                              list(decoded_list._orders.keys()))

        # Re-encode for JSON and do some basic checks for sanity.
        re_encoded_list = unified.encode_to_json(decoded_list)
        # We're back to a dict. Check to make sure our custom JSON encoder
        # didn't butcher the entry-less region (10000066).
        re_decoded_list = json.loads(re_encoded_list)
        self.assertEqual(2, len(re_decoded_list['rowsets']))

        for rowset in re_decoded_list['rowsets']:
            # We only want to check the entry rowset with type 11135.
            if rowset['typeID'] != 11135:
                continue

            # There should always be one rowset, even if it ends up being empty.
            first_rowset = re_decoded_list['rowsets'][0]
            # Check for the empty rowsets with all data intact.
            self.assertListEqual(rowset['rows'], [])
            self.assertTrue('generatedAt' in first_rowset)
            self.assertTrue('regionID' in first_rowset)
            self.assertTrue('typeID' in first_rowset)
Exemplo n.º 12
0
receiver_uri = 'tcp://relay-linode-atl-1.eve-emdr.com:8050'

context = zmq.Context()
subscriber = context.socket(zmq.SUB)

# Connect to the first publicly available relay.
subscriber.connect(receiver_uri)
# Disable filtering.
subscriber.setsockopt(zmq.SUBSCRIBE, "")

print("Connected to %s" % receiver_uri)

while True:
    # Receive raw market JSON strings.
    market_json = zlib.decompress(subscriber.recv())
    market_list = unified.parse_from_json(market_json)

    # If you want to see the string representation for everything coming
    # down the pipe, this is how.
    #print data

    if market_list.list_type == 'orders':
        # This is a market order message.
        print "* Recieved Orders from: %s" % market_list.order_generator

        for order in market_list:
            # You can mess with the MarketOrder in here.
            pass
    else:
        # This is a history message.
        print "* Received History from: %s" % market_list.history_generator
Exemplo n.º 13
0
def process(message):
  query = pysqlpool.getNewQuery(connection)
  
  market_json = zlib.decompress(message)
  market_data = unified.parse_from_json(market_json)
  insertData = []
  deleteData = []
  
  if market_data.list_type == 'orders':
    orderIDs = []
    typeIDs = []
    if len(market_data) == 0:
      pass
    else:
      stuff = {}
      for region in market_data.get_all_order_groups():
        for order in region:
          # Timezone is silently discarded since it doesn't seem to be used in any messages I've seen
          insertData.append((order.order_id, str(order.generated_at).split("+", 1)[0], str(order.order_issue_date).split("+", 1)[0], order.type_id, round(order.price, 2), order.volume_entered, order.volume_remaining, order.order_range, order.order_duration, order.minimum_volume, int(order.is_bid), order.station_id, order.solar_system_id, order.region_id))
          orderIDs.append(str(int(order.order_id)))  # hacky SQLi protection
          typeIDs.append(str(int(order.type_id)))
        deleteData.append((region.region_id,))
        sql = "DELETE FROM `marketOrdersMem` WHERE `regionID` = %s AND `typeID` IN (" + ", ".join(list(set(typeIDs))) + ") AND `orderID` NOT IN (" + ", ".join(orderIDs) + ")"
        query.executeMany(sql, deleteData)

    # This query uses INSERT ... ON DUPLICATE KEY UPDATE syntax. It has a condition to only update the row if the new row's generationDate is newer than the stored generationDate. We don't want to replace our data with older data. We don't use REPLACE because we need to have this condition. Querying the table for existing data is possible for a cleaner statement, but it would probably result in slower inserts.
    sql  = 'INSERT INTO `marketOrdersMem` (`orderID`, `generationDate`, `issueDate`, `typeID`, `price`, `volEntered`, '
    sql += '`volRemaining`, `range`, `duration`, `minVolume`, `bid`, `stationID`, `solarSystemID`, `regionID`) '
    sql += 'VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) '
    sql += 'ON DUPLICATE KEY UPDATE '
    sql += '`issueDate`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`issueDate`), `issueDate`), '
    sql += '`typeID`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`typeID`), `typeID`), '
    sql += '`price`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`price`), `price`), '
    sql += '`volEntered`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`volEntered`), `volEntered`), '
    sql += '`volRemaining`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`volRemaining`), `volRemaining`), '
    sql += '`range`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`range`), `range`), '
    sql += '`duration`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`duration`), `duration`), '
    sql += '`minVolume`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`minVolume`), `minVolume`), '
    sql += '`bid`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`bid`), `bid`), '
    sql += '`stationID`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`stationID`), `stationID`), '
    sql += '`solarSystemID`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`solarSystemID`), `solarSystemID`), '
    sql += '`regionID`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`regionID`), `regionID`), '
    sql += '`generationDate`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`generationDate`), `generationDate`)'
    query.executeMany(sql, insertData)
    # print("Finished a job of %d market orders" % len(market_data))
  
  elif market_data.list_type == 'history':
    queue_history.put(market_data)
    #pass
    # insertData = []
    # for history in market_data.get_all_entries_ungrouped():
    #   insertData.append((history.type_id, history.region_id, history.historical_date, history.low_price, history.high_price, history.average_price, history.total_quantity, history.num_orders, history.generated_at))

    # sql  = 'INSERT INTO `items_history` (`type_id`, `region_id`, `date`, `price_low`, `price_high`, `price_average`, '
    # sql += '`quantity`, `num_orders`, `created`) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) '
    # sql += 'ON DUPLICATE KEY UPDATE '
    # sql += '`price_low`=VALUES(`price_low`), `price_high`=VALUES(`price_high`), `price_average`=VALUES(`price_average`), '
    # sql += '`quantity`=VALUES(`quantity`), `num_orders`=VALUES(`num_orders`)'
    # query.executeMany(sql, insertData)

  gevent.sleep()
  pysqlpool.getNewPool().Commit()
  sys.stdout.write(".")
  sys.stdout.flush()
Exemplo n.º 14
0
    def test_orderless_region(self):
        """
        Tests deserializing a region that has no orders.
        """
        data = """
            {
              "resultType" : "orders",
              "version" : "0.1alpha",
              "uploadKeys" : [
                { "name" : "emk", "key" : "abc" },
                { "name" : "ec" , "key" : "def" }
              ],
              "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
              "currentTime" : "2011-10-22T15:46:00+00:00",
              "columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"],
              "rowsets" : [
                {
                  "generatedAt" : "2011-10-22T15:43:00+00:00",
                  "regionID" : 10000065,
                  "typeID" : 11134,
                  "rows" : [
                    [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
                    [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
                    [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
                  ]
                },
                {
                  "generatedAt" : "2011-10-22T15:42:00+00:00",
                  "regionID" : 10000066,
                  "typeID" : 11135,
                  "rows" : []
                }
              ]
            }
        """
        # Parse JSON, spit out an order list.
        decoded_list = unified.parse_from_json(data)
        self.assertIsInstance(decoded_list, MarketOrderList)
        # There should be two item+region combos.
        self.assertEqual(len(decoded_list._orders.keys()), 2)
        # Now make sure there are three.
        self.assertEqual(len(decoded_list), 3)
        # These are regionID_itemID. Make sure the keys are set correctly.
        self.assertItemsEqual(
            ['10000065_11134', '10000066_11135'],
            decoded_list._orders.keys()
        )

        # Re-encode for JSON and do some basic checks for sanity.
        re_encoded_list = unified.encode_to_json(decoded_list)
        # We're back to a dict. Check to make sure our custom JSON encoder
        # didn't butcher the entry-less region (10000066).
        re_decoded_list = json.loads(re_encoded_list)
        self.assertEqual(2, len(re_decoded_list['rowsets']))

        for rowset in re_decoded_list['rowsets']:
            # We only want to check the entry rowset with type 11135.
            if rowset['typeID'] != 11135:
                continue

            # There should always be one rowset, even if it ends up being empty.
            first_rowset = re_decoded_list['rowsets'][0]
            # Check for the empty rowsets with all data intact.
            self.assertListEqual(rowset['rows'], [])
            self.assertTrue(first_rowset.has_key('generatedAt'))
            self.assertTrue(first_rowset.has_key('regionID'))
            self.assertTrue(first_rowset.has_key('typeID'))