Ejemplo n.º 1
0
 def test_order_serialization(self):
     # Encode the sample order list.
     encoded_orderlist = unified.encode_to_json(self.order_list)
     # Should return a string JSON representation.
     self.assertIsInstance(encoded_orderlist, str)
     # De-code the JSON to instantiate a list of MarketOrder instances that
     # should be identical to self.orderlist.
     decoded_list = unified.parse_from_json(encoded_orderlist)
     self.assertIsInstance(decoded_list, MarketOrderList)
     re_encoded_list = unified.encode_to_json(decoded_list)
     # Re-encode the decoded orderlist. Match the two encoded strings. They
     # should still be the same.
     self.assertEqual(encoded_orderlist, re_encoded_list,
                      "Encoded and re-encoded orders don't match.")
Ejemplo n.º 2
0
 def test_history_serialization(self):
     # Encode the sample history instance.
     encoded_history = unified.encode_to_json(self.history)
     # Should return a string JSON representation.
     self.assertIsInstance(encoded_history, basestring)
     # De-code the JSON to instantiate a MarketHistoryList instances that
     # should be identical to self.orderlist.
     decoded_list = unified.parse_from_json(encoded_history)
     self.assertIsInstance(decoded_list, MarketHistoryList)
     re_encoded_history = unified.encode_to_json(decoded_list)
     # Re-encode the decoded history. Match the two encoded strings. They
     # should still be the same.
     self.assertEqual(
         encoded_history,
         re_encoded_history,
         "Encoded and re-encoded history don't match."
     )
Ejemplo n.º 3
0
def push_message(parsed_message):
    """
    Spawned as a greenlet to push parsed messages through ZeroMQ.
    """
    try:
        # This will be the representation to send to the Announcers.
        json_str = unified.encode_to_json(parsed_message)
    except TypeError:
        logger.error('Unable to serialize a parsed message.')
        return

    # Push a zlib compressed JSON representation of the message to
    # announcers.
    compressed_msg = zlib.compress(json_str)
    sender.send(compressed_msg)
def serialize_orders(doc_dict, region_data):
    """
    Serializes a GetOrders cache file's contents.

    :param dict doc_dict: The parsed cache document in dict form.
    :rtype: str
    :returns: The UUDIF serialized JSON message.
    """
    order_list = MarketOrderList(
        order_generator=ORDER_GENERATOR,
        upload_keys=UPLOAD_KEYS,
    )
    # timezone-aware datetime.
    generated_at = now_dtime_in_utc()
    region_id = region_data[2]
    type_id = region_data[3]

    for order_item in doc_dict['lret']:
        #print order_item
        for entry in order_item:
            #print entry
            order = MarketOrder(
                order_id=entry['orderID'],
                is_bid=entry['bid'],
                region_id=entry['regionID'],
                solar_system_id=entry['solarSystemID'],
                station_id=entry['stationID'],
                type_id=entry['typeID'],
                price=entry['price'],
                volume_entered=entry['volEntered'],
                volume_remaining=entry['volRemaining'],
                minimum_volume=entry['minVolume'],
                order_issue_date=wintime_to_datetime(entry['issueDate']),
                order_duration=entry['duration'],
                order_range=entry['range'],
                generated_at=generated_at,
            )
            order_list.add_order(order)

    if len(order_list) is 0:
        # There were no orders for this item+region combo.
        order_list.set_empty_region(region_id, type_id, generated_at)

    return encode_to_json(order_list)
def serialize_orders(doc_dict, region_data):
    """
    Serializes a GetOrders cache file's contents.

    :param dict doc_dict: The parsed cache document in dict form.
    :rtype: str
    :returns: The UUDIF serialized JSON message.
    """
    order_list = MarketOrderList(
        order_generator=ORDER_GENERATOR,
        upload_keys=UPLOAD_KEYS,
    )
    # timezone-aware datetime.
    generated_at = now_dtime_in_utc()
    region_id = region_data[2]
    type_id = region_data[3]

    for order_item in doc_dict['lret']:
        #print order_item
        for entry in order_item:
            #print entry
            order = MarketOrder(
                order_id=entry['orderID'],
                is_bid=entry['bid'],
                region_id=entry['regionID'],
                solar_system_id=entry['solarSystemID'],
                station_id=entry['stationID'],
                type_id=entry['typeID'],
                price=entry['price'],
                volume_entered=entry['volEntered'],
                volume_remaining=entry['volRemaining'],
                minimum_volume=entry['minVolume'],
                order_issue_date=wintime_to_datetime(entry['issueDate']),
                order_duration=entry['duration'],
                order_range=entry['range'],
                generated_at=generated_at,
            )
            order_list.add_order(order)

    if len(order_list) is 0:
        # There were no orders for this item+region combo.
        order_list.set_empty_region(region_id, type_id, generated_at)

    return encode_to_json(order_list)
def serialize_history(doc_dict, region_data):
    """
    Serializes a GetOldPriceHistory cache file's contents.

    :param dict doc_dict: The parsed cache document in dict form.
    :rtype: str
    :returns: The UUDIF serialized JSON message.
    """
    hist_list = MarketHistoryList(
        order_generator=ORDER_GENERATOR,
        upload_keys=UPLOAD_KEYS,
    )
    # timezone-aware datetime.
    generated_at = now_dtime_in_utc()
    region_id = region_data[2]
    type_id = region_data[3]

    for hist_item in doc_dict['lret']:
        #print hist_item
        entry = MarketHistoryEntry(
            type_id=type_id,
            region_id=region_id,
            historical_date=wintime_to_datetime(hist_item['historyDate']),
            num_orders=hist_item['orders'],
            low_price=hist_item['lowPrice'],
            high_price=hist_item['highPrice'],
            average_price=hist_item['avgPrice'],
            total_quantity=hist_item['volume'],
            generated_at=generated_at,
        )
        hist_list.add_entry(entry)

    if len(hist_list) is 0:
        # There were no orders for this item+region combo.
        hist_list.set_empty_region(region_id, type_id, generated_at)

    return encode_to_json(hist_list)
def serialize_history(doc_dict, region_data):
    """
    Serializes a GetOldPriceHistory cache file's contents.

    :param dict doc_dict: The parsed cache document in dict form.
    :rtype: str
    :returns: The UUDIF serialized JSON message.
    """
    hist_list = MarketHistoryList(
        order_generator=ORDER_GENERATOR,
        upload_keys=UPLOAD_KEYS,
    )
    # timezone-aware datetime.
    generated_at = now_dtime_in_utc()
    region_id = region_data[2]
    type_id = region_data[3]

    for hist_item in doc_dict['lret']:
        #print hist_item
        entry = MarketHistoryEntry(
            type_id=type_id,
            region_id=region_id,
            historical_date=wintime_to_datetime(hist_item['historyDate']),
            num_orders=hist_item['orders'],
            low_price=hist_item['lowPrice'],
            high_price=hist_item['highPrice'],
            average_price=hist_item['avgPrice'],
            total_quantity=hist_item['volume'],
            generated_at=generated_at,
        )
        hist_list.add_entry(entry)

    if len(hist_list) is 0:
        # There were no orders for this item+region combo.
        hist_list.set_empty_region(region_id, type_id, generated_at)

    return encode_to_json(hist_list)
Ejemplo n.º 8
0
 def test_empty_history_reencoding(self):
     """
     Uses a repeated encoding-decoding cycle to determine whether we're
     handling empty rows within rowsets correctly.
     """
     data = """
         {
           "resultType" : "history",
           "version" : "0.1alpha",
           "uploadKeys" : [
             { "name" : "emk", "key" : "abc" },
             { "name" : "ec" , "key" : "def" }
           ],
           "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
           "currentTime" : "2011-10-22T15:46:00+00:00",
           "columns" : ["date","orders","quantity","low","high","average"],
           "rowsets" : [
             {
               "generatedAt" : "2011-10-22T15:42:00+00:00",
               "regionID" : 10000065,
               "typeID" : 11134,
               "rows" : []
             }
           ]
         }
     """
     decoded_list = unified.parse_from_json(data)
     re_encoded_list = unified.encode_to_json(decoded_list)
     re_decoded_list = json.loads(re_encoded_list)
     # There should always be one rowset, even if it ends up being empty.
     self.assertEqual(1, len(re_decoded_list['rowsets']))
     first_rowset = re_decoded_list['rowsets'][0]
     # Check for the empty rowsets with all data intact.
     self.assertListEqual(first_rowset['rows'], [])
     self.assertTrue('generatedAt' in first_rowset)
     self.assertTrue('regionID' in first_rowset)
     self.assertTrue('typeID' in first_rowset)
Ejemplo n.º 9
0
 def test_empty_history_reencoding(self):
     """
     Uses a repeated encoding-decoding cycle to determine whether we're
     handling empty rows within rowsets correctly.
     """
     data = """
         {
           "resultType" : "history",
           "version" : "0.1alpha",
           "uploadKeys" : [
             { "name" : "emk", "key" : "abc" },
             { "name" : "ec" , "key" : "def" }
           ],
           "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
           "currentTime" : "2011-10-22T15:46:00+00:00",
           "columns" : ["date","orders","quantity","low","high","average"],
           "rowsets" : [
             {
               "generatedAt" : "2011-10-22T15:42:00+00:00",
               "regionID" : 10000065,
               "typeID" : 11134,
               "rows" : []
             }
           ]
         }
     """
     decoded_list = unified.parse_from_json(data)
     re_encoded_list = unified.encode_to_json(decoded_list)
     re_decoded_list = json.loads(re_encoded_list)
     # There should always be one rowset, even if it ends up being empty.
     self.assertEqual(1, len(re_decoded_list['rowsets']))
     first_rowset = re_decoded_list['rowsets'][0]
     # Check for the empty rowsets with all data intact.
     self.assertListEqual(first_rowset['rows'], [])
     self.assertTrue(first_rowset.has_key('generatedAt'))
     self.assertTrue(first_rowset.has_key('regionID'))
     self.assertTrue(first_rowset.has_key('typeID'))
Ejemplo n.º 10
0
    def test_orderless_region(self):
        """
        Tests deserializing a region that has no orders.
        """
        data = """
            {
              "resultType" : "orders",
              "version" : "0.1alpha",
              "uploadKeys" : [
                { "name" : "emk", "key" : "abc" },
                { "name" : "ec" , "key" : "def" }
              ],
              "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
              "currentTime" : "2011-10-22T15:46:00+00:00",
              "columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"],
              "rowsets" : [
                {
                  "generatedAt" : "2011-10-22T15:43:00+00:00",
                  "regionID" : 10000065,
                  "typeID" : 11134,
                  "rows" : [
                    [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
                    [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
                    [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
                  ]
                },
                {
                  "generatedAt" : "2011-10-22T15:42:00+00:00",
                  "regionID" : 10000066,
                  "typeID" : 11135,
                  "rows" : []
                }
              ]
            }
        """
        # Parse JSON, spit out an order list.
        decoded_list = unified.parse_from_json(data)
        self.assertIsInstance(decoded_list, MarketOrderList)
        # There should be two item+region combos.
        self.assertEqual(len(list(decoded_list._orders.keys())), 2)
        # Now make sure there are three.
        self.assertEqual(len(decoded_list), 3)
        # These are regionID_itemID. Make sure the keys are set correctly.
        self.assertItemsEqual(['10000065_11134', '10000066_11135'],
                              list(decoded_list._orders.keys()))

        # Re-encode for JSON and do some basic checks for sanity.
        re_encoded_list = unified.encode_to_json(decoded_list)
        # We're back to a dict. Check to make sure our custom JSON encoder
        # didn't butcher the entry-less region (10000066).
        re_decoded_list = json.loads(re_encoded_list)
        self.assertEqual(2, len(re_decoded_list['rowsets']))

        for rowset in re_decoded_list['rowsets']:
            # We only want to check the entry rowset with type 11135.
            if rowset['typeID'] != 11135:
                continue

            # There should always be one rowset, even if it ends up being empty.
            first_rowset = re_decoded_list['rowsets'][0]
            # Check for the empty rowsets with all data intact.
            self.assertListEqual(rowset['rows'], [])
            self.assertTrue('generatedAt' in first_rowset)
            self.assertTrue('regionID' in first_rowset)
            self.assertTrue('typeID' in first_rowset)
Ejemplo n.º 11
0
    def test_orderless_region(self):
        """
        Tests deserializing a region that has no orders.
        """
        data = """
            {
              "resultType" : "orders",
              "version" : "0.1alpha",
              "uploadKeys" : [
                { "name" : "emk", "key" : "abc" },
                { "name" : "ec" , "key" : "def" }
              ],
              "generator" : { "name" : "Yapeal", "version" : "11.335.1737" },
              "currentTime" : "2011-10-22T15:46:00+00:00",
              "columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"],
              "rowsets" : [
                {
                  "generatedAt" : "2011-10-22T15:43:00+00:00",
                  "regionID" : 10000065,
                  "typeID" : 11134,
                  "rows" : [
                    [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038],
                    [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null],
                    [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039]
                  ]
                },
                {
                  "generatedAt" : "2011-10-22T15:42:00+00:00",
                  "regionID" : 10000066,
                  "typeID" : 11135,
                  "rows" : []
                }
              ]
            }
        """
        # Parse JSON, spit out an order list.
        decoded_list = unified.parse_from_json(data)
        self.assertIsInstance(decoded_list, MarketOrderList)
        # There should be two item+region combos.
        self.assertEqual(len(decoded_list._orders.keys()), 2)
        # Now make sure there are three.
        self.assertEqual(len(decoded_list), 3)
        # These are regionID_itemID. Make sure the keys are set correctly.
        self.assertItemsEqual(
            ['10000065_11134', '10000066_11135'],
            decoded_list._orders.keys()
        )

        # Re-encode for JSON and do some basic checks for sanity.
        re_encoded_list = unified.encode_to_json(decoded_list)
        # We're back to a dict. Check to make sure our custom JSON encoder
        # didn't butcher the entry-less region (10000066).
        re_decoded_list = json.loads(re_encoded_list)
        self.assertEqual(2, len(re_decoded_list['rowsets']))

        for rowset in re_decoded_list['rowsets']:
            # We only want to check the entry rowset with type 11135.
            if rowset['typeID'] != 11135:
                continue

            # There should always be one rowset, even if it ends up being empty.
            first_rowset = re_decoded_list['rowsets'][0]
            # Check for the empty rowsets with all data intact.
            self.assertListEqual(rowset['rows'], [])
            self.assertTrue(first_rowset.has_key('generatedAt'))
            self.assertTrue(first_rowset.has_key('regionID'))
            self.assertTrue(first_rowset.has_key('typeID'))