def test_empty_history_reencoding(self): """ Uses a repeated encoding-decoding cycle to determine whether we're handling empty rows within rowsets correctly. """ data = """ { "resultType" : "history", "version" : "0.1alpha", "uploadKeys" : [ { "name" : "emk", "key" : "abc" }, { "name" : "ec" , "key" : "def" } ], "generator" : { "name" : "Yapeal", "version" : "11.335.1737" }, "currentTime" : "2011-10-22T15:46:00+00:00", "columns" : ["date","orders","quantity","low","high","average"], "rowsets" : [ { "generatedAt" : "2011-10-22T15:42:00+00:00", "regionID" : 10000065, "typeID" : 11134, "rows" : [] } ] } """ decoded_list = unified.parse_from_json(data) re_encoded_list = unified.encode_to_json(decoded_list) re_decoded_list = json.loads(re_encoded_list) # There should always be one rowset, even if it ends up being empty. self.assertEqual(1, len(re_decoded_list['rowsets'])) first_rowset = re_decoded_list['rowsets'][0] # Check for the empty rowsets with all data intact. self.assertListEqual(first_rowset['rows'], []) self.assertTrue('generatedAt' in first_rowset) self.assertTrue('regionID' in first_rowset) self.assertTrue('typeID' in first_rowset)
def test_empty_history_reencoding(self): """ Uses a repeated encoding-decoding cycle to determine whether we're handling empty rows within rowsets correctly. """ data = """ { "resultType" : "history", "version" : "0.1alpha", "uploadKeys" : [ { "name" : "emk", "key" : "abc" }, { "name" : "ec" , "key" : "def" } ], "generator" : { "name" : "Yapeal", "version" : "11.335.1737" }, "currentTime" : "2011-10-22T15:46:00+00:00", "columns" : ["date","orders","quantity","low","high","average"], "rowsets" : [ { "generatedAt" : "2011-10-22T15:42:00+00:00", "regionID" : 10000065, "typeID" : 11134, "rows" : [] } ] } """ decoded_list = unified.parse_from_json(data) re_encoded_list = unified.encode_to_json(decoded_list) re_decoded_list = json.loads(re_encoded_list) # There should always be one rowset, even if it ends up being empty. self.assertEqual(1, len(re_decoded_list['rowsets'])) first_rowset = re_decoded_list['rowsets'][0] # Check for the empty rowsets with all data intact. self.assertListEqual(first_rowset['rows'], []) self.assertTrue(first_rowset.has_key('generatedAt')) self.assertTrue(first_rowset.has_key('regionID')) self.assertTrue(first_rowset.has_key('typeID'))
def parse_from_json(json_str): """ Given a Unified Uploader message, parse the contents and return a MarketOrderList or MarketHistoryList instance. :param str json_str: A Unified Uploader message as a JSON string. :rtype: MarketOrderList or MarketHistoryList :raises: MalformedUploadError when invalid JSON is passed in. """ try: message_dict = json.loads(json_str) except ValueError: raise ParseError("Mal-formed JSON input.") upload_keys = message_dict.get('uploadKeys', False) if upload_keys is False: raise ParseError( "uploadKeys does not exist. At minimum, an empty array is required." ) elif not isinstance(upload_keys, list): raise ParseError("uploadKeys must be an array object.") upload_type = message_dict['resultType'] try: if upload_type == 'orders': return orders.parse_from_dict(message_dict) elif upload_type == 'history': return history.parse_from_dict(message_dict) else: raise ParseError('Unified message has unknown upload_type: %s' % upload_type) except TypeError as exc: # MarketOrder and HistoryEntry both raise TypeError exceptions if # invalid input is encountered. raise ParseError(exc.message)
def test_orderless_region(self): """ Tests deserializing a region that has no orders. """ data = """ { "resultType" : "orders", "version" : "0.1alpha", "uploadKeys" : [ { "name" : "emk", "key" : "abc" }, { "name" : "ec" , "key" : "def" } ], "generator" : { "name" : "Yapeal", "version" : "11.335.1737" }, "currentTime" : "2011-10-22T15:46:00+00:00", "columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"], "rowsets" : [ { "generatedAt" : "2011-10-22T15:43:00+00:00", "regionID" : 10000065, "typeID" : 11134, "rows" : [ [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038], [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null], [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039] ] }, { "generatedAt" : "2011-10-22T15:42:00+00:00", "regionID" : 10000066, "typeID" : 11135, "rows" : [] } ] } """ # Parse JSON, spit out an order list. decoded_list = unified.parse_from_json(data) self.assertIsInstance(decoded_list, MarketOrderList) # There should be two item+region combos. self.assertEqual(len(list(decoded_list._orders.keys())), 2) # Now make sure there are three. self.assertEqual(len(decoded_list), 3) # These are regionID_itemID. Make sure the keys are set correctly. self.assertItemsEqual(['10000065_11134', '10000066_11135'], list(decoded_list._orders.keys())) # Re-encode for JSON and do some basic checks for sanity. re_encoded_list = unified.encode_to_json(decoded_list) # We're back to a dict. Check to make sure our custom JSON encoder # didn't butcher the entry-less region (10000066). re_decoded_list = json.loads(re_encoded_list) self.assertEqual(2, len(re_decoded_list['rowsets'])) for rowset in re_decoded_list['rowsets']: # We only want to check the entry rowset with type 11135. if rowset['typeID'] != 11135: continue # There should always be one rowset, even if it ends up being empty. first_rowset = re_decoded_list['rowsets'][0] # Check for the empty rowsets with all data intact. self.assertListEqual(rowset['rows'], []) self.assertTrue('generatedAt' in first_rowset) self.assertTrue('regionID' in first_rowset) self.assertTrue('typeID' in first_rowset)
def test_orderless_region(self): """ Tests deserializing a region that has no orders. """ data = """ { "resultType" : "orders", "version" : "0.1alpha", "uploadKeys" : [ { "name" : "emk", "key" : "abc" }, { "name" : "ec" , "key" : "def" } ], "generator" : { "name" : "Yapeal", "version" : "11.335.1737" }, "currentTime" : "2011-10-22T15:46:00+00:00", "columns" : ["price","volRemaining","range","orderID","volEntered","minVolume","bid","issueDate","duration","stationID","solarSystemID"], "rowsets" : [ { "generatedAt" : "2011-10-22T15:43:00+00:00", "regionID" : 10000065, "typeID" : 11134, "rows" : [ [8999,1,32767,2363806077,1,1,false,"2011-12-03T08:10:59+00:00",90,60008692,30005038], [11499.99,10,32767,2363915657,10,1,false,"2011-12-03T10:53:26+00:00",90,60006970,null], [11500,48,32767,2363413004,50,1,false,"2011-12-02T22:44:01+00:00",90,60006967,30005039] ] }, { "generatedAt" : "2011-10-22T15:42:00+00:00", "regionID" : 10000066, "typeID" : 11135, "rows" : [] } ] } """ # Parse JSON, spit out an order list. decoded_list = unified.parse_from_json(data) self.assertIsInstance(decoded_list, MarketOrderList) # There should be two item+region combos. self.assertEqual(len(decoded_list._orders.keys()), 2) # Now make sure there are three. self.assertEqual(len(decoded_list), 3) # These are regionID_itemID. Make sure the keys are set correctly. self.assertItemsEqual( ['10000065_11134', '10000066_11135'], decoded_list._orders.keys() ) # Re-encode for JSON and do some basic checks for sanity. re_encoded_list = unified.encode_to_json(decoded_list) # We're back to a dict. Check to make sure our custom JSON encoder # didn't butcher the entry-less region (10000066). re_decoded_list = json.loads(re_encoded_list) self.assertEqual(2, len(re_decoded_list['rowsets'])) for rowset in re_decoded_list['rowsets']: # We only want to check the entry rowset with type 11135. if rowset['typeID'] != 11135: continue # There should always be one rowset, even if it ends up being empty. first_rowset = re_decoded_list['rowsets'][0] # Check for the empty rowsets with all data intact. self.assertListEqual(rowset['rows'], []) self.assertTrue(first_rowset.has_key('generatedAt')) self.assertTrue(first_rowset.has_key('regionID')) self.assertTrue(first_rowset.has_key('typeID'))