def test_interval_schema_failure_no_request_end_date(self): """The parser raises an exception if a time series is missing an end date.""" bad_text = """ { "feed": { "component": [], "requestStartDate": 1547553600000 }, "success": true, "message": "" } """ with self.assertRaises(SchemaValidationFailure): p.parse_intervals(bad_text)
def test_interval_schema_failure_bad_date_type_2(self): """The parser raises an exception if the timeseries end time is not in epoch format.""" bad_text = """ { "feed": { "component": [], "requestStartDate": 1547553600000, "requestEndDate": "foo" }, "success": true, "message": "" } """ with self.assertRaises(SchemaValidationFailure): p.parse_intervals(bad_text)
def test_interval_parse(self): """The parser returns a chronologically sorted list of Interval tuples for valid data.""" with open( os.path.join(TEST_DATA_DIR, "fixtures", "test_grovestreams_data.json")) as dataset: text = dataset.read() stream_id, intervals = p.parse_intervals(text) self.assertEqual(stream_id, "bb809707-e759-34bd-bb3c-1e13d1ab3b7c") expected_kwhs = [0, 1, 2, 3, 4] + 123 * [0] actual_kwhs = [ivl.kwh for ivl in intervals] self.assertEqual(expected_kwhs, actual_kwhs) start_dt = datetime(2019, 1, 27, tzinfo=tzutc()) expected_starts = [ start_dt + timedelta(minutes=15 * ii) for ii in range(0, 128) ] actual_starts = [ivl.start for ivl in intervals] self.assertEqual(expected_starts, actual_starts) expected_ends = [ start_dt + timedelta(minutes=15 * ii) for ii in range(1, 129) ] actual_ends = [ivl.end for ivl in intervals] self.assertEqual(expected_ends, actual_ends)
def test_interval_schema_failure_no_stream_uid(self): """The parser raises an exception if timeseries identifier is missing.""" bad_text = """ { "feed": { "component": [ { "stream": [ { "statistic": [ { "data": [0, 0, 0, 0] } ], "lastValueStartDate": 1548661500000, "lastValueEndDate": 1548662400000 } ] } ], "requestEndDate": 1547640000000, "requestStartDate": 1547553600000 } } """ with self.assertRaises(SchemaValidationFailure): p.parse_intervals(bad_text)
def test_interval_schema_failure_no_last_value_end_date(self): """The parser raises an exception if interval-size metadata is missing.""" bad_text = """ { "feed": { "component": [ { "stream": [ { "statistic": [ { "data": [0, 0, 0, 0] } ], "lastValueStartDate": 1548661500000, "streamUid": "bb809707-e759-34bd-bb3c-1e13d1ab3b7c" } ] } ], "requestEndDate": 1547640000000, "requestStartDate": 1547553600000 } } """ with self.assertRaises(SchemaValidationFailure): p.parse_intervals(bad_text)
def test_incoherent_stream_dates(self): """The parser raises an exception if there is a conflict between the metadata and interval data.""" bad_text = """ { "feed": { "component": [ { "stream": [ { "statistic": [ { "data": [0, 0, 0, null] } ], "lastValueStartDate": 1548661500000, "lastValueEndDate": 1548662400000, "streamUid": "bb809707-e759-34bd-bb3c-1e13d1ab3b7c" } ] } ], "requestEndDate": 1547640000000, "requestStartDate": 1547553600000 } } """ with self.assertRaises(GrovestreamsDataIntegrityException): p.parse_intervals(bad_text)
def test_interval_schema_failure_bad_interval_data_type(self): """The parser raises an exception if the timeseries data is not of type Option(number).""" bad_text = """ { "feed": { "component": [ { "stream": [ { "statistic": [ { "data": [0, "bad", 0, 0] } ], "lastValueStartDate": 1548661500000, "lastValueEndDate": 1548662400000, "streamUid": "bb809707-e759-34bd-bb3c-1e13d1ab3b7c" } ] } ], "requestEndDate": 1547640000000, "requestStartDate": 1547553600000 } } """ with self.assertRaises(SchemaValidationFailure): p.parse_intervals(bad_text)
def test_interval_schema_failure_bad_date_type_4(self): """The parser raises an exception if the interval metadata is not in epoch format.""" bad_text = """ { "feed": { "component": [ { "stream": [ { "statistic": [ { "data": [0, 0, 0, 0] } ], "lastValueStartDate": 1548661500000, "lastValueEndDate": "abcd", "streamUid": "bb809707-e759-34bd-bb3c-1e13d1ab3b7c" } ] } ], "requestEndDate": 1547640000000, "requestStartDate": 1547553600000 } } """ with self.assertRaises(SchemaValidationFailure): p.parse_intervals(bad_text)
def _gather_interval_data(self, start_dt, end_dt): start_ms = dt_to_epoch_ms(start_dt) end_ms = dt_to_epoch_ms(end_dt) params = dict( org=self.organization_id, startDate=start_ms, endDate=end_ms, itemsById='[{"compId":"%s","streamId":"%s"}]' % (self.component_id, self.stream_id), ) response = self.sess.get(API_BASE + "/feed", params=params) if response.status_code != requests.codes.ok: log.error(response.text) raise ApiError( f"Failed to acquire data from the API. Status Code: {response.status_code} Message: {response.text}" ) stream_id, intervals = parse_intervals(response.text) intervals.sort(key=lambda x: x.start) return stream_id, intervals
def test_interval_parse_failure(self): """The parser raises an exception when the input is not valid JSON.""" bad_text = '{"feed": }' with self.assertRaises(JsonParseFailure): p.parse_intervals(bad_text)