def test_data_for_non_existing_sensor(self, client): # First add a couple ingress records with a non existing sensor code Message.objects.all().delete() post_data = json.loads(TEST_POST) post_data['data'][0]['sensor'] = 'does not exist' for i in range(3): client.post(self.URL, json.dumps(post_data), **AUTHORIZATION_HEADER, content_type='application/json') assert Message.objects.count() == 3 # Then run the parser parser = TelcameraParser() parser.consume(end_at_empty_queue=True) # Test whether the records in the ingress queue are correctly set to parsed assert Message.objects.filter( consume_succeeded_at__isnull=False).count() == 3 assert FailedMessage.objects.count() == 0 for ingress in Message.objects.all(): assert ingress.consume_started_at is not None assert ingress.consume_succeeded_at is not None # Test whether the records were indeed added to the database assert Observation.objects.all().count() == 3
def test_get_15min_aggregation_timezone_with_both_v1_and_v2_records(self): # Insert some v2 records for each hour for i in range(0, 24): timestamp_str = datetime.now().replace( hour=i, minute=0, second=0).astimezone().replace(microsecond=0).isoformat() response = self.client.post( self.POST_URL_V2, create_new_v2_json(timestamp_str=timestamp_str), **POST_AUTHORIZATION_HEADER, content_type='application/json') self.assertEqual(response.status_code, 200) # Then run the parse_ingress script parser = TelcameraParser() parser.consume(end_at_empty_queue=True) # Complete aggregate because the query in the endpoint depends on it call_man_command('complete_aggregate', 'continuousaggregate_cmsa15min') # test whether the endpoint responds correctly response = self.client.get(self.URL, **GET_AUTHORIZATION_HEADER) self.assertEqual(response.status_code, 200) self.assertGreater(len(json.loads(response.content)), 0)
def test_post_new_record_with_double_zone(self, client): """ Test posting a new message with a double zone in the count message """ Message.objects.all().delete() post_data = json.loads(TEST_POST_DOUBLE_ZONE) client.post(self.URL, json.dumps(post_data), **AUTHORIZATION_HEADER, content_type='application/json') assert Message.objects.count() == 1 # Then run the parser parser = TelcameraParser() parser.consume(end_at_empty_queue=True) # Check the Observation record assert Observation.objects.all().count() == 1 observation = Observation.objects.get() fields_to_check = ('sensor', 'sensor_type', 'sensor_state', 'owner', 'supplier', 'purpose', 'latitude', 'longitude', 'interval', 'timestamp_message', 'timestamp_start') for attr in fields_to_check: if type(getattr(observation, attr)) is Decimal: assert float(getattr(observation, attr)) == post_data['data'][0][attr] elif type(getattr(observation, attr)) is datetime: assert getattr(observation, attr) == dateparser.parse( post_data['data'][0][attr]) else: assert getattr(observation, attr) == post_data['data'][0][attr] # Check the CountAggregate records assert CountAggregate.objects.all().count() == len( post_data['data'][0]['aggregate']) for count_aggr in CountAggregate.objects.all(): # Get the post data for this CountAggregate (they might not be in the same order) posted_count_aggregate = None for aggregate in post_data['data'][0]['aggregate']: if aggregate['id'] == count_aggr.external_id: posted_count_aggregate = aggregate # Check whether we actually found the correct posted count aggregate assert type(posted_count_aggregate) == dict for attr in ('type', 'area', 'count'): assert getattr(count_aggr, attr) == posted_count_aggregate[attr] assert count_aggr.external_id == posted_count_aggregate['id'] assert count_aggr.geom is None # In both zone counts the geom is an empty string. So we check whether they are None assert count_aggr.message == post_data['data'][0]['message'] assert count_aggr.version == post_data['data'][0]['version']
def test_no_distances_key(self, client): post_data = json.loads(TEST_POST) del post_data['data'][1]['aggregate'][0]['distances'] del post_data['data'][1]['aggregate'][1]['distances'] Message.objects.all().delete() client.post(self.URL, json.dumps(post_data), **AUTHORIZATION_HEADER, content_type='application/json') assert Message.objects.count() == 1 # Then run the parser parser = TelcameraParser() parser.consume(end_at_empty_queue=True) assert Observation.objects.all().count() == 1
def test_lat_lng_with_many_decimals(self, client): post_data = json.loads(TEST_POST) post_data['data'][0]['latitude'] = 52.3921439524031 post_data['data'][0]['longitude'] = 4.885872984800177 post_data['data'][1]['latitude'] = post_data['data'][0]['latitude'] post_data['data'][1]['longitude'] = post_data['data'][0]['longitude'] Message.objects.all().delete() client.post(self.URL, json.dumps(post_data), **AUTHORIZATION_HEADER, content_type='application/json') assert Message.objects.count() == 1 # Then run the parser parser = TelcameraParser() parser.consume(end_at_empty_queue=True) assert Observation.objects.all().count() == 1
def test_parse_ingress_fail_with_wrong_input(self, client): # First add an ingress record which is not correct json Message.objects.all().delete() client.post(self.URL, "NOT JSON", **AUTHORIZATION_HEADER, content_type='application/json') assert Message.objects.count() == 1 # Then run the parse_ingress script parser = TelcameraParser() parser.consume(end_at_empty_queue=True) # Test whether the record in the ingress queue is moved to the failed queue assert Message.objects.count() == 0 assert FailedMessage.objects.count() == 1 for failed_ingress in FailedMessage.objects.all(): assert failed_ingress.consume_started_at is not None assert failed_ingress.consume_failed_at is not None assert failed_ingress.consume_succeeded_at is None
def test_vanilla(self, client): # Add records every 5min for multiple days Message.objects.all().delete() test_days = 2 today = date.today() start_date = today - timedelta(days=test_days) the_dt = datetime(start_date.year, start_date.month, start_date.day) while the_dt < datetime(today.year, today.month, today.day): for sensor_name in self.sensor_names: test_post = json.loads(TEST_POST) test_post['data'][0]['sensor'] = sensor_name test_post['data'][0]['timestamp_start'] = the_dt.isoformat() client.post(self.URL, json.dumps(test_post), **AUTHORIZATION_HEADER, content_type='application/json') the_dt += timedelta(minutes=5) # Then run the parse_ingress script parser = TelcameraParser() parser.consume(end_at_empty_queue=True) # Make sure we've got source data assert Observation.objects.all().count() > 100 # Run the aggregator call_man_command('complete_aggregate', 'continuousaggregate_cmsa15min') # Do we have any records in the continuous aggregate table? assert Cmsa15Min.objects.all().count() > 500 # Take a record in the middle of the data in the continuous aggregate table # and check whether the record is made up of exactly 3 messages (one every 5 min) last_record = Cmsa15Min.objects\ .filter(sensor=self.sensor_names[0])\ .filter(timestamp_rounded__gte=(today - timedelta(days=1)).isoformat())\ .order_by('timestamp_rounded')\ .first() assert last_record.basedonxmessages == 3
def test_parse_ingress(self, client): # First add a couple ingress records Message.objects.all().delete() for i in range(3): client.post(self.URL, TEST_POST, **AUTHORIZATION_HEADER, content_type='application/json') assert Message.objects.count() == 3 # Then run the parse_ingress script parser = TelcameraParser() parser.consume(end_at_empty_queue=True) # Test whether the records in the ingress queue are correctly set to parsed assert Message.objects.filter( consume_succeeded_at__isnull=False).count() == 3 assert FailedMessage.objects.count() == 0 for ingress in Message.objects.all(): assert ingress.consume_started_at is not None assert ingress.consume_succeeded_at is not None # Test whether the records were added to the database assert Observation.objects.all().count() == 3