def testSinceAlarmProcessing(self): self.process = ProcessTask.Create(self.e) self.process.Update(rule_ids=[self.ign_on_alarm.key().id(), self.ign_off_alarm.key().id()]) self.process.put() # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_1) self.sp.put() BATCH_1 = { 'speed': [0, 5, 15, 35, 60, 80, 83, 88, 85, 20, 0, 0, 0, 0, 15, 92, 90, 0, 0], 'ign_on': [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0], # Ignition on twice 'ign_off': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0] # Ignition off twice } # In total, on for 12 data points self.__createNewRecords(BATCH_1, first_dt=datetime.now() - timedelta(minutes=5), interval_secs=INTERVAL_SECS) self.__runProcessing() # Confirm analyzed total on seconds a = Analysis.GetOrCreate(self.vehicle_1, ANALYSIS_KEY_PATTERN) self.assertIsNotNone(a) self.assertEqual(a.columnValue('on_secs'), 12 * INTERVAL_SECS) self.sp = SensorProcessTask.Get(self.process, self.vehicle_1) self.assertEqual(self.sp.status_last_run, PROCESS.OK)
def testParsingIssues(self): # Try to run calculation against nonexistant column self.process = ProcessTask.Create(self.e) spec = json.dumps({ 'processers': [{ 'calculation': 'MAX({missing_column})', 'column': 'output', 'analysis_key_pattern': ANALYSIS_KEY_PATTERN }] }) self.process.Update(spec=spec, rule_ids=[self.speeding_alarm.key().id()]) self.process.put() # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_1) self.sp.put() BATCH_1 = { 'speed': [0 for x in range(10)], } self.__createNewRecords(BATCH_1, first_dt=datetime.now()) self.__runProcessing() self.sp = SensorProcessTask.Get(self.process, self.vehicle_1) self.assertEqual(self.sp.status_last_run, PROCESS.OK)
def testNoDataAlarm(self): self.mia_alarm = Rule.Create(self.e) self.mia_alarm.Update(name="No Data", sensortype_id=self.spedometer.key().id(), column="speed", trigger=RULE.NO_DATA, duration=5000) # No data for > 5s self.mia_alarm.put() self.process = ProcessTask.Create(self.e) self.process.Update(spec=None, rule_ids=[self.mia_alarm.key().id()]) self.process.put() # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_1) self.sp.dt_last_run = datetime.now() - timedelta(minutes=2) self.sp.put() self.__runProcessing() # Process last 2 minutes, with no new data, fires alarm alarms = Alarm.Fetch(self.vehicle_1, self.mia_alarm) # self.assertEqual(len(alarms), 1) batch_1 = {'speed': [1, 1, 1, 1, 1]} self.__createNewRecords(batch_1, interval_secs=3) self.__runProcessing() # We get data every 3 seconds, so 5-second no-data alarm doesn't fire alarms = Alarm.Fetch(self.vehicle_1, self.mia_alarm) self.assertEqual(len(alarms), 0)
def testAlarmWithPayment(self): # Create smartphone report sensor self.smartphone_sensor_type = SensorType.Create(self.e) schema = {'agreement': {'unit': '1-5 scale'}} self.smartphone_sensor_type.Update(name="Report Sensor", schema=json.dumps(schema)) self.smartphone_sensor_type.put() self.smartphone_sensor = Sensor.Create( self.e, "1000", self.smartphone_sensor_type.key().id()) self.smartphone_sensor.Update( sensortype_id=self.smartphone_sensor_type.key().id(), name="Smartphone Reports 1", contacts={"user": self.owner.key().id()}) self.smartphone_sensor.put() # Create smartphone report rule with payment on any report PMNT_AMOUNT = 10.0 self.any_report_rule = Rule.Create(self.e) self.any_report_rule.Update( name="Any Report", sensortype_id=self.smartphone_sensor_type.key().id(), column="agreement", trigger=RULE.ANY_DATA, payment_contacts=["user"], payment_amount=PMNT_AMOUNT, consecutive_limit=RULE. ANY, # Deactivate immediately (should be == 1) duration=0) self.any_report_rule.put() self.assertTrue(self.any_report_rule.payments_enabled()) self.process = ProcessTask.Create(self.e) self.process.Update(rule_ids=[self.any_report_rule.key().id()]) self.process.put() # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.smartphone_sensor) self.sp.put() BATCH_SIZE = 3 BATCH_1 = { 'agreement': [random.randint(1, 5) for x in range(BATCH_SIZE)], } self.__createNewRecords(BATCH_1, first_dt=datetime.now(), sensor=self.smartphone_sensor) self.__runProcessing() # This batch should have fired 3 alarms for any report, and created # 3 payments. pmnts = Payment.Fetch(self.owner) self.assertEqual(len(pmnts), 3) total_payments = BATCH_SIZE * PMNT_AMOUNT self.assertEqual(total_payments, sum([p.amount for p in pmnts]))
def testGeoFenceAlarm(self): self.geosensor = SensorType.Create(self.e) schema = { 'location': { 'unit': 'deg', 'label': "Location", 'role': [COLUMN.LOCATION], 'type': 'latlng' } } self.geosensor.Update(name="Geo Sensor", schema=json.dumps(schema)) self.geosensor.put() # Create off route alarm self.offroute_alarm = Rule.Create(self.e) self.offroute_alarm.Update(name="Off Route", sensortype_id=self.spedometer.key().id(), column="location", trigger=RULE.GEOFENCE_OUT, value_complex=json.dumps(DUMMY_GEOFENCE)) self.offroute_alarm.put() self.process = ProcessTask.Create(self.e) self.process.Update(rule_ids=[self.offroute_alarm.key().id()]) self.process.put() self.vehicle_2 = Sensor.Create(self.e, TEST_SENSOR_ID, self.geosensor.key().id()) self.vehicle_2.Update(name="Vehicle Sensor 2") # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_2) self.sp.put() # Process 8 location datapoints (3 in bounds, 3 out, 2 back in) BATCH_1 = { 'location': ["%s,%s" % (coord[0], coord[1]) for coord in ROUTE_DIVERSION] } self.__createNewRecords(BATCH_1, first_dt=datetime.now() - timedelta(minutes=20), interval_secs=30) self.__runProcessing() # Confirm off-route alarm fired upon datapoint 4, and deactivates on 7 (back in fence) alarms = Alarm.Fetch(self.vehicle_2, self.offroute_alarm) self.assertEqual(len(alarms), 1) a = alarms[0] first_record_in_alarm = a.first_record self.assertEqual(a.duration().seconds, 60) # 3 datapoints, 30 second gap oob_record = ROUTE_DIVERSION[3] self.assertEqual(first_record_in_alarm.columnValue('location'), "%s,%s" % (oob_record[0], oob_record[1]))
def testAlarmPeriodLimit(self): # Create hard braking (boolean) alarm self.brake_rule = Rule.Create(self.e) self.brake_rule.Update(name="Braking", sensortype_id=self.spedometer.key().id(), column="hard_braking", trigger=RULE.CEILING, consecutive_limit=RULE.ANY, value2=0.0, alert_contacts=["owner"], plimit_type=RULE.HOUR, plimit=1) # 1 alarm each hour self.brake_rule.put() self.process = ProcessTask.Create(self.e) self.process.Update(rule_ids=[self.brake_rule.key().id()]) self.process.put() # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_1) self.sp.put() # Batch 1 creates 1 alarm in 11am window, skips second alarm # and then creates another alarm in 12pm window. start = datetime(2016, 1, 1, 11, 57) # 11:57am 2016-01-01 BATCH_1 = { # v below should alarm, s should skip (already 1 in same period) # | is passing an hour marker (12pm) # v s | v 'hard_braking': [0, 1, 1, 0, 1] } self.__createNewRecords(BATCH_1, first_dt=start, interval_secs=60) self.__runProcessing() alarms = Alarm.Fetch(self.vehicle_1, self.brake_rule) self.assertEqual(len(alarms), 2) # 1 in each hour last_alarm = alarms[-1] self.assertTrue(last_alarm.dt_start.hour, 12) self.assertTrue(last_alarm.dt_start.minute, 1) # Batch 2 fetches the prior 12pm window alarm, and fails to create # second alarm. start = datetime(2016, 1, 1, 12, 2) # 12:02pm 2016-01-01 BATCH_2 = { # s s 'hard_braking': [0, 0, 1, 0, 1] } self.__createNewRecords(BATCH_2, first_dt=start, interval_secs=60) self.__runProcessing() alarms = Alarm.Fetch(self.vehicle_1, self.brake_rule) self.assertEqual(len(alarms), 2) # still 2, no new alarms created
def testGeoProcessing(self): self.process = ProcessTask.Create(self.e) spec = json.dumps({ 'processers': [{ 'calculation': '. + DISTANCE({location})', 'column': 'total_distance', 'analysis_key_pattern': ANALYSIS_KEY_PATTERN }] }) self.process.Update(spec=spec) self.process.put() # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_1) self.sp.put() loc = db.GeoPt(1.3, 36) MOVE_SIZE = 5 # m N_POINTS = 150 # 2 batches in process worker DELAY_SECS = 1 now = datetime.now() # Populate dummy data with random moves total_distance = 0.0 locations = [] last_gp = None for x in range(N_POINTS): locations.append(str(loc)) now += timedelta(seconds=DELAY_SECS) bearing = random.random() * 180 loc = tools.geoOffset(loc, bearing, MOVE_SIZE / 1000.) if last_gp: total_distance += MOVE_SIZE last_gp = loc BATCH_1 = {'location': locations} self.__createNewRecords(BATCH_1, first_dt=datetime.now()) self.__runProcessing() # Confirm analyzed distance a = Analysis.GetOrCreate(self.vehicle_1, ANALYSIS_KEY_PATTERN) self.assertIsNotNone(a) # Almost equal becuase we miss the distance between batches (FIX) self.assertAlmostEqual(a.columnValue('total_distance'), total_distance, delta=MOVE_SIZE)
def testCrossBatchAlarm(self): # TODO: Make this actually test analysis across batches (fetch prior active alarms) self.process = ProcessTask.Create(self.e) self.process.Update(rule_ids=[self.speeding_alarm.key().id()]) self.process.put() now = datetime.now() # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_1) self.sp.put() BATCH_1 = { 'speed': [0, 5, 15, 90, 93, 90] } # Speeding ends on last data point BATCH_2 = { 'speed': [70, 50, 0, 0, 50, 90] } # Second speeding starts on last BATCH_3 = { 'speed': [91, 85, 60, 1, 0, 0] } # Second speeding ends on second datapoint self.__createNewRecords(BATCH_1, first_dt=now - timedelta(minutes=6)) self.__createNewRecords(BATCH_2, first_dt=now - timedelta(minutes=4)) self.__createNewRecords(BATCH_3, first_dt=now - timedelta(minutes=2)) self.__runProcessing() # Confirm 2 alarms, second straddling batch 2 and 3 alarms = Alarm.Fetch(self.vehicle_1, self.speeding_alarm) self.assertEqual(len(alarms), 2) # Most recent first a2 = alarms[0] a1 = alarms[1] self.assertEqual(a1.duration().seconds, 6) # 3 datapoints, 3 second gap self.assertEqual(a1.apex, 93) self.assertTrue(a2.duration().seconds > 30) # 2 datapoints, large gap between batch 2 & 3 self.assertEqual(a2.apex, 91)
def testAlarmBuffer(self): # Create hard braking (boolean) alarm self.brake_alarm = Rule.Create(self.e) self.brake_alarm.Update( name="Braking", sensortype_id=self.spedometer.key().id(), column="hard_braking", trigger=RULE.CEILING, value2=0.0, alert_contacts=["owner"], buffer=30000, # 30 s duration=0) self.brake_alarm.put() self.process = ProcessTask.Create(self.e) self.process.Update(rule_ids=[self.brake_alarm.key().id()]) self.process.put() # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_1) self.sp.put() BATCH_1 = { # v below should alarm, s are skipped since they fall within 30s buffer # v s s s v 'hard_braking': [0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0] # Alternative boolean alarms } self.__createNewRecords(BATCH_1, first_dt=datetime.now() - timedelta(minutes=5), interval_secs=5) self.__runProcessing() # Confirm braking alarms (2) alarms = Alarm.Fetch(self.vehicle_1, self.brake_alarm) self.assertEqual(len(alarms), 2)
def testGeoRadiusAlarm(self): # Create in radius alarm self.in_radius_alarm = Rule.Create(self.e) self.in_radius_alarm.Update( name="In Town", sensortype_id=self.tracker.key().id(), column="location", trigger=RULE.GEORADIUS_IN, value2=RADIUS, # m value_complex=json.dumps(RADIUS_CENTER), alert_contacts=["owner"], consecutive_limit=RULE.DISABLED, duration=0) self.in_radius_alarm.put() self.process = ProcessTask.Create(self.e) self.process.Update(rule_ids=[self.in_radius_alarm.key().id()]) self.process.put() self.vehicle_2 = Sensor.Create(self.e, TEST_SENSOR_ID, self.tracker.key().id()) self.vehicle_2.Update(name="Vehicle Sensor 2") # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_2) self.sp.put() INTERVAL_SECS = 4 test_data_start = datetime.now() - timedelta(minutes=20) # Process first data points entering radius BATCH_1 = { 'location': ["%s,%s" % (coord[0], coord[1]) for coord in ENTERS_RADIUS] } last_record = self.__createNewRecords(BATCH_1, first_dt=test_data_start, interval_secs=INTERVAL_SECS) self.__runProcessing() # Confirm in-radius alarm fired upon datapoint 4... alarms = Alarm.Fetch(self.vehicle_2, self.in_radius_alarm) self.assertEqual(len(alarms), 1) a = alarms[0] # Process second batch of data points exiting radius BATCH_2 = { 'location': ["%s,%s" % (coord[0], coord[1]) for coord in EXITS_RADIUS] } self.__createNewRecords(BATCH_2, interval_secs=INTERVAL_SECS) self.__runProcessing() # Confirm we still just have the single alarm record alarms = Alarm.Fetch(self.vehicle_2, self.in_radius_alarm) self.assertEqual(len(alarms), 1) a = alarms[0] duration_td = a.duration() self.assertIsNotNone(duration_td) # 3 datapoints in radius print a.json()
def testCeilingAlarmAndStandardProcessing(self): self.process = ProcessTask.Create(self.e) spec = json.dumps({ 'processers': [{ 'calculation': 'MAX({speed})', 'column': 'max_speed', 'analysis_key_pattern': ANALYSIS_KEY_PATTERN }, { 'calculation': '. + SUM({bearing})', 'column': 'total_bearing', 'analysis_key_pattern': ANALYSIS_KEY_PATTERN }, { 'calculation': '. + COUNT({bearing})', 'column': 'count_bearing', 'analysis_key_pattern': ANALYSIS_KEY_PATTERN }, { 'calculation': '. + COUNT(ALARMS())', 'column': 'count_alarms', 'analysis_key_pattern': ANALYSIS_KEY_PATTERN }] }) self.process.Update(spec=spec, rule_ids=[self.speeding_alarm.key().id()]) self.process.put() # Apply our process to our sensor self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_1) self.sp.put() BATCH_1 = { 'speed': [ 0, 5, 15, 35, 60, 80, 83, 88, 85, 78, 75, 75, 76, 81, 89, 92, 90, 83, 78 ], # We speed twice 'bearing': [0, 0, 0, 0, 5, 3, 3, 3, 4, 5, 0, 0, 0, 0, 1, 1, 2, 3, 2] } self.__createNewRecords(BATCH_1, first_dt=datetime.now() - timedelta(minutes=5)) self.__runProcessing() # Confirm analyzed max speed a = Analysis.GetOrCreate(self.vehicle_1, ANALYSIS_KEY_PATTERN) self.assertIsNotNone(a) self.assertEqual(a.columnValue('max_speed'), max(BATCH_1['speed'])) # Confirm we counted new alarms in analysis # self.assertEqual(a.columnValue('count_alarms'), 2) TODO: This fails! self.sp = SensorProcessTask.Get(self.process, self.vehicle_1) self.assertEqual(self.sp.status_last_run, PROCESS.OK) # Confirm speeding alarms (2) alarms = Alarm.Fetch(self.vehicle_1, self.speeding_alarm) self.assertEqual(len(alarms), 2) # Test alarm notifications # TODO: Test output of notification (e.g. log messages or contact records) a = alarms[0] # second alarm message = a.render_alert_message(recipient=self.owner) SPEEDING_ALERT_MESSAGE_RENDERED = "Hello Dan Owner, %s was speeding at 81 at %s" % ( TEST_SENSOR_ID, tools.sdatetime(a.dt_start, fmt="%H:%M", tz="Africa/Nairobi")) self.assertEqual(message, SPEEDING_ALERT_MESSAGE_RENDERED) BATCH_2 = {'speed': [76, 75, 78, 73, 60], 'bearing': [0, 0, 2, 0, 5]} self.__createNewRecords(BATCH_2) self.__runProcessing() a = Analysis.GetOrCreate(self.vehicle_1, ANALYSIS_KEY_PATTERN) self.assertEqual(a.columnValue('total_bearing'), sum(BATCH_1['bearing']) + sum(BATCH_2['bearing'])) self.assertEqual(a.columnValue('count_bearing'), len(BATCH_1['bearing']) + len(BATCH_2['bearing'])) self.assertEqual(a.columnValue('count_alarms'), 2) self.assertEqual(self.sp.status_last_run, PROCESS.OK)