def testSinceAlarmProcessing(self):
        self.process = ProcessTask.Create(self.e)
        self.process.Update(rule_ids=[self.ign_on_alarm.key().id(), self.ign_off_alarm.key().id()])
        self.process.put()

        # Apply our process to our sensor
        self.sp = SensorProcessTask.Create(self.e, self.process, self.vehicle_1)
        self.sp.put()

        BATCH_1 = {
            'speed':   [0,  5,  15, 35, 60, 80, 83, 88, 85, 20, 0,  0,  0,  0,  15, 92, 90, 0,  0],
            'ign_on':  [0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0], # Ignition on twice
            'ign_off': [0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  1,  0] # Ignition off twice
        }
        # In total, on for 12 data points
        self.__createNewRecords(BATCH_1, first_dt=datetime.now() - timedelta(minutes=5), interval_secs=INTERVAL_SECS)
        self.__runProcessing()

        # Confirm analyzed total on seconds
        a = Analysis.GetOrCreate(self.vehicle_1, ANALYSIS_KEY_PATTERN)
        self.assertIsNotNone(a)
        self.assertEqual(a.columnValue('on_secs'), 12 * INTERVAL_SECS)

        self.sp = SensorProcessTask.Get(self.process, self.vehicle_1)
        self.assertEqual(self.sp.status_last_run, PROCESS.OK)
Beispiel #2
0
    def testGeoProcessing(self):
        self.process = ProcessTask.Create(self.e)
        spec = json.dumps({
            'processers': [{
                'calculation': '. + DISTANCE({location})',
                'column': 'total_distance',
                'analysis_key_pattern': ANALYSIS_KEY_PATTERN
            }]
        })
        self.process.Update(spec=spec)
        self.process.put()

        # Apply our process to our sensor
        self.sp = SensorProcessTask.Create(self.e, self.process,
                                           self.vehicle_1)
        self.sp.put()

        loc = db.GeoPt(1.3, 36)
        MOVE_SIZE = 5  # m
        N_POINTS = 150  # 2 batches in process worker
        DELAY_SECS = 1
        now = datetime.now()

        # Populate dummy data with random moves
        total_distance = 0.0
        locations = []
        last_gp = None
        for x in range(N_POINTS):
            locations.append(str(loc))
            now += timedelta(seconds=DELAY_SECS)
            bearing = random.random() * 180
            loc = tools.geoOffset(loc, bearing, MOVE_SIZE / 1000.)
            if last_gp:
                total_distance += MOVE_SIZE
            last_gp = loc
        BATCH_1 = {'location': locations}
        self.__createNewRecords(BATCH_1, first_dt=datetime.now())
        self.__runProcessing()

        # Confirm analyzed distance
        a = Analysis.GetOrCreate(self.vehicle_1, ANALYSIS_KEY_PATTERN)
        self.assertIsNotNone(a)
        # Almost equal becuase we miss the distance between batches (FIX)
        self.assertAlmostEqual(a.columnValue('total_distance'),
                               total_distance,
                               delta=MOVE_SIZE)
Beispiel #3
0
    def testCeilingAlarmAndStandardProcessing(self):
        self.process = ProcessTask.Create(self.e)
        spec = json.dumps({
            'processers': [{
                'calculation': 'MAX({speed})',
                'column': 'max_speed',
                'analysis_key_pattern': ANALYSIS_KEY_PATTERN
            }, {
                'calculation': '. + SUM({bearing})',
                'column': 'total_bearing',
                'analysis_key_pattern': ANALYSIS_KEY_PATTERN
            }, {
                'calculation': '. + COUNT({bearing})',
                'column': 'count_bearing',
                'analysis_key_pattern': ANALYSIS_KEY_PATTERN
            }, {
                'calculation': '. + COUNT(ALARMS())',
                'column': 'count_alarms',
                'analysis_key_pattern': ANALYSIS_KEY_PATTERN
            }]
        })
        self.process.Update(spec=spec,
                            rule_ids=[self.speeding_alarm.key().id()])
        self.process.put()

        # Apply our process to our sensor
        self.sp = SensorProcessTask.Create(self.e, self.process,
                                           self.vehicle_1)
        self.sp.put()

        BATCH_1 = {
            'speed': [
                0, 5, 15, 35, 60, 80, 83, 88, 85, 78, 75, 75, 76, 81, 89, 92,
                90, 83, 78
            ],  # We speed twice
            'bearing':
            [0, 0, 0, 0, 5, 3, 3, 3, 4, 5, 0, 0, 0, 0, 1, 1, 2, 3, 2]
        }
        self.__createNewRecords(BATCH_1,
                                first_dt=datetime.now() - timedelta(minutes=5))
        self.__runProcessing()

        # Confirm analyzed max speed
        a = Analysis.GetOrCreate(self.vehicle_1, ANALYSIS_KEY_PATTERN)
        self.assertIsNotNone(a)
        self.assertEqual(a.columnValue('max_speed'), max(BATCH_1['speed']))

        # Confirm we counted new alarms in analysis
        # self.assertEqual(a.columnValue('count_alarms'), 2) TODO: This fails!
        self.sp = SensorProcessTask.Get(self.process, self.vehicle_1)
        self.assertEqual(self.sp.status_last_run, PROCESS.OK)

        # Confirm speeding alarms (2)
        alarms = Alarm.Fetch(self.vehicle_1, self.speeding_alarm)
        self.assertEqual(len(alarms), 2)

        # Test alarm notifications
        # TODO: Test output of notification (e.g. log messages or contact records)
        a = alarms[0]  # second alarm
        message = a.render_alert_message(recipient=self.owner)
        SPEEDING_ALERT_MESSAGE_RENDERED = "Hello Dan Owner, %s was speeding at 81 at %s" % (
            TEST_SENSOR_ID,
            tools.sdatetime(a.dt_start, fmt="%H:%M", tz="Africa/Nairobi"))
        self.assertEqual(message, SPEEDING_ALERT_MESSAGE_RENDERED)

        BATCH_2 = {'speed': [76, 75, 78, 73, 60], 'bearing': [0, 0, 2, 0, 5]}
        self.__createNewRecords(BATCH_2)
        self.__runProcessing()

        a = Analysis.GetOrCreate(self.vehicle_1, ANALYSIS_KEY_PATTERN)
        self.assertEqual(a.columnValue('total_bearing'),
                         sum(BATCH_1['bearing']) + sum(BATCH_2['bearing']))
        self.assertEqual(a.columnValue('count_bearing'),
                         len(BATCH_1['bearing']) + len(BATCH_2['bearing']))
        self.assertEqual(a.columnValue('count_alarms'), 2)
        self.assertEqual(self.sp.status_last_run, PROCESS.OK)