Esempio n. 1
0
def send_to_influx(model_data, site, timestamp, to_exclude=[],client=None, send_status=True):
    """
    Utility function to send data to influx
    """
    try:
        if client:
            i = client
        else:
            i = Influx()

        model_data_dict = model_to_dict(model_data)

        if to_exclude:
            # Remove any value we wish not to be submitted
            # Generally used with datetime measurement
            for val in to_exclude:
                #if to_exclude  in model_data_dict.keys():
                model_data_dict.pop(val)

        #Add our status will be used for RMC_Status
        if send_status:
            model_data_dict['status'] = 1.0

        status = i.send_object_measurements(model_data_dict, timestamp=timestamp, tags={"site_id":site.id, "site_name":site.site_name})
    except Exception,e:
        message = "Error sending to influx with exception %s in datapint %s"%(e,model_data_dict)
        handle_task_failure(message= message, name='send_to_influx' ,exception=e,data=model_data)
def create_test_data(site, start=None, end="now", interval=5, units='minutes' ,random=True, val=50, db='test_db'):
        #TODO test weekly and monthly reports

        _influx_db_name = db
        i = Influx(database=_influx_db_name)
        data_point_dates = generate_date_array(start=start, end=end, interval=interval, units=units)
        voltage_in = 220
        voltage_out = 220
        soc = val
        R1 = val
        R2 = val
        R3 = val
        R4 = val
        R5 = val
        print "creating %s test data points"%len(data_point_dates)
        print "between %s and %s "%(data_point_dates[0],data_point_dates[len(data_point_dates)-1:])
        # Simulate Grid outage
        for time_val in data_point_dates:
                if random:
                    soc = get_random_int()
                    R1 = voltage_in * get_random_binary()
                    R2 = get_random_interval(100,500)
                    R3 = get_random_interval(22,28)
                    R4 = get_random_interval(100,500)
                    R5 = get_random_interval(100,500)

                dp = Data_Point.objects.create(
                                            site=site,
                                            soc = soc ,
                                            battery_voltage = R3,
                                            time=time_val,
                                            AC_Voltage_in = R1,
                                            AC_Voltage_out = voltage_out,
                                            AC_input = R4,
                                            AC_output = R5,
                                            AC_output_absolute = R2,
                                            AC_Load_in = R2,
                                            AC_Load_out = R4,
                                            pv_production = R5)
                # Also send ton influx
                dp_dict = model_to_dict(dp)
                dp_dict.pop('time')
                dp_dict.pop('inverter_state')
                dp_dict.pop('id')
                i.send_object_measurements(dp_dict,timestamp=time_val.isoformat(),tags={"site_name":site.site_name})
        return len(data_point_dates)
Esempio n. 3
0
def send_to_influx(model_data,
                   site,
                   timestamp,
                   to_exclude=[],
                   client=None,
                   send_status=True):
    """
    Utility function to send data to influx
    """
    try:
        if client:
            i = client
        else:
            i = Influx()

        model_data_dict = model_to_dict(model_data)

        if to_exclude:
            # Remove any value we wish not to be submitted
            # Generally used with datetime measurement
            for val in to_exclude:
                #if to_exclude  in model_data_dict.keys():
                model_data_dict.pop(val)

        #Add our status will be used for RMC_Status
        if send_status:
            model_data_dict['status'] = 1.0

        status = i.send_object_measurements(model_data_dict,
                                            timestamp=timestamp,
                                            tags={
                                                "site_id": site.id,
                                                "site_name": site.site_name
                                            })
    except Exception, e:
        message = "Error sending to influx with exception %s in datapint %s" % (
            e, model_data_dict)
        handle_task_failure(message=message,
                            name='send_to_influx',
                            exception=e,
                            data=model_data)
Esempio n. 4
0
def create_test_data(site, start=None, end="now", interval=5, units='minutes' , val=50, db='test_db', data={}):
        """
        data = {'R1':[0,0,0,..],'R2':[0,0,123,12,...]...} will not generate date but use fixed data set
        if val is not set random data will be generated if data is not existing
        """


        _influx_db_name = db
        i = Influx(database=_influx_db_name)
        data_point_dates = generate_date_array(start=start, end=end, interval=interval, units=units)
        voltage_in = 220
        voltage_out = 220
        soc = val
        R1 = val
        R2 = val
        R3 = val
        R4 = val
        R5 = val
        count = 0
        print "creating %s test data points"%len(data_point_dates)
        print "between %s and %s "%(data_point_dates[0],data_point_dates[len(data_point_dates)-1:])
        # Simulate Grid outage
        for time_val in data_point_dates:
            if not val:
                try:
                    soc = data.get('soc',[])[count]
                except:
                    soc = get_random_int()
                try:
                    R1 = data.get('R1',[])[count]
                except:
                    R1 = voltage_in * get_random_binary()

                try:
                    R2 = data.get('R2',[])[count]
                except:
                    R2 = get_random_interval(100,500)

                try:
                    R3 = data.get('R3',[])[count]
                except:
                    R3 = get_random_interval(22,28)

                try:
                    R4 = data.get('R4',[])[count]
                except:
                    R4 = get_random_interval(100,500)
                try:
                    R5 = data.get('R5',[])[count]
                except:
                    R5 = get_random_interval(100,500)


            dp = Data_Point.objects.create(
                                            site=site,
                                            soc = soc ,
                                            battery_voltage = R3,
                                            time=time_val,
                                            AC_Voltage_in = R1,
                                            AC_Voltage_out = voltage_out,
                                            AC_input = R4,
                                            AC_output = R5,
                                            AC_output_absolute = R2,
                                            AC_Load_in = R2,
                                            AC_Load_out = R4,
                                            pv_production = R5)
            # Also send ton influx
            dp_dict = model_to_dict(dp)
            dp_dict.pop('time')
            dp_dict.pop('inverter_state')
            dp_dict.pop('id')
            i.send_object_measurements(dp_dict,timestamp=time_val.isoformat(),tags={"site_name":site.site_name})
            count = count + 1
            # Count number of outages


        return len(data_point_dates)
class KapacitorTestCase(TestCase):
    def setUp(self):

        # Need this to create a Site
        self.VRM = VRM_Account.objects.create(vrm_user_id='*****@*****.**',
                                              vrm_password="******")

        # Setup Influx
        self._influx_db_name = 'test_db'
        self.i = Influx(database=self._influx_db_name)

        try:
            self.i.create_database(self._influx_db_name)
            #Generate random data  points for 24h
        except:
            self.i.delete_database(self._influx_db_name)
            sleep(1)
            self.i.create_database(self._influx_db_name)
            pass

        # Setup Kapacitor
        self.kap = Kapacitor()
        self.template_id = 'test_template'
        self.task_id = 'test_task'
        self.dj_template_name = 'alert_template'

        self.dbrps = [{'db': self._influx_db_name, 'rp': 'autogen'}]

        self.location = Geoposition(52.5, 24.3)
        dt = timezone.make_aware(timezone.datetime(2015, 12, 11, 22, 0))

        self.site = Sesh_Site.objects.create(site_name=u"Test_aggregate",
                                             comission_date=dt,
                                             location_city=u"kigali",
                                             location_country=u"rwanda",
                                             vrm_account=self.VRM,
                                             installed_kw=123.0,
                                             position=self.location,
                                             system_voltage=12,
                                             number_of_panels=12,
                                             vrm_site_id=213,
                                             battery_bank_capacity=12321,
                                             has_genset=True,
                                             has_grid=True)

        #self.no_points = create_test_data(self.site,
        #                                 start = self.site.comission_date,
        #                                 end = dt + timedelta( hours = 48),
        #                                 interval = 30,
        #                                 random = False)

        #create test user
        self.test_user = Sesh_User.objects.create_user("john doe",
                                                       "*****@*****.**",
                                                       "asdasd12345")
        #assign a user to the sites
        assign_perm("view_Sesh_Site", self.test_user, self.site)

    def tearDown(self):
        self.i.delete_database(self._influx_db_name)
        self.kap.delete_template(self.template_id)
        self.kap.delete_task(self.task_id)
        pass

    @override_settings(INFLUX_DB='test_db')
    def test_template_creation(self):
        """
        Test creating template in kapacitor
        """

        temp_script = """
        // Which measurement to consume
        var measurement string
        // Optional where filter
        var where_filter = lambda: TRUE
        // Optional list of group by dimensions
        var groups = [*]
        // Which field to process
        var field string
        // Warning criteria, has access to 'mean' field
        var warn lambda
        // Critical criteria, has access to 'mean' field
        var crit lambda
        // How much data to window
        var window = 5m
        // The slack channel for alerts
        var slack_channel = '#alerts'

        stream
            |from()
                .measurement(measurement)
                .where(where_filter)
                .groupBy(groups)
            |window()
                .period(window)
                .every(window)
            |mean(field)
            |alert()
                 .warn(warn)
                 .crit(crit)
                 .slack()
                 .channel(slack_channel)

        """
        temp_id = self.template_id
        temp_type = 'stream'

        # Create template
        temp = self.kap.create_template(temp_id, temp_type, temp_script)
        self.assertTrue(temp.has_key('vars'))

        # Verify template creation
        temp_res = self.kap.get_template(temp_id)
        self.assertTrue(temp_res.has_key('vars'))

        # List template
        temp_res = self.kap.list_templates()
        self.assertTrue(temp_res.has_key('templates'))

        # Update Template

        temp_script = """
        // Which measurement to consume
        var measurement = 'cpu'
        // Optional where filter
        var where_filter = lambda: TRUE
        // Optional list of group by dimensions
        var groups = [*]
        // Which field to process
        var field string
        // Warning criteria, has access to 'mean' field
        var warn lambda
        // Critical criteria, has access to 'mean' field
        var crit lambda
        // How much data to window
        var window = 5m
        // The slack channel for alerts
        var slack_channel = '#alerts'

        stream
            |from()
                .measurement(measurement)
                .where(where_filter)
                .groupBy(groups)
            |window()
                .period(window)
                .every(window)
            |mean(field)
            |alert()
                 .warn(warn)
                 .crit(crit)
                 .slack()
                 .channel(slack_channel)

        """
        temp_res = self.kap.update_template(temp_id, temp_script)

        # Delete template
        self.kap.delete_template(self.template_id)

    def test_task_creation(self):
        """
        Create a task and check if it actually causes an alert to trigger
        """

        temp_script = """
                    stream
                        |from()
                            .measurement('cpu')
                        |alert()
                            .crit(lambda: "value" <  70)
                            .log('/tmp/alerts.log')
                        """

        temp_id = self.template_id
        task_id = self.task_id

        # Create task
        temp = self.kap.create_task(task_id,
                                    dbrps=self.dbrps,
                                    script=temp_script,
                                    task_type='stream')
        self.assertEqual(temp['status'], 'enabled')
        sleep(20)

        for i in reversed(range(0, 5)):
            sleep(1)
            dp_dict = {'cpu': i}
            self.i.send_object_measurements(dp_dict,
                                            tags={"site_name": "test_site"},
                                            database=self._influx_db_name)
        temp = self.kap.get_task(task_id)

        self.assertGreater(
            temp['stats']['node-stats']['alert2']['alerts_triggered'], 0)

    def test_task_dj_template(self):
        """
        test task creation with django templates
        """

        template = get_template('seshdash/kapacitor_tasks/%s.tick' %
                                self.dj_template_name)

        alert_id = self.task_id
        alert_info = {
            'field': 'cpu',
            'where_filter_lambda': 'lambda: TRUE',
            'error_lambda': 'lambda: \"value\" < 30',
            'time_window': '5m',
            'slack_channel': '#alerts'
        }

        rendered_alert = template.render(alert_info)
        result = self.kap.create_task(alert_id,
                                      dbrps=self.dbrps,
                                      script=rendered_alert)
        self.assertEquals(result['status'], 'enabled')
class KapacitorTestCase(TestCase):

    def setUp(self):

        # Need this to create a Site
        self.VRM = VRM_Account.objects.create(vrm_user_id='*****@*****.**',vrm_password="******")

        # Setup Influx
        self._influx_db_name = 'test_db'
        self.i = Influx(database=self._influx_db_name)

        try:
            self.i.create_database(self._influx_db_name)
            #Generate random data  points for 24h
        except:
           self.i.delete_database(self._influx_db_name)
           sleep(1)
           self.i.create_database(self._influx_db_name)
           pass

        # Setup Kapacitor
        self.kap = Kapacitor()
        self.template_id = 'test_template'
        self.task_id = 'test_task'
        self.dj_template_name = 'alert_template'


        self.dbrps = [{'db': self._influx_db_name, 'rp':'autogen' }]


        self.location = Geoposition(52.5,24.3)
        dt = timezone.make_aware(timezone.datetime(2015, 12, 11, 22, 0))

        self.site = Sesh_Site.objects.create(site_name=u"Test_aggregate",
                                             comission_date = dt,
                                             location_city = u"kigali",
                                             location_country=u"rwanda",
                                             vrm_account = self.VRM,
                                             installed_kw=123.0,
                                             position=self.location,
                                             system_voltage=12,
                                             number_of_panels=12,
                                             vrm_site_id=213,
                                             battery_bank_capacity=12321,
                                             has_genset=True,
                                             has_grid=True)

        #self.no_points = create_test_data(self.site,
       #                                 start = self.site.comission_date,
       #                                 end = dt + timedelta( hours = 48),
       #                                 interval = 30,
       #                                 random = False)

        #create test user
        self.test_user = Sesh_User.objects.create_user("john doe","*****@*****.**","asdasd12345")
        #assign a user to the sites
        assign_perm("view_Sesh_Site",self.test_user,self.site)

    def tearDown(self):
        self.i.delete_database(self._influx_db_name)
        self.kap.delete_template(self.template_id)
        self.kap.delete_task(self.task_id)
        pass

    @override_settings(INFLUX_DB='test_db')
    def test_template_creation(self):
        """
        Test creating template in kapacitor
        """


        temp_script = """
        // Which measurement to consume
        var measurement string
        // Optional where filter
        var where_filter = lambda: TRUE
        // Optional list of group by dimensions
        var groups = [*]
        // Which field to process
        var field string
        // Warning criteria, has access to 'mean' field
        var warn lambda
        // Critical criteria, has access to 'mean' field
        var crit lambda
        // How much data to window
        var window = 5m
        // The slack channel for alerts
        var slack_channel = '#alerts'

        stream
            |from()
                .measurement(measurement)
                .where(where_filter)
                .groupBy(groups)
            |window()
                .period(window)
                .every(window)
            |mean(field)
            |alert()
                 .warn(warn)
                 .crit(crit)
                 .slack()
                 .channel(slack_channel)

        """
        temp_id = self.template_id
        temp_type = 'stream'

        # Create template
        temp = self.kap.create_template(temp_id, temp_type, temp_script)
        self.assertTrue(temp.has_key('vars'))

        # Verify template creation
        temp_res = self.kap.get_template(temp_id)
        self.assertTrue(temp_res.has_key('vars'))

        # List template
        temp_res = self.kap.list_templates()
        self.assertTrue(temp_res.has_key('templates'))

        # Update Template

        temp_script = """
        // Which measurement to consume
        var measurement = 'cpu'
        // Optional where filter
        var where_filter = lambda: TRUE
        // Optional list of group by dimensions
        var groups = [*]
        // Which field to process
        var field string
        // Warning criteria, has access to 'mean' field
        var warn lambda
        // Critical criteria, has access to 'mean' field
        var crit lambda
        // How much data to window
        var window = 5m
        // The slack channel for alerts
        var slack_channel = '#alerts'

        stream
            |from()
                .measurement(measurement)
                .where(where_filter)
                .groupBy(groups)
            |window()
                .period(window)
                .every(window)
            |mean(field)
            |alert()
                 .warn(warn)
                 .crit(crit)
                 .slack()
                 .channel(slack_channel)

        """
        temp_res = self.kap.update_template(temp_id, temp_script)

        # Delete template
        self.kap.delete_template(self.template_id)

    def test_task_creation(self):
        """
        Create a task and check if it actually causes an alert to trigger
        """


        temp_script = """
                    stream
                        |from()
                            .measurement('cpu')
                        |alert()
                            .crit(lambda: "value" <  70)
                            .log('/tmp/alerts.log')
                        """

        temp_id = self.template_id
        task_id = self.task_id


        # Create task
        temp = self.kap.create_task(task_id, dbrps=self.dbrps, script=temp_script, task_type='stream')
        self.assertEqual(temp['status'],'enabled')
        sleep(20)

        for i in reversed(range(0,5)):
            sleep(1)
            dp_dict = {'cpu': i}
            self.i.send_object_measurements(dp_dict, tags={"site_name":"test_site"}, database=self._influx_db_name)
        temp = self.kap.get_task(task_id)

        self.assertGreater(temp['stats']['node-stats']['alert2']['alerts_triggered'], 0)

    def test_task_dj_template(self):
        """
        test task creation with django templates
        """

        template =  get_template('seshdash/kapacitor_tasks/%s.tick'%self.dj_template_name)

        alert_id = self.task_id
        alert_info ={
                'field': 'cpu',
                'where_filter_lambda' : 'lambda: TRUE',
                'error_lambda' : 'lambda: \"value\" < 30',
                'time_window' : '5m',
                'slack_channel' : '#alerts'
                }


        rendered_alert = template.render(alert_info)
        result = self.kap.create_task(alert_id, dbrps= self.dbrps, script=rendered_alert)
        self.assertEquals(result['status'], 'enabled')