def get_measurement_range_bucket(self, measurement_name, start, end, group_by, site=None, database=None):
        """
        Returns the measurement points for a given range grouped by a given time

        @param measurement_name - name of the measurement
        @param start - The date to start from (type python datetime)
        @param end - The time to stop at ( type python datetime)
        @param group_by - The resolution of the data
        @param site - instance of Sesh_Site
        @param database - The influx database to use (default settings.influx_db)
        """
        db = self.db
        if database:
            db = database
 
        query_string = 'SELECT mean("value") FROM {measurement_name} WHERE time > {start} AND time <= {end} GROUP BY time({time})'

        # This assumes the precision of influx db is set to nanoseconds
        data = {'measurement_name': measurement_name,
                'start': epoch_s_to_ns(get_epoch_from_datetime(start)),
                'end': epoch_s_to_ns(get_epoch_from_datetime(end)),
                'time': group_by}

        query = query_string.format(**data)

        if site:
            query += " and site_name='%s'" % site.site_name
    
        results = list(self._influx_client.query(query, database=db).get_points())
        return results
    def get_measurement_range(self, measurement_name, start, end, site=None, database=None):
        """
        Returns the measurement points for a given range
        
        @param measurement_name - name of the measurement
        @param start - The date to start from (type python datetime)
        @param end - The time to stop at (type python datetime)
        @param site - An instance of a Sesh_Site Model
        @param database - Influx db to use (default settings.influx_db )
        """
        db = self.db
        if database:
            db = database

        query_string = 'SELECT * FROM {measurement_name} WHERE time > {start} and time <= {end}'
         
 
        # This assumes the precision of influx db is set to nanoseconds    
        data = {'measurement_name': measurement_name, 
                      'start': epoch_s_to_ns(get_epoch_from_datetime(start)),
                      'end': epoch_s_to_ns(get_epoch_from_datetime(end))}

        query = query_string.format(**data)

        if site:
            query += " and site_name='%s'" % site.site_name
          

        results = list(self._influx_client.query(query, database=db).get_points())
  
        return results
Exemple #3
0
def create_alert_instance(site, rule, data_point):
    if is_mysql_rule(rule):
        alert_obj = Sesh_Alert.objects.create(
            site=site,
            alert=rule,
            date=timezone.now(),
            isSilence=False,
            emailSent=False,
            slackSent=False,
            smsSent=False,
            point_model=type(data_point).__name__,
            point_id=str(data_point.id))
        alert_obj.save()

        # Set data point to point to alert
        data_point.target_alert = alert_obj
        data_point.save()

    elif is_influx_rule(rule):
        alert_obj = Sesh_Alert.objects.create(
            site=site,
            alert=rule,
            date=timezone.now(),
            isSilence=False,
            emailSent=False,
            slackSent=False,
            smsSent=False,
            point_model='influx',
            point_id=get_epoch_from_datetime(parser.parse(data_point['time'])))
    alert_obj.save()
    return alert_obj
def create_alert_instance(site, rule, data_point):
    if is_mysql_rule(rule):
        alert_obj = Sesh_Alert.objects.create(
                    site = site,
                    alert=rule,
                    date=timezone.now(),
                    isSilence=False,
                    emailSent=False,
                    slackSent=False,
                    smsSent=False,
                    point_model=type(data_point).__name__,
                    point_id= str(data_point.id ))
        alert_obj.save()

            # Set data point to point to alert
        data_point.target_alert = alert_obj
        data_point.save()

    elif is_influx_rule(rule):
        alert_obj = Sesh_Alert.objects.create(
                    site = site,
                    alert = rule,
                    date = timezone.now(),
                    isSilence=False,
                    emailSent=False,
                    slackSent=False,
                    smsSent=False,
                    point_model='influx',
                    point_id = get_epoch_from_datetime(parser.parse(data_point['time'])))
    alert_obj.save()
    return alert_obj
def download_vrm_historical_data():
    """
    Helper function to initiate one time download
    """
    for site in Sesh_Site.objects.filter(vrm_site_is__isnull=True):
        if site.vrm_site_id:
            get_historical_BoM.delay(site.pk,time_utils.get_epoch_from_datetime(site.comission_date))
            run_aggregate_on_historical(site.pk)
def _download_data(request):
    """
    Trigger download of vrm upon loading site data
    """

    sites = _get_user_sites(request)
    for site in sites:
        if site.import_data:
            site_id = site.pk
            get_historical_BoM.delay(site_id, time_utils.get_epoch_from_datetime(site.comission_date))
def download_vrm_historical_data():
    """
    Helper function to initiate one time download
    """
    for site in Sesh_Site.objects.filter(vrm_site_is__isnull=True):
        if site.vrm_site_id:
            get_historical_BoM.delay(
                site.pk,
                time_utils.get_epoch_from_datetime(site.comission_date))
            run_aggregate_on_historical(site.pk)
Exemple #8
0
    def get_measurement_range_bucket(self,
                                     measurement_name,
                                     start,
                                     end,
                                     group_by,
                                     site=None,
                                     database=None):
        """
        Returns the measurement points for a given range grouped by a given time

        @param measurement_name - name of the measurement
        @param start - The date to start from (type python datetime)
        @param end - The time to stop at ( type python datetime)
        @param group_by - The resolution of the data
        @param site - instance of Sesh_Site
        @param database - The influx database to use (default settings.influx_db)
        """
        db = self.db
        if database:
            db = database

        query_string = 'SELECT mean("value") FROM {measurement_name} WHERE time > {start} AND time <= {end} GROUP BY time({time})'

        # This assumes the precision of influx db is set to nanoseconds
        data = {
            'measurement_name': measurement_name,
            'start': epoch_s_to_ns(get_epoch_from_datetime(start)),
            'end': epoch_s_to_ns(get_epoch_from_datetime(end)),
            'time': group_by
        }

        query = query_string.format(**data)

        if site:
            query += " and site_name='%s'" % site.site_name

        results = list(
            self._influx_client.query(query, database=db).get_points())
        return results
Exemple #9
0
def get_historical_BoM(date_range=5):
        """
        Get Historical Data from VRM to backfill any days
        """
        datetime_now = datetime.now()
        datetime_start = datetime_now - timedelta(date_range)

        datetime_now_epoch = time_utils.get_epoch_from_datetime(datetime_now)
        datetime_start_epoch = time_utils.get_epoch_from_datetime(datetime_start)

        sites = Sesh_Site.objects.all()
        count = 0
        for site in sites:
            v_client = VictronAPI(site.vrm_user_id,site.vrm_password)
            vh_client = VictronHistoricalAPI(site.vrm_user_id,site.vrm_password)
            site_id = site.vrm_site_id
            for site_id in v_client.SYSTEMS_IDS:
                #site_id is a tuple
                data = vh_client.get_data(site_id[0],datetime_start_epoch,datetime_now_epoch)
                for row in data:
                    data_point = BoM_Data_Point(
                        site = site,
                        time = row['Date Time'],
                        soc = row['Battery State of Charge (System)'],
                        battery_voltage = row['Battery voltage'],
                        AC_input = row['Input power 1'],
                        AC_output =  row['Output power 1'],
                        AC_Load_in =  row['Input current phase 1'],
                        AC_Load_out =  row['Output current phase 1'],
                        inverter_state = row['VE.Bus Error'],
                        #TODO these need to be activated
                        genset_state =  "off",
                        relay_state = "off",
                        )
                    data_point.save()
                    count = count +1
            print "saved %s BoM data points"%count
Exemple #10
0
    def get_measurement_range(self,
                              measurement_name,
                              start,
                              end,
                              site=None,
                              database=None):
        """
        Returns the measurement points for a given range
        
        @param measurement_name - name of the measurement
        @param start - The date to start from (type python datetime)
        @param end - The time to stop at (type python datetime)
        @param site - An instance of a Sesh_Site Model
        @param database - Influx db to use (default settings.influx_db )
        """
        db = self.db
        if database:
            db = database

        query_string = 'SELECT * FROM {measurement_name} WHERE time > {start} and time <= {end}'

        # This assumes the precision of influx db is set to nanoseconds
        data = {
            'measurement_name': measurement_name,
            'start': epoch_s_to_ns(get_epoch_from_datetime(start)),
            'end': epoch_s_to_ns(get_epoch_from_datetime(end))
        }

        query = query_string.format(**data)

        if site:
            query += " and site_name='%s'" % site.site_name

        results = list(
            self._influx_client.query(query, database=db).get_points())

        return results
Exemple #11
0
def get_enphase_daily_stats(date=None):
        """
        Get enphase daily data or get aggregate data
        """
        calc_range = timedelta(minutes=15)

        #create enphaseAPI
        sites = Sesh_Site.objects.all()

        #return 'The test task executed with argument "%s" ' % param
        #get dates we want to get
        datetime_now = datetime.now()
        datetime_start = datetime_now - timedelta(days=1)
        system_results = {}

        if date:
            datetime_now = date
            datetime_start = datetime_now - timedelta(days=1)


        #turn them into epoch seconds
        datetime_start_epoch = time_utils.get_epoch_from_datetime(datetime_start)
        for site in sites:
                en_client = EnphaseAPI(settings.ENPHASE_KEY,site.enphase_ID)
                system_id = site.enphase_site_id
                print "gettig stats for %s"%system_id
                system_results = en_client.get_stats(system_id,start=datetime_start_epoch)

                #TODO handle exception of empty result
                print len(system_results['intervals'])
                for interval in system_results['intervals']:
                        #store the data
                        print interval
                        end_time_str = time_utils.epoch_to_datetime(interval['end_at'])
                        system_pv_data = PV_Production_Point(
                            site = site,
                            time = end_time_str,
                            wh_production = interval['enwh'],
                            w_production = interval['powr'],
                            #TODO time interval shouldn't be static this needs to be calculated based on data returned,
                            data_duration = calc_range
                            )
                        system_pv_data.save()
        return "updated enphase data %s"%site
Exemple #12
0
def prep_time_series(data,field_1_y,field_2_date,field_2_y=None):
    """
    Create time series data from model data to use for graphing
    """
    y_data = []
    y2_data = []
    x_data = []

    for point in data:
        y_data.append(getattr(point,field_1_y))
        if field_2_y:
            y_data.append(getattr(point,field_2_y))
        date = getattr(point,field_2_date)
        #havascript expets this in milliseconds multiply by 1000
        time_epoch = int(time_utils.get_epoch_from_datetime(date)) * 1000
        x_data.append(time_epoch)
    if field_2_y:
        return y_data,y2_data,x_data
    return y_data,x_data
def get_enphase_daily_stats(date=None):
    """
        Get enphase daily data or get aggregate data
        """
    calc_range = timedelta(minutes=15)

    #create enphaseAPI
    sites = Sesh_Site.objects.all()

    #return 'The test task executed with argument "%s" ' % param
    #get dates we want to get default this is last 24 hours
    datetime_now = datetime.now()
    datetime_start = datetime_now - timedelta(days=1)
    system_results = {}

    if date:
        datetime_now = date
        datetime_start = datetime_now - timedelta(days=1)

    #turn them into epoch seconds
    datetime_start_epoch = time_utils.get_epoch_from_datetime(datetime_start)
    for site in sites:
        en_client = EnphaseAPI(settings.ENPHASE_KEY, site.enphase_ID)
        system_id = site.enphase_site_id
        #print "gettig stats for %s"%system_id
        system_results = en_client.get_stats(system_id,
                                             start=datetime_start_epoch)

        #TODO handle exception of empty result
        #print len(system_results['intervals'])
        for interval in system_results['intervals']:
            #store the data
            #print interval
            end_time_str = time_utils.epoch_to_datetime(interval['end_at'])
            system_pv_data = PV_Production_Point(
                site=site,
                time=end_time_str,
                wh_production=interval['enwh'],
                w_production=interval['powr'],
                #TODO time interval shouldn't be static this needs to be calculated based on data returned,
                data_duration=calc_range)
            system_pv_data.save()
    return "updated enphase data %s" % site
def graphs(request): 
    """
    Returns json, containing data that is used in data analysis graphs
    """
    results = []

    # Getting values from Post request
    time = request.GET.get('time', '') # This is the time range it has to be: 24h, 7d or 30d
    choices = request.GET.getlist('choice[]')
    active_id = request.GET.get('active_site_id', None)
    start_time = request.GET.get('start_time', datetime.now() - timedelta(weeks=1))
    end_time = request.GET.get('end_time', datetime.now())
    resolution = request.GET.get('resolution', '1h')
    current_site = Sesh_Site.objects.filter(id=active_id).first()
    

    if (not current_site) or current_site.organisation != request.user.organisation:
        return HttpResponseBadRequest("Invalid site id, No site was found for the given site id")

    time_delta_dict = {'24h':{'hours':24},
                       '7d': {'days':7},
                       '30d':{'days':30},
                    }

    time_bucket_dict = {'24h':'1h',
                        '7d':'1d',
                        '30d':'5d',
                    }

    # processing post request values to be used in the influx queries
    for choice in choices:
        choice_dict = {}
        choice_dict['measurement'] = choice
        #time_delta = time_delta_dict[time]
        #time_bucket= time_bucket_dict[time]
        choice_dict['si_unit'] = get_measurement_unit(choice)
       
        # Gettting the values of the given element
        client = Influx()

        query_results = client.get_measurement_range_bucket(choice, start_time, end_time, group_by=resolution)

        
        #looping into values
        choice_data = []
        for result in query_results:
            choice_data_dict = {}
            result_time = parser.parse(result['time'])
            result_time = get_epoch_from_datetime(result_time)
            if result['mean'] is not None:
                result_value = round(result['mean'], 2)
            else:
                result_value = 0
            choice_data_dict['time'] = result_time
            choice_data_dict['value'] = result_value
            choice_data.append([result_time, result_value])


        choice_dict['data'] = choice_data
        results.append(choice_dict)

    return HttpResponse(json.dumps(results))