Esempio n. 1
0
    def test_get_all_meter_ids(self):
        """ Unit tests for function get_all_meter_ids(). """

        result = get_all_meter_ids(db.session)

        # Check return types
        self.assertIsInstance(result, list)
        for meter_id in result:
            self.assertIsInstance(meter_id, str)
            self.assertTrue(meter_id.isalnum())

        # Check return values
        self.assertEqual(result, ALL_METER_IDS)
Esempio n. 2
0
    def calculate_average_power(self, session):
        # pylint: disable=global-statement
        global end_next_interval
        date_interval = (end_next_interval -
                         timedelta(minutes=15)).strftime("%Y-%m-%d")
        hour_interval = (end_next_interval -
                         timedelta(minutes=15)).strftime("%H")

        for meter_id in get_all_meter_ids(session):
            average_power_key = 'average_power_' + meter_id + '_' + date_interval
            power_sum = 0
            divider = 0
            for key in get_keys_date_hour_prefix(self.redis_client, meter_id,
                                                 date_interval, hour_interval):

                reading_date, data = get_entry_date(self.redis_client,
                                                    meter_id, key, 'reading')

                if reading_date is None or data is None:
                    continue

                reading_timestamp = reading_date.timestamp()

                if ((end_next_interval - timedelta(minutes=15)).timestamp() <
                        reading_timestamp <= end_next_interval.timestamp()):
                    power_sum += data.get('values').get('power')
                    divider += 1

            if divider != 0:
                average = power_sum / divider
            else:
                average = 0
                message = f"No readings available for {meter_id} between " \
                          f"{(end_next_interval - timedelta(minutes=15))} and {end_next_interval}"
                logger.info(message)

            if len(self.redis_client.keys(average_power_key)) == 0:
                data = {
                    end_next_interval.strftime("%Y-%m-%d %H:%M:%S"): average
                }

            else:
                data = json.loads(self.redis_client.get(average_power_key))
                data[end_next_interval.strftime("%Y-%m-%d %H:%M:%S")] = average

            self.redis_client.set(average_power_key, json.dumps(data))
            self.redis_client.expire(average_power_key,
                                     int(timedelta(days=3).total_seconds()))
Esempio n. 3
0
    def write_readings(self, session, end):
        """ Get all readings for all meters from one the beginning of the BAFA support
        year until now with one-week interval (this is the finest granularity we get for one
        year back in time, cf. https://api.discovergy.com/docs/) and write them
        to the redis database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        :param int end: end of interval in the format required by the
        discovergy API
        """

        for meter_id in get_all_meter_ids(session):

            try:
                readings = self.d.get_readings(meter_id,
                                               calc_support_year_start(), end,
                                               'one_week')

                if readings == []:
                    message = 'No readings available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                for reading in readings:
                    adjusted_reading = check_and_nullify_power_value(
                        reading, meter_id)
                    timestamp = adjusted_reading['time']

                    # Convert unix epoch time in milliseconds to UTC format
                    new_timestamp = datetime.utcfromtimestamp(timestamp/1000).\
                        strftime('%Y-%m-%d %H:%M:%S')

                    key = meter_id + '_' + str(new_timestamp)

                    # Write adjusted reading to redis database as key-value-pair
                    # The unique key consists of the meter id (16 chars), the
                    # separator '_' and the UTC timestamp (19 chars)
                    data = dict(type='reading',
                                values=adjusted_reading['values'])
                    self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)
Esempio n. 4
0
    def write_energy_consumption(self, session):
        """ Get readings for all meters at start and end dates of
        previous and ongoing terms and write them to the redis database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        """

        for meter_id in get_all_meter_ids(session):

            try:
                for timestamp in calc_term_boundaries():
                    end_of_day = round(
                        (datetime.utcfromtimestamp(timestamp / 1000) +
                         timedelta(hours=24, minutes=59,
                                   seconds=59)).timestamp() * 1000)

                    readings = self.d.get_readings(meter_id, timestamp,
                                                   end_of_day, 'one_hour')

                    if readings == []:
                        message = 'No readings available for metering id {}'.format(
                            meter_id)
                        logger.info(message)
                        continue

                    for reading in readings:
                        adjusted_reading = check_and_nullify_power_value(
                            reading, meter_id)
                        timestamp = adjusted_reading['time']

                        # Convert unix epoch time in milliseconds to UTC format
                        new_timestamp = datetime.utcfromtimestamp(
                            timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S')

                        key = meter_id + '_' + str(new_timestamp)

                        # Write adjusted reading to redis database as key-value-pair
                        data = dict(type='reading',
                                    values=adjusted_reading['values'])
                        self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)
Esempio n. 5
0
    def write_last_disaggregations(self, session):
        """ Get the last disaggregation values for all meters and write them to the redis
        database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        """

        two_days_back = calc_two_days_back()
        for meter_id in get_all_meter_ids(session):

            try:
                disaggregation = self.d.get_disaggregation(
                    meter_id, two_days_back, calc_end())

                if disaggregation in ({}, []):
                    message = 'No disaggregation available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                timestamps = sorted(disaggregation.keys())
                if len(timestamps) > 0:
                    timestamp = timestamps[-1]

                    # Convert unix epoch time in milliseconds to UTC format
                    new_timestamp = datetime.utcfromtimestamp(int(timestamp)/1000).\
                        strftime('%Y-%m-%d %H:%M:%S')

                    key = meter_id + '_' + str(new_timestamp)

                    # Write disaggregation to redis database as key-value-pair
                    # The unique key consists of meter id (16 chars), separator '_' and UTC
                    # timestamp
                    data = dict(type='disaggregation',
                                values=disaggregation[timestamp])

                    self.redis_client.set(meter_id + '_last_disaggregation',
                                          json.dumps(data))
                    self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)
Esempio n. 6
0
    def write_disaggregations(self, session, end):
        """ Get all disaggregation values for all meters from one week back
        until now. This is the earliest data we get, otherwise you'll end up
        with a '400 Bad Request: Duration of the data cannot be larger than 1
        week. Please try for a smaller duration.' If one week back lies before
        the current BAFA support year start, start with that value instead.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        :param int end: end of interval in the format required by the
        discovergy API
        """

        for meter_id in get_all_meter_ids(session):

            try:

                disaggregation = self.d.get_disaggregation(
                    meter_id, calc_support_week_start(), end)

                if disaggregation == {}:
                    message = 'No disaggregation available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                for timestamp in disaggregation:

                    # Convert unix epoch time in milliseconds to UTC format
                    new_timestamp = datetime.utcfromtimestamp(
                        int(timestamp) / 1000).strftime('%Y-%m-%d %H:%M:%S')
                    key = meter_id + '_' + str(new_timestamp)

                    # Write disaggregation to redis database as key-value-pair
                    # The unique key consists of meter id, separator '_' and UTC timestamp
                    data = dict(type='disaggregation',
                                values=disaggregation[timestamp])

                    self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)
Esempio n. 7
0
    def write_last_readings(self, session):
        """ Get the last reading for all meters and write them to the redis
        database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        """

        for meter_id in get_all_meter_ids(session):

            try:
                reading = self.d.get_last_reading(meter_id)
                if reading == {}:
                    message = 'No last reading available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                adjusted_reading = check_and_nullify_power_value(
                    reading, meter_id)
                reading_timestamp = str(
                    datetime.utcfromtimestamp(adjusted_reading['time'] /
                                              1000).strftime('%F %T'))
                key = meter_id + '_' + reading_timestamp
                date_key = datetime.utcnow().strftime('%Y-%m-%d')
                # Write reading to redis database as key-value-pair
                # The unique key consists meter id, separator '_' and UTC
                # timestamp
                data = dict(type='reading', values=adjusted_reading['values'])
                self.redis_client.set(key, json.dumps(data))
                self.redis_client.set(meter_id + '_last', json.dumps(data))
                data["time"] = reading_timestamp
                self.redis_client.set(meter_id + '_' + date_key + '_last',
                                      json.dumps(data))

                if self.redis_client.get(meter_id + '_' + date_key +
                                         '_first') is None:
                    self.redis_client.set(meter_id + '_' + date_key + '_first',
                                          json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)