コード例 #1
0
    def test_fetch_request_token(self, post, fetch_request_token):
        """ Test function _fetch_request_token() of class Discovergy. """

        d = Discovergy('TestClient')
        reponse = d._fetch_consumer_tokens()
        request_token_oauth = OAuth1Session(d._oauth_key,
                                            client_secret=d._oauth_secret,
                                            callback_uri='oob')

        # Open OAuth1Session for _fetch_request_token()
        oauth_token_response = request_token_oauth.fetch_request_token(
            d._request_token_url)

        # Close OAuth1Session, otherwise it will generate a warning
        request_token_oauth.close()

        # Check response values
        self.assertTrue('oauth_token' in oauth_token_response.keys())
        self.assertTrue('oauth_token_secret' in oauth_token_response.keys())
        self.assertTrue(oauth_token_response.get('oauth_token').isalnum())
        self.assertTrue(
            oauth_token_response.get('oauth_token_secret').isalnum())
        self.assertEqual(len(oauth_token_response.get('oauth_token')), 32)
        self.assertEqual(len(oauth_token_response.get('oauth_token_secret')),
                         32)

        # Check response types
        self.assertTrue(
            isinstance(oauth_token_response.get('oauth_token'), str))
        self.assertTrue(
            isinstance(oauth_token_response.get('oauth_token_secret'), str))
コード例 #2
0
    def test_login(self, post, get, fetch_access_token, fetch_request_token):
        """ Test function login() of class Discovergy. """

        d = Discovergy('TestClient')
        login = d.login('*****@*****.**', '123test')

        # Check result type
        self.assertTrue(isinstance(login, bool))

        # Check result value
        self.assertEqual(login, True)
コード例 #3
0
    def test_authorize_request_token(self, get):
        """ Test function _authorize_request_token() of class Discovergy. """

        d = Discovergy('TestClient')
        verifier = d._authorize_request_token(
            '*****@*****.**', '123test', '719095064cbc476680700ec5bf274453')

        # Check verifier type
        self.assertTrue(isinstance(verifier, str))

        # Check verifier value
        self.assertEqual(verifier, '3bfea9ada8c144afb81b5992b992303e')
コード例 #4
0
    def test_get_last_reading(self, get_reading, post, get, fetch_access_token,
                              fetch_request_token):
        """ Test function get_last_reading() of class Discovergy. """

        d = Discovergy('TestClient')
        login = d.login('*****@*****.**', '123test')
        measurement = d.get_last_reading(METER_ID)

        # Check result type
        self.assertTrue(isinstance(measurement, dict))

        # Check result values
        self.assertEqual(measurement, READING)
コード例 #5
0
    def test_get_fieldnames_for_meter(self, get_fieldnames, post, get,
                                      fetch_access_token, fetch_request_token):
        """ Test function get_fieldnames_for_meter() of class Discovergy. """

        d = Discovergy('TestClient')
        login = d.login('*****@*****.**', '123test')
        fieldnames = d.get_fieldnames_for_meter(METER_ID)

        # Check result type
        self.assertTrue(isinstance(fieldnames, list))

        # Check result values
        self.assertEqual(fieldnames, FIELDNAMES)
コード例 #6
0
    def test_fetch_consumer_tokens(self, post):
        """ Test function _fetch_consumer_token() of class Discovergy. """

        d = Discovergy("TestClient")
        response = d._fetch_consumer_tokens()

        # Check response types
        self.assertTrue(isinstance(response, MockResponse))
        self.assertTrue(isinstance(response.content, bytes))

        # Check response values
        self.assertEqual(d._oauth_key, response.json()['key'])
        self.assertEqual(d._oauth_secret, response.json()['secret'])
コード例 #7
0
    def __init__(self):
        self.d = Discovergy(client_name)
        self.redis_client = redis.Redis(host=redis_host,
                                        port=redis_port,
                                        db=redis_db)  # connect to server

        # pylint: disable=global-statement
        global end_next_interval
        # set end_next_interval to end of next quarter-hour
        current_time = datetime.utcnow()
        nsecs = current_time.minute * 60 + current_time.second + \
                current_time.microsecond * 1e-6
        delta = math.ceil(nsecs / 900) * 900 - nsecs
        end_next_interval = current_time + timedelta(seconds=delta)
コード例 #8
0
    def test_get_meters(self, get_meters, post, get, fetch_access_token,
                        fetch_request_token):
        """ Test function get_meters() of class Discovergy. """

        d = Discovergy('TestClient')
        login = d.login('*****@*****.**', '123test')
        meters = d.get_meters()

        # Check return types
        self.assertTrue(isinstance(meters, list))
        self.assertTrue(isinstance(meters[0], dict))

        # Check return values
        self.assertEqual(list(meters[0].keys()), METER_KEYS)
        self.assertEqual(list(meters[0].get('location').keys()), LOCATION_KEYS)
コード例 #9
0
    def test_get_readings(self, get_readings, post, get, fetch_access_token,
                          fetch_request_token):
        """ Test function get_readings() of class Discovergy. """

        d = Discovergy('TestClient')
        login = d.login('*****@*****.**', '123test')
        end = datetime.now()
        start = end - timedelta(hours=2)
        start = round(start.timestamp() * 1e3)
        measurement = d.get_readings(METER_ID, start, None, 'one_hour')

        # Check response type
        self.assertTrue(isinstance(measurement, list))

        # Check response values
        self.assertEqual(measurement, READINGS)
コード例 #10
0
    def test_get_disaggregation(self, get_disaggregation, post, get,
                                fetch_access_token, fetch_request_token):
        """ Test function get_last_reading() of class Discovergy. """

        d = Discovergy('TestClient')
        login = d.login('*****@*****.**', '123test')
        end = datetime.now()
        start = end - timedelta(hours=11)
        start = round(start.timestamp() * 1e3)
        end = round(end.timestamp() * 1e3)
        measurement = d.get_disaggregation(METER_ID, start, end)

        # Check result type
        self.assertTrue(isinstance(measurement, dict))

        # Check result values
        self.assertEqual(measurement, DISAGGREGATION)
コード例 #11
0
    def test_fetch_access_token(self, get, fetch_access_token):
        """ Test function _fetch_access_token() of class Discovergy. """

        d = Discovergy('TestClient')
        verifier = d._authorize_request_token(
            '*****@*****.**', '123test', '719095064cbc476680700ec5bf274453')
        access_token = d._fetch_access_token(
            '719095064cbc476680700ec5bf274453',
            '1f51232ace6a403a9bd2cdfff8d63a28', verifier)

        # Check result type
        self.assertTrue(isinstance(access_token, dict))

        # Check result value
        self.assertEqual(
            access_token,
            dict(token='2a28117b269e4f99893e9f758136becc',
                 token_secret='b75c7fc5142842afb3fd6686cacb675b'))
コード例 #12
0
    def test_init(self):
        """ Test function __init__() of class Discovergy. """

        d = Discovergy('TestClient')
        self.assertEqual(d._client_name, "TestClient")
        self.assertEqual(d._email, "")
        self.assertEqual(d._password, "")
        self.assertEqual(d._consumer_key, "")
        self.assertEqual(d._consumer_secret, "")
        self.assertEqual(d._discovergy_oauth, None)
        self.assertEqual(d._base_url, 'https://api.discovergy.com/public/v1')
        self.assertEqual(d._consumer_token_url,
                         d._base_url + '/oauth1/consumer_token')
        self.assertEqual(d._request_token_url,
                         d._base_url + '/oauth1/request_token')
        self.assertEqual(d._authorization_base_url,
                         d._base_url + '/oauth1/authorize')
        self.assertEqual(d._access_token_url,
                         d._base_url + '/oauth1/access_token')
        self.assertEqual(d._oauth_key, None)
        self.assertEqual(d._oauth_secret, None)
コード例 #13
0
class Task:
    """ Handle discovergy login, data retrieval, populating and updating the
    redis database. """
    def __init__(self):
        self.d = Discovergy(client_name)
        self.redis_client = redis.Redis(host=redis_host,
                                        port=redis_port,
                                        db=redis_db)  # connect to server

        # pylint: disable=global-statement
        global end_next_interval
        # set end_next_interval to end of next quarter-hour
        current_time = datetime.utcnow()
        nsecs = current_time.minute * 60 + current_time.second + \
                current_time.microsecond * 1e-6
        delta = math.ceil(nsecs / 900) * 900 - nsecs
        end_next_interval = current_time + timedelta(seconds=delta)

    def login(self):
        """ Authenticate against the discovergy backend. """

        self.d.login(email, password)

    def write_readings(self, session, end):
        """ Get all readings for all meters from one the beginning of the BAFA support
        year until now with one-week interval (this is the finest granularity we get for one
        year back in time, cf. https://api.discovergy.com/docs/) and write them
        to the redis database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        :param int end: end of interval in the format required by the
        discovergy API
        """

        for meter_id in get_all_meter_ids(session):

            try:
                readings = self.d.get_readings(meter_id,
                                               calc_support_year_start(), end,
                                               'one_week')

                if readings == []:
                    message = 'No readings available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                for reading in readings:
                    adjusted_reading = check_and_nullify_power_value(
                        reading, meter_id)
                    timestamp = adjusted_reading['time']

                    # Convert unix epoch time in milliseconds to UTC format
                    new_timestamp = datetime.utcfromtimestamp(timestamp/1000).\
                        strftime('%Y-%m-%d %H:%M:%S')

                    key = meter_id + '_' + str(new_timestamp)

                    # Write adjusted reading to redis database as key-value-pair
                    # The unique key consists of the meter id (16 chars), the
                    # separator '_' and the UTC timestamp (19 chars)
                    data = dict(type='reading',
                                values=adjusted_reading['values'])
                    self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)

    def write_last_readings(self, session):
        """ Get the last reading for all meters and write them to the redis
        database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        """

        for meter_id in get_all_meter_ids(session):

            try:
                reading = self.d.get_last_reading(meter_id)
                if reading == {}:
                    message = 'No last reading available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                adjusted_reading = check_and_nullify_power_value(
                    reading, meter_id)
                reading_timestamp = str(
                    datetime.utcfromtimestamp(adjusted_reading['time'] /
                                              1000).strftime('%F %T'))
                key = meter_id + '_' + reading_timestamp
                date_key = datetime.utcnow().strftime('%Y-%m-%d')
                # Write reading to redis database as key-value-pair
                # The unique key consists meter id, separator '_' and UTC
                # timestamp
                data = dict(type='reading', values=adjusted_reading['values'])
                self.redis_client.set(key, json.dumps(data))
                self.redis_client.set(meter_id + '_last', json.dumps(data))
                data["time"] = reading_timestamp
                self.redis_client.set(meter_id + '_' + date_key + '_last',
                                      json.dumps(data))

                if self.redis_client.get(meter_id + '_' + date_key +
                                         '_first') is None:
                    self.redis_client.set(meter_id + '_' + date_key + '_first',
                                          json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)

    def write_energy_consumption(self, session):
        """ Get readings for all meters at start and end dates of
        previous and ongoing terms and write them to the redis database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        """

        for meter_id in get_all_meter_ids(session):

            try:
                for timestamp in calc_term_boundaries():
                    end_of_day = round(
                        (datetime.utcfromtimestamp(timestamp / 1000) +
                         timedelta(hours=24, minutes=59,
                                   seconds=59)).timestamp() * 1000)

                    readings = self.d.get_readings(meter_id, timestamp,
                                                   end_of_day, 'one_hour')

                    if readings == []:
                        message = 'No readings available for metering id {}'.format(
                            meter_id)
                        logger.info(message)
                        continue

                    for reading in readings:
                        adjusted_reading = check_and_nullify_power_value(
                            reading, meter_id)
                        timestamp = adjusted_reading['time']

                        # Convert unix epoch time in milliseconds to UTC format
                        new_timestamp = datetime.utcfromtimestamp(
                            timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S')

                        key = meter_id + '_' + str(new_timestamp)

                        # Write adjusted reading to redis database as key-value-pair
                        data = dict(type='reading',
                                    values=adjusted_reading['values'])
                        self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)

    def write_disaggregations(self, session, end):
        """ Get all disaggregation values for all meters from one week back
        until now. This is the earliest data we get, otherwise you'll end up
        with a '400 Bad Request: Duration of the data cannot be larger than 1
        week. Please try for a smaller duration.' If one week back lies before
        the current BAFA support year start, start with that value instead.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        :param int end: end of interval in the format required by the
        discovergy API
        """

        for meter_id in get_all_meter_ids(session):

            try:

                disaggregation = self.d.get_disaggregation(
                    meter_id, calc_support_week_start(), end)

                if disaggregation == {}:
                    message = 'No disaggregation available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                for timestamp in disaggregation:

                    # Convert unix epoch time in milliseconds to UTC format
                    new_timestamp = datetime.utcfromtimestamp(
                        int(timestamp) / 1000).strftime('%Y-%m-%d %H:%M:%S')
                    key = meter_id + '_' + str(new_timestamp)

                    # Write disaggregation to redis database as key-value-pair
                    # The unique key consists of meter id, separator '_' and UTC timestamp
                    data = dict(type='disaggregation',
                                values=disaggregation[timestamp])

                    self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)

    def write_last_disaggregations(self, session):
        """ Get the last disaggregation values for all meters and write them to the redis
        database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        """

        two_days_back = calc_two_days_back()
        for meter_id in get_all_meter_ids(session):

            try:
                disaggregation = self.d.get_disaggregation(
                    meter_id, two_days_back, calc_end())

                if disaggregation in ({}, []):
                    message = 'No disaggregation available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                timestamps = sorted(disaggregation.keys())
                if len(timestamps) > 0:
                    timestamp = timestamps[-1]

                    # Convert unix epoch time in milliseconds to UTC format
                    new_timestamp = datetime.utcfromtimestamp(int(timestamp)/1000).\
                        strftime('%Y-%m-%d %H:%M:%S')

                    key = meter_id + '_' + str(new_timestamp)

                    # Write disaggregation to redis database as key-value-pair
                    # The unique key consists of meter id (16 chars), separator '_' and UTC
                    # timestamp
                    data = dict(type='disaggregation',
                                values=disaggregation[timestamp])

                    self.redis_client.set(meter_id + '_last_disaggregation',
                                          json.dumps(data))
                    self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)

    def calculate_average_power(self, session):
        # pylint: disable=global-statement
        global end_next_interval
        date_interval = (end_next_interval -
                         timedelta(minutes=15)).strftime("%Y-%m-%d")
        hour_interval = (end_next_interval -
                         timedelta(minutes=15)).strftime("%H")

        for meter_id in get_all_meter_ids(session):
            average_power_key = 'average_power_' + meter_id + '_' + date_interval
            power_sum = 0
            divider = 0
            for key in get_keys_date_hour_prefix(self.redis_client, meter_id,
                                                 date_interval, hour_interval):

                reading_date, data = get_entry_date(self.redis_client,
                                                    meter_id, key, 'reading')

                if reading_date is None or data is None:
                    continue

                reading_timestamp = reading_date.timestamp()

                if ((end_next_interval - timedelta(minutes=15)).timestamp() <
                        reading_timestamp <= end_next_interval.timestamp()):
                    power_sum += data.get('values').get('power')
                    divider += 1

            if divider != 0:
                average = power_sum / divider
            else:
                average = 0
                message = f"No readings available for {meter_id} between " \
                          f"{(end_next_interval - timedelta(minutes=15))} and {end_next_interval}"
                logger.info(message)

            if len(self.redis_client.keys(average_power_key)) == 0:
                data = {
                    end_next_interval.strftime("%Y-%m-%d %H:%M:%S"): average
                }

            else:
                data = json.loads(self.redis_client.get(average_power_key))
                data[end_next_interval.strftime("%Y-%m-%d %H:%M:%S")] = average

            self.redis_client.set(average_power_key, json.dumps(data))
            self.redis_client.expire(average_power_key,
                                     int(timedelta(days=3).total_seconds()))

    def populate_redis(self):
        """ Populate the redis database with all discovergy data from the past. """

        # pylint: disable=global-statement
        global last_data_flush
        last_data_flush = datetime.utcnow()
        end = calc_end()

        # Connect to sqlite database
        session = create_session()

        try:
            # Authenticate against the discovergy backend
            self.login()

        except Exception as e:
            message = exception_message(e)
            logger.error(message)
            logger.error('Wrong or missing discovergy credentials.')
            return

        self.write_readings(session, end)
        self.write_energy_consumption(session)
        self.write_disaggregations(session, end)

    def update_redis(self):
        """ Update the redis database every 60s with the latest discovergy data. """

        message = 'Started redis task at {}'.format(
            datetime.now().strftime("%H:%M:%S"))
        logger.info(message)

        while True:
            stdlib_time.sleep(60)
            message = 'Fill redis at {}'.format(
                datetime.now().strftime("%H:%M:%S"))
            logger.info(message)

            # Populate redis if last data flush was more than 24h ago
            # pylint: disable=global-statement
            global last_data_flush
            global end_next_interval

            # Connect to SQLite database
            session = create_session()

            if (last_data_flush is None) or (
                    datetime.utcnow() - last_data_flush > timedelta(hours=24)):
                self.populate_redis()
                self.write_last_readings(session)
                self.write_last_disaggregations(session)
                write_baselines(session)
                write_savings(session)
                write_base_values_or_per_capita_consumption(session)
            else:
                self.write_last_readings(session)
                self.write_last_disaggregations(session)

            if (datetime.utcnow() - end_next_interval) > timedelta(0):
                self.calculate_average_power(session)
                end_next_interval = end_next_interval + timedelta(minutes=15)