Esempio n. 1
0
def write_baselines(session):
    """ Write the baseline for each user to the SQLite database. """

    start = calc_support_year_start_datetime()
    for user in get_all_users(session):

        try:
            # Check if entry exists
            baseline = session.query(
                User.baseline).filter_by(meter_id=user.meter_id).first()[0]

            # Create baseline entry if it does not exist
            if not baseline:

                # Try to create new baseline entry
                baseline = calc_energy_consumption_last_term(
                    user.meter_id, start)
                if not baseline:
                    message = ERROR_MESSAGE_BASELINE.format(
                        user.meter_id, message_timestamp)
                    logger.info(message)
                else:
                    user.baseline = baseline
                    session.add(user)

        except Exception as e:
            message = exception_message(e)
            logger.error(message)

    session.commit()
Esempio n. 2
0
def get_community_saving():
    """ Retrieve the last community saving prognosis from the SQLite database.
    :returns: the last saving together with its timestamp or None if there are
    no values
    :rtype: dict or type(None) if there are no values
    """

    try:
        # Query last community saving prognosis
        query_result = []
        for row in db.session.query(CommunitySaving).\
                order_by(CommunitySaving.timestamp.desc()).all():
            query_result.append(
                (row.timestamp.strftime("%Y-%m-%d %H:%M:%S"), row.saving))

        community_saving = query_result[0]

        timestamp = community_saving[0]
        saving = community_saving[1]
        return {timestamp: saving}

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return None
Esempio n. 3
0
def individual_global_challenge():
    """ Shows the individual saving prognosis for today in μWh.
    :returns: (a JSON object where the saving is mapped to the timestamp, 200) or
    ({}, 206) if there is no value
    :rtype: tuple
    swagger_from_file: swagger_files/get_individual-global-challenge.yml
    """

    user_id = get_jwt_identity()
    user = db.session.query(User).filter_by(id=user_id).first()
    if user is None:
        return UNKNOWN_USER.make_json_response(status.HTTP_400_BAD_REQUEST)

    result = {}

    try:
        saving = get_individual_saving(user.meter_id)
        if saving is None:
            return NO_GLOBAL_CHALLENGE.make_json_response(
                status.HTTP_206_PARTIAL_CONTENT)

        baseline = get_individual_baseline(user.id)
        if baseline is None:
            return NO_BASELINE.make_json_response(
                status.HTTP_206_PARTIAL_CONTENT)

        result['saving'] = saving
        result['baseline'] = baseline

        return jsonify(result), status.HTTP_200_OK

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return jsonify(result), status.HTTP_206_PARTIAL_CONTENT
Esempio n. 4
0
def get_individual_saving(meter_id):
    """ Retrieve the last individual saving prognosis for the given meter id
    from the SQLite database.
    :param str meter_id: the user's meter id
    :returns: the last saving together with its timestamp or None if there are
    no values
    :rtype: dict or type(None) if there are no values
    """

    try:
        # Query last individual saving prognosis for the given meter id
        query_result = []
        for row in db.session.query(UserSaving) \
                .filter(UserSaving.meter_id == meter_id).\
                order_by(UserSaving.timestamp.desc()).all():
            query_result.append(
                (row.timestamp.strftime("%Y-%m-%d %H:%M:%S"), row.saving))

        individual_saving = query_result[0]

        timestamp = individual_saving[0]
        saving = individual_saving[1]
        return {timestamp: saving}

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return None
Esempio n. 5
0
def get_data_day_before(dt, meter_id, session):
    """ Get the values from the day before from the SQLite database.
    :param datetime.date: the request date
    :param str meter_id: the user's meter id
    :param sqlalchemy.orm.scoping.scoped_session session: the database session
    :returns: date, meter_id, consumption, consumption_cumulated, inhabitants,
    per_capita_consumption, per_capita_consumption_cumulated, days, moving_average
    and moving_average_annualized
    :rtype: list or type(None) if there was an error
    """

    day_before = dt - timedelta(days=1)

    try:
        result = session.query(PerCapitaConsumption).filter_by(
            meter_id=meter_id).filter(
                extract('year', PerCapitaConsumption.date) == day_before.year,
                extract('month',
                        PerCapitaConsumption.date) == day_before.month,
                extract('day',
                        PerCapitaConsumption.date) == day_before.day).first()

        return result

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return None
Esempio n. 6
0
def calc_ratio_values(start):
    """ Calculates the percentages of energy consumption for the specified
    term. A term is a year where the start may be specified by the caller.
    :param datetime.date start: the start date of the term
    :returns: sum of all standard load profile ratio values of the given term
    :rtype: float
    """

    end = datetime(start.year + 1, start.month, start.day).date()
    term_end = datetime.utcnow().date()
    engine = get_engine()
    energy_total = 0.0
    ratio_values = 0.0
    try:
        with engine.connect() as con:
            # Query total energy which should be ~ 1.000.000 kWh
            energy_total = con.execute("SELECT SUM(energy) FROM loadprofile WHERE date BETWEEN \'"
                                       + str(start) + "\' AND \'" + str(end) +
                                       '\' ORDER BY date').first()[0]

            # Query sum of energy promilles
            energy_promille = con.execute("SELECT SUM(energy) FROM loadprofile"
                                          + " WHERE date BETWEEN \'" +
                                          str(start) + "\' AND \'"
                                          + str(term_end) + '\' ORDER BY date').first()[0]

        if (energy_promille is not None) and (energy_total is not None):
            ratio_values = energy_promille/energy_total

    except Exception as e:
        message = exception_message(e)
        logger.error(message)

    return ratio_values
Esempio n. 7
0
def write_savings(session):
    """ Write the energy savings of each user and the community to the
    SQLite database.
    """

    start = calc_support_year_start_datetime()
    try:
        for key, value in estimate_energy_saving_each_user(start,
                                                           session).items():

            if value is None:
                message = ERROR_MESSAGE_SAVING.format(key, message_timestamp)
                logger.info(message)

                # Create UserSaving instance and set saving to 0.0
                user_saving = UserSaving(datetime.utcnow(), key, 0.0)

            else:
                user_saving = UserSaving(datetime.utcnow(), key, value)

            session.add(user_saving)

        # Create CommunitySaving instance
        community_saving = estimate_energy_saving_all_users(start, session)
        session.add(CommunitySaving(datetime.utcnow(), community_saving))

        session.commit()
    except Exception as e:
        message = exception_message(e)
        logger.error(message)
Esempio n. 8
0
def per_capita_consumption():
    """ Shows the the last annualized moving average in kWh.
    :returns: (a JSON object where the moving average is mapped to the
    timestamp, 200) or ({}, 206) if there is no value
    :rtype: tuple
    swagger_from_file: swagger_files/get_per-capita-consumption.yml
    """

    user_id = get_jwt_identity()
    user = db.session.query(User).filter_by(id=user_id).first()

    if user is None:
        return UNKNOWN_USER.make_json_response(status.HTTP_400_BAD_REQUEST)

    result = {}

    try:
        result = get_moving_average_annualized(user.meter_id)
        if result is None:
            return NO_PER_CAPITA_CONSUMPTION.make_json_response(
                status.HTTP_206_PARTIAL_CONTENT)

        return jsonify(result), status.HTTP_200_OK

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return jsonify(result), status.HTTP_206_PARTIAL_CONTENT
Esempio n. 9
0
def build_data_package(data_day_before, consumption, inhabitants, date):
    """ Build a per capita consumption data package from the retrieved database values.
    :param list data_day_before: the data from the day before the date in
    question from the SQLite database (result of get_data_day_before())
    :param float consumption: the date's calculated consumption
    :param int inhabitants: the number of inhabitants in the user's flat
    :param datetime.date date: the date to build the data package for
    :returns: a data package with all relevant user values on the given date
    :rtype: dict
    """

    try:
        # Calculate consumption_cumulated := consumption_cumulated of the day
        # before + consumption (kWh)
        consumption_cumulated = data_day_before.consumption_cumulated + consumption

        # Calculate per_capita_consumption := consumption/inhabitants (kWh)
        per_capita_consumption = consumption / inhabitants

        # Calculate per_capita_consumption_cumulated := per_capita_consumption_cumulated of the day
        # before + per_capita_consumption (kWh)
        per_capita_consumption_cumulated = data_day_before.per_capita_consumption_cumulated + \
                                           per_capita_consumption

        # Calculate days (since calculation start) := days of the day before + 1
        # (number)
        days = data_day_before.days + 1

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return None

    # Calculate moving_average := per_capita_consumption_cumulated/days (kWh)
    moving_average = per_capita_consumption_cumulated / days

    # Calculate moving_average_annualized := moving_average * 365 (kWh, rounded)
    moving_average_annualized = round(moving_average * 365)

    # Return base values as dict
    return dict(
        date=date,
        consumption=consumption,
        consumption_cumulated=consumption_cumulated,
        inhabitants=inhabitants,
        per_capita_consumption=per_capita_consumption,
        per_capita_consumption_cumulated=per_capita_consumption_cumulated,
        days=days,
        moving_average=moving_average,
        moving_average_annualized=moving_average_annualized)
Esempio n. 10
0
def background_thread():
    """ Emit server-generated live data to the clients every 60s. """
    while True:
        with app.app_context():
            for key in dict(clients):
                try:
                    user = db.session.query(User).filter_by(
                        meter_id=clients[key].get('meter_id')).first()
                    message = json.dumps(wp.create_data(user.id))
                    socketio.emit('live_data', {'data': message},
                                  namespace='/live', room=key)
                except Exception as e:
                    message = exception_message(e)
                    logger.error(message)
                    del clients[key]
            socketio.sleep(20)
Esempio n. 11
0
def get_individual_baseline(user_id):
    """ Retrieve the last baseline value for the given meter id from the SQLite
    database.
    :param str user_id: the user's id
    :returns: the user's baseline value
    :rtype: int or type(None) in case of error
    """

    try:
        user = db.session.query(User).filter_by(id=user_id).first()
        return user.baseline

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return None
Esempio n. 12
0
def group_profile_pictures():
    """ Get all profile pictures of the user's group, together with their user
    ids.
    swagger_from_file: swagger_files/get_assets_group-profile-pictures.yml
    """

    user_id = get_jwt_identity()
    try:
        result = get_group_members(user_id)
        if result is None:
            return NO_USERS.make_json_response(status.HTTP_400_BAD_REQUEST)
        return jsonify(result), status.HTTP_200_OK

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return jsonify({}), status.HTTP_206_PARTIAL_CONTENT
Esempio n. 13
0
def community_global_challenge():
    """ Shows the community saving prognosis for today in μWh.
    :return: (a JSON object where the saving is mapped to the timestamp, 200)
    or ({}, 206) if there is no value
    :rtype: tuple
    swagger_from_file: swagger_files/get_community-global-challenge.yml
    """

    result = {}

    try:
        result = get_community_saving()
        return jsonify(result), status.HTTP_200_OK

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return jsonify(result), status.HTTP_206_PARTIAL_CONTENT
Esempio n. 14
0
def get_first_meter_reading_date(redis_client, meter_id, date):
    """ Return the first reading for the given meter id on the given day which
    is stored in the redis database. As we were using unix timestamps as
    basis for our dates all along, there is no need to convert the stored,
    timezone-unaware date to UTC.
    : param str meter_id: the meter id for which to get the value
    : param str date: the date for which to get the value
    : returns: the last reading for the given meter id on the given date or
    None if there are no values
    : rtype: float or type(None)
    """
    key_date_first = f"{meter_id}_{date}_first"
    redis_key_date_first = redis_client.get(key_date_first)

    if redis_key_date_first is None:
        logger.info("No key %s_%s_first available. Iteration needed.",
                    meter_id, date)
        sorted_keys_date = get_sorted_keys_date_prefix(redis_client, meter_id,
                                                       date)

        if len(sorted_keys_date) == 0:
            logger.info('No first reading available for meter id %s on %s',
                        meter_id, str(date))
            return None

        for key in sorted_keys_date:
            reading_date, data = get_entry_date(redis_client, meter_id, key,
                                                'reading')

            if reading_date is None or data is None:
                continue

            data["time"] = reading_date.timestamp()
            redis_client.set(key_date_first, json.dumps(data))
            return data.get('values').get('energy')

    try:
        data = json.loads(redis_key_date_first)

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
    else:
        return data.get('values').get('energy')
Esempio n. 15
0
    def write_readings(self, session, end):
        """ Get all readings for all meters from one the beginning of the BAFA support
        year until now with one-week interval (this is the finest granularity we get for one
        year back in time, cf. https://api.discovergy.com/docs/) and write them
        to the redis database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        :param int end: end of interval in the format required by the
        discovergy API
        """

        for meter_id in get_all_meter_ids(session):

            try:
                readings = self.d.get_readings(meter_id,
                                               calc_support_year_start(), end,
                                               'one_week')

                if readings == []:
                    message = 'No readings available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                for reading in readings:
                    adjusted_reading = check_and_nullify_power_value(
                        reading, meter_id)
                    timestamp = adjusted_reading['time']

                    # Convert unix epoch time in milliseconds to UTC format
                    new_timestamp = datetime.utcfromtimestamp(timestamp/1000).\
                        strftime('%Y-%m-%d %H:%M:%S')

                    key = meter_id + '_' + str(new_timestamp)

                    # Write adjusted reading to redis database as key-value-pair
                    # The unique key consists of the meter id (16 chars), the
                    # separator '_' and the UTC timestamp (19 chars)
                    data = dict(type='reading',
                                values=adjusted_reading['values'])
                    self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)
Esempio n. 16
0
    def write_energy_consumption(self, session):
        """ Get readings for all meters at start and end dates of
        previous and ongoing terms and write them to the redis database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        """

        for meter_id in get_all_meter_ids(session):

            try:
                for timestamp in calc_term_boundaries():
                    end_of_day = round(
                        (datetime.utcfromtimestamp(timestamp / 1000) +
                         timedelta(hours=24, minutes=59,
                                   seconds=59)).timestamp() * 1000)

                    readings = self.d.get_readings(meter_id, timestamp,
                                                   end_of_day, 'one_hour')

                    if readings == []:
                        message = 'No readings available for metering id {}'.format(
                            meter_id)
                        logger.info(message)
                        continue

                    for reading in readings:
                        adjusted_reading = check_and_nullify_power_value(
                            reading, meter_id)
                        timestamp = adjusted_reading['time']

                        # Convert unix epoch time in milliseconds to UTC format
                        new_timestamp = datetime.utcfromtimestamp(
                            timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S')

                        key = meter_id + '_' + str(new_timestamp)

                        # Write adjusted reading to redis database as key-value-pair
                        data = dict(type='reading',
                                    values=adjusted_reading['values'])
                        self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)
Esempio n. 17
0
    def write_last_disaggregations(self, session):
        """ Get the last disaggregation values for all meters and write them to the redis
        database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        """

        two_days_back = calc_two_days_back()
        for meter_id in get_all_meter_ids(session):

            try:
                disaggregation = self.d.get_disaggregation(
                    meter_id, two_days_back, calc_end())

                if disaggregation in ({}, []):
                    message = 'No disaggregation available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                timestamps = sorted(disaggregation.keys())
                if len(timestamps) > 0:
                    timestamp = timestamps[-1]

                    # Convert unix epoch time in milliseconds to UTC format
                    new_timestamp = datetime.utcfromtimestamp(int(timestamp)/1000).\
                        strftime('%Y-%m-%d %H:%M:%S')

                    key = meter_id + '_' + str(new_timestamp)

                    # Write disaggregation to redis database as key-value-pair
                    # The unique key consists of meter id (16 chars), separator '_' and UTC
                    # timestamp
                    data = dict(type='disaggregation',
                                values=disaggregation[timestamp])

                    self.redis_client.set(meter_id + '_last_disaggregation',
                                          json.dumps(data))
                    self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)
Esempio n. 18
0
    def write_disaggregations(self, session, end):
        """ Get all disaggregation values for all meters from one week back
        until now. This is the earliest data we get, otherwise you'll end up
        with a '400 Bad Request: Duration of the data cannot be larger than 1
        week. Please try for a smaller duration.' If one week back lies before
        the current BAFA support year start, start with that value instead.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        :param int end: end of interval in the format required by the
        discovergy API
        """

        for meter_id in get_all_meter_ids(session):

            try:

                disaggregation = self.d.get_disaggregation(
                    meter_id, calc_support_week_start(), end)

                if disaggregation == {}:
                    message = 'No disaggregation available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                for timestamp in disaggregation:

                    # Convert unix epoch time in milliseconds to UTC format
                    new_timestamp = datetime.utcfromtimestamp(
                        int(timestamp) / 1000).strftime('%Y-%m-%d %H:%M:%S')
                    key = meter_id + '_' + str(new_timestamp)

                    # Write disaggregation to redis database as key-value-pair
                    # The unique key consists of meter id, separator '_' and UTC timestamp
                    data = dict(type='disaggregation',
                                values=disaggregation[timestamp])

                    self.redis_client.set(key, json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)
Esempio n. 19
0
    def write_last_readings(self, session):
        """ Get the last reading for all meters and write them to the redis
        database.
        :param sqlalchemy.orm.scoping.scoped_session session: the database session
        """

        for meter_id in get_all_meter_ids(session):

            try:
                reading = self.d.get_last_reading(meter_id)
                if reading == {}:
                    message = 'No last reading available for metering id {}'.format(
                        meter_id)
                    logger.info(message)
                    continue

                adjusted_reading = check_and_nullify_power_value(
                    reading, meter_id)
                reading_timestamp = str(
                    datetime.utcfromtimestamp(adjusted_reading['time'] /
                                              1000).strftime('%F %T'))
                key = meter_id + '_' + reading_timestamp
                date_key = datetime.utcnow().strftime('%Y-%m-%d')
                # Write reading to redis database as key-value-pair
                # The unique key consists meter id, separator '_' and UTC
                # timestamp
                data = dict(type='reading', values=adjusted_reading['values'])
                self.redis_client.set(key, json.dumps(data))
                self.redis_client.set(meter_id + '_last', json.dumps(data))
                data["time"] = reading_timestamp
                self.redis_client.set(meter_id + '_' + date_key + '_last',
                                      json.dumps(data))

                if self.redis_client.get(meter_id + '_' + date_key +
                                         '_first') is None:
                    self.redis_client.set(meter_id + '_' + date_key + '_first',
                                          json.dumps(data))

            except Exception as e:
                message = exception_message(e)
                logger.error(message)
Esempio n. 20
0
def get_moving_average_annualized(meter_id):
    """ Retrieve the last annualized moving average for the given meter id from the SQLite database.
    :param str meter_id: the user's meter id
    :returns: the last per capita consumption mapped to its timestamp or None if there are no
    values
    :rtype: dict or type(None) if there are no values
    """

    try:
        result = db.session.query(PerCapitaConsumption.date,
                                  PerCapitaConsumption.moving_average_annualized).\
            filter_by(meter_id=meter_id).order_by(PerCapitaConsumption.date.desc()).first()

        timestamp = result[0].strftime('%Y-%m-%d %H:%M:%S')
        moving_average_annualized = result[1]
        return {timestamp: moving_average_annualized}

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return None
Esempio n. 21
0
def get_group_members(user_id):
    """ Get the parameters from the database to create a group picture list for
    the given user.
    :param in user_id: the user's id
    :returns: the group members' ids and profile pictures
    :rtype: list
    """

    try:
        target_user = User.query.filter_by(id=user_id).first()
        target_group = Group.query.filter_by(id=target_user.group_id).first()
        group_users = User.query.filter_by(group_id=target_group.id).all()
        group_members = []
        for group_user in group_users:
            group_members.append(
                dict(id=group_user.id, avatar=group_user.avatar))
        return group_members

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
        return None
Esempio n. 22
0
def write_per_capita_consumption(dt, session):
    """ Write the per capita consumption for each user to the SQLite database.
    If for one user there are no yesterday's values in the database, write the
    base values for that user.
    If today's entry already exists for a user, skip writing that entry.
    :param datetime dt: the date to write the values for
    :param sqlalchemy.orm.scoping.scoped_session session: the database session
    """

    for user in get_all_users(session):

        try:
            # Check if entry exists
            pcc_today = session.query(PerCapitaConsumption).filter_by(
                date=dt, meter_id=user.meter_id).first()
            # Create today's entry if it does not exist
            if not pcc_today:
                dataset = calc_per_capita_consumption(user.meter_id,
                                                      user.inhabitants, dt,
                                                      session)
                # If there are no yesterday's values in the database for this user,
                # define the base values
                if dataset is None:
                    dataset = define_base_values(user.inhabitants, dt)
            # Create PerCapitaConsumption instance
                session.add(
                    PerCapitaConsumption(
                        dt, user.meter_id, dataset['consumption'],
                        dataset['consumption_cumulated'],
                        dataset['inhabitants'],
                        dataset['per_capita_consumption'],
                        dataset['per_capita_consumption_cumulated'],
                        dataset['days'], dataset['moving_average'],
                        dataset['moving_average_annualized']))
        except Exception as e:
            message = exception_message(e)
            logger.error(message)

    session.commit()
Esempio n. 23
0
def get_entry_date(redis_client, meter_id, key, entry_type):
    """ Return creation date of an entry in the redis database.
    :param str meter_id: the meter id the entry belongs to
    :param str key: the entry's key
    :param str entry_type: the entry's type (reading or disaggregation)
    """
    if (key[len(meter_id) + 1:].endswith("last")
            or key[len(meter_id) + 1:].endswith("first")
            or key[len(meter_id) + 1:].endswith("last_disaggregation")
            or key.startswith('average_power')):
        return None, None

    try:
        data = json.loads(redis_client.get(key))

    except Exception as e:
        message = exception_message(e)
        logger.error(message)
    else:
        if data is not None and data.get('type') == entry_type:
            entry_date = parser.parse(key[len(meter_id) + 1:])
            return entry_date, data

    return None, None
Esempio n. 24
0
    def populate_redis(self):
        """ Populate the redis database with all discovergy data from the past. """

        # pylint: disable=global-statement
        global last_data_flush
        last_data_flush = datetime.utcnow()
        end = calc_end()

        # Connect to sqlite database
        session = create_session()

        try:
            # Authenticate against the discovergy backend
            self.login()

        except Exception as e:
            message = exception_message(e)
            logger.error(message)
            logger.error('Wrong or missing discovergy credentials.')
            return

        self.write_readings(session, end)
        self.write_energy_consumption(session)
        self.write_disaggregations(session, end)