Beispiel #1
0
    async def get_meters_list(self,
                              profile_id,
                              meter_type,
                              is_superuser=False):
        slugs = self.get_slugs()

        field_names = self.meter_fields().values()

        query = """SELECT /*<select_names>*/*/*</select_names>*/ FROM meters_meter as meter
         INNER JOIN users_profile_meters as profile_meters 
         ON meter.id = profile_meters.meter_id
         WHERE (%(empty_slugs)s OR meter.name IN (%(slugs)s)) 
         AND %(all_users)s OR (profile_id = %(profile_id)s)
         AND (meter.type = %(type)s);""".replace(
            '/*<select_names>*/*/*</select_names>*/', ','.join(field_names))

        async with database.openmetrics(self.request.app) as connection:
            async with connection.cursor() as cursor:
                await cursor.execute(
                    query, {
                        'profile_id': profile_id,
                        'all_users': is_superuser,
                        'empty_slugs': not slugs,
                        'slugs': ",".join(slugs) if slugs else ('', ),
                        'type': meter_type
                    })
                async for row in cursor:
                    response_item = dict(zip(self.meter_fields(), row))
                    yield response_item
Beispiel #2
0
    async def get_profile_id(self, username):
        query = """SELECT auth_user.id FROM auth_user WHERE auth_user.username = %(username)s;"""

        async with database.openmetrics(self.request.app) as connection:
            async with connection.cursor() as cursor:
                await cursor.execute(query, {
                    'username': username,
                })
                profile_id = await cursor.fetchone()
                return profile_id[0]
Beispiel #3
0
async def wifi_iter(app, username, slugs, fields, date_from, date_to):
    # Collect field names like in DB
    field_names = wifi_field_names()
    select_names = [field_names[field] for field in fields]

    select_query = f"""
      SELECT /*<select_names>*/*/*</select_names>*/
      FROM readings_wifireading as wifi_reading
        INNER JOIN meters_meter as meter
          ON wifi_reading.meter_id = meter.id
      WHERE meter.id IN (
        SELECT profile_meters.meter_id
        FROM users_profile_meters as profile_meters
          INNER JOIN meters_meter as meter
          ON meter.id = profile_meters.meter_id
        WHERE (
            %(empty_slugs)s OR -- TRUE if no slugs 
            meter.name IN %(slugs)s
          )
          AND (
            %(all_users)s OR -- True if superuser
            profile_meters.profile_id = (SELECT auth_user.id FROM auth_user WHERE auth_user.username = %(username)s)
          )
      )
      AND %(date_from)s <= wifi_reading.datetime AND wifi_reading.datetime <= %(date_to)s
      ORDER BY wifi_reading.datetime
    ;
    """.replace('/*<select_names>*/*/*</select_names>*/',
                ','.join(select_names))

    async with database.openmetrics(app) as connection:
        async with connection.cursor() as cursor:
            await cursor.execute(
                select_query,
                {
                    'empty_slugs': not slugs,
                    # avoid sql syntax error: 'meter.name IN ()'
                    #                                     ^^^^^
                    'slugs': slugs if slugs else ('', ),
                    'all_users': username is None,
                    'username': '' if username is None else username,
                    'date_from': date_from,
                    'date_to': date_to,
                })
            async for row in cursor:
                response_item = dict(zip(fields, row))
                print(response_item)
                if 'date' in response_item:
                    response_item['date'] = response_item['date'].strftime(
                        "%Y-%m-%d")
                yield response_item
Beispiel #4
0
async def emc1sp_csv(request, request_args):
    field_names = (
        'name',
        'reference',
        'description',
        'date',
        'domestic_load_kwh',
        'grid_energy_utilised_kwh',
        'grid_export_kwh',
        'solar_storage_utilised_kwh',
        'generation_kwh',
        'battery_charge_kwh',
        'solar_generation_kwh',
        'gas_total_m3',
    )

    csv_response = io.StringIO()
    writer = csv.DictWriter(csv_response, field_names)
    writer.writeheader()

    from_date = request_args['fd']
    to_date = request_args['td']
    remote_name = await user_keys.get_remote_username(
        local_db=database.local_storage(request.app),
        username=request_args['usr'],
        api_key=request_args['key'])

    async for response_row in emc1sp_query_iter(
            database.openmetrics(request.app), remote_name, from_date,
            to_date):
        writer.writerow(response_row)

    current_time = datetime.datetime.now().strftime('%Y%m%d%H%M')
    filename = f"{request_args['usr']}_{current_time}_csvexport.csv"

    response = web.StreamResponse()
    response.headers[
        'CONTENT-DISPOSITION'] = f'attachment; filename="{filename}"'
    await response.prepare(request)
    try:
        csv_response.seek(0)
        while True:
            file_part = csv_response.read(4096 * 4096)
            if len(file_part) <= 0:
                break
            await response.write(file_part.encode())
    finally:
        await response.write_eof()
    return response
Beispiel #5
0
async def emc1sp_json(request):
    body = await request.post()

    if "fromdate" not in body:
        return web.json_response(
            {"error": "Body must contains 'fromdate' (DATE) field"})
    from_date = body['fromdate']

    if "todate" not in body:
        return web.json_response(
            {"error": "Body must contains 'todate' (DATE) field"})
    to_date = body['todate']

    response = []
    async for item in emc1sp_query_iter(database.openmetrics(request.app),
                                        request['username'], from_date,
                                        to_date):
        response.append(item)

    return web.json_response({'data': response})
Beispiel #6
0
async def readings_csv(request, request_args):
    rename_args(
        request_args, {
            'ir': 'import_reads',
            'er': 'export_reads',
            'fd': 'fromdate',
            'td': 'todate',
            'usr': '******',
            'key': 'api_key'
        })

    remote_name = await user_keys.get_remote_username(
        local_db=database.local_storage(request.app),
        username=request_args['username'],
        api_key=request_args['api_key'])

    meter_alias = 'm'
    reading_alias = 'r'

    # always included in each request
    both_names = [
        f'{meter_alias}.name', f'{meter_alias}.mpan',
        f'{meter_alias}.location', f'{reading_alias}.date'
    ]

    # IMPORT READINGS
    import_names = [
        reading_alias + '.' + name for name in (
            'import_total_wh',
            'import_total',
            'import0030',
            'import0100',
            'import0130',
            'import0200',
            'import0230',
            'import0300',
            'import0330',
            'import0400',
            'import0430',
            'import0500',
            'import0530',
            'import0600',
            'import0630',
            'import0700',
            'import0730',
            'import0800',
            'import0830',
            'import0900',
            'import0930',
            'import1000',
            'import1030',
            'import1100',
            'import1130',
            'import1200',
            'import1230',
            'import1300',
            'import1330',
            'import1400',
            'import1430',
            'import1500',
            'import1530',
            'import1600',
            'import1630',
            'import1700',
            'import1730',
            'import1800',
            'import1830',
            'import1900',
            'import1930',
            'import2000',
            'import2030',
            'import2100',
            'import2130',
            'import2200',
            'import2230',
            'import2300',
            'import2330',
            'import0000',
        )
    ]

    # EXPORT READINGS
    export_names = [
        reading_alias + '.' + name
        for name in ('export_total_wh', 'export_total', 'export0030',
                     'export0100', 'export0130', 'export0200', 'export0230',
                     'export0300', 'export0330', 'export0400', 'export0430',
                     'export0500', 'export0530', 'export0600', 'export0630',
                     'export0700', 'export0730', 'export0800', 'export0830',
                     'export0900', 'export0930', 'export1000', 'export1030',
                     'export1100', 'export1130', 'export1200', 'export1230',
                     'export1300', 'export1330', 'export1400', 'export1430',
                     'export1500', 'export1530', 'export1600', 'export1630',
                     'export1700', 'export1730', 'export1800', 'export1830',
                     'export1900', 'export1930', 'export2000', 'export2030',
                     'export2100', 'export2130', 'export2200', 'export2230',
                     'export2300', 'export2330', 'export0000')
    ]

    # Rename table column name here
    rename_dict = {
        f'{meter_alias}.name': 'name',
        f'{meter_alias}.mpan': 'reference',
        f'{meter_alias}.location': 'description',
        f'{reading_alias}.import_total': 'day_total_wh',
        f'{reading_alias}.export_total': 'export_day_total_wh'
    }

    select_names = []
    select_names.extend(both_names)

    if request_args['import_reads']:
        select_names.extend(import_names)

    if request_args['export_reads']:
        select_names.extend(export_names)

    select_query = "SELECT " + f"""
        { ','.join(select_names) }
        FROM readings_reading AS {reading_alias}
        INNER JOIN meters_meter AS {meter_alias} ON {meter_alias}.id = {reading_alias}.meter_id
        WHERE {reading_alias}.meter_id IN
        (SELECT meter_id FROM users_profile_meters WHERE profile_id =
        (SELECT id FROM auth_user WHERE username = %(username)s)) 
        AND date >= %(fromdate)s AND date <= %(todate)s; 
    """

    parameters = {
        'fromdate': request_args["fromdate"],
        'todate': request_args['todate'],
        'username': remote_name
    }

    csv_response = io.StringIO()
    writer = csv.DictWriter(csv_response, select_names)
    writer.writerow({
        selected_column: (rename_dict.get(selected_column)
                          or selected_column.replace(f'{reading_alias}.', ''))
        for selected_column in select_names
    })

    async with database.openmetrics(request.app) as conn:
        async with conn.cursor() as cursor:
            await cursor.execute(select_query, parameters)
            async for selected_row in cursor:
                response_row = {}
                for selected_column, value in zip(select_names, selected_row):
                    if isinstance(value, (datetime.date, datetime.datetime)):
                        value = value.strftime("%Y-%m-%d")
                    response_row[selected_column] = value
                writer.writerow(response_row)

    current_time = datetime.datetime.now().strftime('%Y%m%d%H%M')
    filename = f"{request_args['username']}_{current_time}_csvexport.csv"

    response = web.StreamResponse()
    response.headers[
        'CONTENT-DISPOSITION'] = f'attachment; filename="{filename}"'
    await response.prepare(request)
    try:
        csv_response.seek(0)
        while True:
            file_part = csv_response.read(4096 * 4096)
            if len(file_part) <= 0:
                break
            await response.write(file_part.encode())
    finally:
        await response.write_eof()
    return response