Esempio n. 1
0
def prepare_limit_skip_payload(request, _dict):
    """ limit skip clause validation

    Args:
        request: request query params
        _dict: main payload dict
    Returns:
        chain payload dict
    """
    limit = __DEFAULT_LIMIT
    if 'limit' in request.query and request.query['limit'] != '':
        try:
            limit = int(request.query['limit'])
            if limit < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(reason="Limit must be a positive integer")

    offset = __DEFAULT_OFFSET
    if 'skip' in request.query and request.query['skip'] != '':
        try:
            offset = int(request.query['skip'])
            if offset < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(reason="Skip/Offset must be a positive integer")

    payload = PayloadBuilder(_dict).LIMIT(limit)
    if offset:
        payload = PayloadBuilder(_dict).SKIP(offset)

    return payload.chain_payload()
Esempio n. 2
0
        async def filter(cls, **kwargs):
            user_id = kwargs['uid']
            user_name = kwargs['username']

            q = PayloadBuilder().SELECT("id", "uname", "role_id").WHERE(['enabled', '=', 't'])

            if user_id is not None:
                q = q.AND_WHERE(['id', '=', user_id])

            if user_name is not None:
                q = q.AND_WHERE(['uname', '=', user_name])

            storage_client = connect.get_storage_async()
            q_payload = PayloadBuilder(q.chain_payload()).payload()
            result = await storage_client.query_tbl_with_payload('users', q_payload)
            return result['rows']
Esempio n. 3
0
async def asset(request):
    """ Browse a particular asset for which we have recorded readings and
    return a readings with timestamps for the asset. The number of readings
    return is defaulted to a small number (20), this may be changed by supplying
    the query parameter ?limit=xx&skip=xx

    Return the result of the query
    SELECT TO_CHAR(user_ts, '__TIMESTAMP_FMT') as "timestamp", (reading)::jsonFROM readings WHERE asset_code = 'asset_code' ORDER BY user_ts DESC LIMIT 20 OFFSET 0
    """
    asset_code = request.match_info.get('asset_code', '')

    # TODO: FOGL-637, 640
    timestamp = {
        "column": "user_ts",
        "format": __TIMESTAMP_FMT,
        "alias": "timestamp"
    }
    d = OrderedDict()
    d['return'] = [timestamp, "reading"]
    _where = PayloadBuilder().WHERE(["asset_code", "=",
                                     asset_code]).chain_payload()
    _and_where = where_clause(request, _where)
    d.update(_and_where)

    # Add the order by and limit clause
    limit = int(request.query.get(
        'limit')) if 'limit' in request.query else __DEFAULT_LIMIT
    offset = int(request.query.get(
        'skip')) if 'skip' in request.query else __DEFAULT_OFFSET
    _sort_limit_skip_payload = PayloadBuilder(d).ORDER_BY(["user_ts", "desc"
                                                           ]).LIMIT(limit)
    if offset:
        _sort_limit_skip_payload = PayloadBuilder(d).SKIP(offset)

    d.update(_sort_limit_skip_payload.chain_payload())

    payload = json.dumps(d)
    _storage = connect.get_storage()
    results = _storage.query_tbl_with_payload('readings', payload)

    if 'rows' in results:
        return web.json_response(results['rows'])
    else:
        raise web.HTTPBadRequest(reason=results['message'])
Esempio n. 4
0
async def get_asset_tracker_events(request):
    """
    Args:
        request:

    Returns:
            asset track records

    :Example:
            curl -X GET http://localhost:8081/foglamp/track
            curl -X GET http://localhost:8081/foglamp/track?asset=XXX
            curl -X GET http://localhost:8081/foglamp/track?event=XXX
            curl -X GET http://localhost:8081/foglamp/track?service=XXX
    """
    payload = PayloadBuilder().SELECT("asset", "event", "service", "foglamp", "plugin", "ts") \
        .ALIAS("return", ("ts", 'timestamp')).FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS")) \
        .WHERE(['1', '=', 1])
    if 'asset' in request.query and request.query['asset'] != '':
        asset = urllib.parse.unquote(request.query['asset'])
        payload.AND_WHERE(['asset', '=', asset])
    if 'event' in request.query and request.query['event'] != '':
        event = request.query['event']
        payload.AND_WHERE(['event', '=', event])
    if 'service' in request.query and request.query['service'] != '':
        service = urllib.parse.unquote(request.query['service'])
        payload.AND_WHERE(['service', '=', service])

    storage_client = connect.get_storage_async()
    payload = PayloadBuilder(payload.chain_payload())
    try:
        result = await storage_client.query_tbl_with_payload(
            'asset_tracker', payload.payload())
        response = result['rows']
    except KeyError:
        raise web.HTTPBadRequest(reason=result['message'])
    except Exception as ex:
        raise web.HTTPException(reason=ex)

    return web.json_response({'track': response})
Esempio n. 5
0
async def get_audit_entries(request):
    """ Returns a list of audit trail entries sorted with most recent first and total count
        (including the criteria search if applied)

    :Example:

        curl -X GET http://localhost:8081/foglamp/audit

        curl -X GET http://localhost:8081/foglamp/audit?limit=5

        curl -X GET http://localhost:8081/foglamp/audit?limit=5&skip=3

        curl -X GET http://localhost:8081/foglamp/audit?skip=2

        curl -X GET http://localhost:8081/foglamp/audit?source=PURGE

        curl -X GET http://localhost:8081/foglamp/audit?severity=FAILURE

        curl -X GET http://localhost:8081/foglamp/audit?source=LOGGN&severity=INFORMATION&limit=10
    """

    limit = __DEFAULT_LIMIT
    if 'limit' in request.query and request.query['limit'] != '':
        try:
            limit = int(request.query['limit'])
            if limit < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(reason="Limit must be a positive integer")

    offset = __DEFAULT_OFFSET
    if 'skip' in request.query and request.query['skip'] != '':
        try:
            offset = int(request.query['skip'])
            if offset < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(
                reason="Skip/Offset must be a positive integer")

    source = None
    if 'source' in request.query and request.query['source'] != '':
        try:
            source = request.query.get('source')
            # SELECT * FROM log_codes
            storage_client = connect.get_storage_async()
            result = await storage_client.query_tbl("log_codes")
            log_codes = [key['code'] for key in result['rows']]
            if source not in log_codes:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(
                reason="{} is not a valid source".format(source))

    severity = None
    if 'severity' in request.query and request.query['severity'] != '':
        try:
            severity = Severity[request.query['severity'].upper()].value
        except KeyError as ex:
            raise web.HTTPBadRequest(
                reason="{} is not a valid severity".format(ex))

    try:
        # HACK: This way when we can more future we do not get an exponential
        # explosion of if statements
        payload = PayloadBuilder().SELECT("code", "level", "log", "ts")\
            .ALIAS("return", ("ts", 'timestamp')).FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS"))\
            .WHERE(['1', '=', 1])

        if source is not None:
            payload.AND_WHERE(['code', '=', source])

        if severity is not None:
            payload.AND_WHERE(['level', '=', severity])

        _and_where_payload = payload.chain_payload()
        # SELECT *, count(*) OVER() FROM log - No support yet from storage layer
        # TODO: FOGL-740, FOGL-663 once ^^ resolved we should replace below storage call for getting total rows
        _and_where_copy = copy.deepcopy(_and_where_payload)
        total_count_payload = PayloadBuilder(_and_where_copy).AGGREGATE(["count", "*"])\
            .ALIAS("aggregate", ("*", "count", "count")).payload()

        # SELECT count (*) FROM log <_and_where_payload>
        storage_client = connect.get_storage_async()
        result = await storage_client.query_tbl_with_payload(
            'log', total_count_payload)
        total_count = result['rows'][0]['count']

        payload = PayloadBuilder(_and_where_payload)
        payload.ORDER_BY(['ts', 'desc'])
        payload.LIMIT(limit)

        if offset > 0:
            payload.OFFSET(offset)

        # SELECT * FROM log <payload.payload()>
        results = await storage_client.query_tbl_with_payload(
            'log', payload.payload())
        res = []
        for row in results['rows']:
            r = dict()
            r["details"] = row["log"]
            severity_level = int(row["level"])
            r["severity"] = Severity(
                severity_level).name if severity_level in (0, 1, 2,
                                                           4) else "UNKNOWN"
            r["source"] = row["code"]
            r["timestamp"] = row["timestamp"]

            res.append(r)

    except Exception as ex:
        raise web.HTTPInternalServerError(reason=str(ex))

    return web.json_response({'audit': res, 'totalCount': total_count})
Esempio n. 6
0
async def get_audit_entries(request):
    """ Returns a list of audit trail entries sorted with most recent first and total count
        (including the criteria search if applied)

    :Example:

        curl -X GET http://localhost:8081/foglamp/audit

        curl -X GET http://localhost:8081/foglamp/audit?limit=5

        curl -X GET http://localhost:8081/foglamp/audit?limit=5&skip=3

        curl -X GET http://localhost:8081/foglamp/audit?skip=2

        curl -X GET http://localhost:8081/foglamp/audit?source=PURGE

        curl -X GET http://localhost:8081/foglamp/audit?severity=ERROR

        curl -X GET http://localhost:8081/foglamp/audit?source=LOGGN&severity=INFORMATION&limit=10
    """
    try:
        limit = request.query.get('limit') if 'limit' in request.query else __DEFAULT_LIMIT
        offset = request.query.get('skip') if 'skip' in request.query else __DEFAULT_OFFSET
        source = request.query.get('source') if 'source' in request.query else None
        severity = request.query.get('severity') if 'severity' in request.query else None

        # HACK: This way when we can more future we do not get an exponential
        # explosion of if statements
        payload = PayloadBuilder().WHERE(['1', '=', '1'])
        if source is not None and source != "":
            payload.AND_WHERE(['code', '=', source])

        if severity is not None and severity != "":
            payload.AND_WHERE(['level', '=', Severity[severity].value])

        _and_where_payload = payload.chain_payload()
        # SELECT *, count(*) OVER() FROM log - No support yet from storage layer
        # TODO: FOGL-740, FOGL-663 once ^^ resolved we should replace below storage call for getting total rows
        # TODO: FOGL-643 - Aggregate with alias support needed to use payload builder
        aggregate = {"operation": "count", "column": "*", "alias": "count"}
        d = OrderedDict()
        d['aggregate'] = aggregate
        d.update(_and_where_payload)
        total_count_payload = json.dumps(d)

        # SELECT count (*) FROM log <_and_where_payload>
        storage_client = connect.get_storage()
        result = storage_client.query_tbl_with_payload('log', total_count_payload)
        total_count = result['rows'][0]['count']

        payload.ORDER_BY(['ts', 'desc'])
        payload.LIMIT(int(limit))

        if offset != '' and int(offset) > 0:
            payload.OFFSET(int(offset))

        # SELECT * FROM log <payload.payload()>
        results = storage_client.query_tbl_with_payload('log', payload.payload())
        res = []
        for row in results['rows']:
            r = dict()
            r["details"] = row["log"]
            # TODO: FOGL-695 fix PURGE logging level
            severity_level = int(row["level"])
            r["severity"] = Severity(severity_level).name if severity_level in range(1, 5) else "UNKNOWN"
            r["source"] = row["code"]
            r["timestamp"] = row["ts"]

            res.append(r)

        return web.json_response({'audit': res, 'total_count': total_count})

    except ValueError as ex:
        raise web.HTTPNotFound(reason=str(ex))
Esempio n. 7
0
async def asset_averages(request):
    """ Browse all the assets for which we have recorded readings and
    return a series of averages per second, minute or hour.

    The readings averaged can also be time limited by use of the query
    parameter seconds=sss. This defines a number of seconds that the reading
    must have been processed in. Older readings than this will not be summarised.

    The readings averaged can also be time limited by use of the query
    parameter minutes=mmm. This defines a number of minutes that the reading
    must have been processed in. Older readings than this will not be summarised.

    The readings averaged can also be time limited by use of the query
    parameter hours=hh. This defines a number of hours that the reading
    must have been processed in. Older readings than this will not be summarised.

    Only one of hour, minutes or seconds should be supplied

    The amount of time covered by each returned value is set using the
    query parameter group. This may be set to seconds, minutes or hours

    Return the result of the query
    SELECT user_ts AVG((reading->>'reading')::float) FROM readings WHERE asset_code = 'asset_code' GROUP BY user_ts
    """
    asset_code = request.match_info.get('asset_code', '')
    reading = request.match_info.get('reading', '')

    ts_restraint = 'YYYY-MM-DD HH24:MI:SS'
    if 'group' in request.query:
        if request.query['group'] == 'seconds':
            ts_restraint = 'YYYY-MM-DD HH24:MI:SS'
        elif request.query['group'] == 'minutes':
            ts_restraint = 'YYYY-MM-DD HH24:MI'
        elif request.query['group'] == 'hours':
            ts_restraint = 'YYYY-MM-DD HH24'

    # TODO: FOGL-637, 640
    timestamp = {
        "column": "user_ts",
        "format": ts_restraint,
        "alias": "timestamp"
    }
    prop_dict = {"column": "reading", "properties": reading}
    min_dict = {"operation": "min", "json": prop_dict, "alias": "min"}
    max_dict = {"operation": "max", "json": prop_dict, "alias": "max"}
    avg_dict = {"operation": "avg", "json": prop_dict, "alias": "average"}

    aggregate = OrderedDict()
    aggregate['aggregate'] = [min_dict, max_dict, avg_dict]
    d = OrderedDict()
    d['aggregate'] = [min_dict, max_dict, avg_dict]
    _where = PayloadBuilder().WHERE(["asset_code", "=",
                                     asset_code]).chain_payload()
    _and_where = where_clause(request, _where)
    d.update(_and_where)

    # Add the group by and limit clause
    d['group'] = timestamp
    limit = int(request.query.get(
        'limit')) if 'limit' in request.query else __DEFAULT_LIMIT
    _limit_payload = PayloadBuilder(d).LIMIT(limit)
    d.update(_limit_payload.chain_payload())

    payload = json.dumps(d)
    _storage = connect.get_storage()
    results = _storage.query_tbl_with_payload('readings', payload)

    if 'rows' in results:
        return web.json_response(results['rows'])
    else:
        raise web.HTTPBadRequest(reason=results['message'])
Esempio n. 8
0
async def asset_reading(request):
    """ Browse a particular sensor value of a particular asset for which we have recorded readings and
    return the timestamp and reading value for that sensor. The number of rows returned
    is limited to a small number, this number may be altered by use of
    the query parameter limit=xxx&skip=xxx.

    The readings returned can also be time limited by use of the query
    parameter seconds=sss. This defines a number of seconds that the reading
    must have been processed in. Older readings than this will not be returned.

    The readings returned can also be time limited by use of the query
    parameter minutes=mmm. This defines a number of minutes that the reading
    must have been processed in. Older readings than this will not be returned.

    The readings returned can also be time limited by use of the query
    parameter hours=hh. This defines a number of hours that the reading
    must have been processed in. Older readings than this will not be returned.

    Only one of hour, minutes or seconds should be supplied

    Return the result of the query
    SELECT TO_CHAR(user_ts, '__TIMESTAMP_FMT') as "timestamp", reading->>'reading' FROM readings WHERE asset_code = 'asset_code' ORDER BY user_ts DESC LIMIT 20 OFFSET 0
    """
    asset_code = request.match_info.get('asset_code', '')
    reading = request.match_info.get('reading', '')

    # TODO: FOGL-637, 640
    timestamp = {
        "column": "user_ts",
        "format": __TIMESTAMP_FMT,
        "alias": "timestamp"
    }
    json_property = OrderedDict()
    json_property['json'] = {"column": "reading", "properties": reading}
    json_property['alias'] = reading

    d = OrderedDict()
    d['return'] = [timestamp, json_property]
    _where = PayloadBuilder().WHERE(["asset_code", "=",
                                     asset_code]).chain_payload()
    _and_where = where_clause(request, _where)
    d.update(_and_where)

    # Add the order by and limit clause
    limit = int(request.query.get(
        'limit')) if 'limit' in request.query else __DEFAULT_LIMIT
    offset = int(request.query.get(
        'skip')) if 'skip' in request.query else __DEFAULT_OFFSET
    _sort_limit_skip_payload = PayloadBuilder(d).ORDER_BY(["user_ts", "desc"
                                                           ]).LIMIT(limit)

    if offset:
        _sort_limit_skip_payload = PayloadBuilder(d).SKIP(offset)

    d.update(_sort_limit_skip_payload.chain_payload())

    payload = json.dumps(d)
    _storage = connect.get_storage()
    results = _storage.query_tbl_with_payload('readings', payload)

    if 'rows' in results:
        return web.json_response(results['rows'])
    else:
        raise web.HTTPBadRequest(reason=results['message'])