Exemplo n.º 1
0
async def get_audit_entries(request):
    """ Returns a list of audit trail entries sorted with most recent first and total count
        (including the criteria search if applied)

    :Example:

        curl -X GET http://localhost:8081/foglamp/audit

        curl -X GET http://localhost:8081/foglamp/audit?limit=5

        curl -X GET http://localhost:8081/foglamp/audit?limit=5&skip=3

        curl -X GET http://localhost:8081/foglamp/audit?skip=2

        curl -X GET http://localhost:8081/foglamp/audit?source=PURGE

        curl -X GET http://localhost:8081/foglamp/audit?severity=FAILURE

        curl -X GET http://localhost:8081/foglamp/audit?source=LOGGN&severity=INFORMATION&limit=10
    """

    limit = __DEFAULT_LIMIT
    if 'limit' in request.query and request.query['limit'] != '':
        try:
            limit = int(request.query['limit'])
            if limit < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(reason="Limit must be a positive integer")

    offset = __DEFAULT_OFFSET
    if 'skip' in request.query and request.query['skip'] != '':
        try:
            offset = int(request.query['skip'])
            if offset < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(
                reason="Skip/Offset must be a positive integer")

    source = None
    if 'source' in request.query and request.query['source'] != '':
        try:
            source = request.query.get('source')
            # SELECT * FROM log_codes
            storage_client = connect.get_storage_async()
            result = await storage_client.query_tbl("log_codes")
            log_codes = [key['code'] for key in result['rows']]
            if source not in log_codes:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(
                reason="{} is not a valid source".format(source))

    severity = None
    if 'severity' in request.query and request.query['severity'] != '':
        try:
            severity = Severity[request.query['severity'].upper()].value
        except KeyError as ex:
            raise web.HTTPBadRequest(
                reason="{} is not a valid severity".format(ex))

    try:
        # HACK: This way when we can more future we do not get an exponential
        # explosion of if statements
        payload = PayloadBuilder().SELECT("code", "level", "log", "ts")\
            .ALIAS("return", ("ts", 'timestamp')).FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS"))\
            .WHERE(['1', '=', 1])

        if source is not None:
            payload.AND_WHERE(['code', '=', source])

        if severity is not None:
            payload.AND_WHERE(['level', '=', severity])

        _and_where_payload = payload.chain_payload()
        # SELECT *, count(*) OVER() FROM log - No support yet from storage layer
        # TODO: FOGL-740, FOGL-663 once ^^ resolved we should replace below storage call for getting total rows
        _and_where_copy = copy.deepcopy(_and_where_payload)
        total_count_payload = PayloadBuilder(_and_where_copy).AGGREGATE(["count", "*"])\
            .ALIAS("aggregate", ("*", "count", "count")).payload()

        # SELECT count (*) FROM log <_and_where_payload>
        storage_client = connect.get_storage_async()
        result = await storage_client.query_tbl_with_payload(
            'log', total_count_payload)
        total_count = result['rows'][0]['count']

        payload = PayloadBuilder(_and_where_payload)
        payload.ORDER_BY(['ts', 'desc'])
        payload.LIMIT(limit)

        if offset > 0:
            payload.OFFSET(offset)

        # SELECT * FROM log <payload.payload()>
        results = await storage_client.query_tbl_with_payload(
            'log', payload.payload())
        res = []
        for row in results['rows']:
            r = dict()
            r["details"] = row["log"]
            severity_level = int(row["level"])
            r["severity"] = Severity(
                severity_level).name if severity_level in (0, 1, 2,
                                                           4) else "UNKNOWN"
            r["source"] = row["code"]
            r["timestamp"] = row["timestamp"]

            res.append(r)

    except Exception as ex:
        raise web.HTTPInternalServerError(reason=str(ex))

    return web.json_response({'audit': res, 'totalCount': total_count})
Exemplo n.º 2
0
async def get_audit_entries(request):
    """ Returns a list of audit trail entries sorted with most recent first and total count
        (including the criteria search if applied)

    :Example:

        curl -X GET http://localhost:8081/foglamp/audit

        curl -X GET http://localhost:8081/foglamp/audit?limit=5

        curl -X GET http://localhost:8081/foglamp/audit?limit=5&skip=3

        curl -X GET http://localhost:8081/foglamp/audit?skip=2

        curl -X GET http://localhost:8081/foglamp/audit?source=PURGE

        curl -X GET http://localhost:8081/foglamp/audit?severity=ERROR

        curl -X GET http://localhost:8081/foglamp/audit?source=LOGGN&severity=INFORMATION&limit=10
    """
    try:
        limit = request.query.get('limit') if 'limit' in request.query else __DEFAULT_LIMIT
        offset = request.query.get('skip') if 'skip' in request.query else __DEFAULT_OFFSET
        source = request.query.get('source') if 'source' in request.query else None
        severity = request.query.get('severity') if 'severity' in request.query else None

        # HACK: This way when we can more future we do not get an exponential
        # explosion of if statements
        payload = PayloadBuilder().WHERE(['1', '=', '1'])
        if source is not None and source != "":
            payload.AND_WHERE(['code', '=', source])

        if severity is not None and severity != "":
            payload.AND_WHERE(['level', '=', Severity[severity].value])

        _and_where_payload = payload.chain_payload()
        # SELECT *, count(*) OVER() FROM log - No support yet from storage layer
        # TODO: FOGL-740, FOGL-663 once ^^ resolved we should replace below storage call for getting total rows
        # TODO: FOGL-643 - Aggregate with alias support needed to use payload builder
        aggregate = {"operation": "count", "column": "*", "alias": "count"}
        d = OrderedDict()
        d['aggregate'] = aggregate
        d.update(_and_where_payload)
        total_count_payload = json.dumps(d)

        # SELECT count (*) FROM log <_and_where_payload>
        storage_client = connect.get_storage()
        result = storage_client.query_tbl_with_payload('log', total_count_payload)
        total_count = result['rows'][0]['count']

        payload.ORDER_BY(['ts', 'desc'])
        payload.LIMIT(int(limit))

        if offset != '' and int(offset) > 0:
            payload.OFFSET(int(offset))

        # SELECT * FROM log <payload.payload()>
        results = storage_client.query_tbl_with_payload('log', payload.payload())
        res = []
        for row in results['rows']:
            r = dict()
            r["details"] = row["log"]
            # TODO: FOGL-695 fix PURGE logging level
            severity_level = int(row["level"])
            r["severity"] = Severity(severity_level).name if severity_level in range(1, 5) else "UNKNOWN"
            r["source"] = row["code"]
            r["timestamp"] = row["ts"]

            res.append(r)

        return web.json_response({'audit': res, 'total_count': total_count})

    except ValueError as ex:
        raise web.HTTPNotFound(reason=str(ex))