Example #1
0
async def get_audit_entries(request):
    """ Returns a list of audit trail entries sorted with most recent first and total count
        (including the criteria search if applied)

    :Example:

        curl -X GET http://localhost:8081/fledge/audit

        curl -X GET http://localhost:8081/fledge/audit?limit=5

        curl -X GET http://localhost:8081/fledge/audit?limit=5&skip=3

        curl -X GET http://localhost:8081/fledge/audit?skip=2

        curl -X GET http://localhost:8081/fledge/audit?source=PURGE

        curl -X GET http://localhost:8081/fledge/audit?source=NTFSD,NTFSN,NTFAD,NTFST,NTFDL,NTFCL
        curl -X GET http://localhost:8081/fledge/audit?severity=FAILURE

        curl -X GET http://localhost:8081/fledge/audit?source=LOGGN&severity=INFORMATION&limit=10
    """

    limit = __DEFAULT_LIMIT
    if 'limit' in request.query and request.query['limit'] != '':
        try:
            limit = int(request.query['limit'])
            if limit < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(reason="Limit must be a positive integer")

    offset = __DEFAULT_OFFSET
    if 'skip' in request.query and request.query['skip'] != '':
        try:
            offset = int(request.query['skip'])
            if offset < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(reason="Skip/Offset must be a positive integer")

    source = None
    source_list = []
    if 'source' in request.query and request.query['source'] != '':
        try:
            source = request.query.get('source')
            source_list = source.split(',')
            # SELECT * FROM log_codes
            storage_client = connect.get_storage_async()
            result = await storage_client.query_tbl("log_codes")
            log_codes = [key['code'] for key in result['rows']]
            for code in source_list:
                if code not in log_codes:
                    raise ValueError(code)
        except ValueError as e:
            raise web.HTTPBadRequest(reason="{} is not a valid source".format(str(e)))

    severity = None
    if 'severity' in request.query and request.query['severity'] != '':
        try:
            severity = Severity[request.query['severity'].upper()].value
        except KeyError as ex:
            raise web.HTTPBadRequest(reason="{} is not a valid severity".format(ex))

    try:
        # HACK: This way when we can more future we do not get an exponential
        # explosion of if statements
        payload = PayloadBuilder().SELECT("code", "level", "log", "ts")\
            .ALIAS("return", ("ts", 'timestamp')).FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS"))\
            .WHERE(['1', '=', 1])

        if source is not None:
            if len(source_list) == 1:
                payload.AND_WHERE(['code', '=', source])
            else:
                payload = PayloadBuilder().SELECT("code", "level", "log", "ts") \
                    .ALIAS("return", ("ts", 'timestamp')).FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS"))
                payload.WHERE(['code', 'in', source_list])
        if severity is not None:
            payload.AND_WHERE(['level', '=', severity])

        _and_where_payload = payload.chain_payload()
        # SELECT *, count(*) OVER() FROM log - No support yet from storage layer
        # TODO: FOGL-740, FOGL-663 once ^^ resolved we should replace below storage call for getting total rows
        _and_where_copy = copy.deepcopy(_and_where_payload)
        total_count_payload = PayloadBuilder(_and_where_copy).AGGREGATE(["count", "*"])\
            .ALIAS("aggregate", ("*", "count", "count")).payload()

        # SELECT count (*) FROM log <_and_where_payload>
        storage_client = connect.get_storage_async()
        result = await storage_client.query_tbl_with_payload('log', total_count_payload)
        total_count = result['rows'][0]['count']

        payload = PayloadBuilder(_and_where_payload)
        payload.ORDER_BY(['ts', 'desc'])
        payload.LIMIT(limit)

        if offset > 0:
            payload.OFFSET(offset)

        # SELECT * FROM log <payload.payload()>
        results = await storage_client.query_tbl_with_payload('log', payload.payload())
        res = []
        for row in results['rows']:
            r = dict()
            r["details"] = row["log"]
            severity_level = int(row["level"])
            r["severity"] = Severity(severity_level).name if severity_level in (0, 1, 2, 4) else "UNKNOWN"
            r["source"] = row["code"]
            r["timestamp"] = row["timestamp"]

            res.append(r)

    except Exception as ex:
        raise web.HTTPInternalServerError(reason=str(ex))

    return web.json_response({'audit': res, 'totalCount': total_count})
Example #2
0
async def get_tasks_latest(request):
    """
    Returns:
            the list of the most recent task execution for each name from tasks table

    :Example:
              curl -X GET  http://localhost:8081/fledge/task/latest

              curl -X GET  http://localhost:8081/fledge/task/latest?name=xxx
    """
    payload = PayloadBuilder().SELECT("id", "schedule_name", "process_name", "state", "start_time", "end_time", "reason", "pid", "exit_code")\
        .ALIAS("return", ("start_time", 'start_time'), ("end_time", 'end_time'))\
        .FORMAT("return", ("start_time", "YYYY-MM-DD HH24:MI:SS.MS"), ("end_time", "YYYY-MM-DD HH24:MI:SS.MS"))\
        .ORDER_BY(["schedule_name", "asc"], ["start_time", "desc"])

    if 'name' in request.query and request.query['name'] != '':
        name = request.query['name']
        payload.WHERE(["schedule_name", "=", name])

    try:
        _storage = connect.get_storage_async()
        results = await _storage.query_tbl_with_payload(
            'tasks', payload.payload())

        if len(results['rows']) == 0:
            raise web.HTTPNotFound(reason="No Tasks found")

        tasks = []
        previous_schedule = None
        for row in results['rows']:
            if not row['schedule_name'].strip():
                continue
            if previous_schedule != row['schedule_name']:
                tasks.append(row)
                previous_schedule = row['schedule_name']

        new_tasks = []
        for task in tasks:
            new_tasks.append({
                'id':
                str(task['id']),
                'name':
                task['schedule_name'],
                'processName':
                task['process_name'],
                'state': [t.name.capitalize()
                          for t in list(Task.State)][int(task['state']) - 1],
                'startTime':
                str(task['start_time']),
                'endTime':
                str(task['end_time']),
                'exitCode':
                task['exit_code'],
                'reason':
                task['reason'],
                'pid':
                task['pid']
            })
        return web.json_response({'tasks': new_tasks})
    except (ValueError, TaskNotFoundError) as ex:
        raise web.HTTPNotFound(reason=str(ex))