def __init__(self, *args, **kwargs):
     #super(LiveStatusWaitQuery, self).__init__(*args, **kwargs)
     LiveStatusQuery.__init__(self, *args, **kwargs)
     self.response = LiveStatusResponse(responseheader='off', outputformat='csv', keepalive='off', columnheaders='undef', separators=LiveStatusResponse.separators)
     self.response = LiveStatusResponse()
     self.wait_start = time.time()
     self.wait_timeout = 0
     self.wait_trigger = 'all'
 def __init__(self, *args, **kwargs):
     #super(LiveStatusWaitQuery, self).__init__(*args, **kwargs)
     LiveStatusQuery.__init__(self, *args, **kwargs)
     self.response = LiveStatusResponse(responseheader='off', outputformat='csv', keepalive='off', columnheaders='undef', separators=LiveStatusResponse.separators)
     self.response = LiveStatusResponse()
     self.wait_start = time.time()
     self.wait_timeout = 0
     self.wait_trigger = 'all'
Example #3
0
 def handle_request(self, data):
     try:
         return self.handle_request_and_fail(data)
     except LiveStatusQueryError, exp:
         # LiveStatusQueryError(404, table)
         # LiveStatusQueryError(450, column)
         code, detail = exp.args
         response = LiveStatusResponse()
         response.output = LiveStatusQueryError.messages[code] % detail
         response.statuscode = code
         if 'fixed16' in data:
             response.responseheader = 'fixed16'
         return response.respond()
Example #4
0
class LiveStatus(object):
    """A class that represents the status of all objects in the broker

    """
    def __init__(self, datamgr, query_cache, db, pnp_path, return_queue):
        self.datamgr = datamgr
        self.query_cache = query_cache
        self.db = db
        self.pnp_path = pnp_path
        self.return_queue = return_queue
        self.counters = LiveStatusCounters()

    def handle_request(self, data):
        try:
            return self.handle_request_and_fail(data)
        except LiveStatusQueryError, exp:
            # LiveStatusQueryError(404, table)
            # LiveStatusQueryError(450, column)
            code, detail = exp.args
            response = LiveStatusResponse()
            response.output = LiveStatusQueryError.messages[code] % detail
            response.statuscode = code
            if 'fixed16' in data:
                response.responseheader = 'fixed16'
            return response.respond()
        except Exception, exp:
            print "exception!!!", exp
            response = LiveStatusResponse()
            response.output = LiveStatusQueryError.messages[452] % data
            response.statuscode = 452
            if 'fixed16' in data:
                response.responseheader = 'fixed16'
            return response.respond()
Example #5
0
    def __init__(self, datamgr, query_cache, db, pnp_path, return_queue,
                 counters):
        # Runtime data form the global LiveStatus object
        self.datamgr = datamgr
        self.query_cache = query_cache
        self.db = db
        self.pnp_path = pnp_path
        self.return_queue = return_queue
        self.counters = counters

        # Private attributes for this specific request
        self.response = LiveStatusResponse()
        self.authuser = None
        self.table = None
        self.columns = []
        self.filtercolumns = []
        self.prefiltercolumns = []
        self.outputcolumns = []
        self.stats_group_by = []
        self.stats_columns = []
        self.aliases = []
        self.limit = None
        self.extcmd = False

        # Initialize the stacks which are needed for the Filter: and Stats:
        # filter- and count-operations
        self.filter_stack = LiveStatusStack()
        self.stats_filter_stack = LiveStatusStack()
        self.stats_postprocess_stack = LiveStatusStack()
        self.stats_query = False

        # When was this query launched?
        self.tic = time.time()
        # Clients can also send their local time with the request
        self.client_localtime = self.tic

        # This is mostly used in the Response.format... which needs to know
        # the class behind a queries table
        self.table_class_map = table_class_map
Example #6
0
 def handle_request(self, data):
     try:
         return self.handle_request_and_fail(data)
     except LiveStatusQueryError, exp:
         # LiveStatusQueryError(404, table)
         # LiveStatusQueryError(450, column)
         code, detail = exp.args
         response = LiveStatusResponse()
         response.output = LiveStatusQueryError.messages[code] % detail
         response.statuscode = code
         if 'fixed16' in data:
             response.responseheader = 'fixed16'
         return response.respond()
Example #7
0
class LiveStatus(object):
    """A class that represents the status of all objects in the broker

    """
    def __init__(self, datamgr, query_cache, db, pnp_path, return_queue):
        self.datamgr = datamgr
        self.query_cache = query_cache
        self.db = db
        self.pnp_path = pnp_path
        self.return_queue = return_queue
        self.counters = LiveStatusCounters()

    def handle_request(self, data):
        try:
            return self.handle_request_and_fail(data)
        except LiveStatusQueryError, exp:
            # LiveStatusQueryError(404, table)
            # LiveStatusQueryError(450, column)
            code, detail = exp.args
            response = LiveStatusResponse()
            response.output = LiveStatusQueryError.messages[code] % detail
            response.statuscode = code
            if 'fixed16' in data:
                response.responseheader = 'fixed16'
            return response.respond()
        except Exception, exp:
            logger.error("[Livestatus] Exception! %s" % exp)
            # Also show the exception
            output = cStringIO.StringIO()
            traceback.print_exc(file=output)
            logger.error("[Livestatus] Back trace of this exception: %s" %
                         (output.getvalue()))
            output.close()
            # Ok now we can return something
            response = LiveStatusResponse()
            response.output = LiveStatusQueryError.messages[452] % data
            response.statuscode = 452
            if 'fixed16' in data:
                response.responseheader = 'fixed16'
            return response.respond()
Example #8
0
    def __init__(self, datamgr, query_cache, db, pnp_path, return_queue, counters):
        # Runtime data form the global LiveStatus object
        self.datamgr = datamgr
        self.query_cache = query_cache
        self.db = db
        self.pnp_path = pnp_path
        self.return_queue = return_queue
        self.counters = counters

        # Private attributes for this specific request
        self.response = LiveStatusResponse()
        self.raw_data = ''
        self.authuser = None
        self.table = None
        self.columns = []
        self.filtercolumns = []
        self.prefiltercolumns = []
        self.outputcolumns = []
        self.stats_group_by = []
        self.stats_columns = []
        self.aliases = []
        self.limit = None
        self.extcmd = False

        # Initialize the stacks which are needed for the Filter: and Stats:
        # filter- and count-operations
        self.filter_stack = LiveStatusStack()
        self.stats_filter_stack = LiveStatusStack()
        self.stats_postprocess_stack = LiveStatusStack()
        self.stats_query = False

        # When was this query launched?
        self.tic = time.time()
        # Clients can also send their local time with the request
        self.client_localtime = self.tic

        # This is mostly used in the Response.format... which needs to know
        # the class behind a queries table
        self.table_class_map = table_class_map
Example #9
0
class LiveStatusQuery(object):

    my_type = 'query'

    def __init__(self, datamgr, query_cache, db, pnp_path, return_queue, counters):
        # Runtime data form the global LiveStatus object
        self.datamgr = datamgr
        self.query_cache = query_cache
        self.db = db
        self.pnp_path = pnp_path
        self.return_queue = return_queue
        self.counters = counters

        # Private attributes for this specific request
        self.response = LiveStatusResponse()
        self.authuser = None
        self.table = None
        self.columns = []
        self.filtercolumns = []
        self.prefiltercolumns = []
        self.outputcolumns = []
        self.stats_group_by = []
        self.stats_columns = []
        self.aliases = []
        self.limit = None
        self.extcmd = False

        # Initialize the stacks which are needed for the Filter: and Stats:
        # filter- and count-operations
        self.filter_stack = LiveStatusStack()
        self.stats_filter_stack = LiveStatusStack()
        self.stats_postprocess_stack = LiveStatusStack()
        self.stats_query = False

        # When was this query launched?
        self.tic = time.time()
        # Clients can also send their local time with the request
        self.client_localtime = self.tic

        # This is mostly used in the Response.format... which needs to know
        # the class behind a queries table
        self.table_class_map = table_class_map

    def __str__(self):
        output = "LiveStatusRequest:\n"
        for attr in ["table", "columns", "filtercolumns", "prefiltercolumns", "aliases", "stats_group_by", "stats_query"]:
            output += "request %s: %s\n" % (attr, getattr(self, attr))
        return output

    def split_command(self, line, splits=1):
        """Create a list from the words of a line"""
        return line.split(' ', splits)

    def split_option(self, line, splits=1):
        """Like split_commands, but converts numbers to int data type"""
        x = map(lambda i: (i.isdigit() and int(i)) or i, [token.strip() for token in re.split(r"[\s]+", line, splits)])
        return x

    def split_option_with_columns(self, line):
        """Split a line in a command and a list of words"""
        cmd, columns = self.split_option(line)
        return cmd, [self.strip_table_from_column(c) for c in re.compile(r'\s+').split(columns)]

    def strip_table_from_column(self, column):
        """Cut off the table name, because it is possible
        to say service_state instead of state"""
        bygroupmatch = re.compile('(\w+)by.*group').search(self.table)
        if bygroupmatch:
            return re.sub(re.sub('s$', '', bygroupmatch.group(1)) + '_', '', column, 1)
        else:
            return re.sub(re.sub('s$', '', self.table) + '_', '', column, 1)

    def parse_input(self, data):
        """Parse the lines of a livestatus request.

        This function looks for keywords in input lines and
        sets the attributes of the request object

        """
        for line in data.splitlines():
            line = line.strip()
            # Tools like NagVis send KEYWORK:option, and we prefer to have
            # a space following the:
            if ':' in line and not ' ' in line:
                line = line.replace(':', ': ')
            keyword = line.split(' ')[0].rstrip(':')
            if keyword == 'GET':  # Get the name of the base table
                _, self.table = self.split_command(line)
                if self.table not in table_class_map.keys():
                    raise LiveStatusQueryError(404, self.table)
            elif keyword == 'Columns':  # Get the names of the desired columns
                _, self.columns = self.split_option_with_columns(line)
                self.response.columnheaders = 'off'
            elif keyword == 'ResponseHeader':
                _, responseheader = self.split_option(line)
                self.response.responseheader = responseheader
            elif keyword == 'OutputFormat':
                _, outputformat = self.split_option(line)
                self.response.outputformat = outputformat
            elif keyword == 'KeepAlive':
                _, keepalive = self.split_option(line)
                self.response.keepalive = keepalive
            elif keyword == 'ColumnHeaders':
                _, columnheaders = self.split_option(line)
                self.response.columnheaders = columnheaders
            elif keyword == 'Limit':
                _, self.limit = self.split_option(line)
            elif keyword == 'AuthUser':
                if self.table in ['hosts', 'hostgroups', 'services', 'servicegroups', 'hostsbygroup', 'servicesbygroup', 'servicesbyhostgroup']:
                    _, self.authuser = self.split_option(line)
                # else self.authuser stays None and will be ignored
            elif keyword == 'Filter':
                try:
                    _, attribute, operator, reference = self.split_option(line, 3)
                except:
                    _, attribute, operator, reference = self.split_option(line, 2) + ['']
                if operator in ['=', '>', '>=', '<', '<=', '=~', '~', '~~', '!=', '!>', '!>=', '!<', '!<=']:
                    # Cut off the table name
                    attribute = self.strip_table_from_column(attribute)
                    # Some operators can simply be negated
                    if operator in ['!>', '!>=', '!<', '!<=']:
                        operator = {'!>': '<=', '!>=': '<', '!<': '>=', '!<=': '>'}[operator]
                    # Put a function on top of the filter_stack which implements
                    # the desired operation
                    self.filtercolumns.append(attribute)
                    self.prefiltercolumns.append(attribute)
                    self.filter_stack.put_stack(self.make_filter(operator, attribute, reference))
                    if self.table == 'log':
                        self.db.add_filter(operator, attribute, reference)
                else:
                    print "illegal operation", operator
                    pass  # illegal operation
            elif keyword == 'And':
                _, andnum = self.split_option(line)
                # Take the last andnum functions from the stack
                # Construct a new function which makes a logical and
                # Put the function back onto the stack
                self.filter_stack.and_elements(andnum)
                if self.table == 'log':
                    self.db.add_filter_and(andnum)
            elif keyword == 'Or':
                _, ornum = self.split_option(line)
                # Take the last ornum functions from the stack
                # Construct a new function which makes a logical or
                # Put the function back onto the stack
                self.filter_stack.or_elements(ornum)
                if self.table == 'log':
                    self.db.add_filter_or(ornum)
            elif keyword == 'Negate':
                self.filter_stack.not_elements()
                if self.table == 'log':
                    self.db.add_filter_not()
            elif keyword == 'StatsGroupBy':
                _, stats_group_by = self.split_option_with_columns(line)
                self.filtercolumns.extend(stats_group_by)
                self.stats_group_by.extend(stats_group_by)
                # Deprecated. If your query contains at least one Stats:-header
                # then Columns: has the meaning of the old StatsGroupBy: header
            elif keyword == 'Stats':
                self.stats_query = True
                try:
                    _, attribute, operator, reference = self.split_option(line, 3)
                    if attribute in ['sum', 'min', 'max', 'avg', 'std'] and reference.startswith('as '):
                        attribute, operator = operator, attribute
                        _, alias = reference.split(' ')
                        self.aliases.append(alias)
                    elif attribute in ['sum', 'min', 'max', 'avg', 'std'] and reference == '=':
                        # Workaround for thruk-cmds like: Stats: sum latency =
                        attribute, operator = operator, attribute
                        reference = ''
                except:
                    _, attribute, operator = self.split_option(line, 3)
                    if attribute in ['sum', 'min', 'max', 'avg', 'std']:
                        attribute, operator = operator, attribute
                    reference = ''
                attribute = self.strip_table_from_column(attribute)
                if operator in ['=', '>', '>=', '<', '<=', '=~', '~', '~~', '!=', '!>', '!>=', '!<', '!<=']:
                    if operator in ['!>', '!>=', '!<', '!<=']:
                        operator = {'!>': '<=', '!>=': '<', '!<': '>=', '!<=': '>'}[operator]
                    self.filtercolumns.append(attribute)
                    self.stats_columns.append(attribute)
                    self.stats_filter_stack.put_stack(self.make_filter(operator, attribute, reference))
                    self.stats_postprocess_stack.put_stack(self.make_filter('count', attribute, None))
                elif operator in ['sum', 'min', 'max', 'avg', 'std']:
                    self.stats_columns.append(attribute)
                    self.stats_filter_stack.put_stack(self.make_filter('dummy', attribute, None))
                    self.stats_postprocess_stack.put_stack(self.make_filter(operator, attribute, None))
                else:
                    print "illegal operation", operator
                    pass  # illegal operation
            elif keyword == 'StatsAnd':
                _, andnum = self.split_option(line)
                self.stats_filter_stack.and_elements(andnum)
            elif keyword == 'StatsOr':
                _, ornum = self.split_option(line)
                self.stats_filter_stack.or_elements(ornum)
            elif keyword == 'Separators':
                _, sep1, sep2, sep3, sep4 = line.split(' ', 5)
                self.response.separators = map(lambda x: chr(int(x)), [sep1, sep2, sep3, sep4])
            elif keyword == 'Localtime':
                _, self.client_localtime = self.split_option(line)
            elif keyword == 'COMMAND':
                _, self.extcmd = line.split(' ', 1)
            else:
                # This line is not valid or not implemented
                print "Received a line of input which i can't handle: '%s'" % line
                pass
        self.metainfo = LiveStatusQueryMetainfo(data)

    def launch_query(self):
        """ Prepare the request object's filter stacks """

        # The Response object needs to access the Query
        self.response.load(self)

        # A minimal integrity check
        if not self.table:
            return []

        # Ask the cache if this request was already answered under the same
        # circumstances. (And f not, whether this query is cacheable at all)
        cacheable, cache_hit, cached_response = self.query_cache.get_cached_query(self.metainfo)
        if cache_hit:
            self.columns = cached_response['columns']
            self.response.columnheaders = cached_response['columnheaders']
            return cached_response['result']

        # Make columns unique
        self.filtercolumns = list(set(self.filtercolumns))
        self.prefiltercolumns = list(set(self.prefiltercolumns))
        self.stats_columns = list(set(self.stats_columns))

        if self.stats_query:
            if len(self.columns) > 0:
                # StatsGroupBy is deprecated. Columns: can be used instead
                self.stats_group_by = self.columns
            elif len(self.stats_group_by) > 0:
                self.columns = self.stats_group_by + self.stats_columns
            #if len(self.stats_columns) > 0 and len(self.columns) == 0:
            if len(self.stats_columns) > 0:
                self.columns = self.stats_columns + self.columns
        else:
            if len(self.columns) == 0:
                self.outputcolumns = list_livestatus_attributes(self.table)
            else:
                self.outputcolumns = self.columns

        # Make one big filter where the single filters are anded
        self.filter_stack.and_elements(self.filter_stack.qsize())

        # Get the function which implements the Filter: statements
        filter_func = self.filter_stack.get_stack()
        without_filter = len(self.filtercolumns) == 0
        cs = LiveStatusConstraints(filter_func, without_filter, self.authuser)

        try:
            # Remember the number of stats filters. We need these numbers as columns later.
            # But we need to ask now, because get_live_data() will empty the stack
            num_stats_filters = self.stats_filter_stack.qsize()
            if self.table == 'log':
                result = self.get_live_data_log(cs)
            else:
                # If the pnpgraph_present column is involved, then check
                # with each request if the pnp perfdata path exists
                if 'pnpgraph_present' in self.columns + self.filtercolumns + self.prefiltercolumns and self.pnp_path and os.access(self.pnp_path, os.R_OK):
                    self.pnp_path_readable = True
                else:
                    self.pnp_path_readable = False
                # Apply the filters on the broker's host/service/etc elements

                result = self.get_live_data(cs)

            if self.stats_query:
                self.columns = range(num_stats_filters)
                if self.stats_group_by:
                    self.columns = tuple(list(self.stats_group_by) + list(self.columns))
                if len(self.aliases) == 0:
                    # If there were Stats: staments without "as", show no column headers at all
                    self.response.columnheaders = 'off'
                else:
                    self.response.columnheaders = 'on'

            if self.stats_query:
                result = self.statsify_result(result)
                # statsify_result returns a dict with column numers as keys
            elif self.table == 'columns':
                # With stats_request set to True, format_output expects result
                # to be a list of dicts instead a list of objects
                self.stats_query = True

        except Exception, e:
            import traceback
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            print e
            traceback.print_exc(32)
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            result = []

        if cacheable and not cache_hit:
            # We cannot cache generators, so we must first read them into a list
            result = [r for r in result]
            # Especially for stats requests also the columns and headers
            # are modified, so we need to save them too.
            self.query_cache.cache_query(self.metainfo, {
                'result': result,
                'columns': self.columns,
                'columnheaders': self.response.columnheaders,
            })

        return result
class LiveStatusWaitQuery(LiveStatusQuery):

    my_type = 'wait'

    def __init__(self, *args, **kwargs):
        #super(LiveStatusWaitQuery, self).__init__(*args, **kwargs)
        LiveStatusQuery.__init__(self, *args, **kwargs)
        self.response = LiveStatusResponse(responseheader='off', outputformat='csv', keepalive='off', columnheaders='undef', separators=LiveStatusResponse.separators)
        self.response = LiveStatusResponse()
        self.wait_start = time.time()
        self.wait_timeout = 0
        self.wait_trigger = 'all'

    def parse_input(self, data):
        """Parse the lines of a livestatus request.

        This function looks for keywords in input lines and
        sets the attributes of the request object.
        WaitCondition statements are written into the metafilter string as if they
        were ordinary Filter:-statements. (metafilter is then used for a MetaData object)

        """
        metafilter = ""
        for line in data.splitlines():
            line = line.strip()
            # Tools like NagVis send KEYWORK:option, and we prefer to have
            # a space following the:
            if ':' in line and not ' ' in line:
                line = line.replace(':', ': ')
            keyword = line.split(' ')[0].rstrip(':')
            if keyword == 'GET':  # Get the name of the base table
                _, self.table = self.split_command(line)
                metafilter += "GET %s\n" % self.table
            elif keyword == 'WaitObject':  # Pick a specific object by name
                _, item = self.split_option(line)
                # It's like Filter: name = %s
                # Only for services it's host<blank>servicedesc
                if self.table == 'services':
                    if ';' in item:
                        host_name, service_description = item.split(';', 1)
                    else:
                        host_name, service_description = item.split(' ', 1)
                    self.filtercolumns.append('host_name')
                    self.prefiltercolumns.append('host_name')
                    self.filter_stack.put(self.make_filter('=', 'host_name', host_name))
                    self.filtercolumns.append('description')
                    self.prefiltercolumns.append('description')
                    self.filter_stack.put(self.make_filter('=', 'description', service_description))
                    # A WaitQuery works like an ordinary Query. But if
                    # we already know which object we're watching for
                    # changes, instead of scanning the entire list and
                    # applying a Filter:, we simply reduce the list
                    # so it has just one element.
                    metafilter += "Filter: host_name = %s\n" % host_name
                    metafilter += "Filter: service_description = %s\n" % service_description
                elif self.table == 'hosts':
                    attribute = self.strip_table_from_column('name')
                    self.filtercolumns.append('name')
                    self.prefiltercolumns.append('name')
                    self.filter_stack.put(self.make_filter('=', 'name', item))
                    metafilter += "Filter: host_name = %s\n" % (item,)
                else:
                    attribute = self.strip_table_from_column('name')
                    self.filtercolumns.append('name')
                    self.prefiltercolumns.append('name')
                    self.filter_stack.put(self.make_filter('=', 'name', item))
                    # For the other tables this works like an ordinary query.
                    # In the future there might be more lookup-tables
            elif keyword == 'WaitTrigger':
                _, self.wait_trigger = self.split_option(line)
                if self.wait_trigger not in ['check', 'state', 'log', 'downtime', 'comment', 'command']:
                    self.wait_trigger = 'all'
            elif keyword == 'WaitCondition':
                try:
                    _, attribute, operator, reference = self.split_option(line, 3)
                except:
                    _, attribute, operator, reference = self.split_option(line, 2) + ['']
                if operator in ['=', '>', '>=', '<', '<=', '=~', '~', '~~', '!=', '!>', '!>=', '!<', '!<=']:
                    # We need to set columns, if not columnheaders will be set to "on"
                    self.columns.append(attribute)
                    # Cut off the table name
                    attribute = self.strip_table_from_column(attribute)
                    # Some operators can simply be negated
                    if operator in ['!>', '!>=', '!<', '!<=']:
                        operator = {'!>': '<=', '!>=': '<', '!<': '>=', '!<=': '>'}[operator]
                    # Put a function on top of the filter_stack which implements
                    # the desired operation
                    self.filtercolumns.append(attribute)
                    self.prefiltercolumns.append(attribute)
                    self.filter_stack.put(self.make_filter(operator, attribute, reference))
                    if self.table == 'log':
                        self.db.add_filter(operator, attribute, reference)
                else:
                    logger.warning("[Livestatus Wait Query] Illegal operation: %s" % str(operator))
                    pass  # illegal operation
            elif keyword == 'WaitConditionAnd':
                _, andnum = self.split_option(line)
                # Take the last andnum functions from the stack
                # Construct a new function which makes a logical and
                # Put the function back onto the stack
                self.filter_stack.and_elements(andnum)
                if self.table == 'log':
                    self.db.add_filter_and(andnum)
            elif keyword == 'WaitConditionOr':
                _, ornum = self.split_option(line)
                # Take the last ornum functions from the stack
                # Construct a new function which makes a logical or
                # Put the function back onto the stack
                self.filter_stack.or_elements(ornum)
                if self.table == 'log':
                    self.db.add_filter_or(ornum)
            elif keyword == 'WaitTimeout':
                _, self.wait_timeout = self.split_option(line)
                self.wait_timeout = int(self.wait_timeout) / 1000
            else:
                # This line is not valid or not implemented
                logger.warning("[Livestatus Wait Query] Received a line of input which i can't handle: '%s'" % line)
                pass
        # Make columns unique
        self.filtercolumns = list(set(self.filtercolumns))
        self.prefiltercolumns = list(set(self.prefiltercolumns))

        # Make one big filter where the single filters are anded
        self.filter_stack.and_elements(self.filter_stack.qsize())

        if self.table == 'log':
            self.sql_filter_stack.and_elements(self.sql_filter_stack.qsize())

        self.metainfo = LiveStatusQueryMetainfo(metafilter)

    def launch_query(self):
        """ Prepare the request object's filter stacks """

        # The Response object needs to access the Query
        self.response.load(self)

        # A minimal integrity check
        if not self.table:
            return []

        try:
            # Remember the number of stats filters. We need these numbers as columns later.
            # But we need to ask now, because get_live_data() will empty the stack
            if self.table == 'log':
                result = self.get_live_data_log()
            else:
                # If the pnpgraph_present column is involved, then check
                # with each request if the pnp perfdata path exists
                if 'pnpgraph_present' in self.columns + self.filtercolumns + self.prefiltercolumns and self.pnp_path and os.access(self.pnp_path, os.R_OK):
                    self.pnp_path_readable = True
                else:
                    self.pnp_path_readable = False
                # Apply the filters on the broker's host/service/etc elements
                result = self.get_live_data()
        except Exception, e:
            import traceback
            logger.error("[Livestatus Wait Query]  Error: %s" % e)
            traceback.print_exc(32)
            result = []
        return result
Example #11
0
class LiveStatusQuery(object):

    my_type = 'query'

    def __init__(self, datamgr, query_cache, db, pnp_path, return_queue,
                 counters):
        # Runtime data form the global LiveStatus object
        self.datamgr = datamgr
        self.query_cache = query_cache
        self.db = db
        self.pnp_path = pnp_path
        self.return_queue = return_queue
        self.counters = counters

        # Private attributes for this specific request
        self.response = LiveStatusResponse()
        self.authuser = None
        self.table = None
        self.columns = []
        self.filtercolumns = []
        self.prefiltercolumns = []
        self.outputcolumns = []
        self.stats_group_by = []
        self.stats_columns = []
        self.aliases = []
        self.limit = None
        self.extcmd = False

        # Initialize the stacks which are needed for the Filter: and Stats:
        # filter- and count-operations
        self.filter_stack = LiveStatusStack()
        self.stats_filter_stack = LiveStatusStack()
        self.stats_postprocess_stack = LiveStatusStack()
        self.stats_query = False

        # When was this query launched?
        self.tic = time.time()
        # Clients can also send their local time with the request
        self.client_localtime = self.tic

        # This is mostly used in the Response.format... which needs to know
        # the class behind a queries table
        self.table_class_map = table_class_map

    def __str__(self):
        output = "LiveStatusRequest:\n"
        for attr in [
                "table", "columns", "filtercolumns", "prefiltercolumns",
                "aliases", "stats_group_by", "stats_query"
        ]:
            output += "request %s: %s\n" % (attr, getattr(self, attr))
        return output

    def split_command(self, line, splits=1):
        """Create a list from the words of a line"""
        return line.split(' ', splits)

    def split_option(self, line, splits=1):
        """Like split_commands, but converts numbers to int data type"""
        x = map(lambda i: (i.isdigit() and int(i)) or i,
                [token.strip() for token in re.split(r"[\s]+", line, splits)])
        return x

    def split_option_with_columns(self, line):
        """Split a line in a command and a list of words"""
        cmd, columns = self.split_option(line)
        return cmd, [
            self.strip_table_from_column(c)
            for c in re.compile(r'\s+').split(columns)
        ]

    def strip_table_from_column(self, column):
        """Cut off the table name, because it is possible
        to say service_state instead of state"""
        bygroupmatch = re.compile('(\w+)by.*group').search(self.table)
        if bygroupmatch:
            return re.sub(
                re.sub('s$', '', bygroupmatch.group(1)) + '_', '', column, 1)
        else:
            return re.sub(re.sub('s$', '', self.table) + '_', '', column, 1)

    def parse_input(self, data):
        """Parse the lines of a livestatus request.

        This function looks for keywords in input lines and
        sets the attributes of the request object

        """
        for line in data.splitlines():
            line = line.strip()
            # Tools like NagVis send KEYWORK:option, and we prefer to have
            # a space following the:
            if ':' in line and not ' ' in line:
                line = line.replace(':', ': ')
            keyword = line.split(' ')[0].rstrip(':')
            if keyword == 'GET':  # Get the name of the base table
                _, self.table = self.split_command(line)
                if self.table not in table_class_map.keys():
                    raise LiveStatusQueryError(404, self.table)
            elif keyword == 'Columns':  # Get the names of the desired columns
                _, self.columns = self.split_option_with_columns(line)
                self.response.columnheaders = 'off'
            elif keyword == 'ResponseHeader':
                _, responseheader = self.split_option(line)
                self.response.responseheader = responseheader
            elif keyword == 'OutputFormat':
                _, outputformat = self.split_option(line)
                self.response.outputformat = outputformat
            elif keyword == 'KeepAlive':
                _, keepalive = self.split_option(line)
                self.response.keepalive = keepalive
            elif keyword == 'ColumnHeaders':
                _, columnheaders = self.split_option(line)
                self.response.columnheaders = columnheaders
            elif keyword == 'Limit':
                _, self.limit = self.split_option(line)
            elif keyword == 'AuthUser':
                if self.table in [
                        'hosts', 'hostgroups', 'services', 'servicegroups',
                        'hostsbygroup', 'servicesbygroup',
                        'servicesbyhostgroup'
                ]:
                    _, self.authuser = self.split_option(line)
                # else self.authuser stays None and will be ignored
            elif keyword == 'Filter':
                try:
                    _, attribute, operator, reference = self.split_option(
                        line, 3)
                except:
                    _, attribute, operator, reference = self.split_option(
                        line, 2) + ['']
                if operator in [
                        '=', '>', '>=', '<', '<=', '=~', '~', '~~', '!=', '!>',
                        '!>=', '!<', '!<='
                ]:
                    # Cut off the table name
                    attribute = self.strip_table_from_column(attribute)
                    # Some operators can simply be negated
                    if operator in ['!>', '!>=', '!<', '!<=']:
                        operator = {
                            '!>': '<=',
                            '!>=': '<',
                            '!<': '>=',
                            '!<=': '>'
                        }[operator]
                    # Put a function on top of the filter_stack which implements
                    # the desired operation
                    self.filtercolumns.append(attribute)
                    self.prefiltercolumns.append(attribute)
                    self.filter_stack.put_stack(
                        self.make_filter(operator, attribute, reference))
                    if self.table == 'log':
                        self.db.add_filter(operator, attribute, reference)
                else:
                    print "illegal operation", operator
                    pass  # illegal operation
            elif keyword == 'And':
                _, andnum = self.split_option(line)
                # Take the last andnum functions from the stack
                # Construct a new function which makes a logical and
                # Put the function back onto the stack
                self.filter_stack.and_elements(andnum)
                if self.table == 'log':
                    self.db.add_filter_and(andnum)
            elif keyword == 'Or':
                _, ornum = self.split_option(line)
                # Take the last ornum functions from the stack
                # Construct a new function which makes a logical or
                # Put the function back onto the stack
                self.filter_stack.or_elements(ornum)
                if self.table == 'log':
                    self.db.add_filter_or(ornum)
            elif keyword == 'Negate':
                self.filter_stack.not_elements()
                if self.table == 'log':
                    self.db.add_filter_not()
            elif keyword == 'StatsGroupBy':
                _, stats_group_by = self.split_option_with_columns(line)
                self.filtercolumns.extend(stats_group_by)
                self.stats_group_by.extend(stats_group_by)
                # Deprecated. If your query contains at least one Stats:-header
                # then Columns: has the meaning of the old StatsGroupBy: header
            elif keyword == 'Stats':
                self.stats_query = True
                try:
                    _, attribute, operator, reference = self.split_option(
                        line, 3)
                    if attribute in ['sum', 'min', 'max', 'avg', 'std'
                                     ] and reference.startswith('as '):
                        attribute, operator = operator, attribute
                        _, alias = reference.split(' ')
                        self.aliases.append(alias)
                    elif attribute in ['sum', 'min', 'max', 'avg', 'std'
                                       ] and reference == '=':
                        # Workaround for thruk-cmds like: Stats: sum latency =
                        attribute, operator = operator, attribute
                        reference = ''
                except:
                    _, attribute, operator = self.split_option(line, 3)
                    if attribute in ['sum', 'min', 'max', 'avg', 'std']:
                        attribute, operator = operator, attribute
                    reference = ''
                attribute = self.strip_table_from_column(attribute)
                if operator in [
                        '=', '>', '>=', '<', '<=', '=~', '~', '~~', '!=', '!>',
                        '!>=', '!<', '!<='
                ]:
                    if operator in ['!>', '!>=', '!<', '!<=']:
                        operator = {
                            '!>': '<=',
                            '!>=': '<',
                            '!<': '>=',
                            '!<=': '>'
                        }[operator]
                    self.filtercolumns.append(attribute)
                    self.stats_columns.append(attribute)
                    self.stats_filter_stack.put_stack(
                        self.make_filter(operator, attribute, reference))
                    self.stats_postprocess_stack.put_stack(
                        self.make_filter('count', attribute, None))
                elif operator in ['sum', 'min', 'max', 'avg', 'std']:
                    self.stats_columns.append(attribute)
                    self.stats_filter_stack.put_stack(
                        self.make_filter('dummy', attribute, None))
                    self.stats_postprocess_stack.put_stack(
                        self.make_filter(operator, attribute, None))
                else:
                    print "illegal operation", operator
                    pass  # illegal operation
            elif keyword == 'StatsAnd':
                _, andnum = self.split_option(line)
                self.stats_filter_stack.and_elements(andnum)
            elif keyword == 'StatsOr':
                _, ornum = self.split_option(line)
                self.stats_filter_stack.or_elements(ornum)
            elif keyword == 'Separators':
                _, sep1, sep2, sep3, sep4 = line.split(' ', 5)
                self.response.separators = map(lambda x: chr(int(x)),
                                               [sep1, sep2, sep3, sep4])
            elif keyword == 'Localtime':
                _, self.client_localtime = self.split_option(line)
            elif keyword == 'COMMAND':
                _, self.extcmd = line.split(' ', 1)
            else:
                # This line is not valid or not implemented
                print "Received a line of input which i can't handle: '%s'" % line
                pass
        self.metainfo = LiveStatusQueryMetainfo(data)

    def launch_query(self):
        """ Prepare the request object's filter stacks """

        # The Response object needs to access the Query
        self.response.load(self)

        # A minimal integrity check
        if not self.table:
            return []

        # Ask the cache if this request was already answered under the same
        # circumstances. (And f not, whether this query is cacheable at all)
        cacheable, cache_hit, cached_response = self.query_cache.get_cached_query(
            self.metainfo)
        if cache_hit:
            self.columns = cached_response['columns']
            self.response.columnheaders = cached_response['columnheaders']
            return cached_response['result']

        # Make columns unique
        self.filtercolumns = list(set(self.filtercolumns))
        self.prefiltercolumns = list(set(self.prefiltercolumns))
        self.stats_columns = list(set(self.stats_columns))

        if self.stats_query:
            if len(self.columns) > 0:
                # StatsGroupBy is deprecated. Columns: can be used instead
                self.stats_group_by = self.columns
            elif len(self.stats_group_by) > 0:
                self.columns = self.stats_group_by + self.stats_columns
            #if len(self.stats_columns) > 0 and len(self.columns) == 0:
            if len(self.stats_columns) > 0:
                self.columns = self.stats_columns + self.columns
        else:
            if len(self.columns) == 0:
                self.outputcolumns = list_livestatus_attributes(self.table)
            else:
                self.outputcolumns = self.columns

        # Make one big filter where the single filters are anded
        self.filter_stack.and_elements(self.filter_stack.qsize())

        # Get the function which implements the Filter: statements
        filter_func = self.filter_stack.get_stack()
        without_filter = len(self.filtercolumns) == 0
        cs = LiveStatusConstraints(filter_func, without_filter, self.authuser)

        try:
            # Remember the number of stats filters. We need these numbers as columns later.
            # But we need to ask now, because get_live_data() will empty the stack
            num_stats_filters = self.stats_filter_stack.qsize()
            if self.table == 'log':
                result = self.get_live_data_log(cs)
            else:
                # If the pnpgraph_present column is involved, then check
                # with each request if the pnp perfdata path exists
                if 'pnpgraph_present' in self.columns + self.filtercolumns + self.prefiltercolumns and self.pnp_path and os.access(
                        self.pnp_path, os.R_OK):
                    self.pnp_path_readable = True
                else:
                    self.pnp_path_readable = False
                # Apply the filters on the broker's host/service/etc elements

                result = self.get_live_data(cs)

            if self.stats_query:
                self.columns = range(num_stats_filters)
                if self.stats_group_by:
                    self.columns = tuple(
                        list(self.stats_group_by) + list(self.columns))
                if len(self.aliases) == 0:
                    # If there were Stats: staments without "as", show no column headers at all
                    self.response.columnheaders = 'off'
                else:
                    self.response.columnheaders = 'on'

            if self.stats_query:
                result = self.statsify_result(result)
                # statsify_result returns a dict with column numers as keys
            elif self.table == 'columns':
                # With stats_request set to True, format_output expects result
                # to be a list of dicts instead a list of objects
                self.stats_query = True

        except Exception, e:
            import traceback
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            print e
            traceback.print_exc(32)
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            result = []

        if cacheable and not cache_hit:
            # We cannot cache generators, so we must first read them into a list
            result = [r for r in result]
            # Especially for stats requests also the columns and headers
            # are modified, so we need to save them too.
            self.query_cache.cache_query(
                self.metainfo, {
                    'result': result,
                    'columns': self.columns,
                    'columnheaders': self.response.columnheaders,
                })

        return result
class LiveStatusWaitQuery(LiveStatusQuery):

    my_type = 'wait'

    def __init__(self, *args, **kwargs):
        #super(LiveStatusWaitQuery, self).__init__(*args, **kwargs)
        LiveStatusQuery.__init__(self, *args, **kwargs)
        self.response = LiveStatusResponse(responseheader='off', outputformat='csv', keepalive='off', columnheaders='undef', separators=LiveStatusResponse.separators)
        self.response = LiveStatusResponse()
        self.wait_start = time.time()
        self.wait_timeout = 0
        self.wait_trigger = 'all'

    def parse_input(self, data):
        """Parse the lines of a livestatus request.

        This function looks for keywords in input lines and
        sets the attributes of the request object.
        WaitCondition statements are written into the metafilter string as if they
        were ordinary Filter:-statements. (metafilter is then used for a MetaData object)

        """
        metafilter = ""
        for line in data.splitlines():
            line = line.strip()
            # Tools like NagVis send KEYWORK:option, and we prefer to have
            # a space following the:
            if ':' in line and not ' ' in line:
                line = line.replace(':', ': ')
            keyword = line.split(' ')[0].rstrip(':')
            if keyword == 'GET':  # Get the name of the base table
                _, self.table = self.split_command(line)
                metafilter += "GET %s\n" % self.table
            elif keyword == 'WaitObject':  # Pick a specific object by name
                _, item = self.split_option(line)
                # It's like Filter: name = %s
                # Only for services it's host<blank>servicedesc
                if self.table == 'services':
                    if ';' in item:
                        host_name, service_description = item.split(';', 1)
                    else:
                        host_name, service_description = item.split(' ', 1)
                    self.filtercolumns.append('host_name')
                    self.prefiltercolumns.append('host_name')
                    self.filter_stack.put(self.make_filter('=', 'host_name', host_name))
                    self.filtercolumns.append('description')
                    self.prefiltercolumns.append('description')
                    self.filter_stack.put(self.make_filter('=', 'description', service_description))
                    # A WaitQuery works like an ordinary Query. But if
                    # we already know which object we're watching for
                    # changes, instead of scanning the entire list and
                    # applying a Filter:, we simply reduce the list
                    # so it has just one element.
                    metafilter += "Filter: host_name = %s\n" % host_name
                    metafilter += "Filter: service_description = %s\n" % service_description
                elif self.table == 'hosts':
                    attribute = self.strip_table_from_column('name')
                    self.filtercolumns.append('name')
                    self.prefiltercolumns.append('name')
                    self.filter_stack.put(self.make_filter('=', 'name', item))
                    metafilter += "Filter: host_name = %s\n" % (item,)
                else:
                    attribute = self.strip_table_from_column('name')
                    self.filtercolumns.append('name')
                    self.prefiltercolumns.append('name')
                    self.filter_stack.put(self.make_filter('=', 'name', item))
                    # For the other tables this works like an ordinary query.
                    # In the future there might be more lookup-tables
            elif keyword == 'WaitTrigger':
                _, self.wait_trigger = self.split_option(line)
                if self.wait_trigger not in ['check', 'state', 'log', 'downtime', 'comment', 'command']:
                    self.wait_trigger = 'all'
            elif keyword == 'WaitCondition':
                try:
                    _, attribute, operator, reference = self.split_option(line, 3)
                except:
                    _, attribute, operator, reference = self.split_option(line, 2) + ['']
                if operator in ['=', '>', '>=', '<', '<=', '=~', '~', '~~', '!=', '!>', '!>=', '!<', '!<=']:
                    # We need to set columns, if not columnheaders will be set to "on"
                    self.columns.append(attribute)
                    # Cut off the table name
                    attribute = self.strip_table_from_column(attribute)
                    # Some operators can simply be negated
                    if operator in ['!>', '!>=', '!<', '!<=']:
                        operator = {'!>': '<=', '!>=': '<', '!<': '>=', '!<=': '>'}[operator]
                    # Put a function on top of the filter_stack which implements
                    # the desired operation
                    self.filtercolumns.append(attribute)
                    self.prefiltercolumns.append(attribute)
                    self.filter_stack.put(self.make_filter(operator, attribute, reference))
                    if self.table == 'log':
                        self.db.add_filter(operator, attribute, reference)
                else:
                    logger.warning("[Livestatus Wait Query] Illegal operation: %s" % str(operator))
                    pass  # illegal operation
            elif keyword == 'WaitConditionAnd':
                _, andnum = self.split_option(line)
                # Take the last andnum functions from the stack
                # Construct a new function which makes a logical and
                # Put the function back onto the stack
                self.filter_stack.and_elements(andnum)
                if self.table == 'log':
                    self.db.add_filter_and(andnum)
            elif keyword == 'WaitConditionOr':
                _, ornum = self.split_option(line)
                # Take the last ornum functions from the stack
                # Construct a new function which makes a logical or
                # Put the function back onto the stack
                self.filter_stack.or_elements(ornum)
                if self.table == 'log':
                    self.db.add_filter_or(ornum)
            elif keyword == 'WaitTimeout':
                _, self.wait_timeout = self.split_option(line)
                self.wait_timeout = int(self.wait_timeout) / 1000
            else:
                # This line is not valid or not implemented
                logger.warning("[Livestatus Wait Query] Received a line of input which i can't handle: '%s'" % line)
                pass
        # Make columns unique
        self.filtercolumns = list(set(self.filtercolumns))
        self.prefiltercolumns = list(set(self.prefiltercolumns))

        # Make one big filter where the single filters are anded
        self.filter_stack.and_elements(self.filter_stack.qsize())

        #if self.table == 'log':
        #    self.sql_filter_stack.and_elements(self.sql_filter_stack.qsize())

        self.metainfo = LiveStatusQueryMetainfo(metafilter)

    def launch_query(self):
        """ Prepare the request object's filter stacks """

        # The Response object needs to access the Query
        self.response.load(self)

        # A minimal integrity check
        if not self.table:
            return []

        try:
            # Remember the number of stats filters. We need these numbers as columns later.
            # But we need to ask now, because get_live_data() will empty the stack
            if self.table == 'log':
                result = self.get_live_data_log()
            else:
                # If the pnpgraph_present column is involved, then check
                # with each request if the pnp perfdata path exists
                if 'pnpgraph_present' in self.columns + self.filtercolumns + self.prefiltercolumns and self.pnp_path and os.access(self.pnp_path, os.R_OK):
                    self.pnp_path_readable = True
                else:
                    self.pnp_path_readable = False
                # Apply the filters on the broker's host/service/etc elements
                result = self.get_live_data()
        except Exception, e:
            import traceback
            logger.error("[Livestatus Wait Query]  Error: %s" % e)
            traceback.print_exc(32)
            result = []
        return result
 def __init__(self, *args, **kwargs):
     super(LiveStatusWaitQuery, self).__init__(*args, **kwargs)
     self.response = LiveStatusResponse()
     self.wait_start = time.time()
     self.wait_timeout = 0
     self.wait_trigger = 'all'
 def __init__(self, *args, **kwargs):
     super(LiveStatusWaitQuery, self).__init__(*args, **kwargs)
     self.response = LiveStatusResponse()
     self.wait_start = time.time()
     self.wait_timeout = 0
     self.wait_trigger = 'all'