示例#1
0
class LiveStatusQuery(Hooker):

    my_type = "query"

    def __init__(
        self,
        configs,
        hosts,
        services,
        contacts,
        hostgroups,
        servicegroups,
        contactgroups,
        timeperiods,
        commands,
        schedulers,
        pollers,
        reactionners,
        brokers,
        dbconn,
        pnp_path,
        return_queue,
        counters,
    ):
        # Runtime data form the global LiveStatus object
        self.configs = configs
        self.hosts = hosts
        self.services = services
        self.contacts = contacts
        self.hostgroups = hostgroups
        self.servicegroups = servicegroups
        self.contactgroups = contactgroups
        self.timeperiods = timeperiods
        self.commands = commands
        self.schedulers = schedulers
        self.pollers = pollers
        self.reactionners = reactionners
        self.brokers = brokers
        self.dbconn = dbconn
        self.pnp_path = pnp_path
        self.return_queue = return_queue
        self.counters = counters

        # Private attributes for this specific request
        self.response = LiveStatusResponse(
            responseheader="off",
            outputformat="csv",
            keepalive="off",
            columnheaders="undef",
            separators=LiveStatusResponse.separators,
        )
        self.table = None
        self.columns = []
        self.filtercolumns = []
        self.prefiltercolumns = []
        self.stats_group_by = []
        self.stats_columns = []
        self.aliases = []
        self.limit = None
        self.extcmd = False
        self.out_map = self.copy_out_map_hooks()

        # Initialize the stacks which are needed for the Filter: and Stats:
        # filter- and count-operations
        self.filter_stack = LiveStatusStack()
        self.sql_filter_stack = LiveStatusStack()
        self.sql_filter_stack.type = "sql"
        self.stats_filter_stack = LiveStatusStack()
        self.stats_postprocess_stack = LiveStatusStack()
        self.stats_request = False

        # When was this query launched?
        self.tic = time.time()
        # Clients can also send their local time with the request
        self.client_localtime = self.tic

    def find_converter(self, attribute):
        """Return a function that converts textual numbers
        in the request to the correct data type"""
        out_map = LSout_map[self.out_map_name]
        if attribute in out_map and "type" in out_map[attribute]:
            if out_map[attribute]["type"] == "int":
                return int
            elif out_map[attribute]["type"] == "float":
                return float
        return None

    def set_default_out_map_name(self):
        """Translate the table name to the corresponding out_map key."""
        try:
            self.out_map_name = {
                "hosts": "Host",
                "services": "Service",
                "hostgroups": "Hostgroup",
                "servicegroups": "Servicegroup",
                "contacts": "Contact",
                "contactgroups": "Contactgroup",
                "comments": "Comment",
                "downtimes": "Downtime",
                "commands": "Command",
                "timeperiods": "Timeperiod",
                "hostsbygroup": "Hostsbygroup",
                "servicesbygroup": "Servicesbygroup",
                "servicesbyhostgroup": "Servicesbyhostgroup",
                "status": "Config",
                "log": "Logline",
                "schedulers": "SchedulerLink",
                "pollers": "PollerLink",
                "reactionners": "ReactionnerLink",
                "brokers": "BrokerLink",
                "problems": "Problem",
                "columns": "Config",  # just a dummy
            }[self.table]
        except:
            self.out_map_name = "hosts"

    def copy_out_map_hooks(self):
        """Update the hooks for some out_map entries.
        
        Livestatus columns which have a fulldepythonize postprocessor
        need an updated argument list. The third argument needs to
        be the request object. (When the out_map is first supplied
        with hooks, the third argument is the Livestatus object.)
        
        """
        new_map = {}
        for objtype in LSout_map:
            new_map[objtype] = {}
            for attribute in LSout_map[objtype]:
                new_map[objtype][attribute] = {}
                entry = LSout_map[objtype][attribute]
                if "hooktype" in entry:
                    if "prop" not in entry or entry["prop"] is None:
                        prop = attribute
                    else:
                        prop = entry["prop"]
                    if "default" in entry:
                        default = entry["default"]
                    else:
                        if entry["type"] == "int" or entry["type"] == "float":
                            default = 0
                        else:
                            default = ""
                    func = entry["fulldepythonize"]
                    new_map[objtype][attribute]["hook"] = self.make_hook(
                        "get_prop_full_depythonize", prop, default, func, None
                    )
                else:
                    new_map[objtype][attribute]["hook"] = entry["hook"]
        return new_map

    def __str__(self):
        output = "LiveStatusRequest:\n"
        for attr in [
            "table",
            "columns",
            "filtercolumns",
            "prefiltercolumns",
            "aliases",
            "stats_group_by",
            "stats_request",
        ]:
            output += "request %s: %s\n" % (attr, getattr(self, attr))
        return output

    def split_command(self, line, splits=1):
        """Create a list from the words of a line"""
        return line.split(" ", splits)

    def split_option(self, line, splits=1):
        """Like split_commands, but converts numbers to int data type"""
        # x = [int(i) if i.isdigit() else i for i in [token.strip() for token in re.split(r"[\s]+", line, splits)]]
        x = map(lambda i: (i.isdigit() and int(i)) or i, [token.strip() for token in re.split(r"[\s]+", line, splits)])
        return x

    def split_option_with_columns(self, line):
        """Split a line in a command and a list of words"""
        cmd, columns = self.split_option(line)
        return cmd, [self.strip_table_from_column(c) for c in re.compile(r"\s+").split(columns)]

    def strip_table_from_column(self, column):
        """Cut off the table name, because it is possible 
        to say service_state instead of state"""
        bygroupmatch = re.compile("(\w+)by.*group").search(self.table)
        if bygroupmatch:
            return re.sub(re.sub("s$", "", bygroupmatch.group(1)) + "_", "", column, 1)
        else:
            return re.sub(re.sub("s$", "", self.table) + "_", "", column, 1)

    def parse_input(self, data):
        """Parse the lines of a livestatus request.
        
        This function looks for keywords in input lines and
        sets the attributes of the request object
        
        """
        for line in data.splitlines():
            line = line.strip()
            # Tools like NagVis send KEYWORK:option, and we prefer to have
            # a space folowing the :
            if ":" in line and not " " in line:
                line = line.replace(":", ": ")
            keyword = line.split(" ")[0].rstrip(":")
            if keyword == "GET":  # Get the name of the base table
                cmd, self.table = self.split_command(line)
                self.set_default_out_map_name()
            elif keyword == "Columns":  # Get the names of the desired columns
                cmd, self.columns = self.split_option_with_columns(line)
                self.response.columnheaders = "off"
            elif keyword == "ResponseHeader":
                cmd, responseheader = self.split_option(line)
                self.response.responseheader = responseheader
            elif keyword == "OutputFormat":
                cmd, outputformat = self.split_option(line)
                self.response.outputformat = outputformat
            elif keyword == "KeepAlive":
                cmd, keepalive = self.split_option(line)
                self.response.keepalive = keepalive
            elif keyword == "ColumnHeaders":
                cmd, columnheaders = self.split_option(line)
                self.response.columnheaders = columnheaders
            elif keyword == "Limit":
                cmd, self.limit = self.split_option(line)
            elif keyword == "Filter":
                try:
                    cmd, attribute, operator, reference = self.split_option(line, 3)
                except:
                    cmd, attribute, operator, reference = self.split_option(line, 2) + [""]
                if operator in ["=", ">", ">=", "<", "<=", "=~", "~", "~~", "!=", "!>", "!>=", "!<", "!<="]:
                    # Cut off the table name
                    attribute = self.strip_table_from_column(attribute)
                    # Some operators can simply be negated
                    if operator in ["!>", "!>=", "!<", "!<="]:
                        operator = {"!>": "<=", "!>=": "<", "!<": ">=", "!<=": ">"}[operator]
                    # Put a function on top of the filter_stack which implements
                    # the desired operation
                    self.filtercolumns.append(attribute)
                    self.prefiltercolumns.append(attribute)
                    self.filter_stack.put(self.make_filter(operator, attribute, reference))
                    if self.table == "log":
                        if attribute == "time":
                            self.sql_filter_stack.put(self.make_sql_filter(operator, attribute, reference))
                else:
                    print "illegal operation", operator
                    pass  # illegal operation
            elif keyword == "And":
                cmd, andnum = self.split_option(line)
                # Take the last andnum functions from the stack
                # Construct a new function which makes a logical and
                # Put the function back onto the stack
                self.filter_stack.and_elements(andnum)
            elif keyword == "Or":
                cmd, ornum = self.split_option(line)
                # Take the last ornum functions from the stack
                # Construct a new function which makes a logical or
                # Put the function back onto the stack
                self.filter_stack.or_elements(ornum)
            elif keyword == "StatsGroupBy":
                cmd, stats_group_by = self.split_option_with_columns(line)
                self.filtercolumns.extend(stats_group_by)
                self.stats_group_by.extend(stats_group_by)
                # Deprecated. If your query contains at least one Stats:-header
                # then Columns: has the meaning of the old StatsGroupBy: header
            elif keyword == "Stats":
                self.stats_request = True
                try:
                    cmd, attribute, operator, reference = self.split_option(line, 3)
                    if attribute in ["sum", "min", "max", "avg", "std"] and reference.find("as ", 3) != -1:
                        attribute, operator = operator, attribute
                        asas, alias = reference.split(" ")
                        self.aliases.append(alias)
                    elif attribute in ["sum", "min", "max", "avg", "std"] and reference == "=":
                        # Workaround for thruk-cmds like: Stats: sum latency =
                        attribute, operator = operator, attribute
                        reference = ""
                except:
                    cmd, attribute, operator = self.split_option(line, 3)
                    if attribute in ["sum", "min", "max", "avg", "std"]:
                        attribute, operator = operator, attribute
                    reference = ""
                attribute = self.strip_table_from_column(attribute)
                if operator in ["=", ">", ">=", "<", "<=", "=~", "~", "~~", "!=", "!>", "!>=", "!<", "!<="]:
                    if operator in ["!>", "!>=", "!<", "!<="]:
                        operator = {"!>": "<=", "!>=": "<", "!<": ">=", "!<=": ">"}[operator]
                    self.filtercolumns.append(attribute)
                    self.stats_columns.append(attribute)
                    self.stats_filter_stack.put(self.make_filter(operator, attribute, reference))
                    self.stats_postprocess_stack.put(self.make_filter("count", attribute, None))
                elif operator in ["sum", "min", "max", "avg", "std"]:
                    self.stats_columns.append(attribute)
                    self.stats_filter_stack.put(self.make_filter("dummy", attribute, None))
                    self.stats_postprocess_stack.put(self.make_filter(operator, attribute, None))
                else:
                    print "illegal operation", operator
                    pass  # illegal operation
            elif keyword == "StatsAnd":
                cmd, andnum = self.split_option(line)
                self.stats_filter_stack.and_elements(andnum)
            elif keyword == "StatsOr":
                cmd, ornum = self.split_option(line)
                self.stats_filter_stack.or_elements(ornum)
            elif keyword == "Separators":
                cmd, sep1, sep2, sep3, sep4 = line.split(" ", 5)
                self.response.separators = map(lambda x: chr(int(x)), [sep1, sep2, sep3, sep4])
            elif keyword == "Localtime":
                cmd, self.client_localtime = self.split_option(line)
            elif keyword == "COMMAND":
                cmd, self.extcmd = line.split(" ", 1)
            else:
                # This line is not valid or not implemented
                print "Received a line of input which i can't handle : '%s'" % line
                pass

    def launch_query(self):
        """ Prepare the request object's filter stacks """

        # A minimal integrity check
        if not self.table:
            return []

        # Make columns unique
        self.filtercolumns = list(set(self.filtercolumns))
        self.prefiltercolumns = list(set(self.prefiltercolumns))
        self.stats_columns = list(set(self.stats_columns))

        if self.stats_request:
            if len(self.columns) > 0:
                # StatsGroupBy is deprecated. Columns: can be used instead
                self.stats_group_by = self.columns
            elif len(self.stats_group_by) > 0:
                self.columns = self.stats_group_by + self.stats_columns
            # if len(self.stats_columns) > 0 and len(self.columns) == 0:
            if len(self.stats_columns) > 0:
                self.columns = self.stats_columns + self.columns

        # Make one big filter where the single filters are anded
        self.filter_stack.and_elements(self.filter_stack.qsize())
        try:
            # Remember the number of stats filters. We need these numbers as columns later.
            # But we need to ask now, because get_live_data() will empty the stack
            num_stats_filters = self.stats_filter_stack.qsize()
            if self.table == "log":
                self.sql_filter_stack.and_elements(self.sql_filter_stack.qsize())
                result = self.get_live_data_log()
            else:
                # If the pnpgraph_present column is involved, then check
                # with each request if the pnp perfdata path exists
                if (
                    "pnpgraph_present" in self.columns + self.filtercolumns + self.prefiltercolumns
                    and self.pnp_path
                    and os.access(self.pnp_path, os.R_OK)
                ):
                    self.pnp_path_readable = True
                else:
                    self.pnp_path_readable = False
                # Apply the filters on the broker's host/service/etc elements

                result = self.get_live_data()

            if self.stats_request:
                self.columns = range(num_stats_filters)
                if self.stats_group_by:
                    self.columns = tuple(list(self.stats_group_by) + list(self.columns))
                if len(self.aliases) == 0:
                    # If there were Stats: staments without "as", show no column headers at all
                    self.response.columnheaders = "off"
                else:
                    self.response.columnheaders = "on"

        except Exception, e:
            import traceback

            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            print e
            traceback.print_exc(32)
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            result = []

        return result
示例#2
0
class LiveStatusQuery(Hooker):

    my_type = 'query'

    def __init__(self, configs, hosts, services, contacts, hostgroups, servicegroups, contactgroups, timeperiods, commands, schedulers, pollers, reactionners, brokers, db, use_aggressive_sql, pnp_path, return_queue, counters):
        # Runtime data form the global LiveStatus object
        self.configs = configs
        self.hosts = hosts
        self.services = services
        self.contacts = contacts
        self.hostgroups = hostgroups
        self.servicegroups = servicegroups
        self.contactgroups = contactgroups
        self.timeperiods = timeperiods
        self.commands = commands
        self.schedulers = schedulers
        self.pollers = pollers
        self.reactionners = reactionners
        self.brokers = brokers
        self.db = db
        self.use_aggressive_sql = use_aggressive_sql
        self.pnp_path = pnp_path
        self.return_queue = return_queue
        self.counters = counters

        # Private attributes for this specific request
        self.response = LiveStatusResponse(responseheader = 'off', outputformat = 'csv', keepalive = 'off', columnheaders = 'undef', separators = LiveStatusResponse.separators)
        self.table = None
        self.columns = []
        self.filtercolumns = []
        self.prefiltercolumns = []
        self.stats_group_by = []
        self.stats_columns = []
        self.aliases = []
        self.limit = None
        self.extcmd = False
        self.out_map = self.copy_out_map_hooks()

        # Initialize the stacks which are needed for the Filter: and Stats:
        # filter- and count-operations
        self.filter_stack = LiveStatusStack()
        # This stack is used to create a full-blown select-statement
        self.sql_filter_stack = LiveStatusStack()
        self.sql_filter_stack.type = 'sql'
        # This stack is used to create a minimal select-statement which
        # selects only by time >= and time <=
        self.sql_time_filter_stack = LiveStatusStack()
        self.sql_time_filter_stack.type = 'sql'
        self.stats_filter_stack = LiveStatusStack()
        self.stats_postprocess_stack = LiveStatusStack()
        self.stats_request = False

        # When was this query launched?
        self.tic = time.time()
        # Clients can also send their local time with the request
        self.client_localtime = self.tic


    def find_converter(self, attribute):
        """Return a function that converts textual numbers
        in the request to the correct data type"""
        out_map = LSout_map[self.out_map_name]
        if attribute in out_map and 'type' in out_map[attribute]:
            if out_map[attribute]['type'] == 'int':
                return int
            elif out_map[attribute]['type'] == 'float':
                return float
        return None


    def set_default_out_map_name(self):
        """Translate the table name to the corresponding out_map key."""
        try:
            self.out_map_name = {
                'hosts' : 'Host',
                'services' : 'Service',
                'hostgroups' : 'Hostgroup',
                'servicegroups' : 'Servicegroup',
                'contacts' : 'Contact',
                'contactgroups' : 'Contactgroup',
                'comments' : 'Comment',
                'downtimes' : 'Downtime',
                'commands' : 'Command',
                'timeperiods' : 'Timeperiod',
                'hostsbygroup' : 'Hostsbygroup',
                'servicesbygroup' : 'Servicesbygroup',
                'servicesbyhostgroup' : 'Servicesbyhostgroup',
                'status' : 'Config',
                'log' : 'Logline',
                'schedulers' : 'SchedulerLink',
                'pollers' : 'PollerLink',
                'reactionners' : 'ReactionnerLink',
                'brokers' : 'BrokerLink',
                'problems' : 'Problem',
                'columns' : 'Config', # just a dummy
            }[self.table]
        except:
            self.out_map_name = 'hosts'


    def copy_out_map_hooks(self):
        """Update the hooks for some out_map entries.
        
        Livestatus columns which have a fulldepythonize postprocessor
        need an updated argument list. The third argument needs to
        be the request object. (When the out_map is first supplied
        with hooks, the third argument is the Livestatus object.)
        
        """
        new_map = {}
        for objtype in LSout_map:
            new_map[objtype] = {}
            for attribute in LSout_map[objtype]:
                new_map[objtype][attribute] = {}
                entry =  LSout_map[objtype][attribute]
                if 'hooktype' in entry:
                    if 'prop' not in entry or entry['prop'] is None:
                        prop = attribute
                    else:
                        prop = entry['prop']
                    if 'default' in entry:
                        default = entry['default']
                    else:
                        if entry['type'] == 'int' or entry['type'] == 'float':
                            default = 0
                        else:
                            default = ''
                    func = entry['fulldepythonize']
                    new_map[objtype][attribute]['hook'] = self.make_hook('get_prop_full_depythonize', prop, default, func, None)
                else:
                    new_map[objtype][attribute]['hook'] = entry['hook']
        return new_map


    def __str__(self):
        output = "LiveStatusRequest:\n"
        for attr in ["table", "columns", "filtercolumns", "prefiltercolumns", "aliases", "stats_group_by", "stats_request"]:
            output += "request %s: %s\n" % (attr, getattr(self, attr))
        return output
  

    def split_command(self, line, splits=1):
        """Create a list from the words of a line"""
        return line.split(' ', splits)


    def split_option(self, line, splits=1):
        """Like split_commands, but converts numbers to int data type"""
        #x = [int(i) if i.isdigit() else i for i in [token.strip() for token in re.split(r"[\s]+", line, splits)]]
        x = map (lambda i: (i.isdigit() and int(i)) or i, [token.strip() for token in re.split(r"[\s]+", line, splits)])
        return x


    def split_option_with_columns(self, line):
        """Split a line in a command and a list of words"""
        cmd, columns = self.split_option(line)
        return cmd, [self.strip_table_from_column(c) for c in re.compile(r'\s+').split(columns)]


    def strip_table_from_column(self, column):
        """Cut off the table name, because it is possible 
        to say service_state instead of state"""
        bygroupmatch = re.compile('(\w+)by.*group').search(self.table)
        if bygroupmatch:
            return re.sub(re.sub('s$', '', bygroupmatch.group(1)) + '_', '', column, 1)
        else:
            return re.sub(re.sub('s$', '', self.table) + '_', '', column, 1)


    def parse_input(self, data):
        """Parse the lines of a livestatus request.
        
        This function looks for keywords in input lines and
        sets the attributes of the request object
        
        """
        for line in data.splitlines():
            line = line.strip()
            # Tools like NagVis send KEYWORK:option, and we prefer to have
            # a space following the :
            if ':' in line and not ' ' in line:
                line = line.replace(':', ': ')
            keyword = line.split(' ')[0].rstrip(':')
            if keyword == 'GET': # Get the name of the base table
                cmd, self.table = self.split_command(line)
                self.set_default_out_map_name()
            elif keyword == 'Columns': # Get the names of the desired columns
                cmd, self.columns = self.split_option_with_columns(line)
                self.response.columnheaders = 'off'
            elif keyword == 'ResponseHeader':
                cmd, responseheader = self.split_option(line)
                self.response.responseheader = responseheader
            elif keyword == 'OutputFormat':
                cmd, outputformat = self.split_option(line)
                self.response.outputformat = outputformat
            elif keyword == 'KeepAlive':
                cmd, keepalive = self.split_option(line)
                self.response.keepalive = keepalive
            elif keyword == 'ColumnHeaders':
                cmd, columnheaders = self.split_option(line)
                self.response.columnheaders = columnheaders
            elif keyword == 'Limit':
                cmd, self.limit = self.split_option(line)
            elif keyword == 'Filter':
                try:
                    cmd, attribute, operator, reference = self.split_option(line, 3)
                except:
                    cmd, attribute, operator, reference = self.split_option(line, 2) + ['']
                if operator in ['=', '>', '>=', '<', '<=', '=~', '~', '~~', '!=', '!>', '!>=', '!<', '!<=']:
                    # Cut off the table name
                    attribute = self.strip_table_from_column(attribute)
                    # Some operators can simply be negated
                    if operator in ['!>', '!>=', '!<', '!<=']:
                        operator = { '!>' : '<=', '!>=' : '<', '!<' : '>=', '!<=' : '>' }[operator]
                    # Put a function on top of the filter_stack which implements
                    # the desired operation
                    self.filtercolumns.append(attribute)
                    self.prefiltercolumns.append(attribute)
                    self.filter_stack.put(self.make_filter(operator, attribute, reference))
                    if self.table == 'log':
                        if attribute == 'time':
                            self.sql_time_filter_stack.put(self.make_sql_filter(operator, attribute, reference))
                        self.sql_filter_stack.put(self.make_sql_filter(operator, attribute, reference))
                else:
                    print "illegal operation", operator
                    pass # illegal operation
            elif keyword == 'And':
                cmd, andnum = self.split_option(line)
                # Take the last andnum functions from the stack
                # Construct a new function which makes a logical and
                # Put the function back onto the stack
                self.filter_stack.and_elements(andnum)
                if self.table == 'log':
                    #self.sql_time_filter_stack.and_elements(andnum)
                    self.sql_filter_stack.and_elements(andnum)
            elif keyword == 'Or':
                cmd, ornum = self.split_option(line)
                # Take the last ornum functions from the stack
                # Construct a new function which makes a logical or
                # Put the function back onto the stack
                self.filter_stack.or_elements(ornum)
                if self.table == 'log':
                    #self.sql_time_filter_stack.or_elements(ornum)
                    self.sql_filter_stack.or_elements(ornum)
            elif keyword == 'StatsGroupBy':
                cmd, stats_group_by = self.split_option_with_columns(line)
                self.filtercolumns.extend(stats_group_by)
                self.stats_group_by.extend(stats_group_by)
                # Deprecated. If your query contains at least one Stats:-header
                # then Columns: has the meaning of the old StatsGroupBy: header
            elif keyword == 'Stats':
                self.stats_request = True
                try:
                    cmd, attribute, operator, reference = self.split_option(line, 3)
                    if attribute in ['sum', 'min', 'max', 'avg', 'std'] and reference.find('as ', 3) != -1:
                        attribute, operator = operator, attribute
                        asas, alias = reference.split(' ')
                        self.aliases.append(alias)
                    elif attribute in ['sum', 'min', 'max', 'avg', 'std'] and reference == '=':
                        # Workaround for thruk-cmds like: Stats: sum latency =
                        attribute, operator = operator, attribute
                        reference = ''
                except:
                    cmd, attribute, operator = self.split_option(line, 3)
                    if attribute in ['sum', 'min', 'max', 'avg', 'std']:
                        attribute, operator = operator, attribute
                    reference = ''
                attribute = self.strip_table_from_column(attribute)
                if operator in ['=', '>', '>=', '<', '<=', '=~', '~', '~~', '!=', '!>', '!>=', '!<', '!<=']:
                    if operator in ['!>', '!>=', '!<', '!<=']:
                        operator = { '!>' : '<=', '!>=' : '<', '!<' : '>=', '!<=' : '>' }[operator]
                    self.filtercolumns.append(attribute)
                    self.stats_columns.append(attribute)
                    self.stats_filter_stack.put(self.make_filter(operator, attribute, reference))
                    self.stats_postprocess_stack.put(self.make_filter('count', attribute, None))
                elif operator in ['sum', 'min', 'max', 'avg', 'std']:
                    self.stats_columns.append(attribute)
                    self.stats_filter_stack.put(self.make_filter('dummy', attribute, None))
                    self.stats_postprocess_stack.put(self.make_filter(operator, attribute, None))
                else:
                    print "illegal operation", operator
                    pass # illegal operation
            elif keyword == 'StatsAnd':
                cmd, andnum = self.split_option(line)
                self.stats_filter_stack.and_elements(andnum)
            elif keyword == 'StatsOr':
                cmd, ornum = self.split_option(line)
                self.stats_filter_stack.or_elements(ornum)
            elif keyword == 'Separators':
                cmd, sep1, sep2, sep3, sep4 = line.split(' ', 5)
                self.response.separators = map(lambda x: chr(int(x)), [sep1, sep2, sep3, sep4])
            elif keyword == 'Localtime':
                cmd, self.client_localtime = self.split_option(line)
            elif keyword == 'COMMAND':
                cmd, self.extcmd = line.split(' ', 1)
            else:
                # This line is not valid or not implemented
                print "Received a line of input which i can't handle : '%s'" % line
                pass


    def launch_query(self):
        """ Prepare the request object's filter stacks """
        
        # A minimal integrity check
        if not self.table:
            return []

        # Make columns unique
        self.filtercolumns = list(set(self.filtercolumns))
        self.prefiltercolumns = list(set(self.prefiltercolumns))
        self.stats_columns = list(set(self.stats_columns))

        if self.stats_request:
            if len(self.columns) > 0:
                # StatsGroupBy is deprecated. Columns: can be used instead
                self.stats_group_by = self.columns
            elif len(self.stats_group_by) > 0:
                self.columns = self.stats_group_by + self.stats_columns
            #if len(self.stats_columns) > 0 and len(self.columns) == 0:
            if len(self.stats_columns) > 0:
                self.columns = self.stats_columns + self.columns

        # Make one big filter where the single filters are anded
        self.filter_stack.and_elements(self.filter_stack.qsize())
        try:
            # Remember the number of stats filters. We need these numbers as columns later.
            # But we need to ask now, because get_live_data() will empty the stack
            num_stats_filters = self.stats_filter_stack.qsize()
            if self.table == 'log':
                self.sql_time_filter_stack.and_elements(self.sql_time_filter_stack.qsize())
                self.sql_filter_stack.and_elements(self.sql_filter_stack.qsize())
                result = self.get_live_data_log()
            else:
                # If the pnpgraph_present column is involved, then check
                # with each request if the pnp perfdata path exists
                if 'pnpgraph_present' in self.columns + self.filtercolumns + self.prefiltercolumns and self.pnp_path and os.access(self.pnp_path, os.R_OK):
                    self.pnp_path_readable = True
                else:
                    self.pnp_path_readable = False
                # Apply the filters on the broker's host/service/etc elements
          
                result = self.get_live_data()
                
            if self.stats_request:
                self.columns = range(num_stats_filters)
                if self.stats_group_by:
                    self.columns = tuple(list(self.stats_group_by) + list(self.columns))
                if len(self.aliases) == 0:
                    #If there were Stats: staments without "as", show no column headers at all
                    self.response.columnheaders = 'off'
                else:
                    self.response.columnheaders = 'on'

        except Exception, e:
            import traceback
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            print e
            traceback.print_exc(32) 
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            result = []
        
        return result