Ejemplo n.º 1
0
class EventletInbox(object):
    def __init__(self, logger=None):
        ''' __init__ '''

        self.__inbox = EventletQueue()

        if logger is None:
            self._logger = getLogger('%s.EventletInbox' % __name__)
        else:
            self._logger = logger

    def get(self):
        ''' get data from inbox '''
        try:
            result = self.__inbox.get_nowait()
        except EventletEmpty:
            raise EmptyInboxException
        return result

    def put(self, message):
        ''' put message to inbox '''

        self.__inbox.put(message)

    def __len__(self):
        ''' return length of inbox '''

        return self.__inbox.qsize()
Ejemplo n.º 2
0
class StatsdLog(object):
    """Simple server to monitor a syslog udp stream for statsd events"""
    def __init__(self, conf):
        TRUE_VALUES = set(('true', '1', 'yes', 'on', 't', 'y'))
        self.conf = conf
        self.logger = logging.getLogger('statsdlogd')
        self.logger.setLevel(logging.INFO)
        self.syslog = SysLogHandler(address='/dev/log')
        self.formatter = logging.Formatter('%(name)s: %(message)s')
        self.syslog.setFormatter(self.formatter)
        self.logger.addHandler(self.syslog)
        self.debug = conf.get('debug', 'false').lower() in TRUE_VALUES
        self.statsd_host = conf.get('statsd_host', '127.0.0.1')
        self.statsd_port = int(conf.get('statsd_port', '8125'))
        self.listen_addr = conf.get('listen_addr', '127.0.0.1')
        self.listen_port = int(conf.get('listen_port', 8126))
        self.report_internal_stats = conf.get('report_internal_stats',
                                              'true').lower() in TRUE_VALUES
        self.int_stats_interval = int(conf.get('internal_stats_interval', 5))
        self.buff = int(conf.get('buffer_size', 8192))
        self.max_q_size = int(conf.get('max_line_backlog', 512))
        self.statsd_sample_rate = float(conf.get('statsd_sample_rate', '.5'))
        self.counter = 0
        self.skip_counter = 0
        self.hits = 0
        self.q = Queue(maxsize=self.max_q_size)
        # key: regex
        self.patterns_file = conf.get('patterns_file',
                                      '/etc/statsdlog/patterns.json')
        self.json_patterns = conf.get('json_pattern_file',
                                      'true').lower() in TRUE_VALUES
        try:
            self.patterns = self.load_patterns()
        except Exception as err:
            self.logger.exception(err)
            print "Encountered exception at startup: %s" % err
            sys.exit(1)
        self.statsd_addr = (self.statsd_host, self.statsd_port)
        self.comp_patterns = {}
        for item in self.patterns:
            self.comp_patterns[item] = re.compile(self.patterns[item])

    def load_patterns(self):
        if self.json_patterns:
            self.logger.info("Using json based patterns file: %s" %
                             self.patterns_file)
            with open(self.patterns_file) as pfile:
                return json.loads(pfile.read())
        else:
            self.logger.info("Using plain text patterns file: %s" %
                             self.patterns_file)
        patterns = {}
        with open(self.patterns_file) as f:
            for line in f:
                if line:
                    pattern = [x.strip() for x in line.split("=", 1)]
                else:
                    pattern = None
                if len(pattern) != 2:
                    # skip this line
                    self.logger.error("Skipping pattern. Unable to parse: %s" %
                                      line)
                else:
                    if pattern[0] and pattern[1]:
                        patterns[pattern[0]] = pattern[1]
                    else:
                        self.logger.error(
                            "Skipping pattern. Unable to parse: %s" % line)
        return patterns

    def check_line(self, line):
        """
        Check if a line matches our search patterns.

        :param line: The string to check
        :returns: List of regex entries that matched (or empty list if none)
        """
        matches = []
        for entry in self.comp_patterns:
            if self.comp_patterns[entry].match(line):
                matches.append(entry)
        return matches

    def internal_stats(self):
        """
        Periodically send our own stats to statsd.
        """
        lastcount = 0
        lasthit = 0
        while True:
            eventlet.sleep(self.int_stats_interval)
            self.send_event("statsdlog.lines:%s|c" %
                            (self.counter - lastcount))
            lastcount = self.counter
            self.send_event("statsdlog.hits:%s|c" % (self.hits - lasthit))
            lasthit = self.hits

    def stats_print(self):
        """
        Periodically dump some stats to the logs.
        """
        lastcount = 0
        lasthit = 0
        while True:
            eventlet.sleep(2)
            lps = (self.counter - lastcount) / 60
            hps = (self.hits - lasthit) / 60
            lastcount = self.counter
            lasthit = self.hits
            self.logger.info('per second: %d lines - hits %d' % (lps, hps))
            self.logger.info('totals: %d hits - %d lines' %
                             (self.hits, self.counter))
            if self.skip_counter is not 0:
                self.logger.info('Had to skip %d log lines so far' %
                                 self.skip_counter)

    def send_event(self, payload):
        """
        Fire event to statsd

        :param payload: The payload of the udp packet to send.
        """
        try:
            udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
            udp_socket.sendto(payload, self.statsd_addr)
        except Exception:
            # udp sendto failed (socket already in use?), but thats ok
            self.logger.error("Error trying to send statsd event")

    def statsd_counter_increment(self, stats, delta=1):
        """
        Increment multiple statsd stats counters

        :param stats: list of stats items to package and send
        :param delta: delta of stats items
        """
        if self.statsd_sample_rate < 1:
            if random() <= self.statsd_sample_rate:
                for item in stats:
                    payload = "%s:%s|c|@%s" % (item, delta,
                                               self.statsd_sample_rate)
                    self.send_event(payload)
        else:
            for item in stats:
                payload = "%s:%s|c" % (item, delta)
                self.send_event(payload)

    def worker(self):
        """
        Check for and process log lines in queue
        """
        while True:
            msg = self.q.get()
            matches = self.check_line(msg)
            for match in matches:
                self.statsd_counter_increment([match])
                if self.hits >= maxint:
                    self.logger.info("hit maxint, reset hits counter")
                    self.hits = 0
                self.hits += 1

    def listener(self):
        """
        syslog udp listener
        """
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        bind_addr = (self.listen_addr, self.listen_port)
        sock.bind(bind_addr)
        self.logger.info("listening on %s:%d" % bind_addr)
        while 1:
            data, addr = sock.recvfrom(self.buff)
            if not data:
                break
            else:
                if self.q.qsize() < self.max_q_size:
                    self.q.put(data)
                    if self.counter >= maxint:
                        self.logger.info("hit maxint, reset seen counter")
                        self.counter = 0
                    self.counter += 1
                else:
                    if self.debug:
                        self.logger.notice("max log lines in queue, skipping")
                    if self.skip_counter >= maxint:
                        self.logger.info("hit maxint, reset skip counter")
                        self.skip_counter = 0
                    self.skip_counter += 1

    def start(self):
        """
        Start the listener, worker, and mgmt server.
        """
        eventlet.spawn_n(self.worker)
        if self.debug:
            eventlet.spawn_n(self.stats_print)
        if self.report_internal_stats:
            eventlet.spawn_n(self.internal_stats)
        while True:
            try:
                self.listener()
            except Exception as err:
                self.logger.error(err)
Ejemplo n.º 3
0
class AsyncClient(baseasync.BaseAsync):
    def __init__(self, *args, **kwargs):
        super(AsyncClient, self).__init__(*args, **kwargs)
        self.pool = eventlet.greenpool.GreenPool(DEFAULT_POOL_SIZE)
        self.reader_thread = None
        self.writer_thread = None
        self.queue = Queue(DEFAULT_MAX_QUEUE_SIZE)
        self.max_pending = MAX_PENDING
        self.closing = False

    def build_socket(self, family=socket.AF_INET):
        return socket.socket(family)

    def connect(self):
        super(AsyncClient, self).connect()
        self.closing = False
        self.reader_thread = eventlet.greenthread.spawn(self._reader_run)
        self.writer_thread = eventlet.greenthread.spawn(self._writer_run)

    def dispatch(self, fn, *args, **kwargs):
        if LOG.isEnabledFor(logging.DEBUG):
            LOG.debug("Dispatching: Pending {0}".format(len(self._pending)))
        self.pool.spawn_n(fn, *args, **kwargs)

    def shutdown(self):
        self.closing = True
        if len(self._pending) + self.queue.qsize() == 0:
            self._end_close()

    def close(self):
        self.shutdown()
        self.wait()

    def _end_close(self):
        self.writer_thread.kill()
        self.reader_thread.kill()

        super(AsyncClient, self).close()

        self.writer_thread = None
        self.reader_thread = None

    def sendAsync(self, header, value, onSuccess, onError):
        if self.closing:
            raise common.ConnectionClosed(
                "Client is closing, can't queue more operations.")

        if self.faulted:
            self._raise(
                common.ConnectionFaulted(
                    "Can't send message when connection is on a faulted state."
                ), onError)
            return  #skip the rest

        # fail fast on NotConnected
        if not self.isConnected:
            self._raise(common.NotConnected("Not connected."), onError)
            return  #skip the rest

        if LOG.isEnabledFor(logging.DEBUG):
            LOG.debug("Queue: {0}".format(self.queue.qsize()))
        self.queue.put((header, value, onSuccess, onError))
        eventlet.sleep(0)

    def wait(self):
        self.queue.join()

    def send(self, header, value):
        done = eventlet.event.Event()

        class Dummy:
            pass

        d = Dummy()
        d.error = None
        d.result = None

        def innerSuccess(m, r, value):
            d.result = (m, r, value)
            done.send()

        def innerError(e):
            d.error = e
            done.send()

        self.sendAsync(header, value, innerSuccess, innerError)

        done.wait()  # TODO(Nacho): should be add a default timeout?
        if d.error: raise d.error
        return d.result

    def _writer_run(self):
        while self.isConnected and not self.faulted:
            try:
                while len(self._pending) > self.max_pending:
                    eventlet.sleep(0)
                (header, value, onSuccess, onError) = self.queue.get()
                super(AsyncClient, self).sendAsync(header, value, onSuccess,
                                                   onError)
            except common.ConnectionFaulted:
                pass
            except common.ConnectionClosed:
                pass
            except Exception as ex:
                self._fault_client(ex)

            # Yield execution, don't starve the reader
            eventlet.sleep(0)

    def _reader_run(self):
        while self.isConnected and not self.faulted:
            try:
                self._async_recv()
                self.queue.task_done()
                if self.closing and len(
                        self._pending) + self.queue.qsize() == 0:
                    self._end_close()
            except common.ConnectionFaulted:
                pass
            except Exception as ex:
                self._fault_client(ex)
Ejemplo n.º 4
0
class Client(baseasync.BaseAsync):
    def __init__(self, *args, **kwargs):
        super(Client, self).__init__(*args, **kwargs)
        self.pool = eventlet.greenpool.GreenPool(DEFAULT_POOL_SIZE)
        self.reader_thread = None
        self.writer_thread = None
        self.queue = Queue(DEFAULT_MAX_QUEUE_SIZE)
        self.max_pending = MAX_PENDING
        self.closing = False

    def build_socket(self, family=socket.AF_INET):
        return socket.socket(family)

    def wrap_secure_socket(self, s, ssl_version):
        return GreenSSLSocket(s, ssl_version=ssl_version)

    def connect(self):
        super(Client, self).connect()
        self.closing = False
        self.reader_thread = eventlet.greenthread.spawn(self._reader_run)
        self.writer_thread = eventlet.greenthread.spawn(self._writer_run)

    def dispatch(self, fn, *args, **kwargs):
        if LOG.isEnabledFor(logging.DEBUG):
            LOG.debug("Dispatching: Pending {0}".format(len(self._pending)))
        self.pool.spawn_n(fn, *args, **kwargs)

    def shutdown(self):
        self.closing = True
        if len(self._pending) + self.queue.qsize() == 0:
            self._end_close()

    def close(self):
        self.shutdown()
        self.wait()

    def _end_close(self):
        self.writer_thread.kill()
        self.reader_thread.kill()

        super(Client, self).close()

        self.writer_thread = None
        self.reader_thread = None

    def sendAsync(self, header, value, onSuccess, onError, no_ack=False):
        if self.closing:
            raise common.ConnectionClosed("Client is closing, can't queue more operations.")

        if self.faulted:
            self._raise(common.ConnectionFaulted("Can't send message when connection is on a faulted state."), onError)
            return  # skip the rest

        # fail fast on NotConnected
        if not self.isConnected:
            self._raise(common.NotConnected("Not connected."), onError)
            return  # skip the rest

        if LOG.isEnabledFor(logging.DEBUG):
            LOG.debug("Queue: {0}".format(self.queue.qsize()))
        self.queue.put((header, value, onSuccess, onError, no_ack))
        eventlet.sleep(0)

    def wait(self):
        self.queue.join()

    def send(self, header, value):
        done = eventlet.event.Event()

        class Dummy:
            pass

        d = Dummy()
        d.error = None
        d.result = None

        def innerSuccess(m, r, value):
            d.result = (m, r, value)
            done.send()

        def innerError(e):
            d.error = e
            done.send()

        self.sendAsync(header, value, innerSuccess, innerError)

        done.wait()  # TODO(Nacho): should be add a default timeout?
        if d.error:
            raise d.error
        return d.result

    def _writer_run(self):
        while self.isConnected and not self.faulted:
            try:
                while len(self._pending) > self.max_pending:
                    eventlet.sleep(0)
                (header, value, onSuccess, onError, no_ack) = self.queue.get()
                super(Client, self).sendAsync(header, value, onSuccess, onError, no_ack)
            except common.ConnectionFaulted:
                pass
            except common.ConnectionClosed:
                pass
            except Exception as ex:
                self._fault_client(ex)

            # Yield execution, don't starve the reader
            eventlet.sleep(0)

    def _reader_run(self):
        while self.isConnected and not self.faulted:
            try:
                self._async_recv()
                self.queue.task_done()
                if self.closing and len(self._pending) + self.queue.qsize() == 0:
                    self._end_close()
            except common.ConnectionFaulted:
                pass
            except Exception as ex:
                self._fault_client(ex)
Ejemplo n.º 5
0
class StatsdLog(object):
    def __init__(self, conf):
        TRUE_VALUES = set(("true", "1", "yes", "on", "t", "y"))
        self.conf = conf
        self.logger = logging.getLogger("statsdlogd")
        self.logger.setLevel(logging.INFO)
        self.syslog = SysLogHandler(address="/dev/log")
        self.formatter = logging.Formatter("%(name)s: %(message)s")
        self.syslog.setFormatter(self.formatter)
        self.logger.addHandler(self.syslog)

        if conf.get("debug", False) in TRUE_VALUES:
            self.debug = True
        else:
            self.debug = False

        self.statsd_host = conf.get("statsd_host", "127.0.0.1")
        self.statsd_port = int(conf.get("statsd_port", "8125"))
        self.listen_addr = conf.get("listen_addr", "127.0.0.1")
        self.listen_port = int(conf.get("listen_port", 8126))
        if conf.get("report_internal_stats", False) in TRUE_VALUES:
            self.report_internal_stats = True
        else:
            self.report_internal_stats = False
        self.int_stats_interval = int(conf.get("internal_stats_interval", 5))
        self.buff = int(conf.get("buffer_size", 8192))
        self.max_q_size = int(conf.get("max_line_backlog", 512))
        self.statsd_sample_rate = float(conf.get("statsd_sample_rate", ".5"))
        self.counter = 0
        self.skip_counter = 0
        self.hits = 0
        self.q = Queue(maxsize=self.max_q_size)
        # key: regex
        self.patterns_file = conf.get("patterns_file", "patterns.json")
        try:
            with open(self.patterns_file) as pfile:
                self.patterns = json.loads(pfile.read())
        except Exception as err:
            self.logger.critical(err)
            print err
            sys.exit(1)
        self.statsd_addr = (self.statsd_host, self.statsd_port)
        self.comp_patterns = {}
        for item in self.patterns:
            self.comp_patterns[item] = re.compile(self.patterns[item])

    def check_line(self, line):
        """
        Check if a line matches our search patterns.

        :param line: The string to check
        :returns: None or regex entry that matched
        """
        for entry in self.comp_patterns:
            if self.comp_patterns[entry].match(line):
                return entry
        return None

    def internal_stats(self):
        """
        Periodically send our own stats to statsd.
        """
        lastcount = 0
        lasthit = 0
        while True:
            eventlet.sleep(self.int_stats_interval)
            self.send_event("statsdlog.lines:%s|c" % (self.counter - lastcount))
            lastcount = self.counter
            self.send_event("statsdlog.hits:%s|c" % (self.hits - lasthit))
            lasthit = self.hits

    def stats_print(self):
        """
        Periodically dump some stats to the logs.
        """
        lastcount = 0
        lasthit = 0
        while True:
            eventlet.sleep(2)
            lps = (self.counter - lastcount) / 60
            hps = (self.hits - lasthit) / 60
            lastcount = self.counter
            lasthit = self.hits
            self.logger.info("per second: %d lines - hits %d" % (lps, hps))
            self.logger.info("totals: %d hits - %d lines" % (self.hits, self.counter))
            if self.skip_counter is not 0:
                self.logger.info("Had to skip %d log lines so far" % self.skip_counter)

    def send_event(self, payload):
        """
        Fire event to statsd

        :param payload: The payload of the udp packet to send.
        """
        try:
            udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
            udp_socket.sendto(payload, self.statsd_addr)
        except Exception:
            # udp sendto failed (socket already in use?), but thats ok
            self.logger.error("Error trying to send statsd event")

    def statsd_counter_increment(self, stats, delta=1):
        """
        Increment multiple statsd stats counters

        :param stats: list of stats items to package and send
        :param delta: delta of stats items
        """
        if self.statsd_sample_rate < 1:
            if random() <= self.statsd_sample_rate:
                for item in stats:
                    payload = "%s:%s|c|@%s" % (item, delta, self.statsd_sample_rate)
                    self.send_event(payload)
        else:
            for item in stats:
                payload = "%s:%s|c" % (item, delta)
                self.send_event(payload)

    def worker(self):
        """
        Check for and process log lines in queue
        """
        while True:
            msg = self.q.get()
            matched = self.check_line(msg)
            if matched:
                self.statsd_counter_increment([matched])
                if self.hits >= maxint:
                    self.logger.info("hit maxint, reset hits counter")
                    self.hits = 0
                self.hits += 1
            else:
                pass

    def listener(self):
        """
        syslog udp listener
        """
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        bind_addr = (self.listen_addr, self.listen_port)
        sock.bind(bind_addr)
        self.logger.info("listening on %s:%d" % bind_addr)
        while 1:
            data, addr = sock.recvfrom(self.buff)
            if not data:
                break
            else:
                if self.q.qsize() < self.max_q_size:
                    self.q.put(data)
                    if self.counter >= maxint:
                        self.logger.info("hit maxint, reset seen counter")
                        self.counter = 0
                    self.counter += 1
                else:
                    if self.debug:
                        self.logger.notice("max log lines in queue, skipping")
                    if self.skip_counter >= maxint:
                        self.logger.info("hit maxint, reset skip counter")
                        self.skip_counter = 0
                    self.skip_counter += 1

    def start(self):
        """
        Start the listener, worker, and mgmt server.
        """
        eventlet.spawn_n(self.worker)
        if self.debug:
            eventlet.spawn_n(self.stats_print)
        if self.report_internal_stats:
            eventlet.spawn_n(self.internal_stats)
        while True:
            try:
                self.listener()
            except Exception as err:
                self.logger.error(err)
Ejemplo n.º 6
0
class StatsdLog(object):

    """Simple server to monitor a syslog udp stream for statsd events"""

    def __init__(self, conf):
        TRUE_VALUES = set(('true', '1', 'yes', 'on', 't', 'y'))
        self.conf = conf
        self.logger = logging.getLogger('statsdlogd')
        self.logger.setLevel(logging.INFO)
        self.syslog = SysLogHandler(address='/dev/log')
        self.formatter = logging.Formatter('%(name)s: %(message)s')
        self.syslog.setFormatter(self.formatter)
        self.logger.addHandler(self.syslog)
        self.debug = conf.get('debug', 'false').lower() in TRUE_VALUES
        self.statsd_host = conf.get('statsd_host', '127.0.0.1')
        self.statsd_port = int(conf.get('statsd_port', '8125'))
        self.listen_addr = conf.get('listen_addr', '127.0.0.1')
        self.listen_port = int(conf.get('listen_port', 8126))
        self.report_internal_stats = conf.get('report_internal_stats',
                                              'true').lower() in TRUE_VALUES
        self.int_stats_interval = int(conf.get('internal_stats_interval', 5))
        self.buff = int(conf.get('buffer_size', 8192))
        self.max_q_size = int(conf.get('max_line_backlog', 512))
        self.statsd_sample_rate = float(conf.get('statsd_sample_rate', '.5'))
        self.counter = 0
        self.skip_counter = 0
        self.hits = 0
        self.q = Queue(maxsize=self.max_q_size)
        # key: regex
        self.patterns_file = conf.get('patterns_file',
                                      '/etc/statsdlog/patterns.json')
        self.json_patterns = conf.get('json_pattern_file',
                                      'true').lower() in TRUE_VALUES
        try:
            self.patterns = self.load_patterns()
        except Exception as err:
            self.logger.exception(err)
            print "Encountered exception at startup: %s" % err
            sys.exit(1)
        self.statsd_addr = (self.statsd_host, self.statsd_port)
        self.comp_patterns = {}
        for item in self.patterns:
            self.comp_patterns[item] = re.compile(self.patterns[item])

    def load_patterns(self):
        if self.json_patterns:
            self.logger.info("Using json based patterns file: %s" %
                             self.patterns_file)
            with open(self.patterns_file) as pfile:
                return json.loads(pfile.read())
        else:
            self.logger.info("Using plain text patterns file: %s" %
                             self.patterns_file)
        patterns = {}
        with open(self.patterns_file) as f:
            for line in f:
                if line:
                    pattern = [x.strip() for x in line.split("=", 1)]
                else:
                    pattern = None
                if len(pattern) != 2:
                    # skip this line
                    self.logger.error(
                        "Skipping pattern. Unable to parse: %s" % line)
                else:
                    if pattern[0] and pattern[1]:
                        patterns[pattern[0]] = pattern[1]
                    else:
                        self.logger.error(
                            "Skipping pattern. Unable to parse: %s" % line)
        return patterns

    def check_line(self, line):
        """
        Check if a line matches our search patterns.

        :param line: The string to check
        :returns: List of regex entries that matched (or empty list if none)
        """
        matches = []
        for entry in self.comp_patterns:
            if self.comp_patterns[entry].match(line):
                matches.append(entry)
        return matches

    def internal_stats(self):
        """
        Periodically send our own stats to statsd.
        """
        lastcount = 0
        lasthit = 0
        while True:
            eventlet.sleep(self.int_stats_interval)
            self.send_event("statsdlog.lines:%s|c" %
                            (self.counter - lastcount))
            lastcount = self.counter
            self.send_event("statsdlog.hits:%s|c" % (self.hits - lasthit))
            lasthit = self.hits

    def stats_print(self):
        """
        Periodically dump some stats to the logs.
        """
        lastcount = 0
        lasthit = 0
        while True:
            eventlet.sleep(2)
            lps = (self.counter - lastcount) / 60
            hps = (self.hits - lasthit) / 60
            lastcount = self.counter
            lasthit = self.hits
            self.logger.info('per second: %d lines - hits %d' % (lps, hps))
            self.logger.info('totals: %d hits - %d lines' %
                             (self.hits, self.counter))
            if self.skip_counter is not 0:
                self.logger.info('Had to skip %d log lines so far' %
                                 self.skip_counter)

    def send_event(self, payload):
        """
        Fire event to statsd

        :param payload: The payload of the udp packet to send.
        """
        try:
            udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
            udp_socket.sendto(payload, self.statsd_addr)
        except Exception:
            # udp sendto failed (socket already in use?), but thats ok
            self.logger.error("Error trying to send statsd event")

    def statsd_counter_increment(self, stats, delta=1):
        """
        Increment multiple statsd stats counters

        :param stats: list of stats items to package and send
        :param delta: delta of stats items
        """
        if self.statsd_sample_rate < 1:
            if random() <= self.statsd_sample_rate:
                for item in stats:
                    payload = "%s:%s|c|@%s" % (item, delta,
                                               self.statsd_sample_rate)
                    self.send_event(payload)
        else:
            for item in stats:
                payload = "%s:%s|c" % (item, delta)
                self.send_event(payload)

    def worker(self):
        """
        Check for and process log lines in queue
        """
        while True:
            msg = self.q.get()
            matches = self.check_line(msg)
            for match in matches:
                self.statsd_counter_increment([match])
                if self.hits >= maxint:
                    self.logger.info("hit maxint, reset hits counter")
                    self.hits = 0
                self.hits += 1

    def listener(self):
        """
        syslog udp listener
        """
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        bind_addr = (self.listen_addr, self.listen_port)
        sock.bind(bind_addr)
        self.logger.info("listening on %s:%d" % bind_addr)
        while 1:
            data, addr = sock.recvfrom(self.buff)
            if not data:
                break
            else:
                if self.q.qsize() < self.max_q_size:
                    self.q.put(data)
                    if self.counter >= maxint:
                        self.logger.info("hit maxint, reset seen counter")
                        self.counter = 0
                    self.counter += 1
                else:
                    if self.debug:
                        self.logger.notice("max log lines in queue, skipping")
                    if self.skip_counter >= maxint:
                        self.logger.info("hit maxint, reset skip counter")
                        self.skip_counter = 0
                    self.skip_counter += 1

    def start(self):
        """
        Start the listener, worker, and mgmt server.
        """
        eventlet.spawn_n(self.worker)
        if self.debug:
            eventlet.spawn_n(self.stats_print)
        if self.report_internal_stats:
            eventlet.spawn_n(self.internal_stats)
        while True:
            try:
                self.listener()
            except Exception as err:
                self.logger.error(err)