def __init__(self, next_amqp_queues=[], next_balanced=False, name="worker1", beat_interval=60, logging_level=logging.INFO, exchange_name='amq.direct', routing_keys=[]): multiprocessing.Process.__init__(self) self.logging_level = logging_level self.signal_queue = multiprocessing.Queue(maxsize=5) self.RUN = True self.name = name self.amqp_queue = "Engine_%s" % name self.routing_keys = routing_keys self.exchange_name = exchange_name self.perfdata_retention = 3600 self.next_amqp_queues = next_amqp_queues self.get_amqp_queue = itertools.cycle(self.next_amqp_queues) ## Get from internal or external queue self.next_balanced = next_balanced init = cinit() self.logger = init.getLogger(name, logging_level=self.logging_level) logHandler = logging.FileHandler(filename=os.path.expanduser("~/var/log/engines/%s.log" % name)) logHandler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(name)s %(message)s")) # Log in file self.logger.addHandler(logHandler) self.counter_error = 0 self.counter_event = 0 self.counter_worktime = 0 self.thd_warn_sec_per_evt = 0.6 self.thd_crit_sec_per_evt = 0.9 self.beat_interval = beat_interval self.beat_last = time.time() self.create_queue = True self.send_stats_event = True self.rk_on_error = [] self.last_stat = int(time.time()) self.logger.info("Engine initialised")
def __init__(self, next_amqp_queues=[], next_balanced=False, name="worker1", beat_interval=60, logging_level=logging.INFO, exchange_name='amq.direct', routing_keys=[]): multiprocessing.Process.__init__(self) self.logging_level = logging_level self.signal_queue = multiprocessing.Queue(maxsize=5) self.RUN = True self.name = name self.amqp_queue = "Engine_%s" % name self.routing_keys = routing_keys self.exchange_name = exchange_name self.perfdata_retention = 3600 self.next_amqp_queues = next_amqp_queues self.get_amqp_queue = itertools.cycle(self.next_amqp_queues) ## Get from internal or external queue self.next_balanced = next_balanced init = cinit() self.logger = init.getLogger(name, logging_level=self.logging_level) # Log in file self.logger.addHandler( logging.FileHandler( filename=os.path.expanduser("~/var/log/engines/%s.log" % name))) self.counter_error = 0 self.counter_event = 0 self.counter_worktime = 0 self.thd_warn_sec_per_evt = 0.6 self.thd_crit_sec_per_evt = 0.9 self.beat_interval = beat_interval self.beat_last = time.time() self.create_queue = True self.send_stats_event = True self.rk_on_error = [] self.last_stat = int(time.time()) self.logger.info("Engine initialised")
def __init__(self, next_engines=[], next_balanced=True, name="worker1", beat_interval=60, use_internal_queue=True, queue_maxsize=1000, logging_level=logging.INFO): multiprocessing.Process.__init__(self) self.logging_level = logging_level self.signal_queue = multiprocessing.Queue(maxsize=5) self.input_queue = multiprocessing.Queue(maxsize=queue_maxsize) self.queue_maxsize = queue_maxsize self.RUN = True self.name = name self.amqp_queue = "Engine_%s" % name self.perfdata_retention = 3600 ## Get from internal or external queue self.next_balanced = next_balanced self.next_engines = next_engines self.get_next_engine = itertools.cycle(self.next_engines) init = cinit() self.logger = init.getLogger(name, logging_level=self.logging_level) # Log in file self.logger.addHandler( logging.FileHandler( filename=os.path.expanduser("~/var/log/engines/%s.log" % name))) self.counter_error = 0 self.counter_event = 0 self.counter_worktime = 0 self.thd_warn_sec_per_evt = 0.6 self.thd_crit_sec_per_evt = 0.9 self.beat_interval = beat_interval self.beat_last = time.time() self.create_queue = True # If use_internal_queue is false, use only AMQP Queue self.use_internal_queue = use_internal_queue self.send_stats_event = True self.rk_on_error = [] self.logger.info("Engine initialised")
def __init__(self, next_engines=[], next_balanced=True, name="worker1", beat_interval=60, use_internal_queue=True, queue_maxsize=1000, logging_level=logging.INFO): multiprocessing.Process.__init__(self) self.logging_level = logging_level self.signal_queue = multiprocessing.Queue(maxsize=5) self.input_queue = multiprocessing.Queue(maxsize=queue_maxsize) self.queue_maxsize = queue_maxsize self.RUN = True self.name = name self.amqp_queue = "Engine_%s" % name self.perfdata_retention = 3600 ## Get from internal or external queue self.next_balanced = next_balanced self.next_engines = next_engines self.get_next_engine = itertools.cycle(self.next_engines) init = cinit() self.logger = init.getLogger(name, logging_level=self.logging_level) # Log in file self.logger.addHandler(logging.FileHandler(filename=os.path.expanduser("~/var/log/engines/%s.log" % name))) self.counter_error = 0 self.counter_event = 0 self.counter_worktime = 0 self.thd_warn_sec_per_evt = 0.6 self.thd_crit_sec_per_evt = 0.9 self.beat_interval = beat_interval self.beat_last = time.time() self.create_queue = True # If use_internal_queue is false, use only AMQP Queue self.use_internal_queue = use_internal_queue self.send_stats_event = True self.rk_on_error = [] self.logger.info("Engine initialised")
def __init__(self, next_engines=[], name="worker1", beat_interval=60, use_internal_queue=True, queue_maxsize=1000, logging_level=logging.INFO): multiprocessing.Process.__init__(self) self.logging_level = logging_level self.signal_queue = multiprocessing.Queue(maxsize=5) self.input_queue = multiprocessing.Queue(maxsize=queue_maxsize) self.RUN = True self.name = name self.amqp_queue = "Engine_%s" % name self.perfdata_retention = 3600 ## Get from internal or external queue self.next_engines = next_engines init = cinit() self.logger = init.getLogger(name, logging_level=self.logging_level) self.counter_error = 0 self.counter_event = 0 self.counter_worktime = 0 self.thd_warn_sec_per_evt = 0.6 self.thd_crit_sec_per_evt = 0.9 self.beat_interval = beat_interval self.beat_last = time.time() self.create_queue = True # If use_internal_queue is false, use only AMQP Queue self.use_internal_queue = use_internal_queue self.amqp_flow = True self.send_stats_event = True self.logger.info("Engine initialised")
# You should have received a copy of the GNU Affero General Public License # along with Canopsis. If not, see <http://www.gnu.org/licenses/>. # --------------------------------- import socket, zlib, json, time from pyparsing import Word, alphas, Suppress, Combine, nums, string, Optional, Regex from camqp import camqp from cinit import cinit import cevent DAEMON_NAME='gelf2amqp' init = cinit() logger = init.getLogger(DAEMON_NAME) handler = init.getHandler(logger) gelf_port = 5555 gelf_interface = "0.0.0.0" myamqp = None #sys.path.append(os.path.expanduser("~/opt/event-brokers/nagios/api")) ## Init parser integer = Word(nums) serverDateTime = Regex("\S\S\S\s*\d\d?\s*\d\d:\d\d:\d\d") hostname = Word(alphas + nums + "_" + "-") daemon = Word(alphas + "/" + "-" + "_") + Optional(Suppress("[") + integer + Suppress("]")) + Suppress(":")
# the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Canopsis is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Canopsis. If not, see <http://www.gnu.org/licenses/>. # --------------------------------- import sys from ctools import dynmodloads from cinit import cinit init = cinit() if len(sys.argv) != 2: print "Usage: %s [init|update]" % sys.argv[0] sys.exit(1) action = sys.argv[1].lower() if action != "update" and action != "init": print "Invalid option" sys.exit(1) ## Logger logger = init.getLogger("mongodb-conf", "INFO") ## Load