def test_01_Init(self): self.engine.pre_run() selectorTest = cselector(self.storage, name='selectorTest') selectorTest.mfilter = {'test_key': 'value'} selectorTest.load(selectorTest.dump()) self.engine.selectors = [selectorTest] self.engine.work({'test_key':'not a value'}) self.assertTrue(self.engine.selector_refresh == {}) self.engine.selectors = [selectorTest] self.engine.work({'test_key':'value'}) self.assertTrue(self.engine.selector_refresh == {'selector.account.root.selectorTest': True}) from camqp import camqp self.engine.amqp = camqp(logging_level=logging.INFO, logging_name="test selector engine") for event_append in xrange(10): self.engine.beat() self.engine.beat() self.engine.post_run()
def main(): global amqp, ready logger.info("Initialyze process") handler.run() # Init AMQP amqp = camqp(on_ready=amqp2engines_ready, logging_name="%s-amqp" % DAEMON_NAME) amqp.add_queue(DAEMON_NAME, ['#'], on_event, amqp.exchange_name_events, auto_delete=False) amqp.add_queue("%s_alerts" % DAEMON_NAME, ['#'], on_alert, amqp.exchange_name_alerts, auto_delete=False) # Start AMQP amqp.start() # Safety wait time.sleep(3) ready = True logger.info("Wait") handler.wait() # Stop AMQP amqp.stop() amqp.join() stop_engines() logger.info("Process finished")
def test_01_Init(self): self.engine.pre_run() selectorTest = cselector(self.storage, name='selectorTest') selectorTest.mfilter = {'test_key': 'value'} selectorTest.load(selectorTest.dump()) self.engine.selectors = [selectorTest] self.engine.work({'test_key': 'not a value'}) self.assertTrue(self.engine.selector_refresh == {}) self.engine.selectors = [selectorTest] self.engine.work({'test_key': 'value'}) self.assertTrue(self.engine.selector_refresh == {'selector.account.root.selectorTest': True}) from camqp import camqp self.engine.amqp = camqp(logging_level=logging.INFO, logging_name="test selector engine") for event_append in xrange(10): self.engine.beat() self.engine.beat() self.engine.post_run()
def launch_celery_task(*args, **kwargs): if kwargs.has_key('task') and kwargs.has_key('method'): try: amqp = camqp(logging_name="aps-amqp") amqp.start() timer_begin = int(time.time()) #----------Get task informations task_name = kwargs['_scheduled'] celery_task_name = kwargs['task'] module = __import__(kwargs['task']) exec "task = module.%s" % kwargs['method'] #-------------Clear arguments methodargs = kwargs del methodargs['task'] del methodargs['method'] del kwargs['_scheduled'] #-------------execute task success = True try: result = task.delay(*args, **methodargs) result.get() result = result.result if not result['success']: raise Exception('Celery task failed') except Exception, err: success = False aps_error = str(err) logger.error(err) #------------Get account and storage try: if isinstance(kwargs['account'], unicode) or isinstance( kwargs['account'], str): account = caccount(user=kwargs['account']) else: account = kwargs['account'] #logger.error(account) logger.info('Caccount create from passed arguments') except Exception, err: logger.info('No account specified in the task') account = caccount() try: storage = cstorage(account=account, namespace='object') except Exception, err: logger.info('Error while fecthing storages : %s' % err) success = False aps_error = str(err)
def launch_celery_task(*args,**kwargs): if kwargs.has_key('task') and kwargs.has_key('method'): try: amqp = camqp(logging_name="aps-amqp") amqp.start() timer_begin = int(time.time()) #----------Get task informations task_name = kwargs['_scheduled'] celery_task_name = kwargs['task'] module = __import__(kwargs['task']) exec "task = module.%s" % kwargs['method'] #-------------Clear arguments methodargs = kwargs del methodargs['task'] del methodargs['method'] del kwargs['_scheduled'] #-------------execute task success = True try: result = task.delay(*args,**methodargs) result.get() result = result.result if not result['success']: raise Exception('Celery task failed') except Exception, err: success = False aps_error = str(err) logger.error(err) #------------Get account and storage try: if isinstance(kwargs['account'],unicode) or isinstance(kwargs['account'],str): account = caccount(user=kwargs['account']) else: account = kwargs['account'] #logger.error(account) logger.info('Caccount create from passed arguments') except Exception, err: logger.info('No account specified in the task') account = caccount() try: storage = cstorage(account=account, namespace='object') except Exception, err: logger.info('Error while fecthing storages : %s' % err) success = False aps_error = str(err)
def run(self): def ready(): self.logger.info(" + Ready!") self.logger.info("Start Engine with pid %s" % (os.getpid())) from camqp import camqp self.amqp = camqp(logging_level=logging.INFO, logging_name="%s-amqp" % self.name, on_ready=ready) if self.create_queue: self.create_amqp_queue() self.amqp.start() self.pre_run() while self.RUN: # Internal signals try: signal = self.signal_queue.get_nowait() self.logger.debug("Signal: %s" % signal) if signal == "STOP": self.RUN = False except Queue.Empty: pass # Beat if self.beat_interval: now = time.time() if now > (self.beat_last + self.beat_interval): self._beat() self.beat_last = now # Input Queue if not self.input_queue.empty(): while self.RUN : try: event = self.input_queue.get_nowait() self._work(event) except Queue.Empty: if self.amqp.paused and self.RUN: self.logger.info("Re-start AMQP Flow") self.amqp.paused = False break time.sleep(0.5) self.post_run() self.logger.info("Stop Engine") self.stop() self.logger.info("End of Engine")
def run(self): def ready(): self.logger.info(" + Ready!") self.logger.info("Start Engine with pid %s" % (os.getpid())) from camqp import camqp self.amqp = camqp(logging_level=logging.INFO, logging_name="%s-amqp" % self.name, on_ready=ready) if self.create_queue: self.create_amqp_queue() self.amqp.start() self.pre_run() while self.RUN: # Internal signals try: signal = self.signal_queue.get_nowait() self.logger.debug("Signal: %s" % signal) if signal == "STOP": self.RUN = False except Queue.Empty: pass # Beat if self.beat_interval: now = time.time() if now > (self.beat_last + self.beat_interval): self._beat() self.beat_last = now while self.RUN: try: event = self.input_queue.get_nowait() self._work(event) except Queue.Empty: if self.amqp.paused and self.RUN: self.logger.info("Re-start AMQP Flow") self.amqp.paused = False time.sleep(0.5) break self.post_run() self.logger.info("Stop Engine") self.stop() self.logger.info("End of Engine")
def pre_run(self): import logging self.manager = pyperfstore2.manager(logging_level=logging.INFO) self.internal_amqp = camqp(logging_level=logging.INFO, logging_name="%s-internal-amqp" % self.name) self.internal_amqp.add_queue(queue_name=INTERNAL_QUEUE, routing_keys=["#"], callback=self.on_internal_event, no_ack=True, exclusive=False, auto_delete=False) self.internal_amqp.start()
def pre_run(self): import logging self.manager = pyperfstore2.manager(logging_level=logging.INFO) self.internal_amqp = camqp(logging_level=logging.INFO, logging_name="%s-internal-amqp" % self.name) self.internal_amqp.add_queue( queue_name=INTERNAL_QUEUE, routing_keys=["#"], callback=self.on_internal_event, no_ack=True, exclusive=False, auto_delete=False ) self.internal_amqp.start()
def test_1_Init(self): global myamqp myamqp = camqp() myamqp.add_queue( queue_name="unittest_alerts", routing_keys="#", callback=on_alert, exchange_name=myamqp.exchange_name_alerts ) myamqp.start() time.sleep(1) global storage storage = cstorage(caccount(user="******", group="root"), namespace="events", logging_level=logging.DEBUG) global perfstore perfstore = pyperfstore2.manager(logging_level=logging.DEBUG) clean()
def run(self): def ready(): self.logger.info(" + Ready!") self.logger.info("Start Engine with pid %s" % (os.getpid())) from camqp import camqp self.amqp = camqp(logging_level=logging.INFO, logging_name="%s-amqp" % self.name, on_ready=ready) if self.create_queue: self.create_amqp_queue() self.amqp.start() self.pre_run() while self.RUN: # Internal signals try: signal = self.signal_queue.get_nowait() self.logger.debug("Signal: %s" % signal) if signal == "STOP": self.RUN = False except Queue.Empty: pass # Beat if self.beat_interval: now = time.time() if now > (self.beat_last + self.beat_interval): self._beat() self.beat_last = now try: time.sleep(1) except Exception as err: self.logger.error("Error in break time: %s" % err) self.RUN = False self.post_run() self.logger.info("Stop Engine") self.stop() self.logger.info("End of Engine")
def main(): handler.run() # global global myamqp # Connect to amqp bus logger.debug("Start AMQP ...") myamqp = camqp() myamqp.start() wait_gelf_udp(on_log) logger.debug("Stop AMQP ...") myamqp.stop() myamqp.join()
def test_1_Init(self): global myamqp myamqp = camqp() myamqp.add_queue(queue_name="unittest_alerts", routing_keys="#", callback=on_alert, exchange_name=myamqp.exchange_name_alerts) myamqp.start() time.sleep(1) global storage storage = cstorage(caccount(user="******", group="root"), namespace='events', logging_level=logging.DEBUG) global perfstore perfstore = pyperfstore2.manager(logging_level=logging.DEBUG) clean()
def main(): signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) # global global myamqp, transportDispatcher # Connect to amqp bus logger.debug("Start AMQP ...") myamqp = camqp() logger.info("Load all MIBs ...") for oid in snmp2amqp_conf.mibs.keys(): mibs[oid] = mib(snmp2amqp_conf.mibs[oid]) logger.info("Init SNMP listenner ...") transportDispatcher = AsynsockDispatcher() transportDispatcher.registerTransport( udp.domainName, udp.UdpSocketTransport().openServerMode((snmp2amqp_conf.interface, snmp2amqp_conf.port)) ) transportDispatcher.registerRecvCbFun(cbFun) transportDispatcher.jobStarted(1) # this job would never finish ## set euid of process os.setuid(getpwnam('canopsis')[2]) myamqp.start() logger.info("Wait SNMP traps ...") try: transportDispatcher.runDispatcher() except Exception, err: ## Impossible to stop transportDispatcher properly ... logger.error(err) pass
def main(): signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) # global global myamqp, transportDispatcher # Connect to amqp bus logger.debug("Start AMQP ...") myamqp = camqp() logger.info("Load all MIBs ...") for oid in snmp2amqp_conf.mibs.keys(): mibs[oid] = mib(snmp2amqp_conf.mibs[oid]) logger.info("Init SNMP listenner ...") transportDispatcher = AsynsockDispatcher() transportDispatcher.registerTransport( udp.domainName, udp.UdpSocketTransport().openServerMode( (snmp2amqp_conf.interface, snmp2amqp_conf.port))) transportDispatcher.registerRecvCbFun(cbFun) transportDispatcher.jobStarted(1) # this job would never finish ## set euid of process os.setuid(getpwnam('canopsis')[2]) myamqp.start() logger.info("Wait SNMP traps ...") try: transportDispatcher.runDispatcher() except Exception, err: ## Impossible to stop transportDispatcher properly ... logger.error(err) pass
def main(): global amqp logger.info("Initialyze process") handler.run() logger.info("Start Engines") start_engines() # Safety wait time.sleep(3) # Init AMQP amqp = camqp(logging_name="%s-amqp" % DAEMON_NAME) amqp.add_queue(DAEMON_NAME, ['#'], on_event, amqp.exchange_name_events, auto_delete=False) amqp.add_queue("%s_alerts" % DAEMON_NAME, ['#'], on_alert, amqp.exchange_name_alerts, auto_delete=False) # Start AMQP amqp.start() logger.info("Wait") handler.wait() # Stop AMQP amqp.stop() amqp.join() stop_engines() logger.info("Process finished")
import bottle from bottle import route, get, put, delete, request, HTTPError, post, response ## Canopsis from caccount import caccount from cstorage import cstorage from cstorage import get_storage from crecord import crecord from camqp import camqp import cevent #import protection function from libexec.auth import check_auth, get_account, check_group_rights amqp = camqp(logging_name="Event-amqp") amqp.start() logger = logging.getLogger('Event') group_managing_access = 'group.CPS_event_admin' ################################################################################## @post('/event/',apply=[check_auth]) @post('/event/:routing_key',apply=[check_auth]) def send_event( routing_key=None): account = get_account() if not check_group_rights(account,group_managing_access): return HTTPError(403, 'Insufficient rights')
######################################################## # # Configuration # ######################################################## AMQP_HOST = "localhost" logging.basicConfig( level=logging.INFO, format='%(asctime)s %(name)s %(levelname)s %(message)s', ) logger = logging.getLogger("bench") amqp = camqp() storage = get_storage(namespace='events', account=caccount(user="******", group="root")) manager = pyperfstore2.manager(logging_level=logging.INFO) base_component_event = cevent.forger( connector='bench', connector_name="engine", event_type="check", source_type="component", component="component-", state=0, state_type=1, output="Output", long_output="",
def test_1_Init(self): global myamqp myamqp = camqp()
# GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Canopsis. If not, see <http://www.gnu.org/licenses/>. import logging from cinit import cinit from caccount import caccount from cstorage import cstorage from crecord import crecord import time from camqp import camqp import cevent amqp = camqp(logging_name="aps-amqp") amqp.start() init = cinit() logger = init.getLogger('aps') def launch_celery_task(*args,**kwargs): if kwargs.has_key('task') and kwargs.has_key('method'): try: timer_begin = int(time.time()) #----------Get task informations task_name = kwargs['_scheduled'] celery_task_name = kwargs['task'] module = __import__(kwargs['task'])
def load(): global amqp amqp = camqp(logging_name="Event-amqp") amqp.start()
import traceback ######################################################## # # Configuration # ######################################################## AMQP_HOST = "localhost" logging.basicConfig(level=logging.INFO, format='%(asctime)s %(name)s %(levelname)s %(message)s', ) logger = logging.getLogger("bench") amqp = camqp() storage = get_storage(namespace='events', account=caccount(user="******", group="root")) base_component_event = cevent.forger( connector = 'bench', connector_name = "engine", event_type = "check", source_type = "component", component = "component-", state = 0, state_type = 1, output = "Output", long_output = "", #perf_data = None, #perf_data_array = [
def publishInCanopsis( resultats, settings, params ): print os.environ #you can add host, port, userid, password, virtualhost, exchange_name, read_conf_file, auto_connect, on_ready if ( params.has_key('amqp_host') ): amqp_host = params['amqp_host'] else: amqp_host = "localhost" if ( params.has_key('amqp_port') ): amqp_port = params['amqp_port'] else: amqp_port = 5672 if ( params.has_key('amqp_userid') ): amqp_userid = params['amqp_userid'] else: amqp_userid = "guest" if ( params.has_key('amqp_password') ): amqp_password = params['amqp_password'] else: amqp_password = "******" if ( params.has_key('amqp_virtualhost') ) : amqp_virtualhost = params['amqp_virtualhost'] else: amqp_virtualhost = "canopsis" if ( params.has_key('amqp_exchange') ): amqp_exchange = params['amqp_exchange'] else: amqp_exchange = "canopsis.events" amqp_bus = camqp(host=amqp_host, port=amqp_port, userid=amqp_userid, password=amqp_password, virtual_host=amqp_virtualhost, exchange_name="canopsis.events" ) record_fail = [] num_del_record = 0 if amqp_bus: # We treat the waiting publications canop_content = False try: canop_file = "%s/cache/tmp/canopsis.json" % (base) canop_content = getJsonToArray(canop_file) except: pass if canop_content: for i in range(len(canop_content)): record = canop_content[i] curr_key = record.keys()[0] inserted = True#amqp_bus.publish(json.dumps(record[curr_key]),curr_key) if ( inserted ): print "INSERTED !" canop_content.pop(i) num_del_record += 1 else: print "NOT INSERTED!" if num_del_record > 0: recordProcessedBusPublication(canop_content) # We now send the new publications newresultat = {} for source,query in resultats.items(): for q_name,records in query.items(): newdata = [] for record in records['data']: newrecord = {} tmplevel = {} remove = [] for field, value in record.items(): if 'metric.' in field: if record.has_key( field.split('.')[1] ): print field.split('.')[0] print record[field.split('.')[1]] print field.split('.')[2] record[ field.split('.')[0]+"."+record[field.split('.')[1]]+"."+field.split('.')[2] ] = value remove.append(field) for field in remove: del record[field] for field, value in record.items(): if "." in field: tmplevel = combine( tmplevel, build_arbo( field.split('.'), value) ) else: newrecord[field] = value metric = [] if ( tmplevel.has_key('metric') ): for metricname, value in tmplevel['metric'].items(): value["metric"] = metricname if ( 'value' in value.keys() ): metric.append(value) newrecord['perf_data_array'] = metric del (tmplevel['metric']) if ( len(tmplevel.keys()) > 0 ): newrecord = combine(newrecord, tmplevel) newdata.append(newrecord) inserted = True #amqp_bus.publish(json.dumps(newrecord), formatRoutingKey(record) ) print "insert record : "+formatRoutingKey(record) if ( inserted ): print json.dumps(newrecord, indent=4 ) print "INSERTED !" else: record_fail.append({formatRoutingKey(record) : newrecord}) print "NOT INSERTED!" records['data'] = newdata else: # We report the records to a "pile" file to be executed when published newresultat = {} for source,tableau in resultats.items(): for records in tableau: newdata = [] for record in records['data']: newrecord = {} tmplevel = {} remove = [] for field, value in record.items(): if 'metric.' in field: if record.has_key( field.split('.')[1] ): record[ str(field.split('.')[0])+"."+str(record[field.split('.')[1]])+"."+str(field.split('.')[2]) ] = value remove.append(field) for field in remove: del record[field] for field, value in record.items(): if "." in field: tmplevel = combine( tmplevel, build_arbo( field.split('.'), value) ) else: newrecord[field] = value metric = [] if ( tmplevel.has_key('metric') ): for metricname, value in tmplevel['metric'].items(): value["metric"] = metricname if ( 'value' in value.keys() ): metric.append(value) newrecord['perf_data_array'] = metric del (tmplevel['metric']) if ( len(tmplevel.keys()) > 0 ): newrecord = combine(newrecord, tmplevel) newdata.append(newrecord) # We directly go to the temp file record_fail.append({formatRoutingKey(record) : newrecord}) print "NOT INSERTED!" records['data'] = newdata recordFailedBusPublication(record_fail) return newdata