def hook_late_configuration(self, arb):
        """ Read config and fill database """
        mac_resol = MacroResolver()
        mac_resol.init(arb.conf)
        for serv in arb.conf.services:
            if serv.check_command.command.module_type == 'snmp_booster':
                try:
                    # Serialize service
                    dict_serv = dict_serialize(serv, mac_resol,
                                               self.datasource)
                except Exception as exp:
                    msg = "[SnmpBooster] [code 0907] [%s,%s] %s" % (
                        serv.host.get_name(), serv.get_name(), exp)
                    logger.error(msg)
                    serv.configuration_errors.append(msg)
                    continue

                # We want to make a diff between arbiter insert and poller insert. Some backend may need it.
                try:
                    self.db_client.update_service_init(dict_serv['host'],
                                                       dict_serv['service'],
                                                       dict_serv)
                except Exception as exp:
                    logger.error(
                        "[SnmpBooster] [code 0909] [%s,%s] "
                        "%s" %
                        (dict_serv['host'], dict_serv['service'], str(exp)))
                    continue

        logger.info("[SnmpBooster] [code 0908] Done parsing")

        # Disconnect from database
        self.db_client.disconnect()
Example #2
0
    def init(self):
        print "Initialisation of the livestatus broker"

        #to_queue is where we get broks from Broker
        #self.to_q = self.properties['to_queue']

        #from_quue is where we push back objects like
        #external commands to the broker
        #self.from_q = self.properties['from_queue']

        self.prepare_log_db()
        self.prepare_pnp_path()

        self.livestatus = LiveStatus(
            self.configs, self.hostname_lookup_table,
            self.servicename_lookup_table, self.hosts, self.services,
            self.contacts, self.hostgroups, self.servicegroups,
            self.contactgroups, self.timeperiods, self.commands,
            self.schedulers, self.pollers, self.reactionners, self.brokers,
            self.dbconn, self.pnp_path, self.from_q)

        m = MacroResolver()
        m.output_macros = [
            'HOSTOUTPUT', 'HOSTPERFDATA', 'HOSTACKAUTHOR', 'HOSTACKCOMMENT',
            'SERVICEOUTPUT', 'SERVICEPERFDATA', 'SERVICEACKAUTHOR',
            'SERVICEACKCOMMENT'
        ]
    def hook_late_configuration(self, arb):
        """ Read config and fill database """
        mac_resol = MacroResolver()
        mac_resol.init(arb.conf)
        for serv in arb.conf.services:
            if serv.check_command.command.module_type == 'snmp_booster':
                try:
                    # Serialize service
                    dict_serv = dict_serialize(serv,
                                               mac_resol,
                                               self.datasource)
                except Exception as exp:
                    logger.error("[SnmpBooster] [code 0907] [%s,%s] "
                                 "%s" % (serv.host.get_name(),
                                         serv.get_name(),
                                         str(exp)))
                    continue

                # We want to make a diff between arbiter insert and poller insert. Some backend may need it.
                try:
                    self.db_client.update_service_init(dict_serv['host'],
                                                       dict_serv['service'],
                                                       dict_serv)
                except Exception as exp:
                    logger.error("[SnmpBooster] [code 0909] [%s,%s] "
                                 "%s" % (dict_serv['host'],
                                         dict_serv['service'],
                                         str(exp)))
                    continue

        logger.info("[SnmpBooster] [code 0908] Done parsing")

        # Disconnect from database
        self.db_client.disconnect()
 def setup_with_file(self, path):
     # i am arbiter-like
     self.broks = {}
     self.me = None
     self.log = logger
     self.log.load_obj(self)
     self.config_files = [path]
     self.conf = Config()
     buf = self.conf.read_config(self.config_files)
     raw_objects = self.conf.read_config_buf(buf)
     self.conf.create_objects_for_type(raw_objects, 'arbiter')
     self.conf.create_objects_for_type(raw_objects, 'module')
     self.conf.early_arbiter_linking()
     self.conf.create_objects(raw_objects)
     self.conf.old_properties_names_to_new()
     self.conf.instance_id = 0
     self.conf.instance_name = 'test'
     # Hack push_flavor, that is set by the dispatcher
     self.conf.push_flavor = 0
     self.conf.linkify_templates()
     self.conf.apply_inheritance()
     self.conf.explode()
     print "Aconf.services has %d elements" % len(self.conf.services)
     self.conf.create_reversed_list()
     self.conf.remove_twins()
     self.conf.apply_implicit_inheritance()
     self.conf.fill_default()
     self.conf.remove_templates()
     print "conf.services has %d elements" % len(self.conf.services)
     self.conf.create_reversed_list()
     self.conf.pythonize()
     self.conf.linkify()
     self.conf.apply_dependencies()
     self.conf.explode_global_conf()
     self.conf.propagate_timezone_option()
     self.conf.create_business_rules()
     self.conf.create_business_rules_dependencies()
     self.conf.is_correct()
     self.confs = self.conf.cut_into_parts()
     self.conf.show_errors()
     self.dispatcher = Dispatcher(self.conf, self.me)
     
     scheddaemon = Shinken(None, False, False, False, None)
     self.sched = Scheduler(scheddaemon)
     
     scheddaemon.sched = self.sched
             
     m = MacroResolver()
     m.init(self.conf)
     self.sched.load_conf(self.conf)
     e = ExternalCommandManager(self.conf, 'applyer')
     self.sched.external_command = e
     e.load_scheduler(self.sched)
     e2 = ExternalCommandManager(self.conf, 'dispatcher')
     e2.load_arbiter(self)
     self.external_command_dispatcher = e2
     self.sched.schedule()
Example #5
0
    def get_obsessive_compulsive_processor_command(self):
        cls = self.__class__
        if not cls.obsess_over or not self.obsess_over_host:
            return

        m = MacroResolver()
        data = self.get_data_for_event_handler()
        cmd = m.resolve_command(cls.ochp_command, data)
        e = EventHandler(cmd, timeout=cls.ochp_timeout)

        # ok we can put it in our temp action queue
        self.actions.append(e)
Example #6
0
    def setup_with_file(self, path):
        # i am arbiter-like
        self.broks = {}
        self.me = None
        self.log = logger
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        self.conf.read_config(self.config_files)
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()
        self.conf.create_objects(raw_objects)
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'
        self.conf.linkify_templates()
        self.conf.apply_inheritance()
        self.conf.explode()
        self.conf.create_reversed_list()
        self.conf.remove_twins()
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.clean_useless()
        self.conf.pythonize()
        self.conf.linkify()
        self.conf.apply_dependancies()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        self.confs = self.conf.cut_into_parts()
        self.dispatcher = Dispatcher(self.conf, self.me)

        scheddaemon = Shinken(None, False, False, False, None)
        self.sched = Scheduler(scheddaemon)

        scheddaemon.sched = self.sched

        m = MacroResolver()
        m.init(self.conf)
        self.sched.load_conf(self.conf)
        e = ExternalCommandManager(self.conf, 'applyer')
        self.sched.external_command = e
        e.load_scheduler(self.sched)
        e2 = ExternalCommandManager(self.conf, 'dispatcher')
        e2.load_arbiter(self)
        self.external_command_dispatcher = e2
        self.sched.schedule()
Example #7
0
    def __init__(self, path, macros, overwrite, runners, output_dir=None, dbmod='', db_direct_insert=False):
        # i am arbiter-like
        self.log = logger
        self.overwrite = overwrite
        self.runners = runners
        self.output_dir = output_dir
        self.dbmod = dbmod
        self.db_direct_insert = db_direct_insert
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()

        buf = self.conf.read_config(self.config_files)
        
        # Add macros on the end of the buf so they will
        # overwrite the resource.cfg ones
        for (m, v) in macros:
            buf += '\n$%s$=%s\n' % (m, v)

        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()
        self.conf.create_objects(raw_objects)
        self.conf.linkify_templates()
        self.conf.apply_inheritance()
        self.conf.explode()
        self.conf.create_reversed_list()
        self.conf.remove_twins()
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        self.conf.pythonize()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.is_correct()

        self.discoveryrules = self.conf.discoveryrules
        self.discoveryruns = self.conf.discoveryruns
        
        m = MacroResolver()
        m.init(self.conf)
        
        # Hash = name, and in it (key, value)
        self.disco_data = {}
        # Hash = name, and in it rules that apply
        self.disco_matches = {}

        self.init_database()
Example #8
0
    def init(self):
        print "Initialisation of the livestatus broker"

        #to_queue is where we get broks from Broker
        #self.to_q = self.properties['to_queue']

        #from_quue is where we push back objects like
        #external commands to the broker
        #self.from_q = self.properties['from_queue']

        self.prepare_log_db()
        self.prepare_pnp_path()


        self.livestatus = LiveStatus(self.configs, self.hostname_lookup_table, self.servicename_lookup_table, self.hosts, self.services, self.contacts, self.hostgroups, self.servicegroups, self.contactgroups, self.timeperiods, self.commands, self.schedulers, self.pollers, self.reactionners, self.brokers, self.dbconn, self.pnp_path, self.from_q)

        m = MacroResolver()
        m.output_macros = ['HOSTOUTPUT', 'HOSTPERFDATA', 'HOSTACKAUTHOR', 'HOSTACKCOMMENT', 'SERVICEOUTPUT', 'SERVICEPERFDATA', 'SERVICEACKAUTHOR', 'SERVICEACKCOMMENT']
    def init(self):
        print "Initialisation of the thrift broker"

        # to_queue is where we get broks from Broker
        # self.to_q = self.properties['to_queue']

        # from_quue is where we push back objects like
        # external commands to the broker
        # self.from_q = self.properties['from_queue']

        # db has to be opened in the manage_brok thread
        self.prepare_log_db()
        self.prepare_pnp_path()

        self.thrift = Thrift_status(
            self.configs,
            self.hosts,
            self.services,
            self.contacts,
            self.hostgroups,
            self.servicegroups,
            self.contactgroups,
            self.timeperiods,
            self.commands,
            self.schedulers,
            self.pollers,
            self.reactionners,
            self.brokers,
            self.dbconn,
            self.pnp_path,
            self.from_q,
        )

        m = MacroResolver()
        m.output_macros = [
            "HOSTOUTPUT",
            "HOSTPERFDATA",
            "HOSTACKAUTHOR",
            "HOSTACKCOMMENT",
            "SERVICEOUTPUT",
            "SERVICEPERFDATA",
            "SERVICEACKAUTHOR",
            "SERVICEACKCOMMENT",
        ]
    def test_bprule_expand_template_macros(self):
        svc_cor = self.sched.services.find_srv_by_name_and_hostname(
            "dummy", "formatted_bp_rule_output")
        self.assertIs(True, svc_cor.got_business_rule)
        self.assertIsNot(svc_cor.business_rule, None)

        svc1 = self.sched.services.find_srv_by_name_and_hostname(
            "test_host_01", "srv1")
        svc2 = self.sched.services.find_srv_by_name_and_hostname(
            "test_host_02", "srv2")
        svc3 = self.sched.services.find_srv_by_name_and_hostname(
            "test_host_03", "srv3")
        hst4 = self.sched.hosts.find_by_name("test_host_04")

        for i in range(2):
            self.scheduler_loop(1, [[svc1, 0, 'OK test_host_01/srv1'],
                                    [svc2, 1, 'WARNING test_host_02/srv2'],
                                    [svc3, 2, 'CRITICAL test_host_03/srv3'],
                                    [hst4, 2, 'DOWN test_host_04']])

        time.sleep(61)
        self.sched.manage_internal_checks()
        self.sched.consume_results()

        # Performs checks
        m = MacroResolver()
        template = "$STATUS$,$SHORTSTATUS$,$HOSTNAME$,$SERVICEDESC$,$FULLNAME$"
        data = svc1.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assertEqual("OK,O,test_host_01,srv1,test_host_01/srv1", output)
        data = svc2.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assertEqual("WARNING,W,test_host_02,srv2,test_host_02/srv2",
                         output)
        data = svc3.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assertEqual("CRITICAL,C,test_host_03,srv3,test_host_03/srv3",
                         output)
        data = hst4.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assertEqual("DOWN,D,test_host_04,,test_host_04", output)
        data = svc_cor.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assertEqual(
            "CRITICAL,C,dummy,formatted_bp_rule_output,dummy/formatted_bp_rule_output",
            output)
    def test_bprule_expand_template_macros(self):
        svc_cor = self.sched.services.find_srv_by_name_and_hostname("dummy", "formatted_bp_rule_output")
        self.assert_(svc_cor.got_business_rule is True)
        self.assert_(svc_cor.business_rule is not None)

        svc1 = self.sched.services.find_srv_by_name_and_hostname("test_host_01", "srv1")
        svc2 = self.sched.services.find_srv_by_name_and_hostname("test_host_02", "srv2")
        svc3 = self.sched.services.find_srv_by_name_and_hostname("test_host_03", "srv3")
        hst4 = self.sched.hosts.find_by_name("test_host_04")

        for i in range(2):
            self.scheduler_loop(
                1,
                [
                    [svc1, 0, "OK test_host_01/srv1"],
                    [svc2, 1, "WARNING test_host_02/srv2"],
                    [svc3, 2, "CRITICAL test_host_03/srv3"],
                    [hst4, 2, "DOWN test_host_04"],
                ],
            )

        time.sleep(61)
        self.sched.manage_internal_checks()
        self.sched.consume_results()

        # Performs checks
        m = MacroResolver()
        template = "$STATUS$,$SHORTSTATUS$,$HOSTNAME$,$SERVICEDESC$,$FULLNAME$"
        data = svc1.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assert_(output == "OK,O,test_host_01,srv1,test_host_01/srv1")
        data = svc2.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assert_(output == "WARNING,W,test_host_02,srv2,test_host_02/srv2")
        data = svc3.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assert_(output == "CRITICAL,C,test_host_03,srv3,test_host_03/srv3")
        data = hst4.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assert_(output == "DOWN,D,test_host_04,,test_host_04")
        data = svc_cor.get_data_for_checks()
        output = m.resolve_simple_macros_in_string(template, data)
        self.assert_(output == "CRITICAL,C,dummy,formatted_bp_rule_output,dummy/formatted_bp_rule_output")
Example #12
0
 def launch(self):
     m = MacroResolver()
     data = []
     cmd = m.resolve_command(self.discoveryrun_command, data)
     self.current_launch = EventHandler(cmd, timeout=300)
     self.current_launch.execute()
Example #13
0
def expand_with_macros(ref, value):
    return MacroResolver().resolve_simple_macros_in_string(
        value, ref.get_data_for_checks())
Example #14
0
    def setup_with_file(self, path):
        time_hacker.set_my_time()
        self.print_header()
        # i am arbiter-like
        self.broks = {}
        self.me = None
        self.log = logger
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()
        self.conf.create_objects(raw_objects)
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'
        # Hack push_flavor, that is set by the dispatcher
        self.conf.push_flavor = 0
        self.conf.load_triggers()
        #import pdb;pdb.set_trace()
        self.conf.linkify_templates()
        #import pdb;pdb.set_trace()
        self.conf.apply_inheritance()
        #import pdb;pdb.set_trace()
        self.conf.explode()
        #print "Aconf.services has %d elements" % len(self.conf.services)
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        self.conf.compute_hash()
        #print "conf.services has %d elements" % len(self.conf.services)
        self.conf.override_properties()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        if not self.conf.conf_is_correct:
            print "The conf is not correct, I stop here"
            self.conf.dump()
            return
        self.conf.clean()

        self.confs = self.conf.cut_into_parts()
        self.conf.prepare_for_sending()
        self.conf.show_errors()
        self.dispatcher = Dispatcher(self.conf, self.me)

        scheddaemon = Shinken(None, False, False, False, None, None)
        self.sched = Scheduler(scheddaemon)

        scheddaemon.sched = self.sched
        scheddaemon.modules_dir = modules_dir
        scheddaemon.load_modules_manager()
        # Remember to clean the logs we just created before launching tests
        self.clear_logs()
        m = MacroResolver()
        m.init(self.conf)
        self.sched.load_conf(self.conf, in_test=True)
        e = ExternalCommandManager(self.conf, 'applyer')
        self.sched.external_command = e
        e.load_scheduler(self.sched)
        e2 = ExternalCommandManager(self.conf, 'dispatcher')
        e2.load_arbiter(self)
        self.external_command_dispatcher = e2
        self.sched.conf.accept_passive_unknown_check_results = False

        self.sched.schedule()
Example #15
0
    def __init__(self, path, output_dir, macros, overright, runners):
        # i am arbiter-like
        self.log = logger
        self.overright = overright
        self.runners = runners
        self.output_dir = output_dir
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        self.conf.read_config(self.config_files)
        buf = self.conf.read_config(self.config_files)
        
        # Add macros on the end of the buf so they will
        # overright the resource.cfg ones
        for (m, v) in macros:
            buf += '\n$%s$=%s\n' % (m, v)

        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()
        self.conf.create_objects(raw_objects)
        #self.conf.instance_id = 0
        #self.conf.instance_name = ''
        self.conf.linkify_templates()
        self.conf.apply_inheritance()
        self.conf.explode()
        self.conf.create_reversed_list()
        self.conf.remove_twins()
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.clean_useless()
        self.conf.pythonize()
        self.conf.linkify()
        self.conf.apply_dependancies()
        #self.conf.explode_global_conf()
        #self.conf.propagate_timezone_option()
        #self.conf.create_business_rules()
        #self.conf.create_business_rules_dependencies()
        self.conf.is_correct()

        self.discoveryrules = self.conf.discoveryrules
        self.discoveryruns = self.conf.discoveryruns
        
        #self.confs = self.conf.cut_into_parts()
        #self.dispatcher = Dispatcher(self.conf, self.me)
        
        #scheddaemon = Shinken(None, False, False, False, None)
        #self.sched = Scheduler(scheddaemon)
        
        #scheddaemon.sched = self.sched
                
        m = MacroResolver()
        m.init(self.conf)
        #self.sched.load_conf(self.conf)
        #e = ExternalCommandManager(self.conf, 'applyer')
        #self.sched.external_command = e
        #e.load_scheduler(self.sched)
        #e2 = ExternalCommandManager(self.conf, 'dispatcher')
        #e2.load_arbiter(self)
        #self.external_command_dispatcher = e2
        #self.sched.schedule()
        
        # Hash = name, and in it (key, value)
        self.disco_data = {}
        # Hash = name, and in it rules that apply
        self.disco_matches = {}
Example #16
0
 def launch(self):
     m = MacroResolver()
     data = []
     cmd = m.resolve_command(self.discoveryrun_command, data)
     self.current_launch = EventHandler(cmd, timeout=300)
     self.current_launch.execute()
Example #17
0
    def setup_with_file(self, path):
        time_hacker.set_my_time()
        self.print_header()
        # i am arbiter-like
        self.broks = {}
        self.me = None
        self.log = logger
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()
        self.conf.create_objects(raw_objects)
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'
        # Hack push_flavor, that is set by the dispatcher
        self.conf.push_flavor = 0
        self.conf.load_triggers()
        #import pdb;pdb.set_trace()
        self.conf.linkify_templates()
        #import pdb;pdb.set_trace()
        self.conf.apply_inheritance()
        #import pdb;pdb.set_trace()
        self.conf.explode()
        #print "Aconf.services has %d elements" % len(self.conf.services)
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        self.conf.compute_hash()
        #print "conf.services has %d elements" % len(self.conf.services)
        self.conf.override_properties()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        if not self.conf.conf_is_correct:
            print "The conf is not correct, I stop here"
            self.conf.dump()
            return
        self.conf.clean()

        self.confs = self.conf.cut_into_parts()
        self.conf.prepare_for_sending()
        self.conf.show_errors()
        self.dispatcher = Dispatcher(self.conf, self.me)

        scheddaemon = Shinken(None, False, False, False, None, None)
        self.sched = Scheduler(scheddaemon)

        scheddaemon.sched = self.sched
        scheddaemon.modules_dir = modules_dir
        scheddaemon.load_modules_manager()
        # Remember to clean the logs we just created before launching tests
        self.clear_logs()
        m = MacroResolver()
        m.init(self.conf)
        self.sched.load_conf(self.conf, in_test=True)
        e = ExternalCommandManager(self.conf, 'applyer')
        self.sched.external_command = e
        e.load_scheduler(self.sched)
        e2 = ExternalCommandManager(self.conf, 'dispatcher')
        e2.load_arbiter(self)
        self.external_command_dispatcher = e2
        self.sched.conf.accept_passive_unknown_check_results = False

        self.sched.schedule()
Example #18
0
    def get_urls(self,
                 obj,
                 url,
                 default_title="Url",
                 default_icon="globe",
                 popover=False):
        '''
        Returns formatted HTML for an element URL

        url string may contain a list of urls separated by |

        Each url may be formatted as:
            - url,,description
            - title::description,,url
            - title,,icon::description,,url

        description is optional

        If title is not specified, default_title is used as title
        If icon is not specified, default_icon is used as icon

        If popover is true, a bootstrap popover is built, else a standard link ...
        '''
        logger.debug("[WebUI] get_urls: %s / %s / %s / %d", url, default_title,
                     default_icon, popover)

        result = []
        for item in url.split('|'):
            try:
                (title, url) = item.split('::')
            except:
                title = "%s,,%s" % (default_title, default_icon)
                url = item

            try:
                (title, icon) = title.split(',,')
            except:
                icon = default_icon

            try:
                (description, real_url) = url.split(',,')
            except:
                description = 'No description provided'
                real_url = url

            # Replace MACROS in url and description
            if hasattr(obj, 'get_data_for_checks'):
                url = MacroResolver().resolve_simple_macros_in_string(
                    real_url, obj.get_data_for_checks())
                description = MacroResolver().resolve_simple_macros_in_string(
                    description, obj.get_data_for_checks())

            logger.debug("[WebUI] get_urls, found: %s / %s / %s / %s", title,
                         icon, url, description)

            if popover:
                if url != '':
                    result.append(
                        '''<a href="%s" target="_blank" role="button" data-toggle="popover medium" data-html="true" data-content="%s" data-trigger="hover focus" data-placement="bottom"><i class="fa fa-%s"></i>&nbsp;%s</a>'''
                        % (url, description, icon, title))
                else:
                    result.append(
                        '''<span data-toggle="popover medium" data-html="true" data-content="%s" data-trigger="hover focus" data-placement="bottom"><i class="fa fa-%s"></i>&nbsp;%s</span>'''
                        % (description, icon, title))
            else:
                if url != '':
                    result.append(
                        '''<a href="%s" target="_blank" title="%s"><i class="fa fa-%s"></i>&nbsp;%s</a>'''
                        % (url, description, icon, title))
                else:
                    result.append(
                        '''<span title="%s"><i class="fa fa-%s"></i>&nbsp;%s</span>'''
                        % (description, icon, title))

        return result
Example #19
0
    def setup_with_file(self, path):
        time_hacker.set_my_time()
        self.print_header()
        # i am arbiter-like
        self.broks = {}
        self.me = None
        self.log = logger
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()

        # If we got one arbiter defined here (before default) we should be in a case where
        # the tester want to load/test a module, so we simulate an arbiter daemon
        # and the modules loading phase. As it has its own modulesmanager, should
        # not impact scheduler modules ones, especially we are asking for arbiter type :)
        if len(self.conf.arbiters) == 1:
            arbdaemon = Arbiter([''],[''], False, False, None, None)
            # only load if the module_dir is reallyexisting, so was set explicitly
            # in the test configuration
            if os.path.exists(getattr(self.conf, 'modules_dir', '')):
                arbdaemon.modules_dir = self.conf.modules_dir
                arbdaemon.load_modules_manager()

                # we request the instances without them being *started*
                # (for those that are concerned ("external" modules):
                # we will *start* these instances after we have been daemonized (if requested)
                me = None
                for arb in self.conf.arbiters:
                    me = arb
                    arbdaemon.modules_manager.set_modules(arb.modules)
                    arbdaemon.do_load_modules()
                    arbdaemon.load_modules_configuration_objects(raw_objects)

        self.conf.create_objects(raw_objects)
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'
        # Hack push_flavor, that is set by the dispatcher
        self.conf.push_flavor = 0
        self.conf.load_triggers()
        #import pdb;pdb.set_trace()
        self.conf.linkify_templates()
        #import pdb;pdb.set_trace()
        self.conf.apply_inheritance()
        #import pdb;pdb.set_trace()
        self.conf.explode()
        #print "Aconf.services has %d elements" % len(self.conf.services)
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        self.conf.compute_hash()
        #print "conf.services has %d elements" % len(self.conf.services)
        self.conf.override_properties()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.set_initial_state()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        if not self.conf.conf_is_correct:
            print "The conf is not correct, I stop here"
            self.conf.dump()
            return
        self.conf.clean()

        self.confs = self.conf.cut_into_parts()
        self.conf.prepare_for_sending()
        self.conf.show_errors()
        self.dispatcher = Dispatcher(self.conf, self.me)

        scheddaemon = Shinken(None, False, False, False, None, None)
        self.scheddaemon = scheddaemon
        self.sched = scheddaemon.sched
        scheddaemon.modules_dir = modules_dir
        scheddaemon.load_modules_manager()
        # Remember to clean the logs we just created before launching tests
        self.clear_logs()
        m = MacroResolver()
        m.init(self.conf)
        self.sched.load_conf(self.conf, in_test=True)
        e = ExternalCommandManager(self.conf, 'applyer')
        self.sched.external_command = e
        e.load_scheduler(self.sched)
        e2 = ExternalCommandManager(self.conf, 'dispatcher')
        e2.load_arbiter(self)
        self.external_command_dispatcher = e2
        self.sched.conf.accept_passive_unknown_check_results = False

        self.sched.schedule()
Example #20
0
 def launch(self, ctx=[], timeout=300):
     m = MacroResolver()
     cmd = m.resolve_command(self.discoveryrun_command, ctx)
     self.current_launch = EventHandler(cmd, timeout=timeout)
     self.current_launch.execute()
Example #21
0
    def setup_with_file(self, path):
        # i am arbiter-like
        self.broks = {}
        self.me = None
        self.log = logger
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()
        self.conf.create_objects(raw_objects)
        self.conf.old_properties_names_to_new()
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'
        # Hack push_flavor, that is set by the dispatcher
        self.conf.push_flavor = 0
        self.conf.load_triggers()
        self.conf.linkify_templates()
        self.conf.apply_inheritance()
        self.conf.explode()
        #print "Aconf.services has %d elements" % len(self.conf.services)
        self.conf.create_reversed_list()
        self.conf.remove_twins()
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        self.conf.compute_hash()
        #print "conf.services has %d elements" % len(self.conf.services)
        self.conf.create_reversed_list()
        self.conf.pythonize()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        if not self.conf.conf_is_correct:
            print "The conf is not correct, I stop here"
            return

        self.confs = self.conf.cut_into_parts()
        self.conf.prepare_for_sending()
        self.conf.show_errors()
        self.dispatcher = Dispatcher(self.conf, self.me)

        scheddaemon = Shinken(None, False, False, False, None)
        self.sched = Scheduler(scheddaemon)

        scheddaemon.sched = self.sched

        m = MacroResolver()
        m.init(self.conf)
        self.sched.load_conf(self.conf, in_test=True)
        e = ExternalCommandManager(self.conf, 'applyer')
        self.sched.external_command = e
        e.load_scheduler(self.sched)
        e2 = ExternalCommandManager(self.conf, 'dispatcher')
        e2.load_arbiter(self)
        self.external_command_dispatcher = e2

        self.sched.schedule()
Example #22
0
 def launch(self, ctx=[], timeout=300):
     m = MacroResolver()
     cmd = m.resolve_command(self.discoveryrun_command, ctx)
     self.current_launch = EventHandler(cmd, timeout=timeout)
     self.current_launch.execute()
Example #23
0
 def get_mr(self):
     mr = MacroResolver()
     mr.init(self.conf)
     return mr
Example #24
0
    def setup_new_conf(self):
        #self.use_ssl = self.app.use_ssl
        (conf, override_conf, modules, satellites) = self.new_conf
        self.new_conf = None
        
        # In fact it make the scheduler just DIE as a bad guy. 
        # Must manage it better or not manage it at all!
        #if self.cur_conf and self.cur_conf.magic_hash == conf.magic_hash:
        #    print("I received a conf with same hash than me, I skip it.")
        #    return
        
        self.conf = conf
        self.cur_conf = conf
        self.override_conf = override_conf
        self.modules = modules
        self.satellites = satellites
        #self.pollers = self.app.pollers

        # Now We create our pollers
        for pol_id in satellites['pollers']:
            # Must look if we already have it
            already_got = pol_id in self.pollers
            p = satellites['pollers'][pol_id]
            self.pollers[pol_id] = p
            uri = pyro.create_uri(p['address'], p['port'], 'Schedulers', self.use_ssl)
            self.pollers[pol_id]['uri'] = uri
            self.pollers[pol_id]['last_connexion'] = 0
            print "Got a poller", p

        #First mix conf and override_conf to have our definitive conf
        for prop in self.override_conf:
            print "Overriding the property %s with value %s" % (prop, self.override_conf[prop])
            val = self.override_conf[prop]
            setattr(self.conf, prop, val)

        if self.conf.use_timezone != 'NOTSET':
            print "Setting our timezone to", self.conf.use_timezone
            os.environ['TZ'] = self.conf.use_timezone
            time.tzset()

        print "I've got modules", self.modules

        # TODO: if scheduler had previous modules instanciated it must clean them !
        self.modules_manager.set_modules(self.modules)
        self.do_load_modules()
        # And start external ones too
        self.modules_manager.start_external_instances()
        
        # give it an interface
        # But first remove previous interface if exists
        if self.ichecks is not None:
            print "Deconnecting previous Check Interface from pyro_daemon"
            self.pyro_daemon.unregister(self.ichecks)
        #Now create and connect it
        self.ichecks = IChecks(self.sched)
        self.uri = self.pyro_daemon.register(self.ichecks, "Checks")
        print "The Checks Interface uri is:", self.uri

        #Same for Broks
        if self.ibroks is not None:
            print "Deconnecting previous Broks Interface from pyro_daemon"
            self.pyro_daemon.unregister(self.ibroks)
        #Create and connect it
        self.ibroks = IBroks(self.sched)
        self.uri2 = self.pyro_daemon.register(self.ibroks, "Broks")
        print "The Broks Interface uri is:", self.uri2

        print("Loading configuration..")
        self.conf.explode_global_conf()
        
        #we give sched it's conf
        self.sched.reset()
        self.sched.load_conf(self.conf)
        self.sched.load_satellites(self.pollers, self.reactionners)

        #We must update our Config dict macro with good value
        #from the config parameters
        self.sched.conf.fill_resource_macros_names_macros()
        #print "DBG: got macors", self.sched.conf.macros

        #Creating the Macroresolver Class & unique instance
        m = MacroResolver()
        m.init(self.conf)

        #self.conf.dump()
        #self.conf.quick_debug()

        #Now create the external commander
        #it's a applyer : it role is not to dispatch commands,
        #but to apply them
        e = ExternalCommandManager(self.conf, 'applyer')

        #Scheduler need to know about external command to
        #activate it if necessery
        self.sched.load_external_command(e)

        #External command need the sched because he can raise checks
        e.load_scheduler(self.sched)
Example #25
0
    def setup_new_conf(self):
        pk = self.new_conf
        conf_raw = pk["conf"]
        override_conf = pk["override_conf"]
        modules = pk["modules"]
        satellites = pk["satellites"]
        instance_name = pk["instance_name"]
        push_flavor = pk["push_flavor"]
        skip_initial_broks = pk["skip_initial_broks"]
        accept_passive_unknown_check_results = pk["accept_passive_unknown_check_results"]
        api_key = pk["api_key"]
        secret = pk["secret"]
        http_proxy = pk["http_proxy"]
        statsd_host = pk["statsd_host"]
        statsd_port = pk["statsd_port"]
        statsd_prefix = pk["statsd_prefix"]
        statsd_enabled = pk["statsd_enabled"]
        statsd_interval = pk["statsd_interval"]
        statsd_types = pk["statsd_types"]
        statsd_pattern = pk["statsd_pattern"]

        # horay, we got a name, we can set it in our stats objects
        statsmgr.register(
            self.sched,
            instance_name,
            "scheduler",
            api_key=api_key,
            secret=secret,
            http_proxy=http_proxy,
            statsd_host=statsd_host,
            statsd_port=statsd_port,
            statsd_prefix=statsd_prefix,
            statsd_enabled=statsd_enabled,
            statsd_interval=statsd_interval,
            statsd_types=statsd_types,
            statsd_pattern=statsd_pattern,
        )

        t0 = time.time()
        conf = cPickle.loads(conf_raw)
        logger.debug("Conf received at %d. Unserialized in %d secs", t0, time.time() - t0)
        self.new_conf = None

        # Tag the conf with our data
        self.conf = conf
        self.conf.push_flavor = push_flavor
        self.conf.instance_name = instance_name
        self.conf.skip_initial_broks = skip_initial_broks
        self.conf.accept_passive_unknown_check_results = accept_passive_unknown_check_results

        self.cur_conf = conf
        self.override_conf = override_conf
        self.modules = modules
        self.satellites = satellites
        # self.pollers = self.app.pollers

        if self.conf.human_timestamp_log:
            logger.set_human_format()

        # Now We create our pollers
        for pol_id in satellites["pollers"]:
            # Must look if we already have it
            already_got = pol_id in self.pollers
            p = satellites["pollers"][pol_id]
            self.pollers[pol_id] = p

            if p["name"] in override_conf["satellitemap"]:
                p = dict(p)  # make a copy
                p.update(override_conf["satellitemap"][p["name"]])

            proto = "http"
            if p["use_ssl"]:
                proto = "https"
            uri = "%s://%s:%s/" % (proto, p["address"], p["port"])
            self.pollers[pol_id]["uri"] = uri
            self.pollers[pol_id]["last_connection"] = 0

        # Now We create our reactionners
        for reac_id in satellites["reactionners"]:
            # Must look if we already have it
            already_got = reac_id in self.reactionners
            reac = satellites["reactionners"][reac_id]
            self.reactionners[reac_id] = reac

            if reac["name"] in override_conf["satellitemap"]:
                reac = dict(reac)  # make a copy
                reac.update(override_conf["satellitemap"][reac["name"]])

            proto = "http"
            if p["use_ssl"]:
                proto = "https"
            uri = "%s://%s:%s/" % (proto, reac["address"], reac["port"])
            self.reactionners[reac_id]["uri"] = uri
            self.reactionners[reac_id]["last_connection"] = 0

        # First mix conf and override_conf to have our definitive conf
        for prop in self.override_conf:
            # print "Overriding the property %s with value %s" % (prop, self.override_conf[prop])
            val = self.override_conf[prop]
            setattr(self.conf, prop, val)

        if self.conf.use_timezone != "":
            logger.debug("Setting our timezone to %s", str(self.conf.use_timezone))
            os.environ["TZ"] = self.conf.use_timezone
            time.tzset()

        if len(self.modules) != 0:
            logger.debug("I've got %s modules", str(self.modules))

        # TODO: if scheduler had previous modules instanciated it must clean them!
        self.modules_manager.set_modules(self.modules)
        self.do_load_modules()

        # give it an interface
        # But first remove previous interface if exists
        if self.ichecks is not None:
            logger.debug("Deconnecting previous Check Interface")
            self.http_daemon.unregister(self.ichecks)
        # Now create and connect it
        self.ichecks = IChecks(self.sched)
        self.http_daemon.register(self.ichecks)
        logger.debug("The Scheduler Interface uri is: %s", self.uri)

        # Same for Broks
        if self.ibroks is not None:
            logger.debug("Deconnecting previous Broks Interface")
            self.http_daemon.unregister(self.ibroks)
        # Create and connect it
        self.ibroks = IBroks(self.sched)
        self.http_daemon.register(self.ibroks)

        logger.info("Loading configuration.")
        self.conf.explode_global_conf()

        # we give sched it's conf
        self.sched.reset()
        self.sched.load_conf(self.conf)
        self.sched.load_satellites(self.pollers, self.reactionners)

        # We must update our Config dict macro with good value
        # from the config parameters
        self.sched.conf.fill_resource_macros_names_macros()
        # print "DBG: got macros", self.sched.conf.macros

        # Creating the Macroresolver Class & unique instance
        m = MacroResolver()
        m.init(self.conf)

        # self.conf.dump()
        # self.conf.quick_debug()

        # Now create the external commander
        # it's a applyer: it role is not to dispatch commands,
        # but to apply them
        e = ExternalCommandManager(self.conf, "applyer")

        # Scheduler need to know about external command to
        # activate it if necessary
        self.sched.load_external_command(e)

        # External command need the sched because he can raise checks
        e.load_scheduler(self.sched)

        # We clear our schedulers managed (it's us :) )
        # and set ourself in it
        self.schedulers = {self.conf.instance_id: self.sched}
    def hook_late_configuration(self, arb):
        """ Read config and fill memcached """
        for serv in arb.conf.services:
            if serv.check_command.command.module_type == 'snmp_booster':
                chk = serv.check_command.command
                mac_resol = MacroResolver()
                mac_resol.init(arb.conf)
                data = serv.get_data_for_checks()
                command_line = mac_resol.resolve_command(serv.check_command,
                                                         data)

                # Clean command
                clean_command = shlex.split(command_line.encode('utf8',
                                                                'ignore'))
                # If the command doesn't seem good
                if len(clean_command) <= 1:
                    logger.error("[SnmpBooster] Bad command "
                                 "detected: %s" % chk.command)
                    continue

                # we do not want the first member, check_snmp thing
                args = parse_args(clean_command[1:])
                (host, community, version,
                 triggergroup, dstemplate, instance, instance_name) = args

                # Get key from memcached
                obj_key = str(host)
                # looking for old datas
                obj = self.memcached.get(obj_key)

                # Don't force check on first launch
                try:
                    if not obj is None:
                        # Host found
                        new_obj = SNMPHost(host, community, version)
                        if not obj == new_obj:
                            # Update host
                            obj.community = new_obj.community
                            obj.version = new_obj.version
                        new_serv = SNMPService(serv, obj, triggergroup,
                                               dstemplate, instance,
                                               instance_name,
                                               serv.service_description)
                        new_serv.set_oids(self.datasource)
                        new_serv.set_triggers(self.datasource)
                        obj.update_service(new_serv)
                        obj.frequences[serv.check_interval].forced = False
                        self.memcached.set(obj_key, obj, time=604800)
                    else:
                        # No old datas for this host
                        new_obj = SNMPHost(host, community, version)
                        new_serv = SNMPService(serv, new_obj, triggergroup,
                                               dstemplate, instance,
                                               instance_name,
                                               serv.service_description)
                        new_serv.set_oids(self.datasource)
                        new_serv.set_triggers(self.datasource)
                        new_obj.update_service(new_serv)
                        # Save new host in memcache
                        self.memcached.set(obj_key, new_obj, time=604800)
                except Exception, e:
                    message = ("[SnmpBooster] Error adding : "
                               "Host %s - Service %s - Error related "
                               "to: %s" % (obj_key,
                                           serv.service_description,
                                           str(e)))
                    logger.error(message)
Example #27
0
    def setup_new_conf(self):
        pk = self.new_conf
        conf_raw = pk['conf']
        override_conf = pk['override_conf']
        modules = pk['modules']
        satellites = pk['satellites']
        instance_name = pk['instance_name']
        push_flavor = pk['push_flavor']
        skip_initial_broks = pk['skip_initial_broks']
        accept_passive_unknown_check_results = pk['accept_passive_unknown_check_results']
        
        # horay, we got a name, we can set it in our stats objects
        statsmgr.register(instance_name, 'scheduler')
        
        t0 = time.time()
        conf = cPickle.loads(conf_raw)
        logger.debug("Conf received at %d. Unserialized in %d secs" % (t0, time.time() - t0))
        self.new_conf = None

        # Tag the conf with our data
        self.conf = conf
        self.conf.push_flavor = push_flavor
        self.conf.instance_name = instance_name
        self.conf.skip_initial_broks = skip_initial_broks
        self.conf.accept_passive_unknown_check_results = accept_passive_unknown_check_results

        self.cur_conf = conf
        self.override_conf = override_conf
        self.modules = modules
        self.satellites = satellites
        #self.pollers = self.app.pollers

        if self.conf.human_timestamp_log:
            logger.set_human_format()

        # Now We create our pollers
        for pol_id in satellites['pollers']:
            # Must look if we already have it
            already_got = pol_id in self.pollers
            p = satellites['pollers'][pol_id]
            self.pollers[pol_id] = p

            if p['name'] in override_conf['satellitemap']:
                p = dict(p)  # make a copy
                p.update(override_conf['satellitemap'][p['name']])

            proto = 'http'
            if p['use_ssl']:
                proto = 'https'
            uri = '%s://%s:%s/' % (proto, p['address'], p['port'])
            self.pollers[pol_id]['uri'] = uri
            self.pollers[pol_id]['last_connection'] = 0

        # First mix conf and override_conf to have our definitive conf
        for prop in self.override_conf:
            #print "Overriding the property %s with value %s" % (prop, self.override_conf[prop])
            val = self.override_conf[prop]
            setattr(self.conf, prop, val)

        if self.conf.use_timezone != '':
            logger.debug("Setting our timezone to %s" % str(self.conf.use_timezone))
            os.environ['TZ'] = self.conf.use_timezone
            time.tzset()

        if len(self.modules) != 0:
            logger.debug("I've got %s modules" % str(self.modules))

        # TODO: if scheduler had previous modules instanciated it must clean them!
        self.modules_manager.set_modules(self.modules)
        self.do_load_modules()

        # give it an interface
        # But first remove previous interface if exists
        if self.ichecks is not None:
            logger.debug("Deconnecting previous Check Interface")
            self.http_daemon.unregister(self.ichecks)
        # Now create and connect it
        self.ichecks = IChecks(self.sched)
        self.http_daemon.register(self.ichecks)
        logger.debug("The Scheduler Interface uri is: %s" % self.uri)
        
        # Same for Broks
        if self.ibroks is not None:
            logger.debug("Deconnecting previous Broks Interface")
            self.http_daemon.unregister(self.ibroks)
        # Create and connect it
        self.ibroks = IBroks(self.sched)
        self.http_daemon.register(self.ibroks)

        logger.info("Loading configuration.")
        self.conf.explode_global_conf()

        # we give sched it's conf
        self.sched.reset()
        self.sched.load_conf(self.conf)
        self.sched.load_satellites(self.pollers, self.reactionners)

        # We must update our Config dict macro with good value
        # from the config parameters
        self.sched.conf.fill_resource_macros_names_macros()
        #print "DBG: got macros", self.sched.conf.macros

        # Creating the Macroresolver Class & unique instance
        m = MacroResolver()
        m.init(self.conf)
        
        #self.conf.dump()
        #self.conf.quick_debug()
        
        # Now create the external commander
        # it's a applyer: it role is not to dispatch commands,
        # but to apply them
        e = ExternalCommandManager(self.conf, 'applyer')
        
        # Scheduler need to know about external command to
        # activate it if necessary
        self.sched.load_external_command(e)

        # External command need the sched because he can raise checks
        e.load_scheduler(self.sched)

        # We clear our schedulers managed (it's us :) )
        # and set ourself in it
        self.schedulers = {self.conf.instance_id: self.sched}
 def get_mr(self):
     mr = MacroResolver()
     mr.init(self.conf)
     return mr
    def hook_late_configuration(self, arb):
        """ Read config and fill memcached """
        for serv in arb.conf.services:
            if serv.check_command.command.module_type == 'snmp_booster':
                chk = serv.check_command.command
                mac_resol = MacroResolver()
                mac_resol.init(arb.conf)
                data = serv.get_data_for_checks()
                command_line = mac_resol.resolve_command(
                    serv.check_command, data)

                # Clean command
                clean_command = shlex.split(
                    command_line.encode('utf8', 'ignore'))
                # If the command doesn't seem good
                if len(clean_command) <= 1:
                    logger.error("[SnmpBooster] Bad command "
                                 "detected: %s" % chk.command)
                    continue

                # we do not want the first member, check_snmp thing
                args = parse_args(clean_command[1:])
                (host, community, version, triggergroup, dstemplate, instance,
                 instance_name) = args

                # Get key from memcached
                obj_key = str(host)
                # looking for old datas
                obj = self.memcached.get(obj_key)

                # Don't force check on first launch
                try:
                    if not obj is None:
                        # Host found
                        new_obj = SNMPHost(host, community, version)
                        if not obj == new_obj:
                            # Update host
                            obj.community = new_obj.community
                            obj.version = new_obj.version
                        new_serv = SNMPService(serv, obj, triggergroup,
                                               dstemplate, instance,
                                               instance_name,
                                               serv.service_description)
                        new_serv.set_oids(self.datasource)
                        new_serv.set_triggers(self.datasource)
                        obj.update_service(new_serv)
                        obj.frequences[serv.check_interval].forced = False
                        self.memcached.set(obj_key, obj, time=604800)
                    else:
                        # No old datas for this host
                        new_obj = SNMPHost(host, community, version)
                        new_serv = SNMPService(serv, new_obj, triggergroup,
                                               dstemplate, instance,
                                               instance_name,
                                               serv.service_description)
                        new_serv.set_oids(self.datasource)
                        new_serv.set_triggers(self.datasource)
                        new_obj.update_service(new_serv)
                        # Save new host in memcache
                        self.memcached.set(obj_key, new_obj, time=604800)
                except Exception, e:
                    message = ("[SnmpBooster] Error adding : "
                               "Host %s - Service %s - Error related "
                               "to: %s" %
                               (obj_key, serv.service_description, str(e)))
                    logger.error(message)
Example #30
0
    def setup_with_file(self, path):
        time_hacker.set_my_time()
        self.print_header()
        # i am arbiter-like
        self.broks = {}
        self.me = None
        self.log = logger
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()

        # If we got one arbiter defined here (before default) we should be in a case where
        # the tester want to load/test a module, so we simulate an arbiter daemon
        # and the modules loading phase. As it has its own modulesmanager, should
        # not impact scheduler modules ones, especially we are asking for arbiter type :)
        if len(self.conf.arbiters) == 1:
            arbdaemon = Arbiter([''], [''], False, False, None, None)
            # only load if the module_dir is reallyexisting, so was set explicitly
            # in the test configuration
            if os.path.exists(getattr(self.conf, 'modules_dir', '')):
                arbdaemon.modules_dir = self.conf.modules_dir
                arbdaemon.load_modules_manager()

                # we request the instances without them being *started*
                # (for those that are concerned ("external" modules):
                # we will *start* these instances after we have been daemonized (if requested)
                me = None
                for arb in self.conf.arbiters:
                    me = arb
                    arbdaemon.modules_manager.set_modules(arb.modules)
                    arbdaemon.do_load_modules()
                    arbdaemon.load_modules_configuration_objects(raw_objects)

        self.conf.create_objects(raw_objects)
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'
        # Hack push_flavor, that is set by the dispatcher
        self.conf.push_flavor = 0
        self.conf.load_triggers()
        #import pdb;pdb.set_trace()
        self.conf.linkify_templates()
        #import pdb;pdb.set_trace()
        self.conf.apply_inheritance()
        #import pdb;pdb.set_trace()
        self.conf.explode()
        #print "Aconf.services has %d elements" % len(self.conf.services)
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        self.conf.override_properties()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.set_initial_state()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        if not self.conf.conf_is_correct:
            print "The conf is not correct, I stop here"
            self.conf.dump()
            return
        self.conf.clean()

        self.confs = self.conf.cut_into_parts()
        self.conf.prepare_for_sending()
        self.conf.show_errors()
        self.dispatcher = Dispatcher(self.conf, self.me)

        scheddaemon = Shinken(None, False, False, False, None, None)
        self.scheddaemon = scheddaemon
        self.sched = scheddaemon.sched
        scheddaemon.modules_dir = modules_dir
        scheddaemon.load_modules_manager()
        # Remember to clean the logs we just created before launching tests
        self.clear_logs()
        m = MacroResolver()
        m.init(self.conf)
        self.sched.load_conf(self.conf, in_test=True)
        e = ExternalCommandManager(self.conf, 'applyer')
        self.sched.external_command = e
        e.load_scheduler(self.sched)
        e2 = ExternalCommandManager(self.conf, 'dispatcher')
        e2.load_arbiter(self)
        self.external_command_dispatcher = e2
        self.sched.conf.accept_passive_unknown_check_results = False

        self.sched.schedule()
Example #31
0
    def __init__(self, path, output_dir, macros, overright, runners):
        # i am arbiter-like
        self.log = logger
        self.overright = overright
        self.runners = runners
        self.output_dir = output_dir
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        self.conf.read_config(self.config_files)
        buf = self.conf.read_config(self.config_files)

        # Add macros on the end of the buf so they will
        # overright the resource.cfg ones
        for (m, v) in macros:
            buf += '\n$%s$=%s\n' % (m, v)

        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()
        self.conf.create_objects(raw_objects)
        #self.conf.instance_id = 0
        #self.conf.instance_name = ''
        self.conf.linkify_templates()
        self.conf.apply_inheritance()
        self.conf.explode()
        self.conf.create_reversed_list()
        self.conf.remove_twins()
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.clean_useless()
        self.conf.pythonize()
        self.conf.linkify()
        self.conf.apply_dependancies()
        #self.conf.explode_global_conf()
        #self.conf.propagate_timezone_option()
        #self.conf.create_business_rules()
        #self.conf.create_business_rules_dependencies()
        self.conf.is_correct()

        self.discoveryrules = self.conf.discoveryrules
        self.discoveryruns = self.conf.discoveryruns

        #self.confs = self.conf.cut_into_parts()
        #self.dispatcher = Dispatcher(self.conf, self.me)

        #scheddaemon = Shinken(None, False, False, False, None)
        #self.sched = Scheduler(scheddaemon)

        #scheddaemon.sched = self.sched

        m = MacroResolver()
        m.init(self.conf)
        #self.sched.load_conf(self.conf)
        #e = ExternalCommandManager(self.conf, 'applyer')
        #self.sched.external_command = e
        #e.load_scheduler(self.sched)
        #e2 = ExternalCommandManager(self.conf, 'dispatcher')
        #e2.load_arbiter(self)
        #self.external_command_dispatcher = e2
        #self.sched.schedule()

        # Hash = name, and in it (key, value)
        self.disco_data = {}
        # Hash = name, and in it rules that apply
        self.disco_matches = {}