def __init__(self, config_file, is_daemon, do_replace, debug, debug_file): super(Receiver, self).__init__( 'receiver', config_file, is_daemon, do_replace, debug, debug_file) # Our arbiters self.arbiters = {} # Our pollers and reactionners self.pollers = {} self.reactionners = {} # Modules are load one time self.have_modules = False # Can have a queue of external_commands give by modules # will be taken by arbiter to process self.external_commands = [] # and the unprocessed one, a buffer self.unprocessed_external_commands = [] self.host_assoc = {} self.direct_routing = False self.accept_passive_unknown_check_results = False self.istats = IStats(self) self.ibroks = IBroks(self) # Now create the external commander. It's just here to dispatch # the commands to schedulers e = ExternalCommandManager(None, 'receiver') e.load_receiver(self) self.external_command = e
def test_unknown_check_result_command_receiver(self): receiverdaemon = Receiver(None, False, False, False, None) receiverdaemon.direct_routing = True receiverdaemon.accept_passive_unknown_check_results = True # Now create the external commander. It's just here to dispatch # the commands to schedulers. Pasted from setup_new_conf() e = ExternalCommandManager(None, 'receiver') e.load_receiver(receiverdaemon) receiverdaemon.external_command = e # Receiver receives unknown host external command excmd = ExternalCommand('[%d] PROCESS_SERVICE_CHECK_RESULT;test_host_0;unknownservice;1;Bobby is not happy|rtt=9999;5;10;0;10000' % time.time()) receiverdaemon.unprocessed_external_commands.append(excmd) receiverdaemon.push_external_commands_to_schedulers() broks = [b for b in receiverdaemon.broks.values() if b.type == 'unknown_service_check_result'] self.assertEqual(len(broks), 1) # now turn it off... receiverdaemon.accept_passive_unknown_check_results = False excmd = ExternalCommand('[%d] PROCESS_SERVICE_CHECK_RESULT;test_host_0;unknownservice;1;Bobby is not happy|rtt=9999;5;10;0;10000' % time.time()) receiverdaemon.unprocessed_external_commands.append(excmd) receiverdaemon.push_external_commands_to_schedulers() receiverdaemon.broks.clear() broks = [b for b in receiverdaemon.broks.values() if b.type == 'unknown_service_check_result'] self.assertEqual(len(broks), 0)
def __init__(self, config_file, is_daemon, do_replace, debug, debug_file): super(Receiver, self).__init__('receiver', config_file, is_daemon, do_replace, debug, debug_file) # Our arbiters self.arbiters = {} # Our pollers and reactionners self.pollers = {} self.reactionners = {} # Modules are load one time self.have_modules = False # Can have a queue of external_commands give by modules # will be taken by arbiter to process self.external_commands = [] # and the unprocessed one, a buffer self.unprocessed_external_commands = [] self.host_assoc = {} self.direct_routing = False self.accept_passive_unknown_check_results = False self.istats = IStats(self) self.ibroks = IBroks(self) # Now create the external commander. It's just here to dispatch # the commands to schedulers e = ExternalCommandManager(None, 'receiver') e.load_receiver(self) self.external_command = e
def test_unknown_check_result_brok(self): # unknown_host_check_result_brok excmd = '[1234567890] PROCESS_HOST_CHECK_RESULT;test_host_0;2;Bob is not happy' expected = { 'time_stamp': 1234567890, 'return_code': '2', 'host_name': 'test_host_0', 'output': 'Bob is not happy', 'perf_data': None } result = cPickle.loads( ExternalCommandManager.get_unknown_check_result_brok(excmd).data) self.assertEqual(expected, result) # unknown_host_check_result_brok with perfdata excmd = '[1234567890] PROCESS_HOST_CHECK_RESULT;test_host_0;2;Bob is not happy|rtt=9999' expected = { 'time_stamp': 1234567890, 'return_code': '2', 'host_name': 'test_host_0', 'output': 'Bob is not happy', 'perf_data': 'rtt=9999' } result = cPickle.loads( ExternalCommandManager.get_unknown_check_result_brok(excmd).data) self.assertEqual(expected, result) # unknown_service_check_result_brok excmd = '[1234567890] PROCESS_HOST_CHECK_RESULT;host-checked;0;Everything OK' expected = { 'time_stamp': 1234567890, 'return_code': '0', 'host_name': 'host-checked', 'output': 'Everything OK', 'perf_data': None } result = cPickle.loads( ExternalCommandManager.get_unknown_check_result_brok(excmd).data) self.assertEqual(expected, result) # unknown_service_check_result_brok with perfdata excmd = '[1234567890] PROCESS_SERVICE_CHECK_RESULT;test_host_0;test_ok_0;1;Bobby is not happy|rtt=9999;5;10;0;10000' expected = { 'host_name': 'test_host_0', 'time_stamp': 1234567890, 'service_description': 'test_ok_0', 'return_code': '1', 'output': 'Bobby is not happy', 'perf_data': 'rtt=9999;5;10;0;10000' } result = cPickle.loads( ExternalCommandManager.get_unknown_check_result_brok(excmd).data) self.assertEqual(expected, result)
def setup_with_file(self, path): # i am arbiter-like self.broks = {} self.me = None self.log = logger self.log.load_obj(self) self.config_files = [path] self.conf = Config() self.conf.read_config(self.config_files) buf = self.conf.read_config(self.config_files) raw_objects = self.conf.read_config_buf(buf) self.conf.create_objects_for_type(raw_objects, 'arbiter') self.conf.create_objects_for_type(raw_objects, 'module') self.conf.early_arbiter_linking() self.conf.create_objects(raw_objects) self.conf.instance_id = 0 self.conf.instance_name = 'test' self.conf.linkify_templates() self.conf.apply_inheritance() self.conf.explode() self.conf.create_reversed_list() self.conf.remove_twins() self.conf.apply_implicit_inheritance() self.conf.fill_default() self.conf.clean_useless() self.conf.pythonize() self.conf.linkify() self.conf.apply_dependancies() self.conf.explode_global_conf() self.conf.propagate_timezone_option() self.conf.create_business_rules() self.conf.create_business_rules_dependencies() self.conf.is_correct() self.confs = self.conf.cut_into_parts() self.dispatcher = Dispatcher(self.conf, self.me) scheddaemon = Shinken(None, False, False, False, None) self.sched = Scheduler(scheddaemon) scheddaemon.sched = self.sched m = MacroResolver() m.init(self.conf) self.sched.load_conf(self.conf) e = ExternalCommandManager(self.conf, 'applyer') self.sched.external_command = e e.load_scheduler(self.sched) e2 = ExternalCommandManager(self.conf, 'dispatcher') e2.load_arbiter(self) self.external_command_dispatcher = e2 self.sched.schedule()
def setup_with_file(self, path): # i am arbiter-like self.broks = {} self.me = None self.log = logger self.log.load_obj(self) self.config_files = [path] self.conf = Config() buf = self.conf.read_config(self.config_files) raw_objects = self.conf.read_config_buf(buf) self.conf.create_objects_for_type(raw_objects, 'arbiter') self.conf.create_objects_for_type(raw_objects, 'module') self.conf.early_arbiter_linking() self.conf.create_objects(raw_objects) self.conf.old_properties_names_to_new() self.conf.instance_id = 0 self.conf.instance_name = 'test' # Hack push_flavor, that is set by the dispatcher self.conf.push_flavor = 0 self.conf.linkify_templates() self.conf.apply_inheritance() self.conf.explode() print "Aconf.services has %d elements" % len(self.conf.services) self.conf.create_reversed_list() self.conf.remove_twins() self.conf.apply_implicit_inheritance() self.conf.fill_default() self.conf.remove_templates() print "conf.services has %d elements" % len(self.conf.services) self.conf.create_reversed_list() self.conf.pythonize() self.conf.linkify() self.conf.apply_dependencies() self.conf.explode_global_conf() self.conf.propagate_timezone_option() self.conf.create_business_rules() self.conf.create_business_rules_dependencies() self.conf.is_correct() self.confs = self.conf.cut_into_parts() self.conf.show_errors() self.dispatcher = Dispatcher(self.conf, self.me) scheddaemon = Shinken(None, False, False, False, None) self.sched = Scheduler(scheddaemon) scheddaemon.sched = self.sched m = MacroResolver() m.init(self.conf) self.sched.load_conf(self.conf) e = ExternalCommandManager(self.conf, 'applyer') self.sched.external_command = e e.load_scheduler(self.sched) e2 = ExternalCommandManager(self.conf, 'dispatcher') e2.load_arbiter(self) self.external_command_dispatcher = e2 self.sched.schedule()
def test_unknown_check_result_brok(self): # unknown_host_check_result_brok excmd = '[1234567890] PROCESS_HOST_CHECK_RESULT;test_host_0;2;Bob is not happy' expected = {'time_stamp': 1234567890, 'return_code': '2', 'host_name': 'test_host_0', 'output': 'Bob is not happy', 'perf_data': None} result = cPickle.loads(ExternalCommandManager.get_unknown_check_result_brok(excmd).data) self.assertEqual(expected, result) # unknown_host_check_result_brok with perfdata excmd = '[1234567890] PROCESS_HOST_CHECK_RESULT;test_host_0;2;Bob is not happy|rtt=9999' expected = {'time_stamp': 1234567890, 'return_code': '2', 'host_name': 'test_host_0', 'output': 'Bob is not happy', 'perf_data': 'rtt=9999'} result = cPickle.loads(ExternalCommandManager.get_unknown_check_result_brok(excmd).data) self.assertEqual(expected, result) # unknown_service_check_result_brok excmd = '[1234567890] PROCESS_HOST_CHECK_RESULT;host-checked;0;Everything OK' expected = {'time_stamp': 1234567890, 'return_code': '0', 'host_name': 'host-checked', 'output': 'Everything OK', 'perf_data': None} result = cPickle.loads(ExternalCommandManager.get_unknown_check_result_brok(excmd).data) self.assertEqual(expected, result) # unknown_service_check_result_brok with perfdata excmd = '[1234567890] PROCESS_SERVICE_CHECK_RESULT;test_host_0;test_ok_0;1;Bobby is not happy|rtt=9999;5;10;0;10000' expected = {'host_name': 'test_host_0', 'time_stamp': 1234567890, 'service_description': 'test_ok_0', 'return_code': '1', 'output': 'Bobby is not happy', 'perf_data': 'rtt=9999;5;10;0;10000'} result = cPickle.loads(ExternalCommandManager.get_unknown_check_result_brok(excmd).data) self.assertEqual(expected, result)
def setup_new_conf(self): conf = self.new_conf self.new_conf = None self.cur_conf = conf # Got our name from the globals if 'receiver_name' in conf['global']: name = conf['global']['receiver_name'] else: name = 'Unnamed receiver' self.name = name logger.load_obj(self, name) self.direct_routing = conf['global']['direct_routing'] g_conf = conf['global'] # If we've got something in the schedulers, we do not want it anymore for sched_id in conf['schedulers']: already_got = False # We can already got this conf id, but with another address if sched_id in self.schedulers: new_addr = conf['schedulers'][sched_id]['address'] old_addr = self.schedulers[sched_id]['address'] new_port = conf['schedulers'][sched_id]['port'] old_port = self.schedulers[sched_id]['port'] # Should got all the same to be ok :) if new_addr == old_addr and new_port == old_port: already_got = True if already_got: logger.info("[%s] We already got the conf %d (%s)" % (self.name, sched_id, conf['schedulers'][sched_id]['name'])) wait_homerun = self.schedulers[sched_id]['wait_homerun'] actions = self.schedulers[sched_id]['actions'] external_commands = self.schedulers[sched_id][ 'external_commands'] con = self.schedulers[sched_id]['con'] s = conf['schedulers'][sched_id] self.schedulers[sched_id] = s if s['name'] in g_conf['satellitemap']: s.update(g_conf['satellitemap'][s['name']]) uri = 'http://%s:%s/' % (s['address'], s['port']) self.schedulers[sched_id]['uri'] = uri if already_got: self.schedulers[sched_id]['wait_homerun'] = wait_homerun self.schedulers[sched_id]['actions'] = actions self.schedulers[sched_id][ 'external_commands'] = external_commands self.schedulers[sched_id]['con'] = con else: self.schedulers[sched_id]['wait_homerun'] = {} self.schedulers[sched_id]['actions'] = {} self.schedulers[sched_id]['external_commands'] = [] self.schedulers[sched_id]['con'] = None self.schedulers[sched_id]['running_id'] = 0 self.schedulers[sched_id]['active'] = s['active'] # Do not connect if we are a passive satellite if self.direct_routing and not already_got: # And then we connect to it :) self.pynag_con_init(sched_id) logger.debug("[%s] Sending us configuration %s" % (self.name, conf)) if not self.have_modules: self.modules = mods = conf['global']['modules'] self.have_modules = True logger.info("We received modules %s " % mods) # Set our giving timezone from arbiter use_timezone = conf['global']['use_timezone'] if use_timezone != 'NOTSET': logger.info("Setting our timezone to %s" % use_timezone) os.environ['TZ'] = use_timezone time.tzset() # Now create the external commander. It's just here to dispatch # the commands to schedulers e = ExternalCommandManager(None, 'receiver') e.load_receiver(self) self.external_command = e
def setup_with_file(self, path): time_hacker.set_my_time() self.print_header() # i am arbiter-like self.broks = {} self.me = None self.log = logger self.log.load_obj(self) self.config_files = [path] self.conf = Config() buf = self.conf.read_config(self.config_files) raw_objects = self.conf.read_config_buf(buf) self.conf.create_objects_for_type(raw_objects, 'arbiter') self.conf.create_objects_for_type(raw_objects, 'module') self.conf.early_arbiter_linking() # If we got one arbiter defined here (before default) we should be in a case where # the tester want to load/test a module, so we simulate an arbiter daemon # and the modules loading phase. As it has its own modulesmanager, should # not impact scheduler modules ones, especially we are asking for arbiter type :) if len(self.conf.arbiters) == 1: arbdaemon = Arbiter([''],[''], False, False, None, None) # only load if the module_dir is reallyexisting, so was set explicitly # in the test configuration if os.path.exists(getattr(self.conf, 'modules_dir', '')): arbdaemon.modules_dir = self.conf.modules_dir arbdaemon.load_modules_manager() # we request the instances without them being *started* # (for those that are concerned ("external" modules): # we will *start* these instances after we have been daemonized (if requested) me = None for arb in self.conf.arbiters: me = arb arbdaemon.modules_manager.set_modules(arb.modules) arbdaemon.do_load_modules() arbdaemon.load_modules_configuration_objects(raw_objects) self.conf.create_objects(raw_objects) self.conf.instance_id = 0 self.conf.instance_name = 'test' # Hack push_flavor, that is set by the dispatcher self.conf.push_flavor = 0 self.conf.load_triggers() #import pdb;pdb.set_trace() self.conf.linkify_templates() #import pdb;pdb.set_trace() self.conf.apply_inheritance() #import pdb;pdb.set_trace() self.conf.explode() #print "Aconf.services has %d elements" % len(self.conf.services) self.conf.apply_implicit_inheritance() self.conf.fill_default() self.conf.remove_templates() self.conf.compute_hash() #print "conf.services has %d elements" % len(self.conf.services) self.conf.override_properties() self.conf.linkify() self.conf.apply_dependencies() self.conf.set_initial_state() self.conf.explode_global_conf() self.conf.propagate_timezone_option() self.conf.create_business_rules() self.conf.create_business_rules_dependencies() self.conf.is_correct() if not self.conf.conf_is_correct: print "The conf is not correct, I stop here" self.conf.dump() return self.conf.clean() self.confs = self.conf.cut_into_parts() self.conf.prepare_for_sending() self.conf.show_errors() self.dispatcher = Dispatcher(self.conf, self.me) scheddaemon = Shinken(None, False, False, False, None, None) self.scheddaemon = scheddaemon self.sched = scheddaemon.sched scheddaemon.modules_dir = modules_dir scheddaemon.load_modules_manager() # Remember to clean the logs we just created before launching tests self.clear_logs() m = MacroResolver() m.init(self.conf) self.sched.load_conf(self.conf, in_test=True) e = ExternalCommandManager(self.conf, 'applyer') self.sched.external_command = e e.load_scheduler(self.sched) e2 = ExternalCommandManager(self.conf, 'dispatcher') e2.load_arbiter(self) self.external_command_dispatcher = e2 self.sched.conf.accept_passive_unknown_check_results = False self.sched.schedule()
def setup_with_file(self, path): # i am arbiter-like self.broks = {} self.me = None self.log = logger self.log.load_obj(self) self.config_files = [path] self.conf = Config() buf = self.conf.read_config(self.config_files) raw_objects = self.conf.read_config_buf(buf) self.conf.create_objects_for_type(raw_objects, 'arbiter') self.conf.create_objects_for_type(raw_objects, 'module') self.conf.early_arbiter_linking() self.conf.create_objects(raw_objects) self.conf.old_properties_names_to_new() self.conf.instance_id = 0 self.conf.instance_name = 'test' # Hack push_flavor, that is set by the dispatcher self.conf.push_flavor = 0 self.conf.load_triggers() self.conf.linkify_templates() self.conf.apply_inheritance() self.conf.explode() #print "Aconf.services has %d elements" % len(self.conf.services) self.conf.create_reversed_list() self.conf.remove_twins() self.conf.apply_implicit_inheritance() self.conf.fill_default() self.conf.remove_templates() self.conf.compute_hash() #print "conf.services has %d elements" % len(self.conf.services) self.conf.create_reversed_list() self.conf.pythonize() self.conf.linkify() self.conf.apply_dependencies() self.conf.explode_global_conf() self.conf.propagate_timezone_option() self.conf.create_business_rules() self.conf.create_business_rules_dependencies() self.conf.is_correct() if not self.conf.conf_is_correct: print "The conf is not correct, I stop here" return self.confs = self.conf.cut_into_parts() self.conf.prepare_for_sending() self.conf.show_errors() self.dispatcher = Dispatcher(self.conf, self.me) scheddaemon = Shinken(None, False, False, False, None) self.sched = Scheduler(scheddaemon) scheddaemon.sched = self.sched m = MacroResolver() m.init(self.conf) self.sched.load_conf(self.conf, in_test=True) e = ExternalCommandManager(self.conf, 'applyer') self.sched.external_command = e e.load_scheduler(self.sched) e2 = ExternalCommandManager(self.conf, 'dispatcher') e2.load_arbiter(self) self.external_command_dispatcher = e2 self.sched.schedule()
def setup_with_file(self, path): time_hacker.set_my_time() self.print_header() # i am arbiter-like self.broks = {} self.me = None self.log = logger self.log.load_obj(self) self.config_files = [path] self.conf = Config() buf = self.conf.read_config(self.config_files) raw_objects = self.conf.read_config_buf(buf) self.conf.create_objects_for_type(raw_objects, 'arbiter') self.conf.create_objects_for_type(raw_objects, 'module') self.conf.early_arbiter_linking() self.conf.create_objects(raw_objects) self.conf.instance_id = 0 self.conf.instance_name = 'test' # Hack push_flavor, that is set by the dispatcher self.conf.push_flavor = 0 self.conf.load_triggers() #import pdb;pdb.set_trace() self.conf.linkify_templates() #import pdb;pdb.set_trace() self.conf.apply_inheritance() #import pdb;pdb.set_trace() self.conf.explode() #print "Aconf.services has %d elements" % len(self.conf.services) self.conf.apply_implicit_inheritance() self.conf.fill_default() self.conf.remove_templates() self.conf.compute_hash() #print "conf.services has %d elements" % len(self.conf.services) self.conf.override_properties() self.conf.linkify() self.conf.apply_dependencies() self.conf.explode_global_conf() self.conf.propagate_timezone_option() self.conf.create_business_rules() self.conf.create_business_rules_dependencies() self.conf.is_correct() if not self.conf.conf_is_correct: print "The conf is not correct, I stop here" self.conf.dump() return self.conf.clean() self.confs = self.conf.cut_into_parts() self.conf.prepare_for_sending() self.conf.show_errors() self.dispatcher = Dispatcher(self.conf, self.me) scheddaemon = Shinken(None, False, False, False, None, None) self.sched = Scheduler(scheddaemon) scheddaemon.sched = self.sched scheddaemon.modules_dir = modules_dir scheddaemon.load_modules_manager() # Remember to clean the logs we just created before launching tests self.clear_logs() m = MacroResolver() m.init(self.conf) self.sched.load_conf(self.conf, in_test=True) e = ExternalCommandManager(self.conf, 'applyer') self.sched.external_command = e e.load_scheduler(self.sched) e2 = ExternalCommandManager(self.conf, 'dispatcher') e2.load_arbiter(self) self.external_command_dispatcher = e2 self.sched.conf.accept_passive_unknown_check_results = False self.sched.schedule()
def setup_new_conf(self): pk = self.new_conf conf_raw = pk['conf'] override_conf = pk['override_conf'] modules = pk['modules'] satellites = pk['satellites'] instance_name = pk['instance_name'] push_flavor = pk['push_flavor'] skip_initial_broks = pk['skip_initial_broks'] accept_passive_unknown_check_results = pk['accept_passive_unknown_check_results'] # horay, we got a name, we can set it in our stats objects statsmgr.register(instance_name, 'scheduler') t0 = time.time() conf = cPickle.loads(conf_raw) logger.debug("Conf received at %d. Unserialized in %d secs" % (t0, time.time() - t0)) self.new_conf = None # Tag the conf with our data self.conf = conf self.conf.push_flavor = push_flavor self.conf.instance_name = instance_name self.conf.skip_initial_broks = skip_initial_broks self.conf.accept_passive_unknown_check_results = accept_passive_unknown_check_results self.cur_conf = conf self.override_conf = override_conf self.modules = modules self.satellites = satellites #self.pollers = self.app.pollers if self.conf.human_timestamp_log: logger.set_human_format() # Now We create our pollers for pol_id in satellites['pollers']: # Must look if we already have it already_got = pol_id in self.pollers p = satellites['pollers'][pol_id] self.pollers[pol_id] = p if p['name'] in override_conf['satellitemap']: p = dict(p) # make a copy p.update(override_conf['satellitemap'][p['name']]) proto = 'http' if p['use_ssl']: proto = 'https' uri = '%s://%s:%s/' % (proto, p['address'], p['port']) self.pollers[pol_id]['uri'] = uri self.pollers[pol_id]['last_connection'] = 0 # First mix conf and override_conf to have our definitive conf for prop in self.override_conf: #print "Overriding the property %s with value %s" % (prop, self.override_conf[prop]) val = self.override_conf[prop] setattr(self.conf, prop, val) if self.conf.use_timezone != '': logger.debug("Setting our timezone to %s" % str(self.conf.use_timezone)) os.environ['TZ'] = self.conf.use_timezone time.tzset() if len(self.modules) != 0: logger.debug("I've got %s modules" % str(self.modules)) # TODO: if scheduler had previous modules instanciated it must clean them! self.modules_manager.set_modules(self.modules) self.do_load_modules() # give it an interface # But first remove previous interface if exists if self.ichecks is not None: logger.debug("Deconnecting previous Check Interface") self.http_daemon.unregister(self.ichecks) # Now create and connect it self.ichecks = IChecks(self.sched) self.http_daemon.register(self.ichecks) logger.debug("The Scheduler Interface uri is: %s" % self.uri) # Same for Broks if self.ibroks is not None: logger.debug("Deconnecting previous Broks Interface") self.http_daemon.unregister(self.ibroks) # Create and connect it self.ibroks = IBroks(self.sched) self.http_daemon.register(self.ibroks) logger.info("Loading configuration.") self.conf.explode_global_conf() # we give sched it's conf self.sched.reset() self.sched.load_conf(self.conf) self.sched.load_satellites(self.pollers, self.reactionners) # We must update our Config dict macro with good value # from the config parameters self.sched.conf.fill_resource_macros_names_macros() #print "DBG: got macros", self.sched.conf.macros # Creating the Macroresolver Class & unique instance m = MacroResolver() m.init(self.conf) #self.conf.dump() #self.conf.quick_debug() # Now create the external commander # it's a applyer: it role is not to dispatch commands, # but to apply them e = ExternalCommandManager(self.conf, 'applyer') # Scheduler need to know about external command to # activate it if necessary self.sched.load_external_command(e) # External command need the sched because he can raise checks e.load_scheduler(self.sched) # We clear our schedulers managed (it's us :) ) # and set ourself in it self.schedulers = {self.conf.instance_id: self.sched}
def setup_with_file(self, path): time_hacker.set_my_time() self.print_header() # i am arbiter-like self.broks = {} self.me = None self.log = logger self.log.load_obj(self) self.config_files = [path] self.conf = Config() buf = self.conf.read_config(self.config_files) raw_objects = self.conf.read_config_buf(buf) self.conf.create_objects_for_type(raw_objects, 'arbiter') self.conf.create_objects_for_type(raw_objects, 'module') self.conf.early_arbiter_linking() # If we got one arbiter defined here (before default) we should be in a case where # the tester want to load/test a module, so we simulate an arbiter daemon # and the modules loading phase. As it has its own modulesmanager, should # not impact scheduler modules ones, especially we are asking for arbiter type :) if len(self.conf.arbiters) == 1: arbdaemon = Arbiter([''], [''], False, False, None, None) # only load if the module_dir is reallyexisting, so was set explicitly # in the test configuration if os.path.exists(getattr(self.conf, 'modules_dir', '')): arbdaemon.modules_dir = self.conf.modules_dir arbdaemon.load_modules_manager() # we request the instances without them being *started* # (for those that are concerned ("external" modules): # we will *start* these instances after we have been daemonized (if requested) me = None for arb in self.conf.arbiters: me = arb arbdaemon.modules_manager.set_modules(arb.modules) arbdaemon.do_load_modules() arbdaemon.load_modules_configuration_objects(raw_objects) self.conf.create_objects(raw_objects) self.conf.instance_id = 0 self.conf.instance_name = 'test' # Hack push_flavor, that is set by the dispatcher self.conf.push_flavor = 0 self.conf.load_triggers() #import pdb;pdb.set_trace() self.conf.linkify_templates() #import pdb;pdb.set_trace() self.conf.apply_inheritance() #import pdb;pdb.set_trace() self.conf.explode() #print "Aconf.services has %d elements" % len(self.conf.services) self.conf.apply_implicit_inheritance() self.conf.fill_default() self.conf.remove_templates() self.conf.override_properties() self.conf.linkify() self.conf.apply_dependencies() self.conf.set_initial_state() self.conf.explode_global_conf() self.conf.propagate_timezone_option() self.conf.create_business_rules() self.conf.create_business_rules_dependencies() self.conf.is_correct() if not self.conf.conf_is_correct: print "The conf is not correct, I stop here" self.conf.dump() return self.conf.clean() self.confs = self.conf.cut_into_parts() self.conf.prepare_for_sending() self.conf.show_errors() self.dispatcher = Dispatcher(self.conf, self.me) scheddaemon = Shinken(None, False, False, False, None, None) self.scheddaemon = scheddaemon self.sched = scheddaemon.sched scheddaemon.modules_dir = modules_dir scheddaemon.load_modules_manager() # Remember to clean the logs we just created before launching tests self.clear_logs() m = MacroResolver() m.init(self.conf) self.sched.load_conf(self.conf, in_test=True) e = ExternalCommandManager(self.conf, 'applyer') self.sched.external_command = e e.load_scheduler(self.sched) e2 = ExternalCommandManager(self.conf, 'dispatcher') e2.load_arbiter(self) self.external_command_dispatcher = e2 self.sched.conf.accept_passive_unknown_check_results = False self.sched.schedule()
def setup_new_conf(self): #self.use_ssl = self.app.use_ssl (conf, override_conf, modules, satellites) = self.new_conf self.new_conf = None # In fact it make the scheduler just DIE as a bad guy. # Must manage it better or not manage it at all! #if self.cur_conf and self.cur_conf.magic_hash == conf.magic_hash: # print("I received a conf with same hash than me, I skip it.") # return self.conf = conf self.cur_conf = conf self.override_conf = override_conf self.modules = modules self.satellites = satellites #self.pollers = self.app.pollers # Now We create our pollers for pol_id in satellites['pollers']: # Must look if we already have it already_got = pol_id in self.pollers p = satellites['pollers'][pol_id] self.pollers[pol_id] = p uri = pyro.create_uri(p['address'], p['port'], 'Schedulers', self.use_ssl) self.pollers[pol_id]['uri'] = uri self.pollers[pol_id]['last_connexion'] = 0 print "Got a poller", p #First mix conf and override_conf to have our definitive conf for prop in self.override_conf: print "Overriding the property %s with value %s" % (prop, self.override_conf[prop]) val = self.override_conf[prop] setattr(self.conf, prop, val) if self.conf.use_timezone != 'NOTSET': print "Setting our timezone to", self.conf.use_timezone os.environ['TZ'] = self.conf.use_timezone time.tzset() print "I've got modules", self.modules # TODO: if scheduler had previous modules instanciated it must clean them ! self.modules_manager.set_modules(self.modules) self.do_load_modules() # And start external ones too self.modules_manager.start_external_instances() # give it an interface # But first remove previous interface if exists if self.ichecks is not None: print "Deconnecting previous Check Interface from pyro_daemon" self.pyro_daemon.unregister(self.ichecks) #Now create and connect it self.ichecks = IChecks(self.sched) self.uri = self.pyro_daemon.register(self.ichecks, "Checks") print "The Checks Interface uri is:", self.uri #Same for Broks if self.ibroks is not None: print "Deconnecting previous Broks Interface from pyro_daemon" self.pyro_daemon.unregister(self.ibroks) #Create and connect it self.ibroks = IBroks(self.sched) self.uri2 = self.pyro_daemon.register(self.ibroks, "Broks") print "The Broks Interface uri is:", self.uri2 print("Loading configuration..") self.conf.explode_global_conf() #we give sched it's conf self.sched.reset() self.sched.load_conf(self.conf) self.sched.load_satellites(self.pollers, self.reactionners) #We must update our Config dict macro with good value #from the config parameters self.sched.conf.fill_resource_macros_names_macros() #print "DBG: got macors", self.sched.conf.macros #Creating the Macroresolver Class & unique instance m = MacroResolver() m.init(self.conf) #self.conf.dump() #self.conf.quick_debug() #Now create the external commander #it's a applyer : it role is not to dispatch commands, #but to apply them e = ExternalCommandManager(self.conf, 'applyer') #Scheduler need to know about external command to #activate it if necessery self.sched.load_external_command(e) #External command need the sched because he can raise checks e.load_scheduler(self.sched)
def setup_new_conf(self): pk = self.new_conf conf_raw = pk["conf"] override_conf = pk["override_conf"] modules = pk["modules"] satellites = pk["satellites"] instance_name = pk["instance_name"] push_flavor = pk["push_flavor"] skip_initial_broks = pk["skip_initial_broks"] accept_passive_unknown_check_results = pk["accept_passive_unknown_check_results"] api_key = pk["api_key"] secret = pk["secret"] http_proxy = pk["http_proxy"] statsd_host = pk["statsd_host"] statsd_port = pk["statsd_port"] statsd_prefix = pk["statsd_prefix"] statsd_enabled = pk["statsd_enabled"] statsd_interval = pk["statsd_interval"] statsd_types = pk["statsd_types"] statsd_pattern = pk["statsd_pattern"] # horay, we got a name, we can set it in our stats objects statsmgr.register( self.sched, instance_name, "scheduler", api_key=api_key, secret=secret, http_proxy=http_proxy, statsd_host=statsd_host, statsd_port=statsd_port, statsd_prefix=statsd_prefix, statsd_enabled=statsd_enabled, statsd_interval=statsd_interval, statsd_types=statsd_types, statsd_pattern=statsd_pattern, ) t0 = time.time() conf = cPickle.loads(conf_raw) logger.debug("Conf received at %d. Unserialized in %d secs", t0, time.time() - t0) self.new_conf = None # Tag the conf with our data self.conf = conf self.conf.push_flavor = push_flavor self.conf.instance_name = instance_name self.conf.skip_initial_broks = skip_initial_broks self.conf.accept_passive_unknown_check_results = accept_passive_unknown_check_results self.cur_conf = conf self.override_conf = override_conf self.modules = modules self.satellites = satellites # self.pollers = self.app.pollers if self.conf.human_timestamp_log: logger.set_human_format() # Now We create our pollers for pol_id in satellites["pollers"]: # Must look if we already have it already_got = pol_id in self.pollers p = satellites["pollers"][pol_id] self.pollers[pol_id] = p if p["name"] in override_conf["satellitemap"]: p = dict(p) # make a copy p.update(override_conf["satellitemap"][p["name"]]) proto = "http" if p["use_ssl"]: proto = "https" uri = "%s://%s:%s/" % (proto, p["address"], p["port"]) self.pollers[pol_id]["uri"] = uri self.pollers[pol_id]["last_connection"] = 0 # Now We create our reactionners for reac_id in satellites["reactionners"]: # Must look if we already have it already_got = reac_id in self.reactionners reac = satellites["reactionners"][reac_id] self.reactionners[reac_id] = reac if reac["name"] in override_conf["satellitemap"]: reac = dict(reac) # make a copy reac.update(override_conf["satellitemap"][reac["name"]]) proto = "http" if p["use_ssl"]: proto = "https" uri = "%s://%s:%s/" % (proto, reac["address"], reac["port"]) self.reactionners[reac_id]["uri"] = uri self.reactionners[reac_id]["last_connection"] = 0 # First mix conf and override_conf to have our definitive conf for prop in self.override_conf: # print "Overriding the property %s with value %s" % (prop, self.override_conf[prop]) val = self.override_conf[prop] setattr(self.conf, prop, val) if self.conf.use_timezone != "": logger.debug("Setting our timezone to %s", str(self.conf.use_timezone)) os.environ["TZ"] = self.conf.use_timezone time.tzset() if len(self.modules) != 0: logger.debug("I've got %s modules", str(self.modules)) # TODO: if scheduler had previous modules instanciated it must clean them! self.modules_manager.set_modules(self.modules) self.do_load_modules() # give it an interface # But first remove previous interface if exists if self.ichecks is not None: logger.debug("Deconnecting previous Check Interface") self.http_daemon.unregister(self.ichecks) # Now create and connect it self.ichecks = IChecks(self.sched) self.http_daemon.register(self.ichecks) logger.debug("The Scheduler Interface uri is: %s", self.uri) # Same for Broks if self.ibroks is not None: logger.debug("Deconnecting previous Broks Interface") self.http_daemon.unregister(self.ibroks) # Create and connect it self.ibroks = IBroks(self.sched) self.http_daemon.register(self.ibroks) logger.info("Loading configuration.") self.conf.explode_global_conf() # we give sched it's conf self.sched.reset() self.sched.load_conf(self.conf) self.sched.load_satellites(self.pollers, self.reactionners) # We must update our Config dict macro with good value # from the config parameters self.sched.conf.fill_resource_macros_names_macros() # print "DBG: got macros", self.sched.conf.macros # Creating the Macroresolver Class & unique instance m = MacroResolver() m.init(self.conf) # self.conf.dump() # self.conf.quick_debug() # Now create the external commander # it's a applyer: it role is not to dispatch commands, # but to apply them e = ExternalCommandManager(self.conf, "applyer") # Scheduler need to know about external command to # activate it if necessary self.sched.load_external_command(e) # External command need the sched because he can raise checks e.load_scheduler(self.sched) # We clear our schedulers managed (it's us :) ) # and set ourself in it self.schedulers = {self.conf.instance_id: self.sched}
def setup_new_conf(self): conf = self.new_conf self.new_conf = None self.cur_conf = conf # Got our name from the globals if 'receiver_name' in conf['global']: name = conf['global']['receiver_name'] else: name = 'Unnamed receiver' self.name = name self.log.load_obj(self, name) self.direct_routing = conf['global']['direct_routing'] g_conf = conf['global'] # If we've got something in the schedulers, we do not want it anymore for sched_id in conf['schedulers']: already_got = False # We can already got this conf id, but with another address if sched_id in self.schedulers: new_addr = conf['schedulers'][sched_id]['address'] old_addr = self.schedulers[sched_id]['address'] new_port = conf['schedulers'][sched_id]['port'] old_port = self.schedulers[sched_id]['port'] # Should got all the same to be ok :) if new_addr == old_addr and new_port == old_port: already_got = True if already_got: logger.info("[%s] We already got the conf %d (%s)" % (self.name, sched_id, conf['schedulers'][sched_id]['name'])) wait_homerun = self.schedulers[sched_id]['wait_homerun'] actions = self.schedulers[sched_id]['actions'] external_commands = self.schedulers[sched_id]['external_commands'] con = self.schedulers[sched_id]['con'] s = conf['schedulers'][sched_id] self.schedulers[sched_id] = s if s['name'] in g_conf['satellitemap']: s.update(g_conf['satellitemap'][s['name']]) uri = pyro.create_uri(s['address'], s['port'], 'ForArbiter', self.use_ssl) self.schedulers[sched_id]['uri'] = uri if already_got: self.schedulers[sched_id]['wait_homerun'] = wait_homerun self.schedulers[sched_id]['actions'] = actions self.schedulers[sched_id]['external_commands'] = external_commands self.schedulers[sched_id]['con'] = con else: self.schedulers[sched_id]['wait_homerun'] = {} self.schedulers[sched_id]['actions'] = {} self.schedulers[sched_id]['external_commands'] = [] self.schedulers[sched_id]['con'] = None self.schedulers[sched_id]['running_id'] = 0 self.schedulers[sched_id]['active'] = s['active'] # Do not connect if we are a passive satellite if self.direct_routing and not already_got: # And then we connect to it :) self.pynag_con_init(sched_id) logger.debug("[%s] Sending us configuration %s" % (self.name, conf)) if not self.have_modules: self.modules = mods = conf['global']['modules'] self.have_modules = True logger.info("We received modules %s " % mods) # Set our giving timezone from arbiter use_timezone = conf['global']['use_timezone'] if use_timezone != 'NOTSET': logger.info("Setting our timezone to %s" % use_timezone) os.environ['TZ'] = use_timezone time.tzset() # Now create the external commander. It's just here to dispatch # the commands to schedulers e = ExternalCommandManager(None, 'receiver') e.load_receiver(self) self.external_command = e
def setup_new_conf(self): conf = self.new_conf self.new_conf = None self.cur_conf = conf # Got our name from the globals if "receiver_name" in conf["global"]: name = conf["global"]["receiver_name"] else: name = "Unnamed receiver" self.name = name logger.load_obj(self, name) self.direct_routing = conf["global"]["direct_routing"] g_conf = conf["global"] # If we've got something in the schedulers, we do not want it anymore for sched_id in conf["schedulers"]: already_got = False # We can already got this conf id, but with another address if sched_id in self.schedulers: new_addr = conf["schedulers"][sched_id]["address"] old_addr = self.schedulers[sched_id]["address"] new_port = conf["schedulers"][sched_id]["port"] old_port = self.schedulers[sched_id]["port"] # Should got all the same to be ok :) if new_addr == old_addr and new_port == old_port: already_got = True if already_got: logger.info( "[%s] We already got the conf %d (%s)" % (self.name, sched_id, conf["schedulers"][sched_id]["name"]) ) wait_homerun = self.schedulers[sched_id]["wait_homerun"] actions = self.schedulers[sched_id]["actions"] external_commands = self.schedulers[sched_id]["external_commands"] con = self.schedulers[sched_id]["con"] s = conf["schedulers"][sched_id] self.schedulers[sched_id] = s if s["name"] in g_conf["satellitemap"]: s.update(g_conf["satellitemap"][s["name"]]) uri = "http://%s:%s/" % (s["address"], s["port"]) self.schedulers[sched_id]["uri"] = uri if already_got: self.schedulers[sched_id]["wait_homerun"] = wait_homerun self.schedulers[sched_id]["actions"] = actions self.schedulers[sched_id]["external_commands"] = external_commands self.schedulers[sched_id]["con"] = con else: self.schedulers[sched_id]["wait_homerun"] = {} self.schedulers[sched_id]["actions"] = {} self.schedulers[sched_id]["external_commands"] = [] self.schedulers[sched_id]["con"] = None self.schedulers[sched_id]["running_id"] = 0 self.schedulers[sched_id]["active"] = s["active"] # Do not connect if we are a passive satellite if self.direct_routing and not already_got: # And then we connect to it :) self.pynag_con_init(sched_id) logger.debug("[%s] Sending us configuration %s" % (self.name, conf)) if not self.have_modules: self.modules = mods = conf["global"]["modules"] self.have_modules = True logger.info("We received modules %s " % mods) # Set our giving timezone from arbiter use_timezone = conf["global"]["use_timezone"] if use_timezone != "NOTSET": logger.info("Setting our timezone to %s" % use_timezone) os.environ["TZ"] = use_timezone time.tzset() # Now create the external commander. It's just here to dispatch # the commands to schedulers e = ExternalCommandManager(None, "receiver") e.load_receiver(self) self.external_command = e