コード例 #1
0
ファイル: receiverdaemon.py プロジェクト: jgmel/alignak
    def __init__(self, config_file, is_daemon, do_replace, debug, debug_file,
                 port=None, local_log=None, daemon_name=None):
        self.daemon_name = 'receiver'
        if daemon_name:
            self.daemon_name = daemon_name

        super(Receiver, self).__init__(self.daemon_name, config_file, is_daemon, do_replace,
                                       debug, debug_file, port, local_log)

        # Our arbiters
        self.arbiters = {}

        # Our pollers and reactionners
        self.pollers = {}
        self.reactionners = {}

        # Modules are load one time
        self.have_modules = False

        # Now an external commands manager and a list for the external_commands
        self.external_commands_manager = None
        self.external_commands = []
        # and the unprocessed one, a buffer
        self.unprocessed_external_commands = []

        self.host_assoc = {}
        self.accept_passive_unknown_check_results = False

        self.http_interface = ReceiverInterface(self)

        # Now create the external commands manager
        # We are a receiver: our role is to get and dispatch commands to the schedulers
        self.external_commands_manager = ExternalCommandManager(None, 'receiver', self)
コード例 #2
0
    def __init__(self, config_file, is_daemon, do_replace, debug, debug_file):

        super(Receiver, self).__init__('receiver', config_file, is_daemon,
                                       do_replace, debug, debug_file)

        # Our arbiters
        self.arbiters = {}

        # Our pollers and reactionners
        self.pollers = {}
        self.reactionners = {}

        # Modules are load one time
        self.have_modules = False

        # Can have a queue of external_commands give by modules
        # will be taken by arbiter to process
        self.external_commands = []
        # and the unprocessed one, a buffer
        self.unprocessed_external_commands = []

        self.host_assoc = {}
        self.direct_routing = False
        self.accept_passive_unknown_check_results = False

        self.http_interface = ReceiverInterface(self)

        # Now create the external commander. It's just here to dispatch
        # the commands to schedulers
        ecm = ExternalCommandManager(None, 'receiver')
        ecm.load_receiver(self)
        self.external_command = ecm
コード例 #3
0
    def setup_new_conf(self):
        # pylint: disable=too-many-statements, too-many-branches, too-many-locals
        """Setup new conf received for scheduler

        :return: None
        """
        # Execute the base class treatment...
        super(Alignak, self).setup_new_conf()

        # ...then our own specific treatment!
        with self.conf_lock:
            # self_conf is our own configuration from the alignak environment
            # self_conf = self.cur_conf['self_conf']
            logger.debug("Got config: %s", self.cur_conf)
            if 'conf_part' not in self.cur_conf:
                self.cur_conf['conf_part'] = None
            conf_part = self.cur_conf['conf_part']

            # Ok now we can save the retention data
            if self.sched.pushed_conf is not None:
                self.sched.update_retention()

            # Get the monitored objects configuration
            t00 = time.time()
            received_conf_part = None
            try:
                received_conf_part = unserialize(conf_part)
                assert received_conf_part is not None
            except AssertionError as exp:
                # This to indicate that no configuration is managed by this scheduler...
                logger.warning(
                    "No managed configuration received from arbiter")
            except AlignakClassLookupException as exp:  # pragma: no cover
                # This to indicate that the new configuration is not managed...
                self.new_conf = {
                    "_status":
                    "Cannot un-serialize configuration received from arbiter",
                    "_error": str(exp)
                }
                logger.error(self.new_conf)
                logger.error("Back trace of the error:\n%s",
                             traceback.format_exc())
                return
            except Exception as exp:  # pylint: disable=broad-except
                # This to indicate that the new configuration is not managed...
                self.new_conf = {
                    "_status":
                    "Cannot un-serialize configuration received from arbiter",
                    "_error": str(exp)
                }
                logger.error(self.new_conf)
                self.exit_on_exception(exp, str(self.new_conf))

            # if not received_conf_part:
            #     return

            logger.info(
                "Monitored configuration %s received at %d. Un-serialized in %d secs",
                received_conf_part, t00,
                time.time() - t00)
            logger.info("Scheduler received configuration : %s",
                        received_conf_part)

            # Now we create our pollers, reactionners and brokers
            for link_type in ['pollers', 'reactionners', 'brokers']:
                if link_type not in self.cur_conf['satellites']:
                    logger.error("Missing %s in the configuration!", link_type)
                    continue

                my_satellites = getattr(self, link_type, {})
                received_satellites = self.cur_conf['satellites'][link_type]
                for link_uuid in received_satellites:
                    rs_conf = received_satellites[link_uuid]
                    logger.debug("- received %s - %s: %s",
                                 rs_conf['instance_id'], rs_conf['type'],
                                 rs_conf['name'])

                    # Must look if we already had a configuration and save our broks
                    already_got = rs_conf['instance_id'] in my_satellites
                    broks = []
                    actions = {}
                    wait_homerun = {}
                    external_commands = {}
                    running_id = 0
                    if already_got:
                        logger.warning("I already got: %s",
                                       rs_conf['instance_id'])
                        # Save some information
                        running_id = my_satellites[link_uuid].running_id
                        (broks, actions,
                         wait_homerun, external_commands) = \
                            my_satellites[link_uuid].get_and_clear_context()
                        # Delete the former link
                        del my_satellites[link_uuid]

                    # My new satellite link...
                    new_link = SatelliteLink.get_a_satellite_link(
                        link_type[:-1], rs_conf)
                    my_satellites[new_link.uuid] = new_link
                    logger.info("I got a new %s satellite: %s", link_type[:-1],
                                new_link)

                    new_link.running_id = running_id
                    new_link.external_commands = external_commands
                    new_link.broks = broks
                    new_link.wait_homerun = wait_homerun
                    new_link.actions = actions

                    # Replacing the satellite address and port by those defined in satellite_map
                    if new_link.name in self.cur_conf['override_conf'].get(
                            'satellite_map', {}):
                        override_conf = self.cur_conf['override_conf']
                        overriding = override_conf.get('satellite_map')[
                            new_link.name]
                        logger.warning(
                            "Do not override the configuration for: %s, with: %s. "
                            "Please check whether this is necessary!",
                            new_link.name, overriding)

            # First mix conf and override_conf to have our definitive conf
            for prop in getattr(self.cur_conf, 'override_conf', []):
                logger.debug("Overriden: %s / %s ", prop,
                             getattr(received_conf_part, prop, None))
                logger.debug("Overriding: %s / %s ", prop,
                             self.cur_conf['override_conf'])
                setattr(received_conf_part, prop,
                        self.cur_conf['override_conf'].get(prop, None))

            # Scheduler modules
            if not self.have_modules:
                try:
                    logger.debug("Modules configuration: %s",
                                 self.cur_conf['modules'])
                    self.modules = unserialize(self.cur_conf['modules'],
                                               no_load=True)
                except AlignakClassLookupException as exp:  # pragma: no cover, simple protection
                    logger.error(
                        'Cannot un-serialize modules configuration '
                        'received from arbiter: %s', exp)
                if self.modules:
                    logger.debug("I received some modules configuration: %s",
                                 self.modules)
                    self.have_modules = True

                    self.do_load_modules(self.modules)
                    # and start external modules too
                    self.modules_manager.start_external_instances()
                else:
                    logger.info("I do not have modules")

            if received_conf_part:
                logger.info("Loading configuration...")

                # Propagate the global parameters to the configuration items
                received_conf_part.explode_global_conf()

                # We give the configuration to our scheduler
                self.sched.reset()
                self.sched.load_conf(self.cur_conf['instance_id'],
                                     self.cur_conf['instance_name'],
                                     received_conf_part)

                # Once loaded, the scheduler has an inner pushed_conf object
                logger.info("Loaded: %s", self.sched.pushed_conf)

                # Update the scheduler ticks according to the daemon configuration
                self.sched.update_recurrent_works_tick(self)

                # We must update our pushed configuration macros with correct values
                # from the configuration parameters
                # self.sched.pushed_conf.fill_resource_macros_names_macros()

                # Creating the Macroresolver Class & unique instance
                m_solver = MacroResolver()
                m_solver.init(received_conf_part)

                # Now create the external commands manager
                # We are an applyer: our role is not to dispatch commands, but to apply them
                ecm = ExternalCommandManager(
                    received_conf_part, 'applyer', self.sched,
                    received_conf_part.accept_passive_unknown_check_results,
                    received_conf_part.log_external_commands)

                # Scheduler needs to know about this external command manager to use it if necessary
                self.sched.external_commands_manager = ecm

                # Ok now we can load the retention data
                self.sched.retention_load()

                # Log hosts/services initial states
                self.sched.log_initial_states()

            # Create brok new conf
            brok = Brok({'type': 'new_conf', 'data': {}})
            self.sched.add_brok(brok)

            # Initialize connection with all our satellites
            logger.info("Initializing connection with my satellites:")
            my_satellites = self.get_links_of_type(s_type='')
            for satellite in list(my_satellites.values()):
                logger.info("- : %s/%s", satellite.type, satellite.name)
                if not self.daemon_connection_init(satellite):
                    logger.error("Satellite connection failed: %s", satellite)

            if received_conf_part:
                # Enable the scheduling process
                logger.info("Loaded: %s", self.sched.pushed_conf)
                self.sched.start_scheduling()

        # Now I have a configuration!
        self.have_conf = True
コード例 #4
0
ファイル: alignak_test.py プロジェクト: jgmel/alignak
    def setup_with_file(self, configuration_file):
        """
        Load alignak with defined configuration file

        If the configuration loading fails, a SystemExit exception is raised to the caller.

        The conf_is_correct property indicates if the configuration loading succeeded or failed.

        The configuration errors property contains a list of the error message that are normally
        logged as ERROR by the arbiter.

        @verified

        :param configuration_file: path + file name of the main configuration file
        :type configuration_file: str
        :return: None
        """
        self.broks = {}
        self.schedulers = {}
        self.brokers = {}
        self.pollers = {}
        self.receivers = {}
        self.reactionners = {}
        self.arbiter = None
        self.conf_is_correct = False
        self.configuration_warnings = []
        self.configuration_errors = []

        # Add collector for test purpose.
        self.setup_logger()

        # Initialize the Arbiter with no daemon configuration file
        self.arbiter = Arbiter(None, [configuration_file], False, False, False, False,
                              '/tmp/arbiter.log', 'arbiter-master')

        try:
            # The following is copy paste from setup_alignak_logger
            # The only difference is that keep logger at INFO level to gather messages
            # This is needed to assert later on logs we received.
            self.logger.setLevel(logging.INFO)
            # Force the debug level if the daemon is said to start with such level
            if self.arbiter.debug:
                self.logger.setLevel(logging.DEBUG)

            # Log will be broks
            for line in self.arbiter.get_header():
                self.logger.info(line)

            self.arbiter.load_monitoring_config_file()

            # If this assertion does not match, then there is a bug in the arbiter :)
            self.assertTrue(self.arbiter.conf.conf_is_correct)
            self.conf_is_correct = True
            self.configuration_warnings = self.arbiter.conf.configuration_warnings
            self.configuration_errors = self.arbiter.conf.configuration_errors
        except SystemExit:
            self.configuration_warnings = self.arbiter.conf.configuration_warnings
            print("Configuration warnings:")
            for msg in self.configuration_warnings:
                print(" - %s" % msg)
            self.configuration_errors = self.arbiter.conf.configuration_errors
            print("Configuration errors:")
            for msg in self.configuration_errors:
                print(" - %s" % msg)
            raise

        for arb in self.arbiter.conf.arbiters:
            if arb.get_name() == self.arbiter.arbiter_name:
                self.arbiter.myself = arb
        self.arbiter.dispatcher = Dispatcher(self.arbiter.conf, self.arbiter.myself)
        self.arbiter.dispatcher.prepare_dispatch()

        # Build schedulers dictionary with the schedulers involved in the configuration
        for scheduler in self.arbiter.dispatcher.schedulers:
            sched = Alignak([], False, False, True, '/tmp/scheduler.log')
            sched.load_modules_manager(scheduler.name)
            sched.new_conf = scheduler.conf_package
            if sched.new_conf:
                sched.setup_new_conf()
            self.schedulers[scheduler.scheduler_name] = sched

        # Build pollers dictionary with the pollers involved in the configuration
        for poller in self.arbiter.dispatcher.pollers:
            self.pollers[poller.poller_name] = poller

        # Build receivers dictionary with the receivers involved in the configuration
        for receiver in self.arbiter.dispatcher.receivers:
            self.receivers[receiver.receiver_name] = receiver

        # Build reactionners dictionary with the reactionners involved in the configuration
        for reactionner in self.arbiter.dispatcher.reactionners:
            self.reactionners[reactionner.reactionner_name] = reactionner

        # Build brokers dictionary with the brokers involved in the configuration
        for broker in self.arbiter.dispatcher.brokers:
            self.brokers[broker.broker_name] = broker

        # Initialize the Receiver with no daemon configuration file
        self.receiver = Receiver(None, False, False, False, False)

        # Initialize the Receiver with no daemon configuration file
        self.broker = Broker(None, False, False, False, False)

        # External commands manager default mode; default is tha pplyer (scheduler) mode
        self.ecm_mode = 'applyer'

        # Now we create an external commands manager in dispatcher mode
        self.arbiter.external_commands_manager = ExternalCommandManager(self.arbiter.conf,
                                                                        'dispatcher',
                                                                        self.arbiter,
                                                                        accept_unknown=True)

        # Now we get the external commands manager of our scheduler
        self.eca = None
        if 'scheduler-master' in self.schedulers:
            self._sched = self.schedulers['scheduler-master'].sched
            self.eca = self.schedulers['scheduler-master'].sched.external_commands_manager

        # Now we create an external commands manager in receiver mode
        self.ecr = ExternalCommandManager(self.receiver.cur_conf, 'receiver', self.receiver,
                                          accept_unknown=True)

        # and an external commands manager in dispatcher mode
        self.ecd = ExternalCommandManager(self.arbiter.conf, 'dispatcher', self.arbiter,
                                          accept_unknown=True)
コード例 #5
0
    def setup_with_file(self, paths, add_default=True):
        self.time_hacker.set_my_time()
        self.print_header()
        # i am arbiter-like
        self.broks = {}
        self.me = None
        self.log = logger
        self.log.load_obj(self)
        if not isinstance(paths, list):
            paths = [paths]  # Fix for modules tests
            add_default = False # Don't mix config
        if add_default:
            paths.insert(0, 'etc/alignak_1r_1h_1s.cfg')
        self.config_files = paths
        self.conf = Config()
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()

        # If we got one arbiter defined here (before default) we should be in a case where
        # the tester want to load/test a module, so we simulate an arbiter daemon
        # and the modules loading phase. As it has its own modulesmanager, should
        # not impact scheduler modules ones, especially we are asking for arbiter type :)
        if len(self.conf.arbiters) == 1:
            arbdaemon = Arbiter([''], [''], False, False, None, None)

            arbdaemon.load_modules_manager()

            # we request the instances without them being *started*
            # (for those that are concerned ("external" modules):
            # we will *start* these instances after we have been daemonized (if requested)
            me = None
            for arb in self.conf.arbiters:
                me = arb
                arbdaemon.do_load_modules(arb.modules)
                arbdaemon.load_modules_configuration_objects(raw_objects)

        self.conf.create_objects(raw_objects)
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'
        # Hack push_flavor, that is set by the dispatcher
        self.conf.push_flavor = 0
        self.conf.load_triggers()
        #import pdb;pdb.set_trace()
        self.conf.linkify_templates()
        #import pdb;pdb.set_trace()
        self.conf.apply_inheritance()
        #import pdb;pdb.set_trace()
        self.conf.explode()
        #print "Aconf.services has %d elements" % len(self.conf.services)
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        #print "conf.services has %d elements" % len(self.conf.services)
        self.conf.override_properties()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        if not self.conf.conf_is_correct:
            print "The conf is not correct, I stop here"
            self.conf.dump()
            return
        self.conf.clean()

        self.confs = self.conf.cut_into_parts()
        self.conf.prepare_for_sending()
        self.conf.show_errors()
        self.dispatcher = Dispatcher(self.conf, self.me)

        scheddaemon = Alignak(None, False, False, False, None, None)
        self.scheddaemon = scheddaemon
        self.sched = scheddaemon.sched
        scheddaemon.load_modules_manager()
        # Remember to clean the logs we just created before launching tests
        self.clear_logs()
        m = MacroResolver()
        m.init(self.conf)
        self.sched.load_conf(self.conf)
        e = ExternalCommandManager(self.conf, 'applyer')
        self.sched.external_command = e
        e.load_scheduler(self.sched)
        e2 = ExternalCommandManager(self.conf, 'dispatcher')
        e2.load_arbiter(self)
        self.external_command_dispatcher = e2
        self.sched.conf.accept_passive_unknown_check_results = False

        self.sched.schedule()
コード例 #6
0
ファイル: schedulerdaemon.py プロジェクト: jgmel/alignak
    def setup_new_conf(self):  # pylint: disable=too-many-statements
        """Setup new conf received for scheduler

        :return: None
        """
        with self.conf_lock:
            self.clean_previous_run()
            new_conf = self.new_conf
            logger.info("[%s] Sending us a configuration", self.name)
            conf_raw = new_conf['conf']
            override_conf = new_conf['override_conf']
            modules = new_conf['modules']
            satellites = new_conf['satellites']
            instance_name = new_conf['instance_name']

            # Ok now we can save the retention data
            if hasattr(self.sched, 'conf'):
                self.sched.update_retention_file(forced=True)

            # horay, we got a name, we can set it in our stats objects
            statsmgr.register(instance_name,
                              'scheduler',
                              statsd_host=new_conf['statsd_host'],
                              statsd_port=new_conf['statsd_port'],
                              statsd_prefix=new_conf['statsd_prefix'],
                              statsd_enabled=new_conf['statsd_enabled'])

            t00 = time.time()
            try:
                conf = unserialize(conf_raw)
            except AlignakClassLookupException as exp:  # pragma: no cover, simple protection
                logger.error(
                    'Cannot un-serialize configuration received from arbiter: %s',
                    exp)
            logger.debug("Conf received at %d. Un-serialized in %d secs", t00,
                         time.time() - t00)
            self.new_conf = None

            if 'scheduler_name' in new_conf:
                name = new_conf['scheduler_name']
            else:
                name = instance_name
            self.name = name

            # Set my own process title
            self.set_proctitle(self.name)

            logger.info("[%s] Received a new configuration, containing: ",
                        self.name)
            for key in new_conf:
                logger.info("[%s] - %s", self.name, key)
            logger.info("[%s] configuration identifiers: %s (%s)", self.name,
                        new_conf['conf_uuid'], new_conf['push_flavor'])

            # Tag the conf with our data
            self.conf = conf
            self.conf.push_flavor = new_conf['push_flavor']
            self.conf.alignak_name = new_conf['alignak_name']
            self.conf.instance_name = instance_name
            self.conf.skip_initial_broks = new_conf['skip_initial_broks']
            self.conf.accept_passive_unknown_check_results = \
                new_conf['accept_passive_unknown_check_results']

            self.cur_conf = conf
            self.override_conf = override_conf
            self.modules = unserialize(modules, True)
            self.satellites = satellites

            # Now We create our pollers, reactionners and brokers
            for sat_type in ['pollers', 'reactionners', 'brokers']:
                if sat_type not in satellites:
                    continue
                for sat_id in satellites[sat_type]:
                    # Must look if we already have it
                    sats = getattr(self, sat_type)
                    sat = satellites[sat_type][sat_id]

                    sats[sat_id] = sat

                    if sat['name'] in override_conf['satellitemap']:
                        sat = dict(sat)  # make a copy
                        sat.update(override_conf['satellitemap'][sat['name']])

                    proto = 'http'
                    if sat['use_ssl']:
                        proto = 'https'
                    uri = '%s://%s:%s/' % (proto, sat['address'], sat['port'])

                    sats[sat_id]['uri'] = uri
                    sats[sat_id]['con'] = None
                    sats[sat_id]['running_id'] = 0
                    sats[sat_id]['last_connection'] = 0
                    sats[sat_id]['connection_attempt'] = 0
                    sats[sat_id]['max_failed_connections'] = 3
                    setattr(self, sat_type, sats)
                logger.debug("We have our %s: %s ", sat_type,
                             satellites[sat_type])
                logger.info("We have our %s:", sat_type)
                for daemon in satellites[sat_type].values():
                    logger.info(" - %s ", daemon['name'])

            # First mix conf and override_conf to have our definitive conf
            for prop in self.override_conf:
                val = self.override_conf[prop]
                setattr(self.conf, prop, val)

            if self.conf.use_timezone != '':
                logger.info("Setting our timezone to %s",
                            str(self.conf.use_timezone))
                os.environ['TZ'] = self.conf.use_timezone
                time.tzset()

            self.do_load_modules(self.modules)

            logger.info("Loading configuration.")
            self.conf.explode_global_conf()  # pylint: disable=E1101

            # we give sched it's conf
            self.sched.reset()
            self.sched.load_conf(self.conf)
            self.sched.load_satellites(self.pollers, self.reactionners,
                                       self.brokers)

            # We must update our Config dict macro with good value
            # from the config parameters
            self.sched.conf.fill_resource_macros_names_macros()

            # Creating the Macroresolver Class & unique instance
            m_solver = MacroResolver()
            m_solver.init(self.conf)

            # self.conf.dump()
            # self.conf.quick_debug()

            # Now create the external commands manager
            # We are an applyer: our role is not to dispatch commands, but to apply them
            ecm = ExternalCommandManager(self.conf, 'applyer', self.sched)

            # Scheduler needs to know about this external command manager to use it if necessary
            self.sched.set_external_commands_manager(ecm)
            # Update External Commands Manager
            self.sched.external_commands_manager.accept_passive_unknown_check_results = \
                self.sched.conf.accept_passive_unknown_check_results

            # We clear our schedulers managed (it's us :) )
            # and set ourselves in it
            self.schedulers = {self.conf.uuid: self.sched}  # pylint: disable=E1101

            # Ok now we can load the retention data
            self.sched.retention_load()

            # Create brok new conf
            brok = Brok({'type': 'new_conf', 'data': {}})
            self.sched.add_brok(brok)
コード例 #7
0
    def setup_new_conf(self):
        """Receiver custom setup_new_conf method

        This function calls the base satellite treatment and manages the configuration needed
        for a receiver daemon:
        - get and configure its satellites
        - configure the modules

        :return: None
        """
        # Execute the base class treatment...
        super(Receiver, self).setup_new_conf()

        # ...then our own specific treatment!
        with self.conf_lock:
            # self_conf is our own configuration from the alignak environment
            # self_conf = self.cur_conf['self_conf']
            logger.debug("Got config: %s", self.cur_conf)

            # Configure and start our modules
            if not self.have_modules:
                try:
                    self.modules = unserialize(self.cur_conf['modules'], no_load=True)
                except AlignakClassLookupException as exp:  # pragma: no cover, simple protection
                    logger.error('Cannot un-serialize modules configuration '
                                 'received from arbiter: %s', exp)
                if self.modules:
                    logger.info("I received some modules configuration: %s", self.modules)
                    self.have_modules = True

                    self.do_load_modules(self.modules)
                    # and start external modules too
                    self.modules_manager.start_external_instances()
                else:
                    logger.info("I do not have modules")

            # Now create the external commands manager
            # We are a receiver: our role is to get and dispatch commands to the schedulers
            global_conf = self.cur_conf.get('global_conf', None)
            if not global_conf:
                logger.error("Received a configuration without any global_conf! "
                             "This may hide a configuration problem with the "
                             "realms and the manage_sub_realms of the satellites!")
                global_conf = {
                    'accept_passive_unknown_check_results': False,
                    'log_external_commands': True
                }
            self.external_commands_manager = \
                ExternalCommandManager(None, 'receiver', self,
                                       global_conf.get(
                                           'accept_passive_unknown_check_results', False),
                                       global_conf.get(
                                           'log_external_commands', False))

            # Initialize connection with all our satellites
            logger.info("Initializing connection with my satellites:")
            my_satellites = self.get_links_of_type(s_type='')
            for satellite in list(my_satellites.values()):
                logger.info("- : %s/%s", satellite.type, satellite.name)
                if not self.daemon_connection_init(satellite):
                    logger.error("Satellite connection failed: %s", satellite)

        # Now I have a configuration!
        self.have_conf = True
コード例 #8
0
    def setup_new_conf(self):
        """Setup new conf received for scheduler

        :return: None
        """
        with self.conf_lock:
            new_c = self.new_conf
            conf_raw = new_c['conf']
            override_conf = new_c['override_conf']
            modules = new_c['modules']
            satellites = new_c['satellites']
            instance_name = new_c['instance_name']
            push_flavor = new_c['push_flavor']
            skip_initial_broks = new_c['skip_initial_broks']
            accept_passive_unknown_chk_res = new_c[
                'accept_passive_unknown_check_results']
            api_key = new_c['api_key']
            secret = new_c['secret']
            http_proxy = new_c['http_proxy']
            statsd_host = new_c['statsd_host']
            statsd_port = new_c['statsd_port']
            statsd_prefix = new_c['statsd_prefix']
            statsd_enabled = new_c['statsd_enabled']

            # horay, we got a name, we can set it in our stats objects
            statsmgr.register(self.sched,
                              instance_name,
                              'scheduler',
                              api_key=api_key,
                              secret=secret,
                              http_proxy=http_proxy,
                              statsd_host=statsd_host,
                              statsd_port=statsd_port,
                              statsd_prefix=statsd_prefix,
                              statsd_enabled=statsd_enabled)

            t00 = time.time()
            conf = cPickle.loads(conf_raw)
            logger.debug("Conf received at %d. Unserialized in %d secs", t00,
                         time.time() - t00)
            self.new_conf = None

            # Tag the conf with our data
            self.conf = conf
            self.conf.push_flavor = push_flavor
            self.conf.instance_name = instance_name
            self.conf.skip_initial_broks = skip_initial_broks
            self.conf.accept_passive_unknown_check_results = accept_passive_unknown_chk_res

            self.cur_conf = conf
            self.override_conf = override_conf
            self.modules = modules
            self.satellites = satellites
            # self.pollers = self.app.pollers

            if self.conf.human_timestamp_log:
                # pylint: disable=E1101
                logger.set_human_format()

            # Now We create our pollers
            for pol_id in satellites['pollers']:
                # Must look if we already have it
                already_got = pol_id in self.pollers
                poll = satellites['pollers'][pol_id]
                self.pollers[pol_id] = poll

                if poll['name'] in override_conf['satellitemap']:
                    poll = dict(poll)  # make a copy
                    poll.update(override_conf['satellitemap'][poll['name']])

                proto = 'http'
                if poll['use_ssl']:
                    proto = 'https'
                uri = '%s://%s:%s/' % (proto, poll['address'], poll['port'])
                self.pollers[pol_id]['uri'] = uri
                self.pollers[pol_id]['last_connection'] = 0

            # Now We create our reactionners
            for reac_id in satellites['reactionners']:
                # Must look if we already have it
                already_got = reac_id in self.reactionners
                reac = satellites['reactionners'][reac_id]
                self.reactionners[reac_id] = reac

                if reac['name'] in override_conf['satellitemap']:
                    reac = dict(reac)  # make a copy
                    reac.update(override_conf['satellitemap'][reac['name']])

                proto = 'http'
                if poll['use_ssl']:
                    proto = 'https'
                uri = '%s://%s:%s/' % (proto, reac['address'], reac['port'])
                self.reactionners[reac_id]['uri'] = uri
                self.reactionners[reac_id]['last_connection'] = 0

            # First mix conf and override_conf to have our definitive conf
            for prop in self.override_conf:
                val = self.override_conf[prop]
                setattr(self.conf, prop, val)

            if self.conf.use_timezone != '':
                logger.debug("Setting our timezone to %s",
                             str(self.conf.use_timezone))
                os.environ['TZ'] = self.conf.use_timezone
                time.tzset()

            if len(self.modules) != 0:
                logger.debug("I've got %s modules", str(self.modules))

            # TODO: if scheduler had previous modules instanciated it must clean them!
            self.do_load_modules(self.modules)

            logger.info("Loading configuration.")
            self.conf.explode_global_conf()

            # we give sched it's conf
            self.sched.reset()
            self.sched.load_conf(self.conf)
            self.sched.load_satellites(self.pollers, self.reactionners)

            # We must update our Config dict macro with good value
            # from the config parameters
            self.sched.conf.fill_resource_macros_names_macros()
            # print "DBG: got macros", self.sched.conf.macros

            # Creating the Macroresolver Class & unique instance
            m_solver = MacroResolver()
            m_solver.init(self.conf)

            # self.conf.dump()
            # self.conf.quick_debug()

            # Now create the external commander
            # it's a applyer: it role is not to dispatch commands,
            # but to apply them
            ecm = ExternalCommandManager(self.conf, 'applyer')

            # Scheduler need to know about external command to
            # activate it if necessary
            self.sched.load_external_command(ecm)

            # External command need the sched because he can raise checks
            ecm.load_scheduler(self.sched)

            # We clear our schedulers managed (it's us :) )
            # and set ourself in it
            self.schedulers = {self.conf.instance_id: self.sched}