コード例 #1
0
def main():
    parser = optparse.OptionParser(
        "%prog [options] -c configfile [-c additional_config_file]",
        version="%prog: " + VERSION)
    parser.add_option('-c', '--config', action='append',
                      dest="config_files", metavar="CONFIG-FILE",
                      help=('Config file (your nagios.cfg). Multiple -c can be '
                            'used, it will be like if all files was just one'))
    parser.add_option('-d', '--daemon', action='store_true',
                      dest="is_daemon",
                      help="Run in daemon mode")
    parser.add_option('-r', '--replace', action='store_true',
                      dest="do_replace",
                      help="Replace previous running arbiter")
    parser.add_option('--debugfile', dest='debug_file',
                      help=("Debug file. Default: not used "
                            "(why debug a bug free program? :) )"))
    parser.add_option("-v", "--verify-config",
                      dest="verify_only", action="store_true",
                      help="Verify config file and exit")
    parser.add_option("-p", "--profile",
                      dest="profile",
                      help="Dump a profile file. Need the python cProfile librairy")
    parser.add_option("-a", "--analyse",
                      dest="analyse",
                      help="Dump an analyse statistics file, for support")
    parser.add_option("-m", "--migrate",
                      dest="migrate",
                      help="Migrate the raw configuration read from the arbiter to another "
                           "module. --> VERY EXPERIMENTAL!")
    parser.add_option("-n", "--name",
                      dest="arb_name",
                      help="Give the arbiter name to use. Optionnal, will use the hostaddress "
                           "if not provide to find it.")

    opts, args = parser.parse_args()


    if not opts.config_files:
        parser.error("Requires at least one config file (option -c/--config")
    if args:
        parser.error("Does not accept any argument. Use option -c/--config")

    # Protect for windows multiprocessing that will RELAUNCH all
    daemon = Arbiter(debug=opts.debug_file is not None, **opts.__dict__)
    if not opts.profile:
        daemon.main()
    else:
        # For perf tuning:
        import cProfile
        cProfile.run('''daemon.main()''', opts.profile)
コード例 #2
0
    def test_pickle_retention(self):
        # get our modules
        mod = pickle_retention_file_generic.Pickle_retention_generic(
            modconf, 'tmp/retention-test.dat')
        try:
            os.unlink(mod.path)
        except:
            pass

        sl = get_instance(mod)
        # Hack here :(
        sl.properties = {}
        sl.properties['to_queue'] = None
        sl.init()

        svc = self.sched.services.find_srv_by_name_and_hostname(
            "test_host_0", "test_ok_0")
        self.scheduler_loop(1, [[svc, 2, 'BAD | value1=0 value2=0']])

        self.sched.get_new_broks()

        # Saving the broks we got
        old_broks = copy.copy(self.sched.broks)

        # Now get a real broker object
        arbiter = Arbiter([''], False, False, False, None, None, None)

        arbiter.broks = self.sched.broks
        sl.hook_save_retention(arbiter)  #, l)
        # update the hosts and service in the scheduler in the retention-file

        # Now we clean the source, like if we restart
        arbiter.broks.clear()

        self.assertEqual(len(arbiter.broks), 0)

        r = sl.hook_load_retention(arbiter)

        # We check we load them :)
        for b in old_broks.values():
            found = False
            for b2 in arbiter.broks.values():
                if b2.id == b.id:
                    found = True
                    break
            self.assertTrue(found)

        # Ok, we can delete the retention file
        os.unlink(mod.path)
コード例 #3
0
    def test_pickle_retention(self):
        # get our modules
        mod = pickle_retention_file_generic.Pickle_retention_generic(
            modconf, 'tmp/retention-test.dat')
        try:
            os.unlink(mod.path)
        except:
            pass

        sl = get_instance(mod)
        # Hack here :(
        sl.properties = {}
        sl.properties['to_queue'] = None
        sl.init()

        svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
        self.scheduler_loop(1, [[svc, 2, 'BAD | value1=0 value2=0']])

        self.sched.get_new_broks()

        # Saving the broks we got
        old_broks = copy.copy(self.sched.broks)

        # Now get a real broker object
        arbiter = Arbiter([''], False, False, False, None, None, None)

        arbiter.broks = self.sched.broks
        sl.hook_save_retention(arbiter) #, l)
        # update the hosts and service in the scheduler in the retention-file

        # Now we clean the source, like if we restart
        arbiter.broks.clear()

        self.assertEqual(len(arbiter.broks), 0)

        r = sl.hook_load_retention(arbiter)

        # We check we load them :)
        for b in old_broks.values():
            found = False
            for b2 in arbiter.broks.values():
                if b2.id == b.id:
                    found = True
                    break
            self.assertTrue(found)

        # Ok, we can delete the retention file
        os.unlink(mod.path)
コード例 #4
0
ファイル: shinken_test.py プロジェクト: Aimage/shinken
    def setup_with_file(self, path):
        time_hacker.set_my_time()
        self.print_header()
        # i am arbiter-like
        self.broks = {}
        self.me = None
        self.log = logger
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()

        # If we got one arbiter defined here (before default) we should be in a case where
        # the tester want to load/test a module, so we simulate an arbiter daemon
        # and the modules loading phase. As it has its own modulesmanager, should
        # not impact scheduler modules ones, especially we are asking for arbiter type :)
        if len(self.conf.arbiters) == 1:
            arbdaemon = Arbiter([''],[''], False, False, None, None)
            # only load if the module_dir is reallyexisting, so was set explicitly
            # in the test configuration
            if os.path.exists(getattr(self.conf, 'modules_dir', '')):
                arbdaemon.modules_dir = self.conf.modules_dir
                arbdaemon.load_modules_manager()

                # we request the instances without them being *started*
                # (for those that are concerned ("external" modules):
                # we will *start* these instances after we have been daemonized (if requested)
                me = None
                for arb in self.conf.arbiters:
                    me = arb
                    arbdaemon.modules_manager.set_modules(arb.modules)
                    arbdaemon.do_load_modules()
                    arbdaemon.load_modules_configuration_objects(raw_objects)

        self.conf.create_objects(raw_objects)
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'
        # Hack push_flavor, that is set by the dispatcher
        self.conf.push_flavor = 0
        self.conf.load_triggers()
        #import pdb;pdb.set_trace()
        self.conf.linkify_templates()
        #import pdb;pdb.set_trace()
        self.conf.apply_inheritance()
        #import pdb;pdb.set_trace()
        self.conf.explode()
        #print "Aconf.services has %d elements" % len(self.conf.services)
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        self.conf.compute_hash()
        #print "conf.services has %d elements" % len(self.conf.services)
        self.conf.override_properties()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.set_initial_state()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        if not self.conf.conf_is_correct:
            print "The conf is not correct, I stop here"
            self.conf.dump()
            return
        self.conf.clean()

        self.confs = self.conf.cut_into_parts()
        self.conf.prepare_for_sending()
        self.conf.show_errors()
        self.dispatcher = Dispatcher(self.conf, self.me)

        scheddaemon = Shinken(None, False, False, False, None, None)
        self.scheddaemon = scheddaemon
        self.sched = scheddaemon.sched
        scheddaemon.modules_dir = modules_dir
        scheddaemon.load_modules_manager()
        # Remember to clean the logs we just created before launching tests
        self.clear_logs()
        m = MacroResolver()
        m.init(self.conf)
        self.sched.load_conf(self.conf, in_test=True)
        e = ExternalCommandManager(self.conf, 'applyer')
        self.sched.external_command = e
        e.load_scheduler(self.sched)
        e2 = ExternalCommandManager(self.conf, 'dispatcher')
        e2.load_arbiter(self)
        self.external_command_dispatcher = e2
        self.sched.conf.accept_passive_unknown_check_results = False

        self.sched.schedule()
コード例 #5
0
                  help=('Config file (your nagios.cfg). Multiple -c can be '
                        'used, it will be like if all files was just one'))
parser.add_option('-d', '--daemon', action='store_true',
                  dest="is_daemon",
                  help="Run in daemon mode")
parser.add_option('-r', '--replace', action='store_true',
                  dest="do_replace",
                  help="Replace previous running arbiter")
parser.add_option('--debugfile', dest='debug_file',
                  help=("Debug file. Default: not used "
                        "(why debug a bug free program? :) )"))
parser.add_option("-v", "--verify-config",
                  dest="verify_only", action="store_true",
                  help="Verify config file and exit")

opts, args = parser.parse_args()


if not opts.config_files:
    parser.error("Requires at least one config file (option -c/--config")
if args:
    parser.error("Does not accept any argument. Use option -c/--config")

# Protect for windows multiprocessing that will RELAUNCH all
if __name__ == '__main__':
    daemon = Arbiter(debug=opts.debug_file is not None, **opts.__dict__)
    daemon.main()
# For perf tuning :
#import cProfile
#cProfile.run('''daemon.main()''', '/tmp/arbiter.profile')
コード例 #6
0
    def setup_with_file(self, path):
        time_hacker.set_my_time()
        self.print_header()
        # i am arbiter-like
        self.broks = {}
        self.me = None
        self.log = logger
        self.log.load_obj(self)
        self.config_files = [path]
        self.conf = Config()
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')
        self.conf.early_arbiter_linking()

        # If we got one arbiter defined here (before default) we should be in a case where
        # the tester want to load/test a module, so we simulate an arbiter daemon
        # and the modules loading phase. As it has its own modulesmanager, should
        # not impact scheduler modules ones, especially we are asking for arbiter type :)
        if len(self.conf.arbiters) == 1:
            arbdaemon = Arbiter([''], [''], False, False, None, None)
            # only load if the module_dir is reallyexisting, so was set explicitly
            # in the test configuration
            if os.path.exists(getattr(self.conf, 'modules_dir', '')):
                arbdaemon.modules_dir = self.conf.modules_dir
                arbdaemon.load_modules_manager()

                # we request the instances without them being *started*
                # (for those that are concerned ("external" modules):
                # we will *start* these instances after we have been daemonized (if requested)
                me = None
                for arb in self.conf.arbiters:
                    me = arb
                    arbdaemon.modules_manager.set_modules(arb.modules)
                    arbdaemon.do_load_modules()
                    arbdaemon.load_modules_configuration_objects(raw_objects)

        self.conf.create_objects(raw_objects)
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'
        # Hack push_flavor, that is set by the dispatcher
        self.conf.push_flavor = 0
        self.conf.load_triggers()
        #import pdb;pdb.set_trace()
        self.conf.linkify_templates()
        #import pdb;pdb.set_trace()
        self.conf.apply_inheritance()
        #import pdb;pdb.set_trace()
        self.conf.explode()
        #print "Aconf.services has %d elements" % len(self.conf.services)
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        self.conf.override_properties()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.set_initial_state()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        if not self.conf.conf_is_correct:
            print "The conf is not correct, I stop here"
            self.conf.dump()
            return
        self.conf.clean()

        self.confs = self.conf.cut_into_parts()
        self.conf.prepare_for_sending()
        self.conf.show_errors()
        self.dispatcher = Dispatcher(self.conf, self.me)

        scheddaemon = Shinken(None, False, False, False, None, None)
        self.scheddaemon = scheddaemon
        self.sched = scheddaemon.sched
        scheddaemon.modules_dir = modules_dir
        scheddaemon.load_modules_manager()
        # Remember to clean the logs we just created before launching tests
        self.clear_logs()
        m = MacroResolver()
        m.init(self.conf)
        self.sched.load_conf(self.conf, in_test=True)
        e = ExternalCommandManager(self.conf, 'applyer')
        self.sched.external_command = e
        e.load_scheduler(self.sched)
        e2 = ExternalCommandManager(self.conf, 'dispatcher')
        e2.load_arbiter(self)
        self.external_command_dispatcher = e2
        self.sched.conf.accept_passive_unknown_check_results = False

        self.sched.schedule()
コード例 #7
0
    def __init__(self):
        self.conf = Config()
        buf = self.conf.read_config(['/etc/shinken/shinken.cfg'])
        self.raw_objects = self.conf.read_config_buf(buf)

        self.conf.create_objects_for_type(self.raw_objects, 'arbiter')
        self.conf.create_objects_for_type(self.raw_objects, 'module')
        self.conf.early_arbiter_linking()

        if len(self.conf.arbiters) == 1:
            self.arbdaemon = Arbiter(
                config_files=[''],
                is_daemon=[''],
                do_replace=False,
                verify_only=False,
                debug=None,
                debug_file=None,
                arb_name='arbtest'
            )
            self.arbdaemon.modules_dir = '/var/lib/shinken/modules/'
            self.arbdaemon.load_modules_manager()

            me = None
            for arb in self.conf.arbiters:
                me = arb
                self.arbdaemon.modules_manager.set_modules(arb.modules)
                self.arbdaemon.do_load_modules()
                self.arbdaemon.load_modules_configuration_objects(self.raw_objects)

        self.conf.create_objects(self.raw_objects)
        self.conf.instance_id = 0
        self.conf.instance_name = 'test'


        # Hack push_flavor, that is set by the dispatcher
        self.conf.push_flavor = 0
        self.conf.load_triggers()
        # import pdb;pdb.set_trace()
        self.conf.linkify_templates()
        # import pdb;pdb.set_trace()
        self.conf.apply_inheritance()
        # import pdb;pdb.set_trace()
        self.conf.explode()
        # print "Aconf.services has %d elements" % len(self.conf.services)
        self.conf.apply_implicit_inheritance()
        self.conf.fill_default()
        self.conf.remove_templates()
        self.conf.compute_hash()
        # print "conf.services has %d elements" % len(self.conf.services)
        self.conf.override_properties()
        self.conf.linkify()
        self.conf.apply_dependencies()
        self.conf.set_initial_state()
        self.conf.explode_global_conf()
        self.conf.propagate_timezone_option()
        self.conf.create_business_rules()
        self.conf.create_business_rules_dependencies()
        self.conf.is_correct()
        if not self.conf.conf_is_correct:
            print "The conf is not correct, I stop here"
            self.conf.dump()
            return
        self.conf.clean()

        self.arbdaemon.conf = self.conf