def main(): """Parse args and run main daemon function :return: None """ parser = optparse.OptionParser("%prog [options]", version="%prog " + VERSION) parser.add_option('-c', '--config', dest="config_file", metavar="INI-CONFIG-FILE", help='Config file') parser.add_option('-d', '--daemon', action='store_true', dest="is_daemon", help="Run in daemon mode") parser.add_option('-r', '--replace', action='store_true', dest="do_replace", help="Replace previous running receiver") parser.add_option('--debugfile', dest='debug_file', help=("Debug file. Default: not used " "(why debug a bug free program? :) )")) opts, args = parser.parse_args() if args: parser.error("Does not accept any argument.") daemon = Receiver(debug=opts.debug_file is not None, **opts.__dict__) daemon.main()
def main(): """Parse args and run main daemon function :return: None """ parser = optparse.OptionParser( "%prog [options]", version="%prog " + VERSION) parser.add_option('-c', '--config', dest="config_file", metavar="INI-CONFIG-FILE", help='Config file') parser.add_option('-d', '--daemon', action='store_true', dest="is_daemon", help="Run in daemon mode") parser.add_option('-r', '--replace', action='store_true', dest="do_replace", help="Replace previous running receiver") parser.add_option('--debugfile', dest='debug_file', help=("Debug file. Default: not used " "(why debug a bug free program? :) )")) opts, args = parser.parse_args() if args: parser.error("Does not accept any argument.") daemon = Receiver(debug=opts.debug_file is not None, **opts.__dict__) daemon.main()
def main(): """Parse args and run main daemon function :return: None """ args = parse_daemon_args() daemon = Receiver(debug=args.debug_file is not None, **args.__dict__) daemon.main()
def main(): """Parse args and run main daemon function :return: None """ try: args = parse_daemon_args() daemon = Receiver(**args.__dict__) daemon.main() except Exception as exp: # pylint: disable=broad-except sys.stderr.write("*** Daemon exited because: %s" % str(exp)) traceback.print_exc() exit(1)
def test_module_host_livestate_unauthorized(self): """Test the module /host API - host livestate - unauthorized access :return: """ # Create an Alignak module mod = Module({ 'module_alias': 'web-services', 'module_types': 'web-services', 'python_name': 'alignak_module_ws', # Alignak backend 'alignak_backend': '', 'username': '', 'password': '', # Do not set a timestamp in the built external commands 'set_timestamp': '0', # Do not give feedback data 'give_feedback': '0', 'give_result': '1', # Set Arbiter address as empty to not poll the Arbiter else the test will fail! 'alignak_host': '', 'alignak_port': 7770, # Set module to listen on all interfaces 'host': '0.0.0.0', 'port': 8888, # Allow host/service creation 'allow_host_creation': '1', 'allow_service_creation': '1', # Force Alignak backend update by the module (default is not force!) 'alignak_backend_livestate_update': '0', # Disable authorization 'authorization': '0' }) # Create a receiver daemon args = {'env_file': '', 'daemon_name': 'receiver-master'} self._receiver_daemon = Receiver(**args) # Create the modules manager for the daemon self.modulemanager = ModulesManager(self._receiver_daemon) # Load an initialize the modules: # - load python module # - get module properties and instances self.modulemanager.load_and_init([mod]) my_module = self.modulemanager.instances[0] # Clear logs self.clear_logs() # Start external modules self.modulemanager.start_external_instances() # Starting external module logs self.assert_log_match("Trying to initialize module: web-services", 0) self.assert_log_match("Starting external module web-services", 1) self.assert_log_match( "Starting external process for module web-services", 2) self.assert_log_match("web-services is now started", 3) # Check alive self.assertIsNotNone(my_module.process) self.assertTrue(my_module.process.is_alive()) time.sleep(1) session = requests.Session() headers = {'Content-Type': 'application/json'} params = {'username': '******', 'password': '******'} response = session.post(self.ws_endpoint + '/login', json=params, headers=headers) assert response.status_code == 200 resp = response.json() # ----- # Update an host with an host livestate (heartbeat / host is alive): livestate # Because there is no backend configured, only an external command is raised # to Alignak for the host data = { "name": "new_host_0", "livestate": { # No timestamp in the livestate "state": "UP", "output": "Output...", "long_output": "Long output...", "perf_data": "'counter'=1", } } self.assertEqual(my_module.received_commands, 0) response = session.patch(self.ws_endpoint + '/host', json=data, headers=headers) print(response) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual( result, { '_status': 'OK', '_result': [ 'new_host_0 is alive :)', "PROCESS_HOST_CHECK_RESULT;new_host_0;0;Output...|'counter'=1\nLong output...", ] }) # No errors - an extrenal command was raised for the host livestate # ----- # Update an host with an host livestate (heartbeat / host is alive): livestate # Because there is no backend configured, only an external command is raised # to Alignak for the host data = { "name": "new_host_0", "livestate": { # No timestamp in the livestate "state": "UP", "output": "Output...", "long_output": "Long output...", "perf_data": "'counter'=1", }, "services": [{ "name": "test_svc_0", "livestate": { "state": "OK", "output": "Output...", "long_output": "Long output...", "perf_data": "'counter'=1", } }] } self.assertEqual(my_module.received_commands, 1) response = session.patch(self.ws_endpoint + '/host', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual( result, { '_status': 'OK', '_result': [ 'new_host_0 is alive :)', "PROCESS_HOST_CHECK_RESULT;new_host_0;0;Output...|'counter'=1\nLong output...", "PROCESS_SERVICE_CHECK_RESULT;new_host_0;test_svc_0;0;Output...|'counter'=1\nLong output..." ] }) # No errors - an extrenal command was raised for the host livestate self.modulemanager.stop_all()
def setup_with_file(self, configuration_file): """ Load alignak with defined configuration file If the configuration loading fails, a SystemExit exception is raised to the caller. The conf_is_correct property indicates if the configuration loading succeeded or failed. The configuration errors property contains a list of the error message that are normally logged as ERROR by the arbiter. @verified :param configuration_file: path + file name of the main configuration file :type configuration_file: str :return: None """ self.broks = {} self.schedulers = {} self.brokers = {} self.pollers = {} self.receivers = {} self.reactionners = {} self.arbiter = None self.conf_is_correct = False self.configuration_warnings = [] self.configuration_errors = [] # Add collector for test purpose. self.setup_logger() # Initialize the Arbiter with no daemon configuration file self.arbiter = Arbiter(None, [configuration_file], False, False, False, False, '/tmp/arbiter.log', 'arbiter-master') try: # The following is copy paste from setup_alignak_logger # The only difference is that keep logger at INFO level to gather messages # This is needed to assert later on logs we received. self.logger.setLevel(logging.INFO) # Force the debug level if the daemon is said to start with such level if self.arbiter.debug: self.logger.setLevel(logging.DEBUG) # Log will be broks for line in self.arbiter.get_header(): self.logger.info(line) self.arbiter.load_monitoring_config_file() # If this assertion does not match, then there is a bug in the arbiter :) self.assertTrue(self.arbiter.conf.conf_is_correct) self.conf_is_correct = True self.configuration_warnings = self.arbiter.conf.configuration_warnings self.configuration_errors = self.arbiter.conf.configuration_errors except SystemExit: self.configuration_warnings = self.arbiter.conf.configuration_warnings print("Configuration warnings:") for msg in self.configuration_warnings: print(" - %s" % msg) self.configuration_errors = self.arbiter.conf.configuration_errors print("Configuration errors:") for msg in self.configuration_errors: print(" - %s" % msg) raise for arb in self.arbiter.conf.arbiters: if arb.get_name() == self.arbiter.arbiter_name: self.arbiter.myself = arb self.arbiter.dispatcher = Dispatcher(self.arbiter.conf, self.arbiter.myself) self.arbiter.dispatcher.prepare_dispatch() # Build schedulers dictionary with the schedulers involved in the configuration for scheduler in self.arbiter.dispatcher.schedulers: sched = Alignak([], False, False, True, '/tmp/scheduler.log') sched.load_modules_manager(scheduler.name) sched.new_conf = scheduler.conf_package if sched.new_conf: sched.setup_new_conf() self.schedulers[scheduler.scheduler_name] = sched # Build pollers dictionary with the pollers involved in the configuration for poller in self.arbiter.dispatcher.pollers: self.pollers[poller.poller_name] = poller # Build receivers dictionary with the receivers involved in the configuration for receiver in self.arbiter.dispatcher.receivers: self.receivers[receiver.receiver_name] = receiver # Build reactionners dictionary with the reactionners involved in the configuration for reactionner in self.arbiter.dispatcher.reactionners: self.reactionners[reactionner.reactionner_name] = reactionner # Build brokers dictionary with the brokers involved in the configuration for broker in self.arbiter.dispatcher.brokers: self.brokers[broker.broker_name] = broker # Initialize the Receiver with no daemon configuration file self.receiver = Receiver(None, False, False, False, False) # Initialize the Receiver with no daemon configuration file self.broker = Broker(None, False, False, False, False) # External commands manager default mode; default is tha pplyer (scheduler) mode self.ecm_mode = 'applyer' # Now we create an external commands manager in dispatcher mode self.arbiter.external_commands_manager = ExternalCommandManager(self.arbiter.conf, 'dispatcher', self.arbiter, accept_unknown=True) # Now we get the external commands manager of our scheduler self.eca = None if 'scheduler-master' in self.schedulers: self._sched = self.schedulers['scheduler-master'].sched self.eca = self.schedulers['scheduler-master'].sched.external_commands_manager # Now we create an external commands manager in receiver mode self.ecr = ExternalCommandManager(self.receiver.cur_conf, 'receiver', self.receiver, accept_unknown=True) # and an external commands manager in dispatcher mode self.ecd = ExternalCommandManager(self.arbiter.conf, 'dispatcher', self.arbiter, accept_unknown=True)
class AlignakTest(unittest.TestCase): time_hacker = TimeHacker() maxDiff = None if sys.version_info < (2, 7): def assertRegex(self, *args, **kwargs): return self.assertRegexpMatches(*args, **kwargs) def setup_logger(self): """ Setup a log collector :return: """ self.logger = logging.getLogger("alignak") # Add collector for test purpose. collector_h = CollectorHandler() collector_h.setFormatter(DEFAULT_FORMATTER_NAMED) self.logger.addHandler(collector_h) def files_update(self, files, replacements): """Update files content with the defined replacements :param files: list of files to parse and replace :param replacements: list of values to replace :return: """ for filename in files: lines = [] with open(filename) as infile: for line in infile: for src, target in replacements.iteritems(): line = line.replace(src, target) lines.append(line) with open(filename, 'w') as outfile: for line in lines: outfile.write(line) def setup_with_file(self, configuration_file): """ Load alignak with defined configuration file If the configuration loading fails, a SystemExit exception is raised to the caller. The conf_is_correct property indicates if the configuration loading succeeded or failed. The configuration errors property contains a list of the error message that are normally logged as ERROR by the arbiter. @verified :param configuration_file: path + file name of the main configuration file :type configuration_file: str :return: None """ self.broks = {} self.schedulers = {} self.brokers = {} self.pollers = {} self.receivers = {} self.reactionners = {} self.arbiter = None self.conf_is_correct = False self.configuration_warnings = [] self.configuration_errors = [] # Add collector for test purpose. self.setup_logger() # Initialize the Arbiter with no daemon configuration file self.arbiter = Arbiter(None, [configuration_file], False, False, False, False, '/tmp/arbiter.log', 'arbiter-master') try: # The following is copy paste from setup_alignak_logger # The only difference is that keep logger at INFO level to gather messages # This is needed to assert later on logs we received. self.logger.setLevel(logging.INFO) # Force the debug level if the daemon is said to start with such level if self.arbiter.debug: self.logger.setLevel(logging.DEBUG) # Log will be broks for line in self.arbiter.get_header(): self.logger.info(line) self.arbiter.load_monitoring_config_file() # If this assertion does not match, then there is a bug in the arbiter :) self.assertTrue(self.arbiter.conf.conf_is_correct) self.conf_is_correct = True self.configuration_warnings = self.arbiter.conf.configuration_warnings self.configuration_errors = self.arbiter.conf.configuration_errors except SystemExit: self.configuration_warnings = self.arbiter.conf.configuration_warnings print("Configuration warnings:") for msg in self.configuration_warnings: print(" - %s" % msg) self.configuration_errors = self.arbiter.conf.configuration_errors print("Configuration errors:") for msg in self.configuration_errors: print(" - %s" % msg) raise for arb in self.arbiter.conf.arbiters: if arb.get_name() == self.arbiter.arbiter_name: self.arbiter.myself = arb self.arbiter.dispatcher = Dispatcher(self.arbiter.conf, self.arbiter.myself) self.arbiter.dispatcher.prepare_dispatch() # Build schedulers dictionary with the schedulers involved in the configuration for scheduler in self.arbiter.dispatcher.schedulers: sched = Alignak([], False, False, True, '/tmp/scheduler.log') sched.load_modules_manager(scheduler.name) sched.new_conf = scheduler.conf_package if sched.new_conf: sched.setup_new_conf() self.schedulers[scheduler.scheduler_name] = sched # Build pollers dictionary with the pollers involved in the configuration for poller in self.arbiter.dispatcher.pollers: self.pollers[poller.poller_name] = poller # Build receivers dictionary with the receivers involved in the configuration for receiver in self.arbiter.dispatcher.receivers: self.receivers[receiver.receiver_name] = receiver # Build reactionners dictionary with the reactionners involved in the configuration for reactionner in self.arbiter.dispatcher.reactionners: self.reactionners[reactionner.reactionner_name] = reactionner # Build brokers dictionary with the brokers involved in the configuration for broker in self.arbiter.dispatcher.brokers: self.brokers[broker.broker_name] = broker # Initialize the Receiver with no daemon configuration file self.receiver = Receiver(None, False, False, False, False) # Initialize the Receiver with no daemon configuration file self.broker = Broker(None, False, False, False, False) # External commands manager default mode; default is tha pplyer (scheduler) mode self.ecm_mode = 'applyer' # Now we create an external commands manager in dispatcher mode self.arbiter.external_commands_manager = ExternalCommandManager(self.arbiter.conf, 'dispatcher', self.arbiter, accept_unknown=True) # Now we get the external commands manager of our scheduler self.eca = None if 'scheduler-master' in self.schedulers: self._sched = self.schedulers['scheduler-master'].sched self.eca = self.schedulers['scheduler-master'].sched.external_commands_manager # Now we create an external commands manager in receiver mode self.ecr = ExternalCommandManager(self.receiver.cur_conf, 'receiver', self.receiver, accept_unknown=True) # and an external commands manager in dispatcher mode self.ecd = ExternalCommandManager(self.arbiter.conf, 'dispatcher', self.arbiter, accept_unknown=True) def fake_check(self, ref, exit_status, output="OK"): """ Simulate a check execution and result :param ref: host/service concerned by the check :param exit_status: check exit status code (0, 1, ...). If set to None, the check is simply scheduled but not "executed" :param output: check output (output + perf data) :return: """ now = time.time() check = ref.schedule(self.schedulers['scheduler-master'].sched.hosts, self.schedulers['scheduler-master'].sched.services, self.schedulers['scheduler-master'].sched.timeperiods, self.schedulers['scheduler-master'].sched.macromodulations, self.schedulers['scheduler-master'].sched.checkmodulations, self.schedulers['scheduler-master'].sched.checks, force=True, force_time=None) # now the check is scheduled and we get it in the action queue self.schedulers['scheduler-master'].sched.add(check) # check is now in sched.checks[] # Allows to force check scheduling without setting its status nor output. # Useful for manual business rules rescheduling, for instance. if exit_status is None: return # fake execution check.check_time = now # and lie about when we will launch it because # if not, the schedule call for ref # will not really reschedule it because there # is a valid value in the future ref.next_chk = now - 0.5 # Max plugin output is default to 8192 check.get_outputs(output, 8192) check.exit_status = exit_status check.execution_time = 0.001 check.status = 'waitconsume' # Put the check result in the waiting results for the scheduler ... self.schedulers['scheduler-master'].sched.waiting_results.put(check) def scheduler_loop(self, count, items, mysched=None): """ Manage scheduler checks @verified :param count: number of checks to pass :type count: int :param items: list of list [[object, exist_status, output]] :type items: list :param mysched: The scheduler :type mysched: None | object :return: None """ if mysched is None: mysched = self.schedulers['scheduler-master'] macroresolver = MacroResolver() macroresolver.init(mysched.conf) for num in range(count): for item in items: (obj, exit_status, output) = item if len(obj.checks_in_progress) == 0: for i in mysched.sched.recurrent_works: (name, fun, nb_ticks) = mysched.sched.recurrent_works[i] if nb_ticks == 1: fun() self.assertGreater(len(obj.checks_in_progress), 0) chk = mysched.sched.checks[obj.checks_in_progress[0]] chk.set_type_active() chk.check_time = time.time() chk.wait_time = 0.0001 chk.last_poll = chk.check_time chk.output = output chk.exit_status = exit_status mysched.sched.waiting_results.put(chk) for i in mysched.sched.recurrent_works: (name, fun, nb_ticks) = mysched.sched.recurrent_works[i] if nb_ticks == 1: fun() def manage_external_command(self, external_command, run=True): """Manage an external command. :return: result of external command resolution """ ext_cmd = ExternalCommand(external_command) if self.ecm_mode == 'applyer': res = None self._scheduler.run_external_command(external_command) self.external_command_loop() if self.ecm_mode == 'dispatcher': res = self.ecd.resolve_command(ext_cmd) if res and run: self.arbiter.broks = {} self.arbiter.add(ext_cmd) self.arbiter.push_external_commands_to_schedulers() # Our scheduler self._scheduler = self.schedulers['scheduler-master'].sched # Our broker self._broker = self._scheduler.brokers['broker-master'] for brok in self.arbiter.broks: print("Brok: %s : %s" % (brok, self.arbiter.broks[brok])) self._broker['broks'][brok] = self.arbiter.broks[brok] if self.ecm_mode == 'receiver': res = self.ecr.resolve_command(ext_cmd) if res and run: self.receiver.broks = {} self.receiver.add(ext_cmd) self.receiver.push_external_commands_to_schedulers() # Our scheduler self._scheduler = self.schedulers['scheduler-master'].sched # Our broker self._broker = self._scheduler.brokers['broker-master'] for brok in self.receiver.broks: print("Brok: %s : %s" % (brok, self.receiver.broks[brok])) self._broker.broks[brok] = self.receiver.broks[brok] return res def external_command_loop(self): """Execute the scheduler actions for external commands. The scheduler is not an ECM 'dispatcher' but an 'applyer' ... so this function is on the external command execution side of the problem. @verified :return: """ for i in self.schedulers['scheduler-master'].sched.recurrent_works: (name, fun, nb_ticks) = self.schedulers['scheduler-master'].sched.recurrent_works[i] if nb_ticks == 1: fun() self.assert_no_log_match("External command Brok could not be sent to any daemon!") def worker_loop(self, verbose=True): self.schedulers['scheduler-master'].sched.delete_zombie_checks() self.schedulers['scheduler-master'].sched.delete_zombie_actions() checks = self.schedulers['scheduler-master'].sched.get_to_run_checks(True, False, worker_name='tester') actions = self.schedulers['scheduler-master'].sched.get_to_run_checks(False, True, worker_name='tester') if verbose is True: self.show_actions() for a in actions: a.status = 'inpoller' a.check_time = time.time() a.exit_status = 0 self.schedulers['scheduler-master'].sched.put_results(a) if verbose is True: self.show_actions() def launch_internal_check(self, svc_br): """ Launch an internal check for the business rule service provided """ # Launch an internal check now = time.time() self._sched.add(svc_br.launch_check(now - 1, self._sched.hosts, self._sched.services, self._sched.timeperiods, self._sched.macromodulations, self._sched.checkmodulations, self._sched.checks)) c = svc_br.actions[0] self.assertEqual(True, c.internal) self.assertTrue(c.is_launchable(now)) # ask the scheduler to launch this check # and ask 2 loops: one to launch the check # and another to get the result self.scheduler_loop(2, []) # We should not have the check anymore self.assertEqual(0, len(svc_br.actions)) def show_logs(self, scheduler=False): """ Show logs. Get logs collected by the collector handler and print them @verified :param scheduler: :return: """ print "--- logs <<<----------------------------------" collector_h = [hand for hand in self.logger.handlers if isinstance(hand, CollectorHandler)][0] for log in collector_h.collector: safe_print(log) print "--- logs >>>----------------------------------" def show_actions(self): print "--- actions <<<----------------------------------" actions = sorted(self.schedulers['scheduler-master'].sched.actions.values(), key=lambda x: x.creation_time) for a in actions: if a.is_a == 'notification': item = self.schedulers['scheduler-master'].sched.find_item_by_id(a.ref) if item.my_type == "host": ref = "host: %s" % item.get_name() else: hst = self.schedulers['scheduler-master'].sched.find_item_by_id(item.host) ref = "host: %s svc: %s" % (hst.get_name(), item.get_name()) print "NOTIFICATION %s %s %s %s %s %s" % (a.uuid, ref, a.type, time.asctime(time.localtime(a.t_to_go)), a.status, a.contact_name) elif a.is_a == 'eventhandler': print "EVENTHANDLER:", a print "--- actions >>>----------------------------------" def show_checks(self): """ Show checks from the scheduler :return: """ print "--- checks <<<--------------------------------" checks = sorted(self.schedulers['scheduler-master'].sched.checks.values(), key=lambda x: x.creation_time) for check in checks: print("- %s" % check) print "--- checks >>>--------------------------------" def show_and_clear_logs(self): """ Prints and then deletes the current logs stored in the log collector @verified :return: """ self.show_logs() self.clear_logs() def show_and_clear_actions(self): self.show_actions() self.clear_actions() def count_logs(self): """ Count the log lines in the Arbiter broks. If 'scheduler' is True, then uses the scheduler's broks list. @verified :return: """ collector_h = [hand for hand in self.logger.handlers if isinstance(hand, CollectorHandler)][0] return len(collector_h.collector) def count_actions(self): """ Count the actions in the scheduler's actions. @verified :return: """ return len(self.schedulers['scheduler-master'].sched.actions.values()) def clear_logs(self): """ Remove all the logs stored in the logs collector @verified :return: """ collector_h = [hand for hand in self.logger.handlers if isinstance(hand, CollectorHandler)][0] collector_h.collector = [] def clear_actions(self): """ Clear the actions in the scheduler's actions. @verified :return: """ self.schedulers['scheduler-master'].sched.actions = {} def assert_actions_count(self, number): """ Check the number of actions @verified :param number: number of actions we must have :type number: int :return: None """ actions = sorted(self.schedulers['scheduler-master'].sched.actions.values(), key=lambda x: x.creation_time) self.assertEqual(number, len(self.schedulers['scheduler-master'].sched.actions), "Not found expected number of actions:\nactions_logs=[[[\n%s\n]]]" % ('\n'.join('\t%s = creation: %s, is_a: %s, type: %s, status: %s, ' 'planned: %s, command: %s' % (idx, b.creation_time, b.is_a, b.type, b.status, b.t_to_go, b.command) for idx, b in enumerate(actions)))) def assert_actions_match(self, index, pattern, field): """ Check if pattern verified in field(property) name of the action with index in action list @verified :param index: index in the actions list. If index is -1, all the actions in the list are searched for a matching pattern :type index: int :param pattern: pattern to verify is in the action :type pattern: str :param field: name of the field (property) of the action :type field: str :return: None """ regex = re.compile(pattern) actions = sorted(self.schedulers['scheduler-master'].sched.actions.values(), key=lambda x: x.creation_time) if index != -1: myaction = actions[index] self.assertTrue(regex.search(getattr(myaction, field)), "Not found a matching pattern in actions:\n" "index=%s field=%s pattern=%r\n" "action_line=creation: %s, is_a: %s, type: %s, " "status: %s, planned: %s, command: %s" % ( index, field, pattern, myaction.creation_time, myaction.is_a, myaction.type, myaction.status, myaction.t_to_go, myaction.command)) return for myaction in actions: if regex.search(getattr(myaction, field)): return self.assertTrue(False, "Not found a matching pattern in actions:\nfield=%s pattern=%r\n" % (field, pattern)) def assert_log_match(self, pattern, index=None): """ Search if the log with the index number has the pattern in the Arbiter logs. If index is None, then all the collected logs are searched for the pattern Logs numbering starts from 0 (the oldest stored log line) This function assert on the search result. As of it, if no log is found with th search criteria an assertion is raised and the test stops on error. :param pattern: string to search in log :type pattern: str :param index: index number :type index: int :return: None """ self.assertIsNotNone(pattern, "Searched pattern can not be None!") collector_h = [hand for hand in self.logger.handlers if isinstance(hand, CollectorHandler)][0] regex = re.compile(pattern) log_num = 0 found = False for log in collector_h.collector: if index is None: if regex.search(log): found = True break elif index == log_num: if regex.search(log): found = True break log_num += 1 self.assertTrue(found, "Not found a matching log line in logs:\nindex=%s pattern=%r\n" "logs=[[[\n%s\n]]]" % ( index, pattern, '\n'.join('\t%s=%s' % (idx, b.strip()) for idx, b in enumerate(collector_h.collector) ) ) ) def assert_checks_count(self, number): """ Check the number of actions @verified :param number: number of actions we must have :type number: int :return: None """ checks = sorted(self.schedulers['scheduler-master'].sched.checks.values(), key=lambda x: x.creation_time) self.assertEqual(number, len(checks), "Not found expected number of checks:\nchecks_logs=[[[\n%s\n]]]" % ('\n'.join('\t%s = creation: %s, is_a: %s, type: %s, status: %s, planned: %s, ' 'command: %s' % (idx, b.creation_time, b.is_a, b.type, b.status, b.t_to_go, b.command) for idx, b in enumerate(checks)))) def assert_checks_match(self, index, pattern, field): """ Check if pattern verified in field(property) name of the check with index in check list @verified :param index: index number of checks list :type index: int :param pattern: pattern to verify is in the check :type pattern: str :param field: name of the field (property) of the check :type field: str :return: None """ regex = re.compile(pattern) checks = sorted(self.schedulers['scheduler-master'].sched.checks.values(), key=lambda x: x.creation_time) mycheck = checks[index] self.assertTrue(regex.search(getattr(mycheck, field)), "Not found a matching pattern in checks:\nindex=%s field=%s pattern=%r\n" "check_line=creation: %s, is_a: %s, type: %s, status: %s, planned: %s, " "command: %s" % ( index, field, pattern, mycheck.creation_time, mycheck.is_a, mycheck.type, mycheck.status, mycheck.t_to_go, mycheck.command)) def _any_check_match(self, pattern, field, assert_not): """ Search if any check matches the requested pattern @verified :param pattern: :param field to search with pattern: :param assert_not: :return: """ regex = re.compile(pattern) checks = sorted(self.schedulers['scheduler-master'].sched.checks.values(), key=lambda x: x.creation_time) for check in checks: if re.search(regex, getattr(check, field)): self.assertTrue(not assert_not, "Found check:\nfield=%s pattern=%r\n" "check_line=creation: %s, is_a: %s, type: %s, status: %s, " "planned: %s, command: %s" % ( field, pattern, check.creation_time, check.is_a, check.type, check.status, check.t_to_go, check.command) ) return self.assertTrue(assert_not, "No matching check found:\n" "pattern = %r\n" "checks = %r" % (pattern, checks)) def assert_any_check_match(self, pattern, field): """ Assert if any check matches the pattern @verified :param pattern: :param field to search with pattern: :return: """ self._any_check_match(pattern, field, assert_not=False) def assert_no_check_match(self, pattern, field): """ Assert if no check matches the pattern @verified :param pattern: :param field to search with pattern: :return: """ self._any_check_match(pattern, field, assert_not=True) def _any_log_match(self, pattern, assert_not): """ Search if any log in the Arbiter logs matches the requested pattern If 'scheduler' is True, then uses the scheduler's broks list. @verified :param pattern: :param assert_not: :return: """ regex = re.compile(pattern) collector_h = [hand for hand in self.logger.handlers if isinstance(hand, CollectorHandler)][0] for log in collector_h.collector: if re.search(regex, log): self.assertTrue(not assert_not, "Found matching log line:\n" "pattern = %r\nbrok log = %r" % (pattern, log)) return self.assertTrue(assert_not, "No matching log line found:\n" "pattern = %r\n" "logs broks = %r" % (pattern, collector_h.collector)) def assert_any_log_match(self, pattern): """ Assert if any log (Arbiter or Scheduler if True) matches the pattern @verified :param pattern: :param scheduler: :return: """ self._any_log_match(pattern, assert_not=False) def assert_no_log_match(self, pattern): """ Assert if no log (Arbiter or Scheduler if True) matches the pattern @verified :param pattern: :param scheduler: :return: """ self._any_log_match(pattern, assert_not=True) def _any_brok_match(self, pattern, level, assert_not): """ Search if any brok message in the Scheduler broks matches the requested pattern and requested level @verified :param pattern: :param assert_not: :return: """ regex = re.compile(pattern) monitoring_logs = [] for brok in self._sched.brokers['broker-master']['broks'].itervalues(): if brok.type == 'monitoring_log': data = unserialize(brok.data) monitoring_logs.append((data['level'], data['message'])) if re.search(regex, data['message']) and (level is None or data['level'] == level): self.assertTrue(not assert_not, "Found matching brok:\n" "pattern = %r\nbrok message = %r" % (pattern, data['message'])) return self.assertTrue(assert_not, "No matching brok found:\n" "pattern = %r\n" "brok message = %r" % (pattern, monitoring_logs)) def assert_any_brok_match(self, pattern, level=None): """ Search if any brok message in the Scheduler broks matches the requested pattern and requested level @verified :param pattern: :param scheduler: :return: """ self._any_brok_match(pattern, level, assert_not=False) def assert_no_brok_match(self, pattern, level=None): """ Search if no brok message in the Scheduler broks matches the requested pattern and requested level @verified :param pattern: :param scheduler: :return: """ self._any_brok_match(pattern, level, assert_not=True) def get_log_match(self, pattern): regex = re.compile(pattern) res = [] collector_h = [hand for hand in self.logger.handlers if isinstance(hand, CollectorHandler)][0] for log in collector_h.collector: if re.search(regex, log): res.append(log) return res def print_header(self): print "\n" + "#" * 80 + "\n" + "#" + " " * 78 + "#" print "#" + string.center(self.id(), 78) + "#" print "#" + " " * 78 + "#\n" + "#" * 80 + "\n" def xtest_conf_is_correct(self): self.print_header() self.assertTrue(self.conf.conf_is_correct) def show_configuration_logs(self): """ Prints the configuration logs @verified :return: """ print("Configuration warnings:") for msg in self.configuration_warnings: print(" - %s" % msg) print("Configuration errors:") for msg in self.configuration_errors: print(" - %s" % msg) def _any_cfg_log_match(self, pattern, assert_not): """ Search a pattern in configuration log (warning and error) @verified :param pattern: :return: """ regex = re.compile(pattern) cfg_logs = self.configuration_warnings + self.configuration_errors for log in cfg_logs: if re.search(regex, log): self.assertTrue(not assert_not, "Found matching log line:\n" "pattern = %r\nlog = %r" % (pattern, log)) return self.assertTrue(assert_not, "No matching log line found:\n" "pattern = %r\n" "logs = %r" % (pattern, cfg_logs)) def assert_any_cfg_log_match(self, pattern): """ Assert if any configuration log matches the pattern @verified :param pattern: :return: """ self._any_cfg_log_match(pattern, assert_not=False) def assert_no_cfg_log_match(self, pattern): """ Assert if no configuration log matches the pattern @verified :param pattern: :return: """ self._any_cfg_log_match(pattern, assert_not=True)
def test_module_host_get(self): """Test the module /host API - host creation and get information :return: """ # Create an Alignak module mod = Module({ 'module_alias': 'web-services', 'module_types': 'web-services', 'python_name': 'alignak_module_ws', # Alignak backend 'alignak_backend': 'http://127.0.0.1:5000', 'username': '******', 'password': '******', # Do not set a timestamp in the built external commands 'set_timestamp': '0', 'give_result': '1', 'give_feedback': '1', # Set Arbiter address as empty to not poll the Arbiter else the test will fail! 'alignak_host': '', 'alignak_port': 7770, # Set module to listen on all interfaces 'host': '0.0.0.0', 'port': 8888, # Allow host/service creation 'allow_host_creation': '1', 'allow_service_creation': '1', # Errors for unknown host/service 'ignore_unknown_host': '0', 'ignore_unknown_service': '0', }) # Create a receiver daemon args = {'env_file': '', 'daemon_name': 'receiver-master'} self._receiver_daemon = Receiver(**args) # Create the modules manager for the daemon self.modulemanager = ModulesManager(self._receiver_daemon) # Load an initialize the modules: # - load python module # - get module properties and instances self.modulemanager.load_and_init([mod]) my_module = self.modulemanager.instances[0] # Clear logs self.clear_logs() # Start external modules self.modulemanager.start_external_instances() # Starting external module logs self.assert_log_match("Trying to initialize module: web-services", 0) self.assert_log_match("Starting external module web-services", 1) self.assert_log_match( "Starting external process for module web-services", 2) self.assert_log_match("web-services is now started", 3) # Check alive self.assertIsNotNone(my_module.process) self.assertTrue(my_module.process.is_alive()) time.sleep(1) # Do not allow GET request on /host - not yet authorized! response = requests.get(self.ws_endpoint + '/host') self.assertEqual(response.status_code, 401) session = requests.Session() # Login with username/password (real backend login) headers = {'Content-Type': 'application/json'} params = {'username': '******', 'password': '******'} response = session.post(self.ws_endpoint + '/login', json=params, headers=headers) assert response.status_code == 200 resp = response.json() # ----- # Get a non-existing host - 1st: use parameters in the request response = session.get(self.ws_endpoint + '/host', auth=self.auth, params={'name': 'new_host_2'}) result = response.json() self.assertEqual( result, { '_status': 'ERR', '_result': [], '_issues': ["Requested host 'new_host_2' does not exist"] }) # Get a non-existing host - 2nd: use host name in the URI response = session.get(self.ws_endpoint + '/host/new_host_2', auth=self.auth) result = response.json() self.assertEqual( result, { '_status': 'ERR', '_result': [], '_issues': ["Requested host 'new_host_2' does not exist"] }) # ----- # Request to create an host - no provided data (default) headers = {'Content-Type': 'application/json'} data = { "name": "new_host_0", } self.assertEqual(my_module.received_commands, 0) response = session.patch(self.ws_endpoint + '/host', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual( result, { '_status': 'OK', '_result': [ 'new_host_0 is alive :)', "Requested host 'new_host_0' does not exist.", "Requested host 'new_host_0' created." ], '_feedback': { 'name': 'new_host_0' } }) # Host created with default check_command and in default user realm # ----- # Get new host to confirm creation - 1st: use parameters in the request response = session.get(self.ws_endpoint + '/host', auth=self.auth, params={'name': 'new_host_0'}) result = response.json() self.assertEqual(result['_status'], 'OK') self.assertIsNot(result['_result'], {}) self.assertEqual(result['_result'][0]['name'], 'new_host_0') # Get new host to confirm creation - 2nd: use host name in the URI response = session.get(self.ws_endpoint + '/host/new_host_0', auth=self.auth) result = response.json() from pprint import pprint pprint(result['_result']) # [{u'2d_coords': u'', # u'3d_coords': u'', # u'_created': u'Thu, 01 Jun 2017 10:58:30 GMT', # u'_etag': u'691c3f4a7cc8996c1d047932421759c020d00857', # u'_id': u'592ff35606fd4b7eec395625', # u'_is_template': False, # u'_links': {u'self': {u'href': u'host/592ff35606fd4b7eec395625', # u'title': u'Host'}}, # u'_overall_state_id': 3, # u'_realm': {u'_all_children': [], # u'_children': [], # u'_created': u'Thu, 01 Jun 2017 10:58:24 GMT', # u'_etag': u'26b3830c017b4fca8553365246f21267aece46a7', # u'_id': u'592ff35006fd4b7eec3955eb', # u'_level': 0, # u'_parent': None, # u'_tree_parents': [], # u'_updated': u'Thu, 01 Jun 2017 10:58:27 GMT', # u'alias': u'', # u'default': True, # u'definition_order': 100, # u'global_critical_threshold': 5, # u'global_warning_threshold': 3, # u'hosts_critical_threshold': 5, # u'hosts_warning_threshold': 3, # u'imported_from': u'unknown', # u'name': u'All', # u'notes': u'', # u'services_critical_threshold': 5, # u'services_warning_threshold': 3}, # u'_sub_realm': True, # u'_template_fields': {}, # u'_templates': [], # u'_templates_with_services': True, # u'_updated': u'Thu, 01 Jun 2017 10:58:30 GMT', # u'action_url': u'', # u'active_checks_enabled': True, # u'address': u'', # u'address6': u'', # u'alias': u'', # u'business_impact': 2, # u'business_impact_modulations': [], # u'business_rule_downtime_as_ack': False, # u'business_rule_host_notification_options': [u'd', u'u', u'r', u'f', u's'], # u'business_rule_output_template': u'', # u'business_rule_service_notification_options': [u'w', # u'u', # u'c', # u'r', # u'f', # u's'], # u'business_rule_smart_notifications': False, # u'check_command': {u'_created': u'Thu, 01 Jun 2017 10:58:24 GMT', # u'_etag': u'356d02479ca7dbebe85e22b9b43e95dc9d5d037c', # u'_id': u'592ff35006fd4b7eec3955f1', # u'_realm': u'592ff35006fd4b7eec3955eb', # u'_sub_realm': True, # u'_updated': u'Thu, 01 Jun 2017 10:58:24 GMT', # u'alias': u'Host/service is always UP/OK', # u'command_line': u'_internal_host_up', # u'definition_order': 100, # u'enable_environment_macros': False, # u'imported_from': u'unknown', # u'module_type': u'fork', # u'name': u'_internal_host_up', # u'notes': u'', # u'poller_tag': u'', # u'reactionner_tag': u'', # u'timeout': -1}, # u'check_command_args': u'', # u'check_freshness': False, # u'check_interval': 5, # u'checkmodulations': [], # u'custom_views': [], # u'customs': {}, # u'definition_order': 100, # u'display_name': u'', # u'escalations': [], # u'event_handler': None, # u'event_handler_args': u'', # u'event_handler_enabled': False, # u'failure_prediction_enabled': False, # u'first_notification_delay': 0, # u'flap_detection_enabled': True, # u'flap_detection_options': [u'o', u'd', u'x'], # u'freshness_state': u'x', # u'freshness_threshold': 0, # u'high_flap_threshold': 50, # u'icon_image': u'', # u'icon_image_alt': u'', # u'icon_set': u'', # u'imported_from': u'unknown', # u'initial_state': u'x', # u'labels': [], # u'location': {u'coordinates': [48.858293, 2.294601], u'type': u'Point'}, # u'low_flap_threshold': 25, # u'ls_acknowledged': False, # u'ls_acknowledgement_type': 1, # u'ls_attempt': 0, # u'ls_current_attempt': 0, # u'ls_downtimed': False, # u'ls_execution_time': 0.0, # u'ls_grafana': False, # u'ls_grafana_panelid': 0, # u'ls_impact': False, # u'ls_last_check': 0, # u'ls_last_hard_state_changed': 0, # u'ls_last_notification': 0, # u'ls_last_state': u'OK', # u'ls_last_state_changed': 0, # u'ls_last_state_type': u'HARD', # u'ls_last_time_down': 0, # u'ls_last_time_unknown': 0, # u'ls_last_time_unreachable': 0, # u'ls_last_time_up': 0, # u'ls_latency': 0.0, # u'ls_long_output': u'', # u'ls_max_attempts': 0, # u'ls_next_check': 0, # u'ls_output': u'', # u'ls_passive_check': False, # u'ls_perf_data': u'', # u'ls_state': u'UNREACHABLE', # u'ls_state_id': 3, # u'ls_state_type': u'HARD', # u'macromodulations': [], # u'max_check_attempts': 1, # u'name': u'new_host_0', # u'notes': u'', # u'notes_url': u'', # u'notification_interval': 60, # u'notification_options': [u'd', u'x', u'r', u'f', u's'], # u'notifications_enabled': True, # u'obsess_over_host': False, # u'parents': [], # u'passive_checks_enabled': True, # u'poller_tag': u'', # u'process_perf_data': True, # u'reactionner_tag': u'', # u'resultmodulations': [], # u'retry_interval': 0, # u'service_excludes': [], # u'service_includes': [], # u'service_overrides': [], # u'snapshot_criteria': [u'd', u'x'], # u'snapshot_enabled': False, # u'snapshot_interval': 5, # u'stalking_options': [], # u'statusmap_image': u'', # u'tags': [], # u'time_to_orphanage': 300, # u'trending_policies': [], # u'trigger_broker_raise_enabled': False, # u'trigger_name': u'', # u'usergroups': [], # u'users': [], # u'vrml_image': u''}] # self.assertEqual(result['_status'], 'OK') self.assertIsNot(result['_result'], {}) self.assertEqual(result['_result'][0]['name'], 'new_host_0') # ----- # Logout response = session.get(self.ws_endpoint + '/logout') self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'OK') self.assertEqual(result['_result'], 'Logged out') self.modulemanager.stop_all()
def test_module_zzz_event(self): """Test the module /event endpoint :return: """ # Create an Alignak module mod = Module({ 'module_alias': 'web-services', 'module_types': 'web-services', 'python_name': 'alignak_module_ws', # Alignak backend 'alignak_backend': 'http://127.0.0.1:5000', 'username': '******', 'password': '******', # Set Arbiter address as empty to not poll the Arbiter else the test will fail! 'alignak_host': '', 'alignak_port': 7770, 'authorization': '1', }) # Create a receiver daemon args = {'env_file': '', 'daemon_name': 'receiver-master'} self._receiver_daemon = Receiver(**args) # Create the modules manager for the daemon self.modulemanager = ModulesManager(self._receiver_daemon) # Load an initialize the modules: # - load python module # - get module properties and instances self.modulemanager.load_and_init([mod]) my_module = self.modulemanager.instances[0] # Clear logs self.clear_logs() # Start external modules self.modulemanager.start_external_instances() # Starting external module logs self.assert_log_match("Trying to initialize module: web-services", 0) self.assert_log_match("Starting external module web-services", 1) self.assert_log_match("Starting external process for module web-services", 2) self.assert_log_match("web-services is now started", 3) # Check alive self.assertIsNotNone(my_module.process) self.assertTrue(my_module.process.is_alive()) time.sleep(1) # --- # Prepare the backend content... self.endpoint = 'http://127.0.0.1:5000' headers = {'Content-Type': 'application/json'} params = {'username': '******', 'password': '******'} # get token response = requests.post(self.endpoint + '/login', json=params, headers=headers) resp = response.json() self.token = resp['token'] self.auth = requests.auth.HTTPBasicAuth(self.token, '') # Get default realm response = requests.get(self.endpoint + '/realm', auth=self.auth) resp = response.json() self.realm_all = resp['_items'][0]['_id'] # --- # Do not allow GET request on /event - not yet authorized response = requests.get(self.ws_endpoint + '/event') self.assertEqual(response.status_code, 401) session = requests.Session() # Login with username/password (real backend login) headers = {'Content-Type': 'application/json'} params = {'username': '******', 'password': '******'} response = session.post(self.ws_endpoint + '/login', json=params, headers=headers) assert response.status_code == 200 resp = response.json() # Do not allow GET request on /event response = session.get(self.ws_endpoint + '/event') self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'ERR') self.assertEqual(result['_issues'], ['You must only POST on this endpoint.']) self.assertEqual(my_module.received_commands, 0) # You must have parameters when POSTing on /event headers = {'Content-Type': 'application/json'} data = {} response = session.post(self.ws_endpoint + '/event', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'ERR') self.assertEqual(result['_issues'], ['You must POST parameters on this endpoint.']) self.assertEqual(my_module.received_commands, 0) # Notify an host event - missing host or service headers = {'Content-Type': 'application/json'} data = { "fake": "" } self.assertEqual(my_module.received_commands, 0) response = session.post(self.ws_endpoint + '/event', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result, {'_status': 'ERR', '_issues': ['Missing host and/or service parameter.']}) # Notify an host event - missing comment headers = {'Content-Type': 'application/json'} data = { "host": "test_host", } response = session.post(self.ws_endpoint + '/event', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result, {'_status': 'ERR', '_issues': ['Missing comment. If you do not have any comment, ' 'do not comment ;)']}) # Notify an host event - default author headers = {'Content-Type': 'application/json'} data = { "host": "test_host", "comment": "My comment" } response = session.post(self.ws_endpoint + '/event', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result, {'_status': 'OK', '_result': ['ADD_HOST_COMMENT;test_host;1;' 'Alignak WS;My comment']}) # Notify an host event - default author and timestamp headers = {'Content-Type': 'application/json'} data = { "timestamp": 1234567890, "host": "test_host", "author": "Me", "comment": "My comment" } response = session.post(self.ws_endpoint + '/event', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result, {'_status': 'OK', '_result': ['[1234567890] ADD_HOST_COMMENT;test_host;1;' 'Me;My comment']}) # Notify a service event - default author headers = {'Content-Type': 'application/json'} data = { "host": "test_host", "service": "test_service", "comment": "My comment" } response = session.post(self.ws_endpoint + '/event', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result, {'_status': 'OK', '_result': ['ADD_SVC_COMMENT;test_host;test_service;1;' 'Alignak WS;My comment']}) # Notify a service event - default author and timestamp headers = {'Content-Type': 'application/json'} data = { "timestamp": 1234567890, "host": "test_host", "service": "test_service", "author": "Me", "comment": "My comment" } response = session.post(self.ws_endpoint + '/event', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result, {'_status': 'OK', '_result': ['[1234567890] ADD_SVC_COMMENT;test_host;test_service;' '1;Me;My comment']}) # Get history to confirm that backend is ready # --- response = session.get(self.endpoint + '/history', auth=self.auth, params={"sort": "-_id", "max_results": 25, "page": 1}) resp = response.json() print(("Response: %s" % resp)) for item in resp['_items']: assert item['type'] in ['webui.comment'] # Got 4 notified events, so we get 4 comments in the backend self.assertEqual(len(resp['_items']), 4) # --- # Logout response = session.get(self.ws_endpoint + '/logout') self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'OK') self.assertEqual(result['_result'], 'Logged out') self.modulemanager.stop_all()
def test_module_zzz_command_unauthorized(self): """ Test the WS /command endpoint - unauthorized access mode :return: """ # Create an Alignak module mod = Module({ 'module_alias': 'web-services', 'module_types': 'web-services', 'python_name': 'alignak_module_ws', # Alignak backend - not configured 'alignak_backend': '', 'username': '', 'password': '', # Set Arbiter address as empty to not poll the Arbiter else the test will fail! 'alignak_host': '', 'alignak_port': 7770, # Disable authorization 'authorization': '0' }) # Create a receiver daemon args = {'env_file': '', 'daemon_name': 'receiver-master'} self._receiver_daemon = Receiver(**args) # Create the modules manager for the daemon self.modulemanager = ModulesManager(self._receiver_daemon) # Load an initialize the modules: # - load python module # - get module properties and instances self.modulemanager.load_and_init([mod]) my_module = self.modulemanager.instances[0] # Clear logs self.clear_logs() # Start external modules self.modulemanager.start_external_instances() # Starting external module logs self.assert_log_match("Trying to initialize module: web-services", 0) self.assert_log_match("Starting external module web-services", 1) self.assert_log_match( "Starting external process for module web-services", 2) self.assert_log_match("web-services is now started", 3) # Check alive self.assertIsNotNone(my_module.process) self.assertTrue(my_module.process.is_alive()) time.sleep(1) session = requests.Session() # You must have parameters when POSTing on /command headers = {'Content-Type': 'application/json'} data = {} response = session.post(self.ws_endpoint + '/command', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'ERR') self.assertEqual(result['_error'], 'You must POST parameters on this endpoint.') self.assertEqual(my_module.received_commands, 0) # You must have a command parameter when POSTing on /command headers = {'Content-Type': 'application/json'} data = { # "command": "Command", "element": "test_host", "parameters": "abc;1" } self.assertEqual(my_module.received_commands, 0) response = session.post(self.ws_endpoint + '/command', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'ERR') # Result error message self.assertEqual(result['_error'], 'Missing command parameter') # Request to execute an external command headers = {'Content-Type': 'application/json'} data = { "command": "Command", "element": "test_host", "parameters": "abc;1" } self.assertEqual(my_module.received_commands, 0) response = session.post(self.ws_endpoint + '/command', json=data, headers=headers) self.assertEqual(my_module.received_commands, 1) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'OK') # Result is uppercase command, parameters are ordered self.assertEqual(result['_command'], 'COMMAND;test_host;abc;1') # Request to execute an external command with timestamp - bad value headers = {'Content-Type': 'application/json'} data = { "command": "Command", "timestamp": "text", "element": "test_host", "parameters": "abc;1" } response = session.post(self.ws_endpoint + '/command', json=data, headers=headers) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result, { '_status': 'ERR', '_error': 'Timestamp must be an integer value' }) self.assertEqual(my_module.received_commands, 1) # Request to execute an external command with timestamp headers = {'Content-Type': 'application/json'} data = { "command": "command_command", "timestamp": "1234", "element": "test_host;test_service", "parameters": "1;abc;2" } response = session.post(self.ws_endpoint + '/command', json=data, headers=headers) self.assertEqual(my_module.received_commands, 2) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'OK') # Result is uppercase command, parameters are ordered self.assertEqual( result['_command'], '[1234] COMMAND_COMMAND;test_host;test_service;1;abc;2') # Request to execute an external command headers = {'Content-Type': 'application/json'} data = { "command": "command_command", "element": "test_host;test_service", "parameters": "1;abc;2" } response = session.post(self.ws_endpoint + '/command', json=data, headers=headers) self.assertEqual(my_module.received_commands, 3) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'OK') # Result is uppercase command, parameters are ordered self.assertEqual(result['_command'], 'COMMAND_COMMAND;test_host;test_service;1;abc;2') # Request to execute an external command headers = {'Content-Type': 'application/json'} data = { "command": "command_command", "element": "test_host/test_service", # Accept / as an host/service separator "parameters": "1;abc;2" } response = session.post(self.ws_endpoint + '/command', json=data, headers=headers) self.assertEqual(my_module.received_commands, 4) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'OK') # Result is uppercase command, parameters are ordered self.assertEqual(result['_command'], 'COMMAND_COMMAND;test_host;test_service;1;abc;2') # Request to execute an external command (Alignak modern syntax) headers = {'Content-Type': 'application/json'} data = { "command": "command_command", "host": "test_host", "service": "test_service", "parameters": "1;abc;2" } response = session.post(self.ws_endpoint + '/command', json=data, headers=headers) self.assertEqual(my_module.received_commands, 5) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'OK') # Result is uppercase command, parameters are ordered self.assertEqual(result['_command'], 'COMMAND_COMMAND;test_host;test_service;1;abc;2') # Request to execute an external command (Alignak modern syntax) headers = {'Content-Type': 'application/json'} data = { "command": "command_command", "host": "test_host", "service": "test_service", "user": "******", "parameters": "1;abc;2" } response = session.post(self.ws_endpoint + '/command', json=data, headers=headers) self.assertEqual(my_module.received_commands, 6) self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'OK') # Result is uppercase command, parameters are ordered self.assertEqual( result['_command'], 'COMMAND_COMMAND;test_host;test_service;test_user;1;abc;2') self.modulemanager.stop_all()
def setUp(self): """Create resources in backend :return: None """ super(TestModuleWsHostgroup, self).setUp() # # Obliged to call to get a self.logger... # self.setup_with_file('cfg/cfg_default.cfg') # self.assertTrue(self.conf_is_correct) # # # ----- # # Provide parameters - logger configuration file (exists) # # ----- # # Clear logs # self.clear_logs() # Create an Alignak module mod = Module({ 'module_alias': 'web-services', 'module_types': 'web-services', 'python_name': 'alignak_module_ws', # Alignak backend 'alignak_backend': 'http://127.0.0.1:5000', 'username': '******', 'password': '******', # Do not set a timestamp in the built external commands 'set_timestamp': '0', # Set Arbiter address as empty to not poll the Arbiter else the test will fail! 'alignak_host': '', 'alignak_port': 7770, # Allow host/service creation 'allow_host_creation': '1', 'allow_service_creation': '1' }) # Create a receiver daemon args = {'env_file': '', 'daemon_name': 'receiver-master'} self._receiver_daemon = Receiver(**args) # Create the modules manager for a daemon type self.modulemanager = ModulesManager(self._receiver_daemon) # Load an initialize the modules: # - load python module # - get module properties and instances self.modulemanager.load_and_init([mod]) self.my_module = self.modulemanager.instances[0] # Clear logs self.clear_logs() # Start external modules self.modulemanager.start_external_instances() # Starting external module logs self.assert_log_match("Trying to initialize module: web-services", 0) self.assert_log_match("Starting external module web-services", 1) self.assert_log_match( "Starting external process for module web-services", 2) self.assert_log_match("web-services is now started", 3) # Check alive self.assertIsNotNone(self.my_module.process) self.assertTrue(self.my_module.process.is_alive()) time.sleep(1)
def _get_history(self, username, password): # Create an Alignak module mod = Module({ 'module_alias': 'web-services', 'module_types': 'web-services', 'python_name': 'alignak_module_ws', # Set Arbiter address as empty to not poll the Arbiter else the test will fail! 'alignak_host': '', 'alignak_port': 7770, # Alignak backend URL 'alignak_backend': 'http://127.0.0.1:5000', 'username': '******', 'password': '******', # Set module to listen on all interfaces 'host': '0.0.0.0', 'port': 8888, # Activate CherryPy file logs 'log_access': '/tmp/alignak-module-ws-access.log', 'log_error': '/tmp/alignak-module-ws-error.log', 'log_level': 'DEBUG' }) # Create a receiver daemon args = {'env_file': '', 'daemon_name': 'receiver-master'} self._receiver_daemon = Receiver(**args) # Create the modules manager for the daemon self.modulemanager = ModulesManager(self._receiver_daemon) # Load an initialize the modules: # - load python module # - get module properties and instances self.modulemanager.load_and_init([mod]) my_module = self.modulemanager.instances[0] # Clear logs self.clear_logs() # Start external modules self.modulemanager.start_external_instances() # Starting external module logs self.assert_log_match("Trying to initialize module: web-services", 0) self.assert_log_match("Starting external module web-services", 1) self.assert_log_match( "Starting external process for module web-services", 2) self.assert_log_match("web-services is now started", 3) time.sleep(1) # Check alive self.assertIsNotNone(my_module.process) self.show_logs() print("Instances: %s" % self.modulemanager.instances) print("My module: %s" % my_module.__dict__) # This test if raising an error whereas the module is really living ! # self.assertTrue(my_module.process.is_alive()) time.sleep(1) # --- # Prepare the backend content... self.endpoint = 'http://127.0.0.1:5000' headers = {'Content-Type': 'application/json'} params = {'username': '******', 'password': '******'} # get token response = requests.post(self.endpoint + '/login', json=params, headers=headers) resp = response.json() self.token = resp['token'] self.auth = requests.auth.HTTPBasicAuth(self.token, '') # Get default realm response = requests.get(self.endpoint + '/realm', auth=self.auth) resp = response.json() self.realm_all = resp['_items'][0]['_id'] # --- # ------------------------------------------- # Add a check result for an host data = { "last_check": 1496332753, "host": self.rh[0]['_id'], "service": None, 'acknowledged': False, 'state_id': 0, 'state': 'UP', 'state_type': 'HARD', 'last_state_id': 0, 'last_state': 'UP', 'last_state_type': 'HARD', 'state_changed': False, 'latency': 0, 'execution_time': 0.12, 'output': 'Check output', 'long_output': 'Check long_output', 'perf_data': 'perf_data', "_realm": self.realm_all } response = requests.post(self.endpoint + '/logcheckresult', json=data, headers=headers, auth=self.auth) resp = response.json() self.assertEqual(resp['_status'], 'OK') # ------------------------------------------- # Add a check result for a service data = { "last_check": 1496332754, "host": self.rh[0]['_id'], "service": self.rs[0]['_id'], 'acknowledged': False, 'state_id': 0, 'state': 'UP', 'state_type': 'HARD', 'last_state_id': 0, 'last_state': 'UP', 'last_state_type': 'HARD', 'state_changed': False, 'latency': 0, 'execution_time': 0.12, 'output': 'Check output', 'long_output': 'Check long_output', 'perf_data': 'perf_data', "_realm": self.realm_all } response = requests.post(self.endpoint + '/logcheckresult', json=data, headers=headers, auth=self.auth) resp = response.json() self.assertEqual(resp['_status'], 'OK') # Add an history event data = { "host_name": "chazay", "service_name": "Processus", "user_name": "Alignak", "type": "check.result", "message": "OK[HARD] (False,False): All is ok", "_realm": self.realm_all, "_sub_realm": True } time.sleep(1) requests.post(self.endpoint + '/history', json=data, headers=headers, auth=self.auth) # Add an history event time.sleep(1) data = { "host_name": "denice", "service_name": "Zombies", "user_name": "Alignak", "type": "check.result", "message": "OK[HARD] (False,False): All is ok", "_realm": self.realm_all, "_sub_realm": True } requests.post(self.endpoint + '/history', json=data, headers=headers, auth=self.auth) # Add an history event time.sleep(1) data = { "host_name": "denice", "user_name": "Me", "type": "monitoring.alert", "message": "HOST ALERT ....", "_realm": self.realm_all, "_sub_realm": True } requests.post(self.endpoint + '/history', json=data, headers=headers, auth=self.auth) # --- # --- # Get history to confirm that backend is ready # --- response = requests.get(self.endpoint + '/history', auth=self.auth, params={ "sort": "-_id", "max_results": 25, "page": 1, 'embedded': json.dumps({"logcheckresult": 1}) }) resp = response.json() pprint(resp['_items']) self.assertEqual(len(resp['_items']), 5) # Backend real history # The commented fields are the one existing in the backend but filtered by the WS backend_real_history = [ { '_created': 'Thu, 01 Jun 2017 15:59:16 GMT', # u'_etag': u'9f07c7285b37bb3d336a96ede3d3fd2a774c4c4c', '_id': '593039d406fd4b3bf0e27d9f', # u'_links': {u'self': {u'href': u'history/593039d406fd4b3bf0e27d9f', # u'title': u'History'}}, # u'_realm': u'593039cc06fd4b3bf0e27d88', # u'_sub_realm': True, # u'_updated': u'Thu, 01 Jun 2017 15:59:16 GMT', 'host_name': 'denice', 'message': 'HOST ALERT ....', 'type': 'monitoring.alert', 'user_name': 'Me' }, { '_created': 'Thu, 01 Jun 2017 15:59:15 GMT', # u'_etag': u'24cd486a1a28859a0177fbe15d1ead61f78f7b2c', '_id': '593039d306fd4b3bf0e27d9e', # u'_links': {u'self': {u'href': u'history/593039d306fd4b3bf0e27d9e', # u'title': u'History'}}, # u'_realm': u'593039cc06fd4b3bf0e27d88', # u'_sub_realm': True, # u'_updated': u'Thu, 01 Jun 2017 15:59:15 GMT', 'host_name': 'denice', 'message': 'OK[HARD] (False,False): All is ok', 'service_name': 'Zombies', 'type': 'check.result', 'user_name': 'Alignak' }, { '_created': 'Thu, 01 Jun 2017 15:59:14 GMT', # u'_etag': u'4c4ee43a4fac0b91dcfddb011619007dedb1cd95', '_id': '593039d206fd4b3bf0e27d9d', # u'_links': {u'self': {u'href': u'history/593039d206fd4b3bf0e27d9d', # u'title': u'History'}}, # u'_realm': u'593039cc06fd4b3bf0e27d88', # u'_sub_realm': True, # u'_updated': u'Thu, 01 Jun 2017 15:59:14 GMT', 'host_name': 'chazay', 'message': 'OK[HARD] (False,False): All is ok', 'service_name': 'Processus', 'type': 'check.result', 'user_name': 'Alignak' }, { '_created': 'Thu, 01 Jun 2017 15:59:13 GMT', # u'_etag': u'76dd35f575244848dd41f67ad3109cf6f1f9a33c', '_id': '593039d106fd4b3bf0e27d9c', # u'_links': {u'self': {u'href': u'history/593039d106fd4b3bf0e27d9c', # u'title': u'History'}}, # u'_realm': u'593039cc06fd4b3bf0e27d88', # u'_sub_realm': True, # u'_updated': u'Thu, 01 Jun 2017 15:59:13 GMT', # u'host': u'593039cc06fd4b3bf0e27d90', 'host_name': 'srv001', 'logcheckresult': { '_created': 'Thu, 01 Jun 2017 15:59:13 GMT', # u'_etag': u'10a3935b1158fe4c8f62962a14b1050fef32df4b', # u'_id': u'593039d106fd4b3bf0e27d9b', # u'_realm': u'593039cc06fd4b3bf0e27d88', # u'_sub_realm': True, # u'_updated': u'Thu, 01 Jun 2017 15:59:13 GMT', 'acknowledged': False, 'acknowledgement_type': 1, 'downtimed': False, 'execution_time': 0.12, # u'host': u'593039cc06fd4b3bf0e27d90', # u'host_name': u'srv001', 'last_check': 1496332753, 'last_state': 'UP', 'last_state_changed': 0, 'last_state_id': 0, 'last_state_type': 'HARD', 'latency': 0.0, 'long_output': 'Check long_output', 'output': 'Check output', 'passive_check': False, 'perf_data': 'perf_data', # u'service': u'593039cf06fd4b3bf0e27d98', # u'service_name': u'ping', 'state': 'UP', 'state_changed': False, 'state_id': 0, 'state_type': 'HARD' }, 'message': 'UP[HARD] (False/False): Check output', # u'service': u'593039cf06fd4b3bf0e27d98', 'service_name': 'ping', 'type': 'check.result', # u'user': None, 'user_name': 'Alignak' }, { '_created': 'Thu, 01 Jun 2017 15:59:13 GMT', # u'_etag': u'c3cd29587ad328325dc48af677b3a36157361a84', '_id': '593039d106fd4b3bf0e27d9a', # u'_links': {u'self': {u'href': u'history/593039d106fd4b3bf0e27d9a', # u'title': u'History'}}, # u'_realm': u'593039cc06fd4b3bf0e27d88', # u'_sub_realm': True, # u'_updated': u'Thu, 01 Jun 2017 15:59:13 GMT', # u'host': u'593039cc06fd4b3bf0e27d90', 'host_name': 'srv001', 'logcheckresult': { '_created': 'Thu, 01 Jun 2017 15:59:13 GMT', # u'_etag': u'0ea4c16f1e651a02772aa2bfa83070b47e7f6531', # u'_id': u'593039d106fd4b3bf0e27d99', # u'_realm': u'593039cc06fd4b3bf0e27d88', # u'_sub_realm': True, # u'_updated': u'Thu, 01 Jun 2017 15:59:13 GMT', 'acknowledged': False, 'acknowledgement_type': 1, 'downtimed': False, 'execution_time': 0.12, # u'host': u'593039cc06fd4b3bf0e27d90', # u'host_name': u'srv001', 'last_check': 1496332754, 'last_state': 'UP', 'last_state_changed': 0, 'last_state_id': 0, 'last_state_type': 'HARD', 'latency': 0.0, 'long_output': 'Check long_output', 'output': 'Check output', 'passive_check': False, 'perf_data': 'perf_data', # u'service': None, # u'service_name': u'', 'state': 'UP', 'state_changed': False, 'state_id': 0, 'state_type': 'HARD' }, 'message': 'UP[HARD] (False/False): Check output', # u'service': None, 'service_name': '', 'type': 'check.result', # u'user': None, 'user_name': 'Alignak' } ] # --- # --- # # Directly call the module function # search = { # 'page': 1, # 'max_results': 25 # } # result = my_module.getBackendHistory(search) # print(result) # print("Page: %d, got: %d items" % (search["page"], len(result['items']))) # for item in result['items']: # print(item) # assert len(result['items']) == 5 # --- # Do not allow GET request on /alignak_logs - not yet authorized! response = requests.get(self.ws_endpoint + '/alignak_logs') self.assertEqual(response.status_code, 401) session = requests.Session() # Login with username/password (real backend login) headers = {'Content-Type': 'application/json'} params = {'username': username, 'password': password} response = session.post(self.ws_endpoint + '/login', json=params, headers=headers) assert response.status_code == 200 resp = response.json() # --- # Get the alignak default history response = session.get(self.ws_endpoint + '/alignak_logs') self.assertEqual(response.status_code, 200) result = response.json() # Remove fields that will obviously be different! for item in result['items']: del (item['_id']) del (item['_created']) # if 'logcheckresult' in item: # del (item['logcheckresult']['_created']) for item in backend_real_history: del (item['_id']) del (item['_created']) if 'logcheckresult' in item: del (item['logcheckresult']['_created']) self.assertEqual(len(result['items']), 5) # Too complex comparison!!! # self.assertEqual(backend_real_history, result['items']) # assert cmp(backend_real_history, result['items']) == 0 # --- # --- # Get the alignak default history, filter to get only check.result response = session.get(self.ws_endpoint + '/alignak_logs?search=type:check.result') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 4) # --- # --- # Get the alignak default history, filter to get only for a user response = session.get(self.ws_endpoint + '/alignak_logs?search=user_name:Alignak') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 4) response = session.get(self.ws_endpoint + '/alignak_logs?search=user_name:Me') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 1) # --- # --- # Get the alignak default history, filter to get only for an host response = session.get(self.ws_endpoint + '/alignak_logs?search=host_name:chazay') self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(len(result['items']), 1) # Implicit host_name response = session.get(self.ws_endpoint + '/alignak_logs?search=chazay') self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(len(result['items']), 1) # Unknown search field response = session.get(self.ws_endpoint + '/alignak_logs?search=name:chazay') self.assertEqual(response.status_code, 200) result = response.json() # All history items because name is not aknown search field! So we get all items... self.assertEqual(len(result['items']), 5) # Some other hosts... response = session.get(self.ws_endpoint + '/alignak_logs?search=host_name:denice') self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(len(result['items']), 2) response = session.get(self.ws_endpoint + '/alignak_logs?search=host_name:srv001') self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(len(result['items']), 2) # Several hosts... response = session.get( self.ws_endpoint + '/alignak_logs?search=host_name:denice host_name:srv001') self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(len(result['items']), 4) # 2 for each host # Not an host... # TODO: looks that ths criteria is not correctly implemented :( # response = session.get(self.ws_endpoint + '/alignak_logs?search=host_name:!denice') # self.assertEqual(response.status_code, 200) # result = response.json() # self.assertEqual(len(result['items']), 3) # --- # --- # Get the alignak default history, NOT for an host # todo: temporarily skipped # response = requests.get(self.ws_endpoint + '/alignak_logs?search=host_name:!Chazay') # self.assertEqual(response.status_code, 200) # result = response.json() # for item in result['items']: # print(item) # self.assertEqual(len(result['items']), 2) # --- # --- # Get the alignak default history, only for a service response = session.get(self.ws_endpoint + '/alignak_logs?search=service_name:Processus') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 1) # --- # --- # Get the alignak default history, for an host and a service # todo multi search query to be improved! # response = session.get(self.ws_endpoint + '/alignak_logs?search="host_name:chazay service_name=Processus"') # self.assertEqual(response.status_code, 200) # result = response.json() # for item in result['items']: # print(item) # self.assertEqual(len(result['items']), 3) # --- # --- # Get the alignak default history, unknown event type response = session.get(self.ws_endpoint + '/alignak_logs?search=type:XXX') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 0) # --- # --- # Get the alignak default history, page count response = session.get(self.ws_endpoint + '/alignak_logs?start=0&count=1') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 1) response = session.get(self.ws_endpoint + '/alignak_logs?start=1&count=1') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 1) response = session.get(self.ws_endpoint + '/alignak_logs?start=2&count=1') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 1) response = session.get(self.ws_endpoint + '/alignak_logs?start=3&count=1') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 1) response = session.get(self.ws_endpoint + '/alignak_logs?start=4&count=1') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 1) # Over the limits ! response = session.get(self.ws_endpoint + '/alignak_logs?start=5&count=1') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 0) response = session.get(self.ws_endpoint + '/alignak_logs?start=50&count=50') self.assertEqual(response.status_code, 200) result = response.json() for item in result['items']: print(item) self.assertEqual(len(result['items']), 0) # --- # --- # Get the alignak history, page count greater than the number of items response = session.get(self.ws_endpoint + '/alignak_logs?start=1&count=25') self.assertEqual(response.status_code, 200) result = response.json() pprint(result) self.assertEqual(len(result['items']), 5) # Got 5 items self.assertEqual(result['_meta']['max_results'], 25) self.assertEqual(result['_meta']['page'], 1) self.assertEqual(result['_meta']['total'], 5) response = session.get(self.ws_endpoint + '/alignak_logs?start=0&count=50') self.assertEqual(response.status_code, 200) result = response.json() pprint(result) self.assertEqual(len(result['items']), 5) # Got 5 items self.assertEqual(result['_meta']['max_results'], 50) self.assertEqual(result['_meta']['page'], 1) self.assertEqual(result['_meta']['total'], 5) # --- # Logout response = session.get(self.ws_endpoint + '/logout') self.assertEqual(response.status_code, 200) result = response.json() self.assertEqual(result['_status'], 'OK') self.assertEqual(result['_result'], 'Logged out')