def _perform_tests(self): test_sites = self._analyze_sites() self._logger.debug("Executing tests for %d sites" % len(test_sites)) results_by_site = {} # Results are fetched simultaneously from the remote sites result_queue = multiprocessing.JoinableQueue( ) # type: multiprocessing.Queue[Tuple[SiteId, str]] processes = [] site_id = SiteId("unknown_site") for site_id in test_sites: process = multiprocessing.Process( target=self._perform_tests_for_site, args=(site_id, result_queue)) process.start() processes.append((site_id, process)) # Now collect the results from the queue until all processes are finished while any(p.is_alive() for site_id, p in processes): try: site_id, results_data = result_queue.get_nowait() result_queue.task_done() result = ast.literal_eval(results_data) if result["state"] == 1: raise MKGeneralException(result["response"]) if result["state"] == 0: test_results = [] for result_data in result["response"]: result = ACResult.from_repr(result_data) test_results.append(result) # Add general connectivity result result = ACResultOK(_("No connectivity problems")) result.from_test(ACTestConnectivity()) result.site_id = site_id test_results.append(result) results_by_site[site_id] = test_results else: raise NotImplementedError() except six.moves.queue.Empty: time.sleep(0.5) # wait some time to prevent CPU hogs except Exception as e: result = ACResultCRIT("%s" % e) result.from_test(ACTestConnectivity()) result.site_id = site_id results_by_site[site_id] = [result] logger.exception("error analyzing configuration for site %s", site_id) self._logger.debug("Got test results") # Group results by category in first instance and then then by test results_by_category = {} # type: Dict[str, Dict[str, Dict[str, Any]]] for site_id, results in results_by_site.items(): for result in results: category_results = results_by_category.setdefault( result.category, {}) test_results_by_site = category_results.setdefault( result.test_id, { "site_results": {}, "test": { "title": result.title, "help": result.help, } }) test_results_by_site["site_results"][result.site_id] = result return results_by_category
def _perform_tests(self): test_sites = self._analyze_sites() self._logger.debug("Executing tests for %d sites" % len(test_sites)) results_by_site = {} # Results are fetched simultaneously from the remote sites result_queue = multiprocessing.JoinableQueue() processes = [] for site_id in test_sites: process = multiprocessing.Process( target=self._perform_tests_for_site, args=(site_id, result_queue)) process.start() processes.append((site_id, process)) # Now collect the results from the queue until all processes are finished while any([p.is_alive() for site_id, p in processes]): try: site_id, results_data = result_queue.get_nowait() result_queue.task_done() result = ast.literal_eval(results_data) if result["state"] == 1: raise MKGeneralException(result["response"]) elif result["state"] == 0: test_results = [] for result_data in result["response"]: result = ACResult.from_repr(result_data) test_results.append(result) results_by_site[site_id] = test_results else: raise NotImplementedError() except Queue.Empty: time.sleep(0.5) # wait some time to prevent CPU hogs except Exception as e: logger.exception("error analyzing configuration for site %s", site_id) html.show_error("%s: %s" % (site_id, e)) self._logger.debug("Got test results") # Group results by category in first instance and then then by test results_by_category = {} for site_id, results in results_by_site.items(): for result in results: category_results = results_by_category.setdefault( result.category, {}) test_results_by_site = category_results.setdefault( result.test_id, { "site_results": {}, "test": { "title": result.title, "help": result.help, } }) test_results_by_site["site_results"][result.site_id] = result return results_by_category