def do_execute(self, diagnostics_parameters: DiagnosticsParameters, job_interface: BackgroundProcessInterface) -> None: job_interface.send_progress_update(_("Diagnostics dump started...")) job_interface.send_progress_update(repr(diagnostics_parameters)) job_interface.send_progress_update(repr(serialize_wato_parameters(diagnostics_parameters))) site = diagnostics_parameters["site"] timeout = html.request.request_timeout - 2 result = check_mk_automation(site, "create-diagnostics-dump", args=serialize_wato_parameters(diagnostics_parameters), timeout=timeout, non_blocking_http=True) job_interface.send_progress_update(result["output"]) if result["tarfile_created"]: tarfile_path = result['tarfile_path'] download_url = html.makeuri_contextless([("site", site), ("tarfile_name", str(Path(tarfile_path)))], "download_diagnostics_dump.py") button = html.render_icon_button(download_url, _("Download"), "diagnostics_dump_file") job_interface.send_progress_update(_("Dump file: %s") % tarfile_path) job_interface.send_result_message(_("%s Creating dump file successfully") % button) else: job_interface.send_result_message(_("Creating dump file failed"))
def do_sync(self, job_interface: background_job.BackgroundProcessInterface, add_to_changelog: bool, enforce_sync: bool, load_users_func: Callable, save_users_func: Callable) -> None: job_interface.send_progress_update(_("Synchronization started...")) if self._execute_sync_action(job_interface, add_to_changelog, enforce_sync, load_users_func, save_users_func): job_interface.send_result_message(_("The user synchronization completed successfully.")) else: job_interface.send_exception(_("The user synchronization failed."))
def _update_index_background( change_action_name: str, job_interface: BackgroundProcessInterface, ) -> None: job_interface.send_progress_update(_("Updating of search index started")) if not IndexBuilder.index_is_built(): job_interface.send_progress_update(_("Search index not found, re-building from scratch")) _build_index_background(job_interface) return IndexBuilder(match_item_generator_registry).build_changed_sub_indices(change_action_name) job_interface.send_result_message(_("Search index successfully updated"))
def _build_index_background( job_interface: BackgroundProcessInterface, n_attempts_redis_connection: int = 1, sleep_time: int = 5, ) -> None: n_attempts = 0 job_interface.send_progress_update(_("Building of search index started")) while True: try: n_attempts += 1 IndexBuilder(match_item_generator_registry).build_full_index() break except redis.ConnectionError: job_interface.send_progress_update( _("Connection attempt %d / %d to Redis failed") % ( n_attempts, n_attempts_redis_connection, )) if n_attempts == n_attempts_redis_connection: job_interface.send_result_message( _("Maximum number of allowed connection attempts reached, terminating" )) raise job_interface.send_progress_update( _("Will wait for %d seconds and retry") % sleep_time) sleep(sleep_time) job_interface.send_result_message(_("Search index successfully built"))
def _execute_sync_action( self, job_interface: background_job.BackgroundProcessInterface, add_to_changelog: bool, enforce_sync: bool, load_users_func: Callable, save_users_func: Callable) -> bool: for connection_id, connection in active_connections(): try: if not enforce_sync and not connection.sync_is_needed(): continue job_interface.send_progress_update( _("[%s] Starting sync for connection") % connection_id) connection.do_sync(add_to_changelog=add_to_changelog, only_username=False, load_users_func=load_users, save_users_func=save_users) job_interface.send_progress_update( _("[%s] Finished sync for connection") % connection_id) except Exception as e: job_interface.send_exception( _("[%s] Exception: %s") % (connection_id, e)) logger.error('Exception (%s, userdb_job): %s', connection_id, traceback.format_exc()) job_interface.send_progress_update(_("Finalizing synchronization")) general_userdb_job() return True
def do_execute( self, diagnostics_parameters: DiagnosticsParameters, job_interface: BackgroundProcessInterface, ) -> None: job_interface.send_progress_update(_("Diagnostics dump started...")) site = diagnostics_parameters["site"] timeout = request.request_timeout - 2 result = create_diagnostics_dump( site, serialize_wato_parameters(diagnostics_parameters), timeout, ) job_interface.send_progress_update(result.output) if result.tarfile_created: tarfile_path = result.tarfile_path download_url = makeuri_contextless( request, [("site", site), ("tarfile_name", str(Path(tarfile_path).name))], filename="download_diagnostics_dump.py", ) button = html.render_icon_button(download_url, _("Download"), "diagnostics_dump_file") job_interface.send_progress_update(_("Dump file: %s") % tarfile_path) job_interface.send_result_message(_("%s Retrieve created dump file") % button) else: job_interface.send_result_message(_("Creating dump file failed"))
def _update_and_store_index_background( change_action_name: str, job_interface: BackgroundProcessInterface, ) -> None: job_interface.send_progress_update(_("Updating of search index started")) index_builder = IndexBuilder(match_item_generator_registry) index_store = get_index_store() try: current_index = index_store.load_index(launch_rebuild_if_missing=False) except IndexNotFoundException: job_interface.send_progress_update( _("Search index file not found, re-building from scratch")) _build_and_store_index_background(job_interface) return current_index.update( index_builder.build_changed_sub_indices(change_action_name)) index_store.store_index(current_index) job_interface.send_result_message(_("Search index successfully updated"))
def _build_and_store_index_background( job_interface: BackgroundProcessInterface) -> None: job_interface.send_progress_update(_("Building of search index started")) build_and_store_index() job_interface.send_result_message(_("Search index successfully built"))
def do_sync(self, job_interface: background_job.BackgroundProcessInterface) -> None: job_interface.send_progress_update(_("Synchronization started...")) self._execute_sync() job_interface.send_result_message(_("The synchronization finished."))
def do_execute( self, diagnostics_parameters: DiagnosticsParameters, job_interface: BackgroundProcessInterface, ) -> None: job_interface.send_progress_update(_("Diagnostics dump started...")) chunks = serialize_wato_parameters(diagnostics_parameters) # TODO: Currently, selecting multiple sites is not possible. # sites = diagnostics_parameters["sites"][1] site = diagnostics_parameters["site"] timeout = request.request_timeout - 2 results = [] for chunk in chunks: chunk_result = create_diagnostics_dump( site, chunk, timeout, ) results.append(chunk_result) # for site in sites: # for chunk in chunks: # chunk_result = create_diagnostics_dump( # site, # chunk, # timeout, # ) # results.append(chunk_result) if len(results) > 1: result = _merge_results(results) elif len(results) == 1: result = results[0] else: job_interface.send_result_message( _("Got no result to create dump file")) return job_interface.send_progress_update(result.output) if result.tarfile_created: tarfile_path = result.tarfile_path download_url = makeuri_contextless( request, [("site", site), ("tarfile_name", str(Path(tarfile_path).name))], filename="download_diagnostics_dump.py", ) button = html.render_icon_button(download_url, _("Download"), "diagnostics_dump_file") job_interface.send_progress_update( _("Dump file: %s") % tarfile_path) job_interface.send_result_message( _("%s Retrieve created dump file") % button) else: job_interface.send_result_message(_("Creating dump file failed"))