Esempio n. 1
0
def _build_index_background(
    job_interface: BackgroundProcessInterface,
    n_attempts_redis_connection: int = 1,
    sleep_time: int = 5,
) -> None:
    n_attempts = 0
    job_interface.send_progress_update(_("Building of search index started"))
    while True:
        try:
            n_attempts += 1
            IndexBuilder(match_item_generator_registry).build_full_index()
            break
        except redis.ConnectionError:
            job_interface.send_progress_update(
                _("Connection attempt %d / %d to Redis failed") % (
                    n_attempts,
                    n_attempts_redis_connection,
                ))
            if n_attempts == n_attempts_redis_connection:
                job_interface.send_result_message(
                    _("Maximum number of allowed connection attempts reached, terminating"
                      ))
                raise
            job_interface.send_progress_update(
                _("Will wait for %d seconds and retry") % sleep_time)
            sleep(sleep_time)
    job_interface.send_result_message(_("Search index successfully built"))
Esempio n. 2
0
    def do_execute(
        self,
        diagnostics_parameters: DiagnosticsParameters,
        job_interface: BackgroundProcessInterface,
    ) -> None:
        job_interface.send_progress_update(_("Diagnostics dump started..."))

        site = diagnostics_parameters["site"]
        timeout = request.request_timeout - 2
        result = create_diagnostics_dump(
            site,
            serialize_wato_parameters(diagnostics_parameters),
            timeout,
        )

        job_interface.send_progress_update(result.output)

        if result.tarfile_created:
            tarfile_path = result.tarfile_path
            download_url = makeuri_contextless(
                request,
                [("site", site), ("tarfile_name", str(Path(tarfile_path).name))],
                filename="download_diagnostics_dump.py",
            )
            button = html.render_icon_button(download_url, _("Download"), "diagnostics_dump_file")

            job_interface.send_progress_update(_("Dump file: %s") % tarfile_path)
            job_interface.send_result_message(_("%s Retrieve created dump file") % button)

        else:
            job_interface.send_result_message(_("Creating dump file failed"))
Esempio n. 3
0
    def do_execute(self, diagnostics_parameters: DiagnosticsParameters,
                   job_interface: BackgroundProcessInterface) -> None:
        job_interface.send_progress_update(_("Diagnostics dump started..."))

        site = diagnostics_parameters["site"]
        timeout = html.request.request_timeout - 2
        result = check_mk_automation(site,
                                     "create-diagnostics-dump",
                                     args=serialize_wato_parameters(diagnostics_parameters),
                                     timeout=timeout,
                                     non_blocking_http=True)

        job_interface.send_progress_update(result["output"])

        if result["tarfile_created"]:
            tarfile_path = result['tarfile_path']
            download_url = makeuri_contextless(
                global_request,
                [("site", site), ("tarfile_name", str(Path(tarfile_path).name))],
                filename="download_diagnostics_dump.py",
            )
            button = html.render_icon_button(download_url, _("Download"), "diagnostics_dump_file")

            job_interface.send_progress_update(_("Dump file: %s") % tarfile_path)
            job_interface.send_result_message(_("%s Retrieve created dump file") % button)

        else:
            job_interface.send_result_message(_("Creating dump file failed"))
Esempio n. 4
0
    def execute_automation(self, job_interface: BackgroundProcessInterface,
                           request: CheckmkAutomationRequest) -> None:
        self._logger.info("Starting automation: %s", request.command)
        self._logger.debug(request)

        result = check_mk_local_automation(request.command, request.args, request.indata,
                                           request.stdin_data, request.timeout)

        # This file will be read by the get-status request
        result_file_path = os.path.join(job_interface.get_work_dir(), "result.mk")
        store.save_object_to_file(result_file_path, result)

        job_interface.send_result_message(_("Finished."))
Esempio n. 5
0
    def _execute_sync_action(
            self, job_interface: background_job.BackgroundProcessInterface,
            add_to_changelog: bool, enforce_sync: bool,
            load_users_func: Callable, save_users_func: Callable) -> bool:
        for connection_id, connection in active_connections():
            try:
                if not enforce_sync and not connection.sync_is_needed():
                    continue

                job_interface.send_progress_update(
                    _("[%s] Starting sync for connection") % connection_id)
                connection.do_sync(add_to_changelog=add_to_changelog,
                                   only_username=False,
                                   load_users_func=load_users,
                                   save_users_func=save_users)
                job_interface.send_progress_update(
                    _("[%s] Finished sync for connection") % connection_id)
            except Exception as e:
                job_interface.send_exception(
                    _("[%s] Exception: %s") % (connection_id, e))
                logger.error('Exception (%s, userdb_job): %s', connection_id,
                             traceback.format_exc())

        job_interface.send_progress_update(_("Finalizing synchronization"))
        general_userdb_job()
        return True
Esempio n. 6
0
 def execute_automation(
     self,
     job_interface: BackgroundProcessInterface,
     api_request: CheckmkAutomationRequest,
 ) -> None:
     self._logger.info("Starting automation: %s", api_request.command)
     self._logger.debug(api_request)
     cmdline_cmd, serialized_result = check_mk_local_automation_serialized(
         command=api_request.command,
         args=api_request.args,
         indata=api_request.indata,
         stdin_data=api_request.stdin_data,
         timeout=api_request.timeout,
     )
     # This file will be read by the get-status request
     self._store_result(
         path=Path(job_interface.get_work_dir()) / "result.mk",
         serialized_result=serialized_result,
         automation_cmd=api_request.command,
         cmdline_cmd=cmdline_cmd,
     )
     job_interface.send_result_message(_("Finished."))
Esempio n. 7
0
 def do_sync(self, job_interface: background_job.BackgroundProcessInterface,
             add_to_changelog: bool, enforce_sync: bool, load_users_func: Callable,
             save_users_func: Callable) -> None:
     job_interface.send_progress_update(_("Synchronization started..."))
     if self._execute_sync_action(job_interface, add_to_changelog, enforce_sync, load_users_func,
                                  save_users_func):
         job_interface.send_result_message(_("The user synchronization completed successfully."))
     else:
         job_interface.send_exception(_("The user synchronization failed."))
Esempio n. 8
0
def _update_index_background(
    change_action_name: str,
    job_interface: BackgroundProcessInterface,
) -> None:
    job_interface.send_progress_update(_("Updating of search index started"))

    if not IndexBuilder.index_is_built():
        job_interface.send_progress_update(_("Search index not found, re-building from scratch"))
        _build_index_background(job_interface)
        return

    IndexBuilder(match_item_generator_registry).build_changed_sub_indices(change_action_name)
    job_interface.send_result_message(_("Search index successfully updated"))
Esempio n. 9
0
def _update_and_store_index_background(
    change_action_name: str,
    job_interface: BackgroundProcessInterface,
) -> None:

    job_interface.send_progress_update(_("Updating of search index started"))

    index_builder = IndexBuilder(match_item_generator_registry)
    index_store = get_index_store()

    try:
        current_index = index_store.load_index(launch_rebuild_if_missing=False)
    except IndexNotFoundException:
        job_interface.send_progress_update(
            _("Search index file not found, re-building from scratch"))
        _build_and_store_index_background(job_interface)
        return

    current_index.update(
        index_builder.build_changed_sub_indices(change_action_name))
    index_store.store_index(current_index)

    job_interface.send_result_message(_("Search index successfully updated"))
Esempio n. 10
0
def _build_and_store_index_background(
        job_interface: BackgroundProcessInterface) -> None:
    job_interface.send_progress_update(_("Building of search index started"))
    build_and_store_index()
    job_interface.send_result_message(_("Search index successfully built"))
Esempio n. 11
0
 def do_sync(self, job_interface: background_job.BackgroundProcessInterface) -> None:
     job_interface.send_progress_update(_("Synchronization started..."))
     self._execute_sync()
     job_interface.send_result_message(_("The synchronization finished."))
Esempio n. 12
0
    def do_execute(
        self,
        diagnostics_parameters: DiagnosticsParameters,
        job_interface: BackgroundProcessInterface,
    ) -> None:
        job_interface.send_progress_update(_("Diagnostics dump started..."))

        chunks = serialize_wato_parameters(diagnostics_parameters)

        # TODO: Currently, selecting multiple sites is not possible.
        # sites = diagnostics_parameters["sites"][1]
        site = diagnostics_parameters["site"]

        timeout = request.request_timeout - 2
        results = []
        for chunk in chunks:
            chunk_result = create_diagnostics_dump(
                site,
                chunk,
                timeout,
            )
            results.append(chunk_result)

        # for site in sites:
        #    for chunk in chunks:

        #        chunk_result = create_diagnostics_dump(
        #            site,
        #            chunk,
        #            timeout,
        #        )
        #        results.append(chunk_result)

        if len(results) > 1:
            result = _merge_results(results)
        elif len(results) == 1:
            result = results[0]
        else:
            job_interface.send_result_message(
                _("Got no result to create dump file"))
            return

        job_interface.send_progress_update(result.output)

        if result.tarfile_created:
            tarfile_path = result.tarfile_path
            download_url = makeuri_contextless(
                request,
                [("site", site),
                 ("tarfile_name", str(Path(tarfile_path).name))],
                filename="download_diagnostics_dump.py",
            )
            button = html.render_icon_button(download_url, _("Download"),
                                             "diagnostics_dump_file")

            job_interface.send_progress_update(
                _("Dump file: %s") % tarfile_path)
            job_interface.send_result_message(
                _("%s Retrieve created dump file") % button)

        else:
            job_interface.send_result_message(_("Creating dump file failed"))