def _create_snapshots(self): with cmk.gui.watolib.utils.exclusive_lock(): if not self._changes: raise MKUserError(None, _("Currently there are no changes to activate.")) if self._get_last_change_id() != self._activate_until: raise MKUserError( None, _("Another change has been made in the meantime. Please review it " "to ensure you also want to activate it now and start the " "activation again.")) # Create (legacy) WATO config snapshot start = time.time() logger.debug("Snapshot creation started") # TODO: Remove/Refactor once new changes mechanism has been implemented # This single function is responsible for the slow activate changes (python tar packaging..) snapshot_name = cmk.gui.watolib.snapshots.create_snapshot(self._comment) log_audit(None, "snapshot-created", _("Created snapshot %s") % snapshot_name) work_dir = os.path.join(self.activation_tmp_base_dir, self._activation_id) if cmk.is_managed_edition(): import cmk.gui.cme.managed_snapshots as managed_snapshots managed_snapshots.CMESnapshotManager( work_dir, self._get_site_configurations()).generate_snapshots() else: self._generate_snapshots(work_dir) logger.debug("Snapshot creation took %.4f" % (time.time() - start))
def create_snapshot(comment): logger.debug("Start creating backup snapshot") start = time.time() store.mkdir(snapshot_dir) snapshot_name = "wato-snapshot-%s.tar" % time.strftime( "%Y-%m-%d-%H-%M-%S", time.localtime(time.time()) ) data: Dict[str, Any] = {} data["comment"] = _("Activated changes by %s.") % user.id if comment: data["comment"] += _("Comment: %s") % comment # with SuperUserContext the user.id is None; later this value will be encoded for tar data["created_by"] = "" if user.id is None else user.id data["type"] = "automatic" data["snapshot_name"] = snapshot_name _do_create_snapshot(data) _do_snapshot_maintenance() log_audit("snapshot-created", _("Created snapshot %s") % snapshot_name) logger.debug("Backup snapshot creation took %.4f", time.time() - start)
def _git_command(args: List[str]) -> None: command = ["git"] + args logger.debug( "GIT: Execute in %s: %s", cmk.utils.paths.default_config_dir, subprocess.list2cmdline(command), ) try: p = subprocess.Popen( command, cwd=cmk.utils.paths.default_config_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8", ) except OSError as e: if e.errno == errno.ENOENT: raise MKGeneralException( _("Error executing GIT command <tt>%s</tt>:<br><br>%s") % (subprocess.list2cmdline(command), e)) raise status = p.wait() if status != 0: out = "" if p.stdout is None else p.stdout.read() raise MKGeneralException( _("Error executing GIT command <tt>%s</tt>:<br><br>%s") % (subprocess.list2cmdline(command), out.replace("\n", "<br>\n")))
def _execute_site_sync(site_id: SiteId, site_spec: SiteConfiguration, site_request: SiteRequest) -> SiteResult: """Executes the sync with a site. Is executed in a dedicated subprocess (One per site)""" try: logger.debug(_("[%s] Starting sync for site"), site_id) # timeout=100: Use a value smaller than the default apache request timeout result = DiscoveredHostLabelSyncResponse( **do_remote_automation(site_spec, "discovered-host-label-sync", [ ("request", repr(site_request.serialize())), ], timeout=100)) logger.debug(_("[%s] Finished sync for site"), site_id) return SiteResult( site_id=site_id, success=True, error="", updated_host_labels=result.updated_host_labels, ) except Exception as e: logger.error('Exception (%s, discovered_host_label_sync)', site_id, exc_info=True) return SiteResult( site_id=site_id, success=False, error=str(e), updated_host_labels=[], )
def _git_command(args: List[str]) -> None: command = ["git"] + args logger.debug( "GIT: Execute in %s: %s", cmk.utils.paths.default_config_dir, subprocess.list2cmdline(command), ) try: completed_process = subprocess.run( command, cwd=cmk.utils.paths.default_config_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8", check=False, ) except OSError as e: if e.errno == errno.ENOENT: raise MKGeneralException( _("Error executing GIT command <tt>%s</tt>:<br><br>%s") % (subprocess.list2cmdline(command), e)) raise if completed_process.returncode: raise MKGeneralException( _("Error executing GIT command <tt>%s</tt>:<br><br>%s") % (subprocess.list2cmdline(command), completed_process.stdout.replace("\n", "<br>\n")))
def page(self) -> CBORPageResult: assert user.id is not None if not is_two_factor_login_enabled(user.id): raise MKGeneralException( _("Two-factor authentication not enabled")) data: dict[str, object] = cbor.decode(request.get_data()) credential_id = data["credentialId"] client_data = ClientData(data["clientDataJSON"]) auth_data = AuthenticatorData(data["authenticatorData"]) signature = data["signature"] logger.debug("ClientData: %r", client_data) logger.debug("AuthenticatorData: %r", auth_data) make_fido2_server().authenticate_complete( session.session_info.webauthn_action_state, [ AttestedCredentialData.unpack_from(v["credential_data"])[0] for v in load_two_factor_credentials(user.id) ["webauthn_credentials"].values() ], credential_id, client_data, auth_data, signature, ) session.session_info.webauthn_action_state = None set_two_factor_completed() return {"status": "OK"}
def _ensure_connected(user: Optional[LoggedInUser], force_authuser: Optional[UserId]) -> None: """Build up a connection to livestatus to either a single site or multiple sites.""" if "live" in g: return if user is None: user = global_user if force_authuser is None: request_force_authuser = request.get_str_input("force_authuser") force_authuser = UserId( request_force_authuser) if request_force_authuser else None logger.debug( "Initializing livestatus connections as user %s (forced auth user: %s)", user.id, force_authuser, ) g.site_status = {} _connect_multiple_sites(user) _set_livestatus_auth(user, force_authuser) logger.debug("Site states: %r", g.site_status)
def disconnect() -> None: """Actively closes all Livestatus connections.""" logger.debug("Disconnecing site connections") if "live" in g: g.live.disconnect() g.pop('live', None) g.pop('site_status', None)
def disconnect() -> None: """Actively closes all Livestatus connections.""" if not g: return logger.debug("Disconnecing site connections") if "live" in g: g.live.disconnect() g.pop("live", None) g.pop("site_status", None)
def execute_activation_cleanup_background_job(): # type: () -> None """This function is called by the GUI cron job once a minute. Errors are logged to var/log/web.log. """ job = ActivationCleanupBackgroundJob() if job.is_active(): logger.debug("Another activation cleanup job is already running: Skipping this time") return job.set_function(job.do_execute) job.start()
def create(tar_filename, components): # type: (str, List[ComponentSpec]) -> None tar = tarfile.open(tar_filename, "w:gz") start = time.time() for component in components: if len(component) == 4: # Mypy does not understand the different lengths what, name, path, excludes = component # type: ignore[misc] else: # Mypy does not understand the different lengths what, name, path = component # type: ignore[misc] excludes = [] excludes = excludes[:] # exclude all temporary files excludes.append(".*new*") abspath = os.path.abspath(path) if os.path.exists(path): if what == "dir": basedir = abspath filename = "." else: basedir = os.path.dirname(abspath) filename = os.path.basename(abspath) subtar_buffer = io.BytesIO() with tarfile.TarFile(fileobj=subtar_buffer, mode="w") as subtar_obj: def exclude_filter(x, excludes=excludes): return filter_subtar_files(x, excludes) subtar_obj.add(os.path.join(basedir, filename), arcname=filename, filter=exclude_filter) subtar_size = len(subtar_buffer.getvalue()) subtar_buffer.seek(0) info = tarfile.TarInfo("%s.tar" % name) info.mtime = int(time.time()) info.uid = 0 info.gid = 0 info.size = subtar_size info.mode = 0o644 info.type = tarfile.REGTYPE tar.addfile(info, subtar_buffer) logger.debug("Packaging %s took %.3fsec", os.path.basename(tar_filename), time.time() - start)
def page_run_cron(): # type: () -> None # Prevent cron jobs from being run too often, also we need # locking in order to prevent overlapping runs if os.path.exists(lock_file): last_run = os.stat(lock_file).st_mtime if time.time() - last_run < 59: raise MKGeneralException("Cron called too early. Skipping.") open(lock_file, "w") # touches the file store.aquire_lock(lock_file) # The cron page is accessed unauthenticated. After leaving the page_run_cron area # into the job functions we always want to have a user context initialized to keep # the code free from special cases (if no user logged in, then...). # The jobs need to be run in privileged mode in general. Some jobs, like the network # scan, switch the user context to a specific other user during execution. config.set_super_user() logger.debug("Starting cron jobs") for cron_job in multisite_cronjobs: try: job_name = cron_job.__name__ logger.debug("Starting [%s]", job_name) cron_job() logger.debug("Finished [%s]", job_name) except Exception: html.write("An exception occured. Take a look at the web.log.\n") logger.exception("Exception in cron job [%s]", job_name) logger.debug("Finished all cron jobs") html.write("OK\n")
def page_run_cron() -> None: lock_file = _lock_file() # Prevent cron jobs from being run too often, also we need # locking in order to prevent overlapping runs if lock_file.exists(): last_run = lock_file.stat().st_mtime if time.time() - last_run < 59: raise MKGeneralException("Cron called too early. Skipping.") with lock_file.open("wb"): pass # touches the file # The cron page is accessed unauthenticated. After leaving the page_run_cron area # into the job functions we always want to have a user context initialized to keep # the code free from special cases (if no user logged in, then...). # The jobs need to be run in privileged mode in general. Some jobs, like the network # scan, switch the user context to a specific other user during execution. with store.locked(lock_file), SuperUserContext(): logger.debug("Starting cron jobs") for cron_job in multisite_cronjobs: try: job_name = cron_job.__name__ logger.debug("Starting [%s]", job_name) cron_job() logger.debug("Finished [%s]", job_name) except Exception: response.set_data("An exception occured. Take a look at the web.log.\n") logger.exception("Exception in cron job [%s]", job_name) logger.debug("Finished all cron jobs") response.set_data("OK\n")
def create(tar_filename, components): tar = tarfile.open(tar_filename, "w:gz") start = time.time() for component in components: if len(component) == 4: what, name, path, excludes = component else: what, name, path = component excludes = [] excludes = excludes[:] # exclude all temporary files excludes.append(".*new*") abspath = os.path.abspath(path) if os.path.exists(path): if what == "dir": basedir = abspath filename = "." else: basedir = os.path.dirname(abspath) filename = os.path.basename(abspath) subtar_buffer = cStringIO.StringIO() with tarfile.TarFile(fileobj=subtar_buffer, mode="w") as subtar_obj: def exclude_filter(x, excludes=excludes): return filter_subtar_files(x, excludes) subtar_obj.add(os.path.join(basedir, filename), arcname=filename, filter=exclude_filter) subtar_size = len(subtar_buffer.getvalue()) subtar_buffer.seek(0) info = tarfile.TarInfo("%s.tar" % name) info.mtime = time.time() info.uid = 0 info.gid = 0 info.size = subtar_size info.mode = 0644 info.type = tarfile.REGTYPE tar.addfile(info, subtar_buffer) logger.debug("Packaging %s took %.3fsec" % (os.path.basename(tar_filename), time.time() - start))
def execute_host_label_sync_job() -> Optional[DiscoveredHostLabelSyncJob]: """This function is called by the GUI cron job once a minute. Errors are logged to var/log/web.log.""" if not has_wato_slave_sites(): return None job = DiscoveredHostLabelSyncJob() job.set_function(job.do_sync) try: job.start() except background_job.BackgroundJobAlreadyRunning: logger.debug("Another synchronization job is already running: Skipping this sync") return job
def page(self) -> CBORPageResult: assert user.id is not None if not is_two_factor_login_enabled(user.id): raise MKGeneralException(_("Two-factor authentication not enabled")) auth_data, state = make_fido2_server().authenticate_begin( [ AttestedCredentialData.unpack_from(v["credential_data"])[0] for v in load_two_factor_credentials(user.id)["webauthn_credentials"].values() ], user_verification="discouraged", ) session.session_info.webauthn_action_state = state logger.debug("Authentication data: %r", auth_data) return auth_data
def execute_userdb_job() -> None: """This function is called by the GUI cron job once a minute. Errors are logged to var/log/web.log. """ if not userdb_sync_job_enabled(): return job = UserSyncBackgroundJob() if job.is_active(): logger.debug("Another synchronization job is already running: Skipping this sync") return job.set_function(job.do_sync, add_to_changelog=False, enforce_sync=False, load_users_func=load_users, save_users_func=save_users) job.start()
def do_git_commit(): author = "%s <%s>" % (config.user.id, config.user.email) git_dir = cmk.utils.paths.default_config_dir + "/.git" if not os.path.exists(git_dir): logger.debug("GIT: Initializing") _git_command(["init"]) # Set git repo global user/mail. seems to be needed to prevent warning message # on at least ubuntu 15.04: "Please tell me who you are. Run git config ..." # The individual commits by users override the author on their own _git_command(["config", "user.email", "check_mk"]) _git_command(["config", "user.name", "check_mk"]) _write_gitignore_files() _git_add_files() _git_command([ "commit", "--untracked-files=no", "--author", author, "-m", _("Initialized GIT for Check_MK") ]) if _git_has_pending_changes(): logger.debug("GIT: Found pending changes - Update gitignore file") _write_gitignore_files() # Writing the gitignore files might have reverted the change. So better re-check. if _git_has_pending_changes(): logger.debug("GIT: Still has pending changes") _git_add_files() message = ", ".join(_git_messages()) if not message: message = _("Unknown configuration change") _git_command(["commit", "--author", author, "-m", message])
def page(self) -> CBORPageResult: assert user.id is not None user.need_permission("general.manage_2fa") raw_data = request.get_data() logger.debug("Raw request: %r", raw_data) data: dict[str, object] = cbor.decode(raw_data) client_data = ClientData(data["clientDataJSON"]) att_obj = AttestationObject(data["attestationObject"]) logger.debug("Client data: %r", client_data) logger.debug("Attestation object: %r", att_obj) auth_data = make_fido2_server().register_complete( session.session_info.webauthn_action_state, client_data, att_obj ) ident = auth_data.credential_data.credential_id.hex() credentials = load_two_factor_credentials(user.id, lock=True) if ident in credentials["webauthn_credentials"]: raise MKGeneralException(_("Your WebAuthn credetial is already in use")) credentials["webauthn_credentials"][ident] = WebAuthnCredential( { "credential_id": ident, "registered_at": int(time.time()), "alias": "", "credential_data": bytes(auth_data.credential_data), } ) save_two_factor_credentials(user.id, credentials) flash(_("Registration successful")) return {"status": "OK"}
def _call_load_plugins_hooks(main_modules: List[ModuleType]) -> None: """Call the load_plugins() function in all main modules Have a look at our Wiki: /books/concepts/page/how-cmkgui-is-organised Each main module has the option to declare a `load_plugins` hook function to realize it's own logic that should be executed when initializing the main module. In previous versions this was executed with loaded configuration and localized during request processing, which resulted in several problems. Now this is executed during application initialization (at import time). 1. During import of the application (e.g. web/app/index.wsgi) `init_modules` cares for the import of all main modules 2. Then this function calls the function `load_plugins` hook of all main modules. 3. The main module is doing it's initialization logic. """ logger.debug("Executing load_plugin hooks") for module in main_modules: name = module.__name__ if name == "cmk.gui.main_modules": continue # Do not call ourselfs if not hasattr(module, "load_plugins"): continue # has no load_plugins hook, nothing to do logger.debug("Executing load_plugins hook for %s", name) # hasattr above ensures the function is available. Mypy does not understand this. module.load_plugins() # type: ignore[attr-defined] logger.debug("Finished executing load_plugin hooks")
def page(self) -> CBORPageResult: assert user.id is not None user.need_permission("general.manage_2fa") registration_data, state = make_fido2_server().register_begin( { "id": user.id.encode("utf-8"), "name": user.id, "displayName": user.alias, "icon": "", }, [ AttestedCredentialData.unpack_from(v["credential_data"])[0] for v in load_two_factor_credentials(user.id)["webauthn_credentials"].values() ], user_verification="discouraged", authenticator_attachment="cross-platform", ) session.session_info.webauthn_action_state = state logger.debug("Registration data: %r", registration_data) return registration_data
def _git_command(args): command = ["git"] + [six.ensure_str(a) for a in args] logger.debug("GIT: Execute in %s: %s", cmk.utils.paths.default_config_dir, subprocess.list2cmdline(command)) try: p = subprocess.Popen(command, cwd=cmk.utils.paths.default_config_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except OSError as e: if e.errno == errno.ENOENT: raise MKGeneralException( _("Error executing GIT command <tt>%s</tt>:<br><br>%s") % (subprocess.list2cmdline(command), e)) raise status = p.wait() if status != 0: out = u"" if p.stdout is None else six.ensure_text(p.stdout.read()) raise MKGeneralException( _("Error executing GIT command <tt>%s</tt>:<br><br>%s") % (subprocess.list2cmdline(command), out.replace("\n", "<br>\n")))
def create_snapshot(comment): logger.debug("Start creating backup snapshot") start = time.time() store.mkdir(snapshot_dir) snapshot_name = "wato-snapshot-%s.tar" % time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(time.time())) data = {} # type: Dict[str, Any] data["comment"] = _("Activated changes by %s.") % config.user.id if comment: data["comment"] += _("Comment: %s") % comment data["created_by"] = config.user.id data["type"] = "automatic" data["snapshot_name"] = snapshot_name _do_create_snapshot(data) _do_snapshot_maintenance() log_audit(None, "snapshot-created", _("Created snapshot %s") % snapshot_name) logger.debug("Backup snapshot creation took %.4f", time.time() - start)
def _git_command(args): command = ["git"] + [a.encode("utf-8") for a in args] logger.debug( "GIT: Execute in %s: %s" % (cmk.utils.paths.default_config_dir, subprocess.list2cmdline(command))) try: p = subprocess.Popen(command, cwd=cmk.utils.paths.default_config_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except OSError as e: if e.errno == 2: raise MKGeneralException( _("Error executing GIT command <tt>%s</tt>:<br><br>%s") % (subprocess.list2cmdline(command), e)) else: raise status = p.wait() if status != 0: raise MKGeneralException( _("Error executing GIT command <tt>%s</tt>:<br><br>%s") % (subprocess.list2cmdline(command), p.stdout.read().replace( "\n", "<br>\n")))
def _import_main_module_plugins(main_modules: List[ModuleType]) -> None: logger.debug("Importing main module plugins") for module in main_modules: main_module_name = module.__name__.split(".")[-1] for plugin_package_name in _plugin_package_names(main_module_name): if not _is_plugin_namespace(plugin_package_name): logger.debug(" Skip loading plugins from %s", plugin_package_name) continue logger.debug(" Importing plugins from %s", plugin_package_name) for plugin_name, exc in load_plugins_with_exceptions(plugin_package_name): logger.error( " Error in %s plugin '%s'\n", main_module_name, plugin_name, exc_info=exc ) utils.add_failed_plugin(main_module_name, plugin_name, exc) logger.debug("Main module plugins imported")
def _create_sample_config(): """Create a very basic sample configuration But only if none of the files that we will create already exists. That is e.g. the case after an update from an older version where no sample config had been created. """ if not _need_to_create_sample_config(): return logger.debug("Start creating the sample config") for generator in sample_config_generator_registry.get_generators(): try: logger.debug("Starting [%s]", generator.ident()) generator.generate() logger.debug("Finished [%s]", generator.ident()) except Exception: logger.exception("Exception in sample config generator [%s]", generator.ident()) logger.debug("Finished creating the sample config")
def page(self) -> CBORPageResult: assert user.id is not None user.need_permission("general.manage_2fa") raw_data = request.get_data() logger.debug("Raw request: %r", raw_data) data: dict[str, object] = cbor.decode(raw_data) client_data = ClientData(data["clientDataJSON"]) att_obj = AttestationObject(data["attestationObject"]) logger.debug("Client data: %r", client_data) logger.debug("Attestation object: %r", att_obj) try: auth_data = make_fido2_server().register_complete( session.session_info.webauthn_action_state, client_data, att_obj) except ValueError as e: if "Invalid origin in ClientData" in str(e): raise MKGeneralException( "The origin %r is not valid. You need to access the UI via HTTPS " "and you need to use a valid host or domain name. See werk #13325 for " "further information" % client_data.get("origin")) from e raise ident = auth_data.credential_data.credential_id.hex() credentials = load_two_factor_credentials(user.id, lock=True) if ident in credentials["webauthn_credentials"]: raise MKGeneralException( _("Your WebAuthn credential is already in use")) credentials["webauthn_credentials"][ident] = WebAuthnCredential({ "credential_id": ident, "registered_at": int(time.time()), "alias": "", "credential_data": bytes(auth_data.credential_data), }) save_two_factor_credentials(user.id, credentials) flash(_("Registration successful")) return {"status": "OK"}
def render_graph_pdf(instance, graph_artwork, graph_data_range, graph_render_options, pos_left=None, pos_top=None, total_width=None, total_height=None): pdf_document = instance["document"] logger.debug(" Render graph %r", graph_artwork["definition"]["specification"]) if pos_left is None: # floating element pdf_document.margin(2.5) # Styling for PDF graphs. Note: We could make some of these # configurable font_size = graph_render_options["font_size"] mm_per_ex = mm_per_ex_by_render_options(graph_render_options) v_label_margin = 1.0 # mm t_label_margin = _graph_time_label_margin() left_border = _graph_vertical_axis_width(graph_render_options) left_margin = _graph_left_margin(graph_render_options) top_margin = _graph_top_margin(graph_render_options) right_margin = _graph_right_margin(graph_render_options) bottom_margin = _graph_bottom_margin(graph_render_options) axis_color = parse_color(graph_render_options["foreground_color"]) zero_rule_color = parse_color(graph_render_options["foreground_color"]) canvas_color = parse_color(graph_render_options["canvas_color"]) background_color = parse_color(graph_render_options["background_color"]) foreground_color = parse_color(graph_render_options["foreground_color"]) axis_over_width = _graph_axis_over_width(graph_render_options) color_gradient = graph_render_options["color_gradient"] / 100.0 curve_line_width = 0.1 # mm rule_line_width = 0.1 # mm label_line_width = 0.04 # mm v_line_color = tuple( map(parse_color, [graph_render_options["foreground_color"], "#a0a0a0", "#a0a0a0"])) v_line_dash = [None, [0.2, 0.4], None] t_line_color = tuple( map(parse_color, [graph_render_options["foreground_color"], "#a0a0a0", "#666666"])) t_line_dash = [None, [0.2, 0.2], None] legend_box_line_width = 0.1 pdf_document.save_state() pdf_document.set_font_size(font_size) legend_box_size = mm_per_ex title_height = graph_title_height(graph_render_options) legend_height = graph_legend_height(graph_artwork, graph_render_options) if pos_left is not None: # Absolute placement of graph height = total_height - title_height - legend_height width = total_width else: # Place graph in page flow width_ex, height_ex = graph_render_options["size"] width = width_ex * mm_per_ex height = height_ex * mm_per_ex left, top, width, total_height = pdf_document.add_canvas( width, height + title_height + legend_height, border_width=graph_render_options["border_width"], left_mm=pos_left) # From here width, height, total_height, left and top are in "mm". right = left + width - right_margin total_bottom = top - total_height bottom = top - height - title_height # Fill canvas with background color pdf_document.render_rect(left, total_bottom, width, total_height, fill_color=background_color) # Regular title (above graph area) if graph_render_options["show_title"] is True: pdf_document.render_aligned_text(left, top - title_height, width, title_height, graph_artwork["title"], align="center", bold=True, color=foreground_color) # The following code is inspired by htdocs/js/graphs.js:render_graph(). Whenever # you change something there, the change should also be reflected here! bottom_border = _graph_bottom_border(graph_render_options) # Prepare position and translation of origin t_range_from = graph_artwork["time_axis"]["range"][0] t_range_to = graph_artwork["time_axis"]["range"][1] t_range = t_range_to - t_range_from t_mm = width - left_border - left_margin - right_margin t_mm_per_second = 1.0 * t_mm / t_range v_range_from = graph_artwork["vertical_axis"]["range"][0] v_range_to = graph_artwork["vertical_axis"]["range"][1] v_range = v_range_to - v_range_from v_mm = height - top_margin - bottom_border - bottom_margin v_mm_per_unit = 1.0 * v_mm / v_range t_orig = left + left_border + left_margin v_orig = bottom + bottom_border + bottom_margin v_axis_orig = v_range_from # paint graph background pdf_document.render_rect(t_orig, v_orig, t_mm, v_mm, fill_color=canvas_color) # Now transform the whole chooridate system to our real t and v choords # so if we paint something at (0, 0) it will correctly represent a # value of 0 and a time point of time_start. trans_t = lambda t: (t - t_range_from) * t_mm_per_second + t_orig trans_v = lambda v: v_orig + ((v - v_axis_orig) * v_mm_per_unit) trans = lambda t, v: (trans_t(t), trans_v(v)) # Paint curves pdf_document.save_state() pdf_document.add_clip_rect(t_orig, v_orig, t_mm, v_mm) step = graph_artwork["step"] / 2.0 for curve in graph_artwork["curves"]: if curve.get("dont_paint"): continue t = graph_artwork["start_time"] points = curve["points"] color = parse_color(curve["color"]) if curve["type"] == "area": prev_lower = None prev_upper = None gradient = (t_orig, v_orig, t_orig, v_orig + v_mm, (darken_color(color, color_gradient), color, lighten_color(color, color_gradient)), (0.0, 0.5, 1.0)) for lower, upper in points: if lower is not None and upper is not None and prev_lower is not None and prev_upper is not None: pdf_document.begin_path() pdf_document.move_to( trans_t(t - step) - 0.01, trans_v(prev_lower)) pdf_document.line_to( trans_t(t - step) - 0.01, trans_v(prev_upper)) pdf_document.line_to(trans_t(t), trans_v(upper)) pdf_document.line_to(trans_t(t), trans_v(lower)) pdf_document.line_to( trans_t(t - step) - 0.01, trans_v(prev_lower)) pdf_document.close_path() pdf_document.fill_path(color, gradient=gradient) prev_lower = lower prev_upper = upper t += step else: # "line" last_value = None pdf_document.begin_path() for value in points: if value is not None: p = trans(t, value) if last_value is not None: pdf_document.line_to(p[0], p[1]) else: pdf_document.move_to(p[0], p[1]) last_value = value t += step pdf_document.stroke_path(color=color, width=curve_line_width) pdf_document.restore_state() # Remove clipping # Now we use these four dimensions for drawing into the canvas using render_... # functions from pdf. Note: top > bottom. # Clear areas where values have been painted out of range. This is # At top and bottom pdf_document.render_rect(t_orig, v_orig + v_mm, t_mm, top_margin, fill_color=background_color) pdf_document.render_rect(t_orig, bottom, t_mm, v_orig - bottom, fill_color=background_color) # Paint axes and a strong line at 0, if that is in the range pdf_document.render_line(t_orig, v_orig - axis_over_width, t_orig, v_orig + v_mm, color=axis_color) pdf_document.render_line(t_orig - axis_over_width, v_orig, right, v_orig, color=axis_color) if v_range_from <= 0 <= v_range_to: pdf_document.render_line(t_orig, trans_v(0), right, trans_v(0), color=zero_rule_color) # Show the inline title if graph_render_options["show_title"] == "inline": title_top = top - (mm_per_ex_by_render_options(graph_render_options) * 2) pdf_document.render_aligned_text( left, title_top, width, mm_per_ex_by_render_options(graph_render_options) * 2, graph_artwork["title"], align="center", bold=True, color=foreground_color) if graph_render_options["show_graph_time"]: title_top = top - (mm_per_ex_by_render_options(graph_render_options) * 2) pdf_document.render_aligned_text( 0, title_top, width, mm_per_ex_by_render_options(graph_render_options) * 2, graph_artwork["time_axis"]["title"], align="right", bold=True, color=foreground_color) # Paint the vertical axis if graph_render_options["show_vertical_axis"]: # Render optional vertical axis label vertical_axis_label = graph_artwork["vertical_axis"]["axis_label"] if vertical_axis_label: pdf_document.render_aligned_text(left + left_margin, top - title_height, left_border, title_height, vertical_axis_label, align="center", valign="middle", color=foreground_color) for position, label, line_width in graph_artwork["vertical_axis"][ "labels"]: if line_width > 0: pdf_document.render_line(t_orig, trans_v(position), right, trans_v(position), width=label_line_width, color=v_line_color[line_width], dashes=v_line_dash[line_width]) if graph_render_options["show_vertical_axis"] and label: pdf_document.render_aligned_text(t_orig - v_label_margin - left_border, trans_v(position), left_border, mm_per_ex, label, align="right", valign="middle", color=foreground_color) # Paint time axis for position, label, line_width in graph_artwork["time_axis"]["labels"]: t_pos_mm = trans_t(position) if line_width > 0 and t_pos_mm > t_orig: pdf_document.render_line(t_pos_mm, v_orig, t_pos_mm, trans_v(v_range_to), width=label_line_width, color=t_line_color[line_width], dashes=t_line_dash[line_width]) if graph_render_options["show_time_axis"] and label: pdf_document.render_aligned_text(t_pos_mm, v_orig - t_label_margin - mm_per_ex, 0, mm_per_ex, label, align="center", color=foreground_color) # Paint horizontal rules like warn and crit rules = graph_artwork["horizontal_rules"] for position, label, color_from_rule, title in rules: if v_range_from <= position <= v_range_to: pdf_document.render_line(t_orig, trans_v(position), right, trans_v(position), width=rule_line_width, color=parse_color(color_from_rule)) # Paint legend if graph_render_options["show_legend"]: legend_lineskip = get_graph_legend_lineskip(graph_render_options) legend_top_margin = _graph_legend_top_margin() legend_top = bottom - legend_top_margin + bottom_margin legend_column_width = (width - left_margin - left_border - right_margin) / 7.0 def paint_legend_line(color, texts): l = t_orig if color: pdf_document.render_rect(l, legend_top + mm_per_ex * 0.2, legend_box_size, legend_box_size, fill_color=color, line_width=legend_box_line_width) for nr, text in enumerate(texts): if text: pdf_document.render_aligned_text( l + (color and nr == 0 and legend_box_size + 0.8 or 0), legend_top, legend_column_width, legend_lineskip, text, align=nr == 0 and "left" or "right", color=foreground_color) if nr == 0: l += legend_column_width * 3 else: l += legend_column_width scalars = [ ("min", _("Minimum")), ("max", _("Maximum")), ("average", _("Average")), ("last", _("Last")), ] scalars_legend_line: List[Optional[str]] = [None] paint_legend_line(None, scalars_legend_line + [x[1] for x in scalars]) pdf_document.render_line(t_orig, legend_top, t_orig + t_mm, legend_top) for curve in graph_artwork["curves"]: legend_top -= legend_lineskip texts = [curve["title"]] for scalar, title in scalars: texts.append(curve["scalars"][scalar][1]) paint_legend_line(parse_color(curve["color"]), texts) if graph_artwork["horizontal_rules"]: pdf_document.render_line(t_orig, legend_top, t_orig + t_mm, legend_top) for value, readable, color_from_artwork, title in graph_artwork[ "horizontal_rules"]: legend_top -= legend_lineskip paint_legend_line(parse_color(color_from_artwork), [title] + [None] * 3 + [readable]) if graph_artwork["definition"].get("is_forecast"): pin = trans_t(graph_artwork['requested_end_time']) pdf_document.render_line(pin, v_orig, pin, trans_v(v_range_to), color=(0.0, 1.0, 0.0)) pdf_document.restore_state() if left is None: # floating element pdf_document.margin(2.5) logger.debug(" Finished rendering graph")
def make_fido2_server() -> Fido2Server: rp_id = request.host logger.debug("Using %r as relaying party ID", rp_id) return Fido2Server(PublicKeyCredentialRpEntity(rp_id, "Checkmk"))
def render_graph_pdf( instance, graph_artwork: GraphArtwork, graph_data_range: GraphDataRange, graph_render_options: GraphRenderOptions, pos_left: Optional[SizeMM] = None, pos_top: Optional[SizeMM] = None, total_width: Optional[SizeMM] = None, total_height: Optional[SizeMM] = None, ) -> None: pdf_document = instance["document"] logger.debug(" Render graph %r", graph_artwork["definition"]["specification"]) if pos_left is None: # floating element pdf_document.margin(2.5) # Styling for PDF graphs. Note: We could make some of these # configurable font_size = graph_render_options["font_size"] mm_per_ex = mm_per_ex_by_render_options(graph_render_options) v_label_margin = 1.0 # mm t_label_margin = _graph_time_label_margin() left_border = _graph_vertical_axis_width(graph_render_options) left_margin = _graph_left_margin(graph_render_options) top_margin = _graph_top_margin(graph_render_options) right_margin = _graph_right_margin(graph_render_options) bottom_margin = _graph_bottom_margin(graph_render_options) axis_color = parse_color(graph_render_options["foreground_color"]) zero_rule_color = parse_color(graph_render_options["foreground_color"]) canvas_color = parse_color(graph_render_options["canvas_color"]) background_color = parse_color(graph_render_options["background_color"]) foreground_color = parse_color(graph_render_options["foreground_color"]) axis_over_width = _graph_axis_over_width(graph_render_options) color_gradient = graph_render_options["color_gradient"] / 100.0 curve_line_width = 0.1 # mm rule_line_width = 0.1 # mm label_line_width = 0.04 # mm v_line_color = tuple( map(parse_color, [graph_render_options["foreground_color"], "#a0a0a0", "#a0a0a0"]) ) v_line_dash = [None, [0.2, 0.4], None] t_line_color = tuple( map(parse_color, [graph_render_options["foreground_color"], "#a0a0a0", "#666666"]) ) t_line_dash = [None, [0.2, 0.2], None] legend_box_line_width = 0.1 pdf_document.save_state() pdf_document.set_font_size(font_size) legend_box_size = mm_per_ex title_height = graph_title_height(graph_render_options) legend_height = graph_legend_height(graph_artwork, graph_render_options) if pos_left is not None: # Absolute placement of graph assert pos_top is not None assert total_width is not None assert total_height is not None height = total_height - title_height - legend_height width = total_width else: # Place graph in page flow width_ex, height_ex = graph_render_options["size"] width = width_ex * mm_per_ex height = height_ex * mm_per_ex left, top, width, total_height = pdf_document.add_canvas( width, height + title_height + legend_height, border_width=graph_render_options["border_width"], left_mm=pos_left, ) # From here width, height, total_height, left and top are in "mm". right = left + width - right_margin total_bottom = top - total_height bottom = top - height - title_height # Fill canvas with background color pdf_document.render_rect(left, total_bottom, width, total_height, fill_color=background_color) # Regular title (above graph area) if graph_render_options["show_title"] is True: title_left_margin = left + right_margin if vertical_axis_label := graph_artwork.get("vertical_axis", {}).get("axis_label"): title_left_margin = left + left_border + left_margin pdf_document.render_aligned_text( title_left_margin, top - title_height, width, title_height, graph_artwork["title"], align="left", bold=True, color=foreground_color, )