def page(self): filter_names = json.loads(html.request.var("aggregations", "[]")) forced_layout_id = html.request.var("layout_id") if forced_layout_id not in BILayoutManagement.get_all_bi_template_layouts( ): forced_layout_id = None state_data = bi.api_get_aggregation_state(filter_names=filter_names) aggregation_info = {"aggregations": {}} aggregation_layouts = BILayoutManagement.get_all_bi_aggregation_layouts( ) for row in state_data["rows"]: aggr_name = row["tree"]["aggr_name"] if filter_names and aggr_name not in filter_names: continue visual_mapper = NodeVisualizationBIDataMapper() aggr_treestate = row["tree"]["aggr_treestate"] hierarchy = visual_mapper.consume(aggr_treestate) data = {} data["hierarchy"] = hierarchy data["groups"] = row["groups"] data["data_timestamp"] = int(time.time()) aggr_settings = row["tree"]["aggr_tree"]["node_visualization"] layout = {"config": {}} if forced_layout_id: layout["enforced_id"] = aggr_name layout["origin_type"] = "globally_enforced" layout["origin_info"] = _("Globally enforced") layout[ "use_layout"] = BILayoutManagement.load_bi_template_layout( forced_layout_id) else: if aggr_name in aggregation_layouts: layout["origin_type"] = "explicit" layout["origin_info"] = _("Explicit set") layout["explicit_id"] = aggr_name layout["config"] = aggregation_layouts[aggr_name] layout["config"]["ignore_rule_styles"] = True else: layout.update( self._get_template_based_layout_settings( aggr_settings)) if "ignore_rule_styles" not in layout["config"]: layout["config"]["ignore_rule_styles"] = aggr_settings.get( "ignore_rule_styles", False) if "line_config" not in layout["config"]: layout["config"]["line_config"] = self._get_line_style_config( aggr_settings) data["layout"] = layout aggregation_info["aggregations"][row["tree"]["aggr_name"]] = data html.set_output_format("json") return aggregation_info
def ajax_openclose() -> None: html.set_output_format("json") if not config.user.may("general.configure_sidebar"): return None snapin_id = html.request.var("name") if snapin_id is None: return None state = html.request.var("state") if state not in [ SnapinVisibility.OPEN.value, SnapinVisibility.CLOSED.value, "off" ]: raise MKUserError("state", "Invalid state: %s" % state) user_config = UserSidebarConfig(config.user, config.sidebar) try: snapin = user_config.get_snapin(snapin_id) except KeyError: return None if state == "off": user_config.remove_snapin(snapin) else: snapin.visible = SnapinVisibility(state) user_config.save()
def move_snapin() -> None: html.set_output_format("json") if not config.user.may("general.configure_sidebar"): return None snapin_id = html.request.var("name") if snapin_id is None: return None user_config = UserSidebarConfig(config.user, config.sidebar) try: snapin = user_config.get_snapin(snapin_id) except KeyError: return None before_id = html.request.var("before") before_snapin: Optional[UserSidebarSnapin] = None if before_id: try: before_snapin = user_config.get_snapin(before_id) except KeyError: pass user_config.move_snapin_before(snapin, before_snapin) user_config.save()
def _ajax_tag_tree_enter(self): html.set_output_format("json") self._load() path = html.request.var("path").split("|") if html.request.var("path") else [] self._cwds[self._current_tree_id] = path self._save_user_settings() html.write("OK")
def page(self): file_name = self.file_name(self._request) file_content = self._get_agent_output_file() html.set_output_format("text") html.response.headers["Content-Disposition"] = "Attachment; filename=%s" % file_name html.write(file_content)
def _export_audit_log(self): html.set_output_format("csv") if self._options["display"] == "daily": filename = "wato-auditlog-%s_%s.csv" % (render.date( time.time()), render.time_of_day(time.time())) else: filename = "wato-auditlog-%s_%s_days.csv" % (render.date( time.time()), self._options["display"][1]) html.write(filename) html.response.headers[ "Content-Disposition"] = "attachment; filename=\"%s\"" % filename titles = ( _('Date'), _('Time'), _('Linkinfo'), _('User'), _('Action'), _('Text'), ) html.write(','.join(titles) + '\n') for t, linkinfo, user, action, text in self._parse_audit_log(): if linkinfo == '-': linkinfo = '' if self._filter_entry(user, action, text): # TODO: Already filtered?! continue html.write_text(','.join((render.date(int(t)), render.time_of_day(int(t)), linkinfo, user, action, '"' + text + '"')) + '\n') return False
def page(self) -> AjaxPageResult: aggregations_var = html.request.get_str_input_mandatory("aggregations", "[]") filter_names = json.loads(aggregations_var) forced_layout_id = html.request.var("layout_id") if forced_layout_id not in BILayoutManagement.get_all_bi_template_layouts(): forced_layout_id = None bi_aggregation_filter = BIAggregationFilter([], [], [], filter_names, [], []) results = bi.get_cached_bi_manager().computer.compute_result_for_filter( bi_aggregation_filter) aggregation_info: Dict[str, Any] = {"aggregations": {}} aggregation_layouts = BILayoutManagement.get_all_bi_aggregation_layouts() for bi_compiled_aggregation, node_result_bundles in results: for node_result_bundle in node_result_bundles: branch = node_result_bundle.instance aggr_name = branch.properties.title visual_mapper = NodeVisualizationBIDataMapper( is_single_host_aggregation=len(branch.get_required_hosts()) == 1) hierarchy = visual_mapper.consume(node_result_bundle) data: Dict[str, Any] = {} data["type"] = "bi" data["hierarchy"] = hierarchy data["groups"] = bi_compiled_aggregation.groups.names data["data_timestamp"] = int(time.time()) aggr_settings = bi_compiled_aggregation.aggregation_visualization layout: Dict[str, Any] = {"config": {}} if forced_layout_id: layout["enforced_id"] = aggr_name layout["origin_type"] = "globally_enforced" layout["origin_info"] = _("Globally enforced") layout["use_layout"] = BILayoutManagement.load_bi_template_layout( forced_layout_id) else: if aggr_name in aggregation_layouts: layout["origin_type"] = "explicit" layout["origin_info"] = _("Explicit set") layout["explicit_id"] = aggr_name layout["config"] = aggregation_layouts[aggr_name] layout["config"]["ignore_rule_styles"] = True else: layout.update(self._get_template_based_layout_settings(aggr_settings)) if "ignore_rule_styles" not in layout["config"]: layout["config"]["ignore_rule_styles"] = aggr_settings.get( "ignore_rule_styles", False) if "line_config" not in layout["config"]: layout["config"]["line_config"] = self._get_line_style_config(aggr_settings) data["layout"] = layout aggregation_info["aggregations"][aggr_name] = data html.set_output_format("json") return aggregation_info
def _ajax_speedometer(self): html.set_output_format("json") try: # Try to get values from last call in order to compute # driftig speedometer-needle and to reuse the scheduled # check reate. # TODO: Do we need a get_float_input_mandatory? last_perc = float( html.request.get_str_input_mandatory("last_perc")) scheduled_rate = float( html.request.get_str_input_mandatory("scheduled_rate")) last_program_start = html.request.get_integer_input_mandatory( "program_start") # Get the current rates and the program start time. If there # are more than one site, we simply add the start times. data = sites.live().query_summed_stats( "GET status\n" "Columns: service_checks_rate program_start") current_rate = data[0] program_start = data[1] # Recompute the scheduled_rate only if it is not known (first call) # or if one of the sites has been restarted. The computed value cannot # change during the monitoring since it just reflects the configuration. # That way we save CPU resources since the computation of the # scheduled checks rate needs to loop over all hosts and services. if last_program_start != program_start: # These days, we configure the correct check interval for Check_MK checks. # We do this correctly for active and for passive ones. So we can simply # use the check_interval of all services. Hosts checks are ignored. # # Manually added services without check_interval could be a problem, but # we have no control there. scheduled_rate = sites.live().query_summed_stats( "GET services\n" "Stats: suminv check_interval\n")[0] / 60.0 percentage = 100.0 * current_rate / scheduled_rate title = _("Scheduled service check rate: %.1f/s, current rate: %.1f/s, that is " "%.0f%% of the scheduled rate") % \ (scheduled_rate, current_rate, percentage) except Exception as e: scheduled_rate = 0.0 program_start = 0 percentage = 0 last_perc = 0.0 title = _("No performance data: %s") % e html.write( json.dumps({ "scheduled_rate": scheduled_rate, "program_start": program_start, "percentage": percentage, "last_perc": last_perc, "title": title, }))
def page(self) -> None: site = html.request.get_ascii_input_mandatory("site") tarfile_name = html.request.get_ascii_input_mandatory("tarfile_name") Filename().validate_value(tarfile_name, "tarfile_name") file_content = self._get_diagnostics_dump_file(site, tarfile_name) html.set_output_format("x-tgz") html.response.headers["Content-Disposition"] = "Attachment; filename=%s" % tarfile_name html.write_binary(file_content)
def ajax_message_read(): html.set_output_format("json") try: notify.delete_gui_message(html.request.var('id')) html.write("OK") except Exception: if config.debug: raise html.write("ERROR")
def page(self): filter_names = json.loads(html.request.var("aggregations", "[]")) forced_layout_id = html.request.var("layout_id") if forced_layout_id not in BILayoutManagement.get_all_bi_template_layouts( ): forced_layout_id = None state_data = bi.api_get_aggregation_state(filter_names=filter_names) aggregation_info = {"aggregations": {}} aggregation_layouts = BILayoutManagement.get_all_bi_aggregation_layouts( ) for row in state_data["rows"]: aggr_name = row["tree"]["aggr_name"] if filter_names and aggr_name not in filter_names: continue visual_mapper = NodeVisualizationBIDataMapper() aggr_treestate = row["tree"]["aggr_treestate"] hierarchy = visual_mapper.consume(aggr_treestate) data = {} data["hierarchy"] = hierarchy data["groups"] = row["groups"] data["data_timestamp"] = int(time.time()) if not forced_layout_id: if aggr_name in aggregation_layouts: data["use_layout_id"] = aggr_name data["use_layout"] = aggregation_layouts[aggr_name] data["layout_origin"] = _("Explicit set") else: template_layout_id = row["tree"]["aggr_tree"][ "use_layout_id"] if template_layout_id and template_layout_id in BILayoutManagement.get_all_bi_template_layouts( ): data["template_layout_id"] = template_layout_id data[ "use_layout"] = BILayoutManagement.load_bi_template_layout( template_layout_id) data["layout_origin"] = _("Template: %s" % template_layout_id) else: data["use_default_layout"] = config.default_bi_layout data["layout_origin"] = _( "Default layout: %s" % config.default_bi_layout.title()) aggregation_info["aggregations"][row["tree"]["aggr_name"]] = data if forced_layout_id: aggregation_info[ "use_layout"] = BILayoutManagement.load_bi_template_layout( forced_layout_id) html.set_output_format("json") return aggregation_info
def ajax_snapin(): """Renders and returns the contents of the requested sidebar snapin(s) in JSON format""" html.set_output_format("json") # Update online state of the user (if enabled) if config.user.id is None: raise Exception("no user ID") userdb.update_user_access_time(config.user.id) user_config = UserSidebarConfig(config.user, config.sidebar) snapin_id = html.request.var("name") snapin_ids = [snapin_id ] if snapin_id else html.request.get_str_input_mandatory( "names", "").split(",") snapin_code: List[str] = [] for snapin_id in snapin_ids: try: snapin_instance = user_config.get_snapin(snapin_id).snapin_type() except KeyError: continue # Skip not existing snapins if not config.user.may(snapin_instance.permission_name()): continue # When restart snapins are about to be refreshed, only render # them, when the core has been restarted after their initial # rendering if not snapin_instance.refresh_regularly( ) and snapin_instance.refresh_on_restart(): since = html.request.get_float_input_mandatory('since', 0) newest = since for site in sites.states().values(): prog_start = site.get("program_start", 0) if prog_start > newest: newest = prog_start if newest <= since: # no restart snapin_code.append(u'') continue with html.plugged(): try: snapin_instance.show() except Exception as e: write_snapin_exception(e) e_message = _("Exception during snapin refresh (snapin \'%s\')" ) % snapin_instance.type_name() logger.error("%s %s: %s", html.request.requested_url, e_message, traceback.format_exc()) finally: snapin_code.append(html.drain()) html.write(json.dumps(snapin_code))
def _ajax_tag_tree(self): html.set_output_format("json") self._load() new_tree = html.request.var("tree_id") if new_tree not in self._trees: raise MKUserError("conf", _("This virtual host tree does not exist.")) self._current_tree_id = new_tree self._save_user_settings() html.write("OK")
def page(self) -> None: if not config.user.may("wato.diagnostics"): raise MKAuthException( _("Sorry, you lack the permission for downloading diagnostics dumps.")) site = html.request.get_ascii_input_mandatory("site") tarfile_name = html.request.get_ascii_input_mandatory("tarfile_name") file_content = self._get_diagnostics_dump_file(site, tarfile_name) html.set_output_format("x-tgz") html.response.headers["Content-Disposition"] = "Attachment; filename=%s" % tarfile_name html.write_binary(file_content)
def _render_exception(e, title=""): if title: title = "%s: " % title if _plain_error(): html.set_output_format("text") html.write("%s%s\n" % (title, e)) elif not _fail_silently(): html.header(title) html.show_error(e) html.footer()
def ajax_graph(): html.set_output_format("json") try: context_var = html.request.get_str_input_mandatory("context") context = json.loads(context_var) response_data = render_ajax_graph(context) html.write(json.dumps(response_data)) except Exception as e: logger.error("Ajax call ajax_graph.py failed: %s\n%s", e, traceback.format_exc()) if config.debug: raise html.write("ERROR: %s" % e)
def _render_exception(e: Exception, title: str) -> Response: if plain_error(): html.set_output_format("text") if title: title = "%s: " % title html.write("%s%s\n" % (title, e)) elif not fail_silently(): html.header(title, Breadcrumb()) html.show_error(str(e)) html.footer() return html.response
def page_api(): try: pretty_print = False if not html.request.has_var("output_format"): html.set_output_format("json") if html.output_format not in _FORMATTERS: html.set_output_format("python") raise MKUserError( None, "Only %s are supported as output formats" % " and ".join('"%s"' % f for f in _FORMATTERS)) # TODO: Add some kind of helper for boolean-valued variables? pretty_print_var = html.request.get_str_input_mandatory( "pretty_print", "no").lower() if pretty_print_var not in ("yes", "no"): raise MKUserError(None, 'pretty_print must be "yes" or "no"') pretty_print = pretty_print_var == "yes" api_call = _get_api_call() _check_permissions(api_call) watolib.init_wato_datastructures( ) # Initialize host and site attributes request_object = _get_request(api_call) _check_formats(api_call, request_object) _check_request_keys(api_call, request_object) response = _execute_action(api_call, request_object) except MKAuthException as e: response = { "result_code": 1, "result": _("Authorization Error. Insufficent permissions for '%s'") % e } except MKException as e: response = { "result_code": 1, "result": _("Check_MK exception: %s") % e } except Exception: if config.debug: raise logger.exception("error handling web API call") response = { "result_code": 1, "result": _("Unhandled exception: %s") % traceback.format_exc(), } html.write( _FORMATTERS[html.output_format][1 if pretty_print else 0](response))
def ajax_set_snapin_site(): html.set_output_format("json") ident = html.request.var("ident") if ident not in snapin_registry: raise MKUserError(None, _("Invalid ident")) site = html.request.var("site") site_choices = dict([("", _("All sites"))] + config.site_choices()) if site not in site_choices: raise MKUserError(None, _("Invalid site")) snapin_sites = config.user.load_file("sidebar_sites", {}, lock=True) snapin_sites[ident] = site config.user.save_file("sidebar_sites", snapin_sites)
def handle_page(self): """The page handler, called by the page registry""" html.set_output_format("json") try: action_response = self.page() response = {"result_code": 0, "result": action_response} except MKException as e: response = {"result_code": 1, "result": "%s" % e} except Exception as e: if config.debug: raise logger.exception("error calling AJAX page handler") response = {"result_code": 1, "result": "%s" % e} html.write(json.dumps(response))
def _export_audit_log(self, audit: List[AuditLogStore.Entry]) -> ActionResult: html.set_output_format("csv") if self._options["display"] == "daily": filename = "wato-auditlog-%s_%s.csv" % (render.date( time.time()), render.time_of_day(time.time())) else: filename = "wato-auditlog-%s_%s_days.csv" % (render.date( time.time()), self._options["display"][1]) html.response.headers[ "Content-Disposition"] = "attachment; filename=\"%s\"" % filename titles = [ _('Date'), _('Time'), _('Object type'), _('Object'), _('User'), _('Action'), _('Summary'), ] if self._show_details: titles.append(_('Details')) html.write(','.join(titles) + '\n') for entry in audit: columns = [ render.date(int(entry.time)), render.time_of_day(int(entry.time)), entry.object_ref.object_type.name if entry.object_ref else "", entry.object_ref.ident if entry.object_ref else "", entry.user_id, entry.action, '"' + escaping.strip_tags(entry.text).replace('"', "'") + '"', ] if self._show_details: columns.append( '"' + escaping.strip_tags(entry.diff_text).replace('"', "'") + '"') html.write(','.join(columns) + '\n') return FinalizeRequest(code=200)
def page(self): if not config.user.may("wato.automation"): raise MKAuthException(_("This account has no permission for automation.")) html.set_output_format("python") if not html.request.has_var("_version"): # Be compatible to calls from sites using versions before 1.5.0p10. # Deprecate with 1.7 by throwing an exception in this situation. response = _get_login_secret(create_on_demand=True) else: response = { "version": cmk_version.__version__, "edition_short": cmk_version.edition_short(), "login_secret": _get_login_secret(create_on_demand=True), } html.write_html(repr(response))
def ajax_render_graph_content(): html.set_output_format("json") try: request = html.get_request() response = { "result_code": 0, "result": render_graph_content_html(request["graph_recipe"], request["graph_data_range"], request["graph_render_options"]), } except Exception: logger.exception("could not render graph") response = { "result_code": 1, "result": _("Unhandled exception: %s") % traceback.format_exc(), } html.write(json.dumps(response))
def _show_crash_dump_message(crash: 'GUICrashReport', plain_text: bool, fail_silently: bool, show_crash_link: Optional[bool]) -> None: """Create a crash dump from a GUI exception and display a message to the user""" if show_crash_link is None: show_crash_link = config.user.may("general.see_crash_reports") title = _("Internal error") message = u"%s: %s<br>\n<br>\n" % (title, crash.crash_info["exc_value"]) # Do not reveal crash context information to unauthenticated users or not permitted # users to prevent disclosure of internal information if not show_crash_link: message += _( "An internal error occurred while processing your request. " "You can report this issue to your Checkmk administrator. " "Detailed information can be found on the crash report page " "or in <tt>var/log/web.log</tt>.") else: crash_url = makeuri( request, [ ("site", config.omd_site()), ("crash_id", crash.ident_to_text()), ], filename="crash.py", ) message += _( "An internal error occured while processing your request. " "You can report this issue to the Checkmk team to help " "fixing this issue. Please open the <a href=\"%s\">crash report page</a> " "and use the form for reporting the problem.") % crash_url if plain_text: html.set_output_format("text") html.write("%s\n" % escaping.strip_tags(message)) return if fail_silently: return html.header(title, Breadcrumb()) html.show_error(message) html.footer()
def _ajax_switch_masterstate(self): # type: () -> None html.set_output_format("text") if not config.user.may("sidesnap.master_control"): return if not html.check_transaction(): return site = html.request.get_ascii_input_mandatory("site") column = html.request.get_ascii_input_mandatory("switch") state = html.request.get_integer_input_mandatory("state") commands = { ("enable_notifications", 1): "ENABLE_NOTIFICATIONS", ("enable_notifications", 0): "DISABLE_NOTIFICATIONS", ("execute_service_checks", 1): "START_EXECUTING_SVC_CHECKS", ("execute_service_checks", 0): "STOP_EXECUTING_SVC_CHECKS", ("execute_host_checks", 1): "START_EXECUTING_HOST_CHECKS", ("execute_host_checks", 0): "STOP_EXECUTING_HOST_CHECKS", ("enable_flap_detection", 1): "ENABLE_FLAP_DETECTION", ("enable_flap_detection", 0): "DISABLE_FLAP_DETECTION", ("process_performance_data", 1): "ENABLE_PERFORMANCE_DATA", ("process_performance_data", 0): "DISABLE_PERFORMANCE_DATA", ("enable_event_handlers", 1): "ENABLE_EVENT_HANDLERS", ("enable_event_handlers", 0): "DISABLE_EVENT_HANDLERS", } command = commands.get((column, state)) if not command: html.write( _("Command %s/%d not found") % (escaping.escape_attribute(column), state)) return sites.live().command("[%d] %s" % (int(time.time()), command), site) sites.live().set_only_sites([site]) sites.live().query( "GET status\nWaitTrigger: program\nWaitTimeout: 10000\nWaitCondition: %s = %d\nColumns: %s\n" % (column, state, column)) sites.live().set_only_sites() self.show()
def _ajax_switch_site(self): html.set_output_format("json") # _site_switch=sitename1:on,sitename2:off,... if not config.user.may("sidesnap.sitestatus"): return if not html.check_transaction(): return switch_var = html.request.var("_site_switch") if switch_var: for info in switch_var.split(","): sitename, onoff = info.split(":") if sitename not in config.sitenames(): continue d = config.user.siteconf.get(sitename, {}) d["disabled"] = onoff != "on" config.user.siteconf[sitename] = d config.user.save_site_config()
def handle_page(self) -> None: """The page handler, called by the page registry""" # FIXME: cyclical link between crash_reporting.py and pages.py from cmk.gui.crash_reporting import handle_exception_as_gui_crash_report html.set_output_format("json") try: action_response = self.page() response = { "result_code": 0, "result": action_response, "severity": "success" } except MKMissingDataError as e: response = { "result_code": 1, "result": "%s" % e, "severity": "success" } except MKException as e: response = { "result_code": 1, "result": "%s" % e, "severity": "error" } except Exception as e: if config.debug: raise logger.exception("error calling AJAX page handler") handle_exception_as_gui_crash_report( plain_error=True, show_crash_link=getattr(g, "may_see_crash_reports", False), ) response = { "result_code": 1, "result": "%s" % e, "severity": "error" } html.write(json.dumps(response))
def page(self): # hostnames: a list of mandatory hostnames # mesh_depth: number of hops from growth root # growth_forbidden: block further traversal at the given nodes topo_config = json.loads(html.request.var("topology_config")) topology = self._get_topology_instance(topo_config) meshes = topology.compute() topology_info = {"topology_meshes": {}} def get_topology_info(hostname, mesh): return { "hostname": hostname, "icon": topology.get_host_icon_image(hostname), "node_type": "topology", "has_no_parents": topology.is_root_node(hostname), "growth_root": topology.is_growth_root(hostname), "growth_possible": topology.may_grow(hostname, mesh), "growth_forbidden": topology.growth_forbidden(hostname), "name": hostname, "state": 0, } topology_info = { "topology_chunks": {}, } topology_info["headline"] = topology.title() for mesh in meshes: if not mesh: continue # Pick root host growth_roots = sorted( mesh.intersection(set(topo_config["growth_root_nodes"]))) mesh_root = growth_roots[0] mesh_info = get_topology_info(mesh_root, mesh) mesh.remove(mesh_root) mesh = sorted(list(mesh)) mesh.insert(0, mesh_root) if mesh: mesh_info["children"] = [] mesh_info["children"].extend( [get_topology_info(x, mesh) for x in mesh[1:]]) mesh_links = set() # Incoming connections for idx, hostname in enumerate(mesh): for child in topology.get_host_incoming(hostname): if child in mesh: mesh_links.add((mesh.index(child), idx)) # Outgoing connections for idx, hostname in enumerate(mesh): for parent in topology.get_host_outgoing(hostname): if parent in mesh: mesh_links.add((idx, mesh.index(parent))) topology_info["topology_chunks"][mesh_root] = { "layout": { "config": { "line_config": { "style": "straight" } } }, "hierarchy": mesh_info, "links": list(mesh_links) } html.set_output_format("json") return topology_info
def ajax_fold(): html.set_output_format("json") user_config = UserSidebarConfig(config.user, config.sidebar) user_config.folded = html.request.var("fold") == "yes" user_config.save()
def send(cls, pdf_source, sendas): html.set_output_format("pdf") html.response.headers["Content-Disposition"] = "inline; filename=" + ensure_str(sendas) html.write_binary(pdf_source)