def _get_time_range_of(self, what: str) -> Union[None, int, float]: varprefix = self.ident + "_" + what rangename = request.var(varprefix + "_range") if rangename == "abs": try: return time.mktime( time.strptime(request.get_str_input_mandatory(varprefix), "%Y-%m-%d")) except Exception: user_errors.add( MKUserError( varprefix, _("Please enter the date in the format YYYY-MM-DD."))) return None if rangename == "unix": return request.get_integer_input_mandatory(varprefix) if rangename is None: return None try: count = request.get_integer_input_mandatory(varprefix) secs = count * int(rangename) return int(time.time()) - secs except Exception: request.set_var(varprefix, "") return None
def test_request_processing(request_context): global_request.set_var("varname", "1a") global_request.set_var("varname2", "1") global_request.get_unicode_input("varname", deflt="lol") global_request.get_integer_input_mandatory("varname2") global_request.get_request(exclude_vars=["varname2"])
def test_request_processing(request_context: RequestContextFixture) -> None: global_request.set_var("varname", "1a") global_request.set_var("varname2", "1") global_request.get_str_input("varname", deflt="lol") global_request.get_integer_input_mandatory("varname2") global_request.get_request(exclude_vars=["varname2"])
def action(self) -> ActionResult: if not transactions.check_transaction(): return redirect(self.mode_url()) connections = load_connection_config(lock=True) if request.has_var("_delete"): index = request.get_integer_input_mandatory("_delete") connection = connections[index] self._add_change( "delete-ldap-connection", _("Deleted LDAP connection %s") % (connection["id"])) del connections[index] save_connection_config(connections) elif request.has_var("_move"): from_pos = request.get_integer_input_mandatory("_move") to_pos = request.get_integer_input_mandatory("_index") connection = connections[from_pos] self._add_change( "move-ldap-connection", _("Changed position of LDAP connection %s to %d") % (connection["id"], to_pos), ) del connections[from_pos] # make to_pos now match! connections[to_pos:to_pos] = [connection] save_connection_config(connections) return redirect(self.mode_url())
def action(self) -> ActionResult: if not transactions.check_transaction(): return redirect(mode_url("folder", folder=watolib.Folder.current().path())) count = request.get_integer_input_mandatory("count") folders = request.get_integer_input_mandatory("folders") levels = request.get_integer_input_mandatory("levels") created = self._create_random_hosts(watolib.Folder.current(), count, folders, levels) flash(_("Added %d random hosts.") % created) return redirect(mode_url("folder", folder=watolib.Folder.current().path()))
def _move_tag_group(self) -> ActionResult: # pylint: disable=useless-return move_nr = request.get_integer_input_mandatory("_move") move_to = request.get_integer_input_mandatory("_index") moved = self._tag_config.tag_groups.pop(move_nr) self._tag_config.tag_groups.insert(move_to, moved) try: self._tag_config.validate_config() except MKGeneralException as e: raise MKUserError(None, "%s" % e) self._tag_config_file.save(self._tag_config.get_dict_format()) self._load_effective_config() watolib.add_change("edit-tags", _("Changed order of tag groups")) return None
def handle_acknowledgement(): if not transactions.check_transaction(): return if request.var("_werk_ack"): werk_id = request.get_integer_input_mandatory("_werk_ack") if werk_id not in g_werks: raise MKUserError("werk", _("This werk does not exist.")) werk = g_werks[werk_id] if werk["compatible"] == "incomp_unack": acknowledge_werk(werk) html.show_message( _("Werk %s - %s has been acknowledged.") % (render_werk_id(werk, with_link=True), render_werk_title(werk)) ) load_werks() # reload ack states after modification render_unacknowleged_werks() elif request.var("_ack_all"): num = len(unacknowledged_incompatible_werks()) acknowledge_all_werks() flash(_("%d incompatible Werks have been acknowledged.") % num) load_werks() # reload ack states after modification html.reload_whole_page()
def action(self) -> ActionResult: if not transactions.check_transaction(): return None test_id = request.var("_test_id") site_id = request.var("_site_id") status_id = request.get_integer_input_mandatory("_status_id", 0) if not test_id: raise MKUserError("_ack_test_id", _("Needed variable missing")) if request.var("_do") in ["ack", "unack"]: if not site_id: raise MKUserError("_ack_site_id", _("Needed variable missing")) if site_id not in activation_sites(): raise MKUserError("_ack_site_id", _("Invalid site given")) if request.var("_do") == "ack": self._acknowledge_test(test_id, site_id, status_id) elif request.var("_do") == "unack": self._unacknowledge_test(test_id, site_id, status_id) elif request.var("_do") == "disable": self._disable_test(test_id) elif request.var("_do") == "enable": self._enable_test(test_id) else: raise NotImplementedError() return None
def _ajax_speedometer(self): response.set_content_type("application/json") try: # Try to get values from last call in order to compute # driftig speedometer-needle and to reuse the scheduled # check reate. # TODO: Do we need a get_float_input_mandatory? last_perc = float(request.get_str_input_mandatory("last_perc")) scheduled_rate = float( request.get_str_input_mandatory("scheduled_rate")) last_program_start = request.get_integer_input_mandatory( "program_start") # Get the current rates and the program start time. If there # are more than one site, we simply add the start times. data = sites.live().query_summed_stats( "GET status\n" "Columns: service_checks_rate program_start") current_rate = data[0] program_start = data[1] # Recompute the scheduled_rate only if it is not known (first call) # or if one of the sites has been restarted. The computed value cannot # change during the monitoring since it just reflects the configuration. # That way we save CPU resources since the computation of the # scheduled checks rate needs to loop over all hosts and services. if last_program_start != program_start: # These days, we configure the correct check interval for Checkmk checks. # We do this correctly for active and for passive ones. So we can simply # use the check_interval of all services. Hosts checks are ignored. # # Manually added services without check_interval could be a problem, but # we have no control there. scheduled_rate = (sites.live().query_summed_stats( "GET services\n" "Stats: suminv check_interval\n")[0] / 60.0) percentage = 100.0 * current_rate / scheduled_rate title = _( "Scheduled service check rate: %.1f/s, current rate: %.1f/s, that is " "%.0f%% of the scheduled rate") % (scheduled_rate, current_rate, percentage) except Exception as e: scheduled_rate = 0.0 program_start = 0 percentage = 0 last_perc = 0.0 title = _("No performance data: %s") % e response.set_data( json.dumps({ "scheduled_rate": scheduled_rate, "program_start": program_start, "percentage": percentage, "last_perc": last_perc, "title": title, }))
def __init__(self): self._options = { key: vs.default_value() for key, vs in self._audit_log_options() } super().__init__() self._store = AuditLogStore(AuditLogStore.make_path()) self._show_details = request.get_integer_input_mandatory( "show_details", 1) == 1
def _from_vars(self): self._start = bool(request.var("_start")) # 'all' not set -> only scan checked hosts in current folder, no recursion # otherwise: all host in this folder, maybe recursively self._all = bool(request.var("all")) self._complete_folder = self._all # Ignored during initial form display self._settings = ParentScanSettings( where=request.get_ascii_input_mandatory("where", "subfolder"), alias=request.get_unicode_input_mandatory("alias", "").strip(), recurse=html.get_checkbox("recurse") or False, select=request.get_ascii_input_mandatory("select", "noexplicit"), timeout=request.get_integer_input_mandatory("timeout", 8), probes=request.get_integer_input_mandatory("probes", 2), max_ttl=request.get_integer_input_mandatory("max_ttl", 10), force_explicit=html.get_checkbox("force_explicit") or False, ping_probes=request.get_integer_input_mandatory("ping_probes", 5), ) self._job = ParentScanBackgroundJob()
def page_werk(): load_werks() werk_id = request.get_integer_input_mandatory("werk") if werk_id not in g_werks: raise MKUserError("werk", _("This werk does not exist.")) werk = g_werks[werk_id] title = ("%s %s - %s") % (_("Werk"), render_werk_id( werk, with_link=False), werk["title"]) breadcrumb = make_main_menu_breadcrumb(mega_menu_registry["help_links"]) breadcrumb.append( BreadcrumbItem( title=_("Change log (Werks)"), url="change_log.py", )) breadcrumb.append(make_current_page_breadcrumb_item(title)) html.header(title, breadcrumb, _page_menu_werk(breadcrumb, werk)) html.open_table(class_=["data", "headerleft", "werks"]) def werk_table_row(caption, content, css=None): html.open_tr() html.th(caption) html.td(content, class_=css) html.close_tr() translator = cmk.utils.werks.WerkTranslator() werk_table_row(_("ID"), render_werk_id(werk, with_link=False)) werk_table_row(_("Title"), html.render_b(render_werk_title(werk))) werk_table_row(_("Component"), translator.component_of(werk)) werk_table_row(_("Date"), render_werk_date(werk)) werk_table_row(_("Checkmk Version"), werk["version"]) werk_table_row(_("Level"), translator.level_of(werk), css="werklevel werklevel%d" % werk["level"]) werk_table_row(_("Class"), translator.class_of(werk), css="werkclass werkclass%s" % werk["class"]) werk_table_row( _("Compatibility"), translator.compatibility_of(werk), css="werkcomp werkcomp%s" % werk["compatible"], ) werk_table_row(_("Description"), render_werk_description(werk), css="nowiki") html.close_table() html.footer()
def ajax_graph_hover(): response.set_content_type("application/json") try: context_var = request.get_str_input_mandatory("context") context = json.loads(context_var) hover_time = request.get_integer_input_mandatory("hover_time") response_data = render_ajax_graph_hover(context, hover_time) response.set_data(json.dumps(response_data)) except Exception as e: logger.error("Ajax call ajax_graph_hover.py failed: %s\n%s", e, traceback.format_exc()) if config.debug: raise response.set_data("ERROR: %s" % e)
def _ajax_switch_masterstate(self) -> None: response.set_content_type("text/plain") if not user.may("sidesnap.master_control"): return if not transactions.check_transaction(): return site = request.get_ascii_input_mandatory("site") column = request.get_ascii_input_mandatory("switch") state = request.get_integer_input_mandatory("state") commands = { ("enable_notifications", 1): "ENABLE_NOTIFICATIONS", ("enable_notifications", 0): "DISABLE_NOTIFICATIONS", ("execute_service_checks", 1): "START_EXECUTING_SVC_CHECKS", ("execute_service_checks", 0): "STOP_EXECUTING_SVC_CHECKS", ("execute_host_checks", 1): "START_EXECUTING_HOST_CHECKS", ("execute_host_checks", 0): "STOP_EXECUTING_HOST_CHECKS", ("enable_flap_detection", 1): "ENABLE_FLAP_DETECTION", ("enable_flap_detection", 0): "DISABLE_FLAP_DETECTION", ("process_performance_data", 1): "ENABLE_PERFORMANCE_DATA", ("process_performance_data", 0): "DISABLE_PERFORMANCE_DATA", ("enable_event_handlers", 1): "ENABLE_EVENT_HANDLERS", ("enable_event_handlers", 0): "DISABLE_EVENT_HANDLERS", } command = commands.get((column, state)) if not command: html.write_text(_("Command %s/%d not found") % (column, state)) return sites.live().command("[%d] %s" % (int(time.time()), command), site) sites.live().set_only_sites([site]) sites.live().query( "GET status\nWaitTrigger: program\nWaitTimeout: 10000\nWaitCondition: %s = %d\nColumns: %s\n" % (column, state, column) ) sites.live().set_only_sites() self.show()
def tristate_value(self): return request.get_integer_input_mandatory(self.varname, self.deflt)
def _from_vars(self): self._search = get_search_expression() self._show_only_modified = (request.get_integer_input_mandatory( "_show_only_modified", 0) == 1)
def page(self) -> cmk.gui.pages.PageResult: if request.get_integer_input_mandatory("major", 0): self._major_page() else: self._patch_page()
def render_tree_json(row): expansion_level = request.get_integer_input_mandatory("expansion_level", 999) treestate = user.get_tree_states("bi") if expansion_level != user.bi_expansion_level: treestate = {} user.set_tree_states("bi", treestate) user.save_tree_states() def render_node_json(tree, show_host): is_leaf = len(tree) == 3 if is_leaf: service = tree[2].get("service") if not service: title = _("Host status") else: title = service else: title = tree[2]["title"] json_node = { "title": title, # 2 -> This element is currently in a scheduled downtime # 1 -> One of the subelements is in a scheduled downtime "in_downtime": tree[0]["in_downtime"], "acknowledged": tree[0]["acknowledged"], "in_service_period": tree[0]["in_service_period"], } if is_leaf: site, hostname = tree[2]["host"] json_node["site"] = site json_node["hostname"] = hostname # Check if we have an assumed state: comparing assumed state (tree[1]) with state (tree[0]) if tree[1] and tree[0] != tree[1]: json_node["assumed"] = True effective_state = tree[1] else: json_node["assumed"] = False effective_state = tree[0] json_node["state"] = effective_state["state"] json_node["output"] = compute_output_message(effective_state, tree[2]) return json_node def render_subtree_json(node, path, show_host): json_node = render_node_json(node, show_host) is_leaf = len(node) == 3 is_next_level_open = len(path) <= expansion_level if not is_leaf and is_next_level_open: json_node["nodes"] = [] for child_node in node[3]: if not child_node[2].get("hidden"): new_path = path + [child_node[2]["title"]] json_node["nodes"].append(render_subtree_json(child_node, new_path, show_host)) return json_node root_node = row["aggr_treestate"] affected_hosts = row["aggr_hosts"] return "", render_subtree_json(root_node, [root_node[2]["title"]], len(affected_hosts) > 1)