def _validate_group(self, tag_group: TagGroup) -> None: if not tag_group.id: raise MKGeneralException( _("Please specify an ID for your tag group.")) _validate_tag_id(tag_group.id) if tag_group.id == "site": raise MKGeneralException( _('The tag group "%s" is reserved for internal use.') % tag_group.id) # Tag groups were made builtin with ~1.4. Previously users could modify # these groups. These users now have the modified tag groups in their # user configuration and should be able to cleanup this using the GUI # for the moment. # With 1.7 we use cmk-update-config to enforce the user to cleanup this. # Then we can re-enable this consistency check. # builtin_config = BuiltinTagConfig() # if builtin_config.tag_group_exists(tag_group.id): # raise MKGeneralException( # _("You can not override the builtin tag group \"%s\".") % tag_group.id) if not tag_group.title: raise MKGeneralException( _('Please specify a title for your tag group "%s".') % tag_group.id) have_none_tag = False for nr, tag in enumerate(tag_group.tags): if tag.id or tag.title: if not tag.id: tag.id = None if len(tag_group.tags) == 1: raise MKGeneralException( _("Can not use an empty tag ID with a single choice." )) if have_none_tag: raise MKGeneralException( _("Only one tag may be empty.")) have_none_tag = True # Make sure tag ID is unique within this group for (n, x) in enumerate(tag_group.tags): if n != nr and x.id == tag.id: raise MKGeneralException( _('Tags IDs must be unique. You\'ve used "%s" twice.' ) % tag.id) if len(tag_group.tags) == 0: raise MKGeneralException(_("Please specify at least one tag.")) if len(tag_group.tags) == 1 and tag_group.tags[0] is None: raise MKGeneralException( _("Tag groups with only one choice must have a tag ID."))
def default_rule_pack(rules): # type: (List[Dict[str, Any]]) -> Dict[str, Any] """Returns the default rule pack""" return { "id": "default", "title": _("Default rule pack"), "rules": rules, "disabled": False, }
def update_tag_group(self, tag_group: TagGroup) -> None: for idx, group in enumerate(self.tag_groups): if group.id == tag_group.id: self.tag_groups[idx] = tag_group break else: raise MKGeneralException( _('Unknown tag group "%s"') % tag_group.id) self._validate_group(tag_group)
def update_tag_group(self, tag_group): for idx, group in enumerate(self.tag_groups): if group.id == tag_group.id: self.tag_groups[idx] = tag_group break else: raise MKGeneralException( _("Unknown tag group \"%s\"") % tag_group.id) self._validate_group(tag_group)
def _manpage_browse_entries(cat, entries): checks = [] for e in entries: checks.append((e["title"], e["name"])) checks.sort() choices = [(str(n + 1), c[0]) for n, c in enumerate(checks)] while True: x = _dialog_menu( _("Man Page Browser"), _manpage_display_header(cat), choices, "0", _("Show Manpage"), _("Back")) if x[0]: index = int(x[1]) - 1 name = checks[index][1] print_man_page(name) else: break
def interface_oper_states() -> Dict[Union[str, int], str]: return { 1: _("up"), 2: _("down"), 3: _("testing"), 4: _("unknown"), 5: _("dormant"), 6: _("not present"), 7: _("lower layer down"), 8: _("degraded"), # artificial, not official }
def ensure_utf8(logger: Optional[Logger] = None) -> None: # Make sure that mail(x) is using UTF-8. Otherwise we cannot send notifications # with non-ASCII characters. Unfortunately we do not know whether C.UTF-8 is # available. If e.g. mail detects a non-Ascii character in the mail body and # the specified encoding is not available, it will silently not send the mail! # Our resultion in future: use /usr/sbin/sendmail directly. # Our resultion in the present: look with locale -a for an existing UTF encoding # and use that. proc: subprocess.Popen = subprocess.Popen( # pylint:disable=consider-using-with ["locale", "-a"], close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, ) locales_list: List[str] = [] std_out: bytes = proc.communicate()[0] exit_code: int = proc.returncode error_msg: str = _("Command 'locale -a' could not be executed. Exit code of command was") not_found_msg: str = _( "No UTF-8 encoding found in your locale -a! " "Please install appropriate locales." ) if exit_code != 0: if not logger: raise MKGeneralException("%s: %r. %s" % (error_msg, exit_code, not_found_msg)) logger.info("%s: %r" % (error_msg, exit_code)) logger.info(not_found_msg) return locales_list = std_out.decode("utf-8", "ignore").split("\n") for encoding in locales_list: el: str = encoding.lower() if "utf8" in el or "utf-8" in el or "utf.8" in el: encoding = encoding.strip() os.putenv("LANG", encoding) if logger: logger.debug("Setting locale for mail to %s.", encoding) break else: if not logger: raise MKGeneralException(not_found_msg) logger.info(not_found_msg) return
def _save_bytes_to_file(self, *, data: bytes, mode: int) -> None: tmp_path = None try: with tempfile.NamedTemporaryFile( "wb", dir=str(self.path.parent), prefix=".%s.new" % self.path.name, delete=False, ) as tmp: tmp_path = Path(tmp.name) tmp_path.chmod(mode) tmp.write(data) # The goal of the fsync would be to ensure that there is a consistent file after a # crash. Without the fsync it may happen that the file renamed below is just an empty # file. That may lead into unexpected situations during loading. # # Don't do a fsync here because this may run into IO performance issues. Even when # we can specify the fsync on a fd, the disk cache may be flushed completely because # the disk does not know anything about fds, only about blocks. # # For Checkmk 1.4 we can not introduce a good solution for this, because the changes # would affect too many parts of Checkmk with possible new issues. For the moment we # stick with the IO behaviour of previous Checkmk versions. # # In the future we'll find a solution to deal better with OS crash recovery situations. # for example like this: # # TODO(lm): The consistency of the file will can be ensured using copies of the # original file which are made before replacing it with the new one. After first # successful loading of the just written fille the possibly existing copies of this # file are deleted. # We can archieve this by calling os.link() before the os.rename() below. Then we need # to define in which situations we want to check out the backup open(s) and in which # cases we can savely delete them. #tmp.flush() #os.fsync(tmp.fileno()) tmp_path.rename(self.path) except (MKTerminate, MKTimeout): raise except Exception as e: if tmp_path: tmp_path.unlink(missing_ok=True) # TODO: How to handle debug mode or logging? raise MKGeneralException( _("Cannot write configuration file \"%s\": %s") % (self.path, e)) finally: release_lock(self.path)
def _verify_aggregation_title_uniqueness( self, compiled_aggregations: Dict[str, BICompiledAggregation]) -> None: used_titles: Dict[str, str] = {} for aggr_id, bi_aggregation in compiled_aggregations.items(): for bi_branch in bi_aggregation.branches: branch_title = bi_branch.properties.title if branch_title in used_titles: raise MKGeneralException( _("The aggregation titles are not unique. \"%s\" is created " "by aggregation <b>%s</b> and <b>%s</b>") % (branch_title, aggr_id, used_titles[branch_title])) used_titles[branch_title] = aggr_id
def _create_fallback_man_page(name, path, error_message): with path.open(encoding="utf-8") as fp: return { "name": name, "path": str(path), "description": fp.read().strip(), "title": _("%s: Cannot parse man page: %s") % (name, error_message), "agents": "", "license": "unknown", "distribution": "unknown", "catalog": ["generic"], }
def _manpage_browser_folder(cat, subtrees): titles = [] for e in subtrees: title = catalog_titles.get(e, e) count = _manpage_num_entries(cat + (e,)) if count: title += " (%d)" % count titles.append((title, e)) titles.sort() choices = [(str(n + 1), t[0]) for n, t in enumerate(titles)] while True: x = _dialog_menu(_("Man Page Browser"), _manpage_display_header(cat), choices, "0", _("Enter"), cat and _("Back") or _("Quit")) if x[0]: index = int(x[1]) subcat = titles[index - 1][1] print_man_page_browser(cat + (subcat,)) else: break
def load_raw_files(werks_dir): if werks_dir is None: werks_dir = _compiled_werks_dir() werks = {} for file_name in werks_dir.glob("[0-9]*"): werk_id = int(file_name.name) try: werk = _load_werk(file_name) werk["id"] = werk_id werks[werk_id] = werk except Exception as e: raise MKGeneralException(_("Failed to load werk \"%s\": %s") % (werk_id, e)) return werks
def weekdays_by_name() -> List[Tuple[str, str]]: """Returns a list of two element tuples containing the weekday ID and the human readable day name""" return [ ("monday", _("Monday")), ("tuesday", _("Tuesday")), ("wednesday", _("Wednesday")), ("thursday", _("Thursday")), ("friday", _("Friday")), ("saturday", _("Saturday")), ("sunday", _("Sunday")), ]
def weekdays() -> Dict[int, str]: """Returns a map of weekday number (starting with 0 at Monday) to the human readable day name""" return { 0: _("Monday"), 1: _("Tuesday"), 2: _("Wednesday"), 3: _("Thursday"), 4: _("Friday"), 5: _("Saturday"), 6: _("Sunday"), }
def _manpage_browse_entries(cat: Iterable[str], entries: Iterable[ManPage]) -> None: checks: list[tuple[str, str]] = [] for e in entries: checks.append((e.title, e.name)) checks.sort() choices = [(str(n + 1), c[0]) for n, c in enumerate(checks)] while True: x = _dialog_menu( _("Man Page Browser"), _manpage_display_header(cat), choices, "0", _("Show Manpage"), _("Back"), ) if x[0]: index = int(x[1]) - 1 name = checks[index][1] ConsoleManPageRenderer(name).paint() else: break
def load_mk_file(path, default=None, lock=False): if default is None: raise MKGeneralException( _("You need to provide a config dictionary to merge with the " "read configuration. The dictionary should have all expected " "keys and their default values set.")) if lock: aquire_lock(path) try: try: execfile(path, globals(), default) except IOError as e: if e.errno != errno.ENOENT: # No such file or directory raise return default except MKTimeout: raise except Exception as e: # TODO: How to handle debug mode or logging? raise MKGeneralException(_("Cannot read configuration file \"%s\": %s") % (path, e))
def fallback(cls, path: Path, name: str, msg: str, content: str) -> "ManPage": return cls( name=name, path=str(path), title=_("%s: Cannot parse man page: %s") % (name, msg), agents=[], catalog=["generic"], license="unknown", distribution="unknown", description=content, item=None, discovery=None, cluster=None, )
def lock_checkmk_configuration() -> Iterator[None]: path = configuration_lockfile() try: aquire_lock(path) except MKTimeout as e: raise MKConfigLockTimeout( _("Couldn't lock the Checkmk configuration. Another " "process is running that holds this lock. In order for you to be " "able to perform the desired action, you have to wait until the " "other process has finished. Please try again later.")) from e try: yield finally: release_lock(path)
def regex(pattern: str, flags: int = 0) -> Pattern[str]: """Compile regex or look it up in already compiled regexes. (compiling is a CPU consuming process. We cache compiled regexes).""" try: return g_compiled_regexes[(pattern, flags)] except KeyError: pass try: reg = re.compile(pattern, flags=flags) except Exception as e: raise MKGeneralException(_("Invalid regular expression '%s': %s") % (pattern, e)) g_compiled_regexes[(pattern, flags)] = reg return reg
def _traverse_rule(self, bi_rule: BIRule, parents=None) -> None: if not parents: parents = [] if bi_rule.id in parents: parents.append(bi_rule.id) raise MKGeneralException( _("There is a cycle in your rules. This rule calls itself - " "either directly or indirectly: %s") % "->".join(parents)) parents.append(bi_rule.id) for node in bi_rule.nodes: if isinstance(node.action, BICallARuleAction): self._traverse_rule( self.get_rule_mandatory(node.action.rule_id), list(parents))
def _load_bytes_from_file(self) -> bytes: try: try: return self.path.read_bytes() except FileNotFoundError: # Since locking (currently) creates an empty file, # there is no semantic difference between an empty and a # non-existing file, so we ensure consistency here. return b"" except (MKTerminate, MKTimeout): raise except Exception as e: if cmk.utils.debug.enabled(): raise raise MKGeneralException( _('Cannot read file "%s": %s') % (self.path, e))
def validate(self): seen: Set[str] = set() for aux_tag in self._tags: aux_tag.validate() # Tag groups were made builtin with ~1.4. Previously users could modify # these groups. These users now have the modified tag groups in their # user configuration and should be able to cleanup this using the GUI # for the moment. # With 1.7 we use cmk-update-config to enforce the user to cleanup this. # Then we can re-enable this consistency check. #builtin_config = BuiltinTagConfig() #if builtin_config.aux_tag_list.exists(aux_tag.id): # raise MKGeneralException( # _("You can not override the builtin auxiliary tag \"%s\".") % aux_tag.id) if aux_tag.id in seen: raise MKGeneralException(_("Duplicate tag ID \"%s\" in auxilary tags") % aux_tag.id) seen.add(aux_tag.id)
def get_title_from_man_page(path: Path) -> str: with path.open(encoding="utf-8") as fp: for line in fp: if line.startswith("title:"): return line.split(":", 1)[1].strip() raise MKGeneralException(_("Invalid man page: Failed to get the title"))
def _builtin_aux_tags(self): return [ { 'id': 'ip-v4', 'topic': _('Address'), 'title': _('IPv4'), 'help': _("Bar"), }, { 'id': 'ip-v6', 'topic': _('Address'), 'title': _('IPv6'), }, { 'id': 'snmp', 'topic': _('Data sources'), 'title': _('Monitor via SNMP'), }, { 'id': 'tcp', 'topic': _('Data sources'), 'title': _('Monitor via Checkmk Agent'), }, { 'id': 'ping', 'topic': _('Data sources'), 'title': _('Only ping this device'), }, ]
def _builtin_tag_groups(self): return [ { 'id': 'agent', 'title': _('Checkmk agent'), 'topic': _('Data sources'), 'tags': [ { 'id': 'cmk-agent', 'title': _('Normal Checkmk agent, or special agent if configured' ), 'aux_tags': ['tcp'], }, { 'id': 'all-agents', 'title': _('Normal Checkmk agent, all configured special agents' ), 'aux_tags': ['tcp'], }, { 'id': 'special-agents', 'title': _('No Checkmk agent, all configured special agents'), 'aux_tags': ['tcp'], }, { 'id': 'no-agent', 'title': _('No agent'), 'aux_tags': [], }, ], }, { 'id': 'piggyback', 'title': _("Piggyback"), 'topic': _('Data sources'), 'help': _("By default every host has the piggyback data source " "<b>Use piggyback data from other hosts if present</b>. " "In this case the <tt>Check_MK</tt> service of this host processes the piggyback data " "but does not warn if no piggyback data is available. The related discovered services " "would become stale. " "If a host has configured <b>Always use and expect piggyback data</b> for the piggyback " "data source then this host expects piggyback data and the <tt>Check_MK</tt> service of " "this host warns if no piggyback data is available. " "In the last case, ie. <b>Never use piggyback data</b>, the <tt>Check_MK</tt> service " "does not process piggyback data at all and ignores it if available." ), 'tags': [ { "id": "auto-piggyback", "title": _("Use piggyback data from other hosts if present"), "aux_tags": [] }, { "id": "piggyback", "title": _("Always use and expect piggyback data"), "aux_tags": [], }, { "id": "no-piggyback", "title": _("Never use piggyback data"), "aux_tags": [], }, ], }, { 'id': 'snmp_ds', 'title': _('SNMP'), 'topic': _('Data sources'), 'tags': [{ 'id': 'no-snmp', 'title': _('No SNMP'), 'aux_tags': [], }, { 'id': 'snmp-v2', 'title': _('SNMP v2 or v3'), 'aux_tags': ['snmp'], }, { 'id': 'snmp-v1', 'title': _('SNMP v1'), 'aux_tags': ['snmp'], }], }, { 'id': 'address_family', 'title': _('IP address family'), 'topic': u'Address', 'tags': [ { 'id': 'ip-v4-only', 'title': _('IPv4 only'), 'aux_tags': ['ip-v4'], }, { 'id': 'ip-v6-only', 'title': _('IPv6 only'), 'aux_tags': ['ip-v6'], }, { 'id': 'ip-v4v6', 'title': _('IPv4/IPv6 dual-stack'), 'aux_tags': ['ip-v4', 'ip-v6'], }, { 'id': 'no-ip', 'title': _('No IP'), 'aux_tags': [], }, ], }, ]
def short_host_state_name(state_num, deflt=u""): # type: (int, Text) -> Text states = {0: _("UP"), 1: _("DOWN"), 2: _("UNREACH")} return states.get(state_num, deflt)
def get_aux_tags_by_topic(self): by_topic: Dict[str, List[str]] = {} for aux_tag in self.aux_tag_list.get_tags(): topic = aux_tag.topic or _('Tags') by_topic.setdefault(topic, []).append(aux_tag) return sorted(by_topic.items(), key=lambda x: x[0])
def _validate_tag_id(tag_id): if not re.match("^[-a-z0-9A-Z_]*$", tag_id): raise MKGeneralException( _("Invalid tag ID. Only the characters a-z, A-Z, 0-9, _ and - are allowed." ))
def get_tag_groups_by_topic(self): by_topic: Dict[str, List[str]] = {} for tag_group in self.tag_groups: topic = tag_group.topic or _('Tags') by_topic.setdefault(topic, []).append(tag_group) return sorted(by_topic.items(), key=lambda x: x[0])
def _append(self, aux_tag): if self.exists(aux_tag.id): raise MKGeneralException( _("The tag ID \"%s\" does already exist in the list of auxiliary tags." ) % aux_tag) self._tags.append(aux_tag)