def _tls_valuespec(cls): return CascadingDropdown( title=_("Encryption"), choices=[ ( "plain_text", _("Plain text (Unencrypted)"), FixedValue({}, totext=_("Use plain text, unencrypted transport")), ), ("encrypted", _("Encrypt data using TLS"), Dictionary( elements=[ ("verify", Checkbox( title=_("Verify server certificate"), label=_( "Verify the Livestatus server certificate using the local site CA" ), default_value=True, help= _("Either verify the server certificate using the site local CA or accept " "any certificate offered by the server. It is highly recommended to " "leave this enabled."), )), ], optional_keys=False, )), ], help= _("When connecting to Check_MK versions older than 1.6 you can only use plain text " "transport. Starting with Check_MK 1.6 it is possible to use encrypted Livestatus " "communication. Sites created with 1.6 will automatically use encrypted communication " "by default. Sites created with previous versions need to be configured manually to " "enable the encryption. Have a look at <a href=\"werk.py?werk=7017\">werk #7017</a> " "for further information."), )
def _vs_activation(title: str, has_foreign_changes: bool) -> Optional[Dictionary]: elements: List[DictionaryEntry] = [] if config.wato_activate_changes_comment_mode != "disabled": is_optional = config.wato_activate_changes_comment_mode != "enforce" elements.append(( "comment", TextAreaUnicode( title=_("Comment (optional)") if is_optional else _("Comment"), cols=40, try_max_width=True, rows=1, help= _("You can provide an optional comment for the current activation. " "This can be useful to document the reason why the changes you " "activate have been made."), allow_empty=is_optional, ))) if has_foreign_changes and config.user.may("wato.activateforeign"): elements.append(("foreign", Checkbox( title=_("Activate foreign changes"), label=_("Activate changes of other users"), ))) if not elements: return None return Dictionary( title=title, elements=elements, optional_keys=[], render="form_part", )
def _parameter_valuespec_hw_fans(): hw_fans_dict = Dictionary( elements=[ ( "lower", Tuple( help=_("Lower levels for the fan speed of a hardware device"), title=_("Lower levels"), elements=[ Integer(title=_("warning if below"), unit="rpm"), Integer(title=_("critical if below"), unit="rpm"), ], ), ), ( "upper", Tuple( help=_("Upper levels for the fan speed of a hardware device"), title=_("Upper levels"), elements=[ Integer(title=_("warning at"), unit="rpm"), Integer(title=_("critical at"), unit="rpm"), ], ), ), ( "output_metrics", Checkbox(title=_("Performance data"), label=_("Enable performance data")), ), ], optional_keys=["upper", "output_metrics"], ) return Transform( valuespec=hw_fans_dict, forth=lambda spec: spec if isinstance(spec, dict) else {"lower": spec}, )
def _valuespec_special_agents_activemq(): return Transform( valuespec=Dictionary( elements=[ ( "servername", TextInput( title=_("Server Name"), allow_empty=False, ), ), ("port", Integer(title=_("Port Number"), default_value=8161)), ( "protocol", DropdownChoice( title=_("Protocol"), choices=[ ("http", "HTTP"), ("https", "HTTPS"), ], ), ), ("use_piggyback", Checkbox(title=_("Use Piggyback"), label=_("Enable"))), ( "basicauth", Tuple( title=_("BasicAuth settings (optional)"), elements=[TextInput(title=_("Username")), Password(title=_("Password"))], ), ), ], optional_keys=["basicauth"], ), title=_("Apache ActiveMQ queues"), forth=_special_agents_activemq_transform_activemq, )
def _parameter_valuespec_netapp_luns(): return Dictionary( title=_("Configure levels for used space"), elements=[ ("ignore_levels", FixedValue( title= _("Ignore used space (this option disables any other options)" ), help= _("Some luns, e.g. jfs formatted, tend to report incorrect used space values" ), totext=_("Ignore used space"), value=True, )), ("levels", Alternative( title=_("Levels for LUN"), show_alternative_title=True, default_value=(80.0, 90.0), match=match_dual_level_type, elements=[ get_free_used_dynamic_valuespec("used", "LUN"), Transform( get_free_used_dynamic_valuespec("free", "LUN", default_value=(20.0, 10.0)), forth=transform_filesystem_free, back=transform_filesystem_free, ) ], )), ("trend_range", Optional(Integer( title=_("Time Range for lun filesystem trend computation"), default_value=24, minvalue=1, unit=_("hours")), title=_("Trend computation"), label=_("Enable trend computation"))), ("trend_mb", Tuple( title=_("Levels on trends in MB per time range"), elements=[ Integer(title=_("Warning at"), unit=_("MB / range"), default_value=100), Integer(title=_("Critical at"), unit=_("MB / range"), default_value=200) ], )), ("trend_perc", Tuple( title=_("Levels for the percentual growth per time range"), elements=[ Percentage( title=_("Warning at"), unit=_("% / range"), default_value=5, ), Percentage( title=_("Critical at"), unit=_("% / range"), default_value=10, ), ], )), ("trend_timeleft", Tuple( title= _("Levels on the time left until the lun filesystem gets full" ), elements=[ Integer( title=_("Warning if below"), unit=_("hours"), default_value=12, ), Integer( title=_("Critical if below"), unit=_("hours"), default_value=6, ), ], )), ("trend_showtimeleft", Checkbox( title=_("Display time left in check output"), label=_("Enable"), help= _("Normally, the time left until the lun filesystem is full is only displayed when " "the configured levels have been breached. If you set this option " "the check always reports this information"))), ("trend_perfdata", Checkbox( title=_("Trend performance data"), label=_("Enable generation of performance data from trends")) ), ("read_only", Checkbox( title=_("LUN is read-only"), help=_("Display a warning if a LUN is not read-only. Without " "this setting a warning will be displayed if a LUN is " "read-only."), label=_("Enable"))), ], )
def dashlet_vs_general_settings(dashlet_type: Type[Dashlet], single_infos: SingleInfos): return Dictionary( title=_("General Settings"), render="form", optional_keys=["title", "title_url"], elements=[ ( "type", FixedValue( value=dashlet_type.type_name(), totext=dashlet_type.title(), title=_("Element type"), ), ), visuals.single_infos_spec(single_infos), ( "background", Checkbox( title=_("Colored background"), label=_("Render background"), help= _("Render gray background color behind the elements content." ), default_value=True, ), ), ( "show_title", DropdownChoice( title=_("Show title header"), help= _("Render the titlebar including title and link above the element." ), choices=[ (False, _("Don't show any header")), (True, _("Show header with highlighted background")), ("transparent", _("Show title without any background")), ], default_value=True, ), ), ( "title", TextInput( title=_("Custom title") + "<sup>*</sup>", placeholder=_( "This option is macro-capable, please check the inline help for more " "information."), help=" ".join(( _("Most elements have a hard coded static title and some are aware of their " "content and set the title dynamically, like the view snapin, which " "displays the title of the view. If you like to use any other title, set it " "here."), _title_help_text_for_macros(dashlet_type), )), size=75, ), ), ( "title_url", TextInput( title=_("Link of Title"), help= _("The URL of the target page the link of the element should link to." ), size=50, ), ), ], )
def _network_scan_elements(self): elements = [ ("ip_ranges", ListOf( self._vs_ip_range(), title=_("IP ranges to scan"), add_label=_("Add new IP range"), text_if_empty=_("No IP range configured"), )), ("exclude_ranges", ListOf( self._vs_ip_range(), title=_("IP ranges to exclude"), add_label=_("Add new IP range"), text_if_empty=_("No exclude range configured"), )), ( "scan_interval", Age( title=_("Scan interval"), display=["days", "hours"], default_value=60 * 60 * 24, minvalue=3600, # 1 hour )), ("time_allowed", Transform( ListOf( TimeofdayRange(allow_empty=False, ), title=_("Time allowed"), help=_( "Limit the execution of the scan to this time range." ), allow_empty=False, style=ListOf.Style.FLOATING, movable=False, default_value=[((0, 0), (24, 0))], ), forth=lambda x: [x] if isinstance(x, tuple) else x, back=sorted, )), ("set_ipaddress", Checkbox( title=_("Set IPv4 address"), help=_( "Whether or not to configure the found IP address as the IPv4 " "address of the found hosts."), default_value=True, )), ] elements += self._optional_tag_criticality_element() elements += [ ("max_parallel_pings", Integer( title=_("Parallel pings to send"), help=_( "Set the maximum number of concurrent pings sent to target IP " "addresses."), minvalue=1, maxvalue=200, default_value=100, )), ("run_as", DropdownChoice( title=_("Run as"), help= _("Execute the network scan in the Check_MK user context of the " "choosen user. This user needs the permission to add new hosts " "to this folder."), choices=self._get_all_user_ids, default_value=lambda: config.user.id, )), ("translate_names", HostnameTranslation(title=_("Translate Hostnames"), )), ] return elements
def _valuespec_active_checks_traceroute(): return Transform( valuespec=Dictionary( title=_("Check current routing"), help= _("This active check uses <tt>traceroute</tt> in order to determine the current " "routing from the monitoring host to the target host. You can specify any number " "of missing or expected routes in order to detect e.g. an (unintended) failover " "to a secondary route."), elements=[ ( "dns", Checkbox( title=_("Name resolution"), label=_( "Use DNS to convert IP addresses into hostnames"), help= _("If you use this option, then <tt>traceroute</tt> is <b>not</b> being " "called with the option <tt>-n</tt>. That means that all IP addresses " "are tried to be converted into names. This usually adds additional " "execution time. Also DNS resolution might fail for some addresses." ), ), ), ip_address_family_element(), ( "routers", ListOf( valuespec=Tuple(elements=[ TextInput( title=_("Router (FQDN, IP-Address)"), allow_empty=False, ), DropdownChoice( title=_("How"), choices=[ ("W", _("WARN - if this router is not being used" )), ("C", _("CRIT - if this router is not being used" )), ("w", _("WARN - if this router is being used")), ("c", _("CRIT - if this router is being used")), ], ), ]), title=_("Router that must or must not be used"), add_label=_("Add Condition"), ), ), ( "method", DropdownChoice( title=_("Method of probing"), choices=[ (None, _("UDP (default behaviour of traceroute)")), ("icmp", _("ICMP Echo Request")), ("tcp", _("TCP SYN")), ], ), ), ], optional_keys=False, ), forth=transform_add_address_family, )
def get_vs_flexible_notifications(): # Make sure, that list is not trivially false def validate_only_services(value, varprefix): for s in value: if s and s[0] != '!': return raise MKUserError(varprefix + "_0", _("The list of services will never match")) return CascadingDropdown( title=_("Notification Method"), choices=[ ("email", _("Plain Text Email (using configured templates)")), ( "flexible", _("Flexible Custom Notifications"), ListOf( Foldable( Dictionary( optional_keys=[ "service_blacklist", "only_hosts", "only_services", "escalation", "match_sl" ], columns=1, elements=[ ( "plugin", DropdownChoice( title=_("Notification Plugin"), choices=notification_script_choices, default_value="mail", ), ), ("parameters", ListOfStrings( title=_("Plugin Arguments"), help= _("You can specify arguments to the notification plugin here. " "Please refer to the documentation about the plugin for what " "parameters are allowed or required here." ), )), ("disabled", Checkbox( title=_("Disabled"), label=_( "Currently disable this notification" ), default_value=False, )), ("timeperiod", cmk.gui.watolib.timeperiods. TimeperiodSelection( title=_("Timeperiod"), help= _("Do only notifiy alerts within this time period" ), )), ( "escalation", Tuple( title= _("Restrict to n<sup>th</sup> to m<sup>th</sup> notification (escalation)" ), orientation="float", elements=[ Integer( label=_("from"), help= _("Let through notifications counting from this number" ), default_value=1, minvalue=1, maxvalue=999999, ), Integer( label=_("to"), help= _("Let through notifications counting upto this number" ), default_value=999999, minvalue=1, maxvalue=999999, ), ], ), ), ( "match_sl", Tuple( title=_("Match service level"), help= _("Host or Service must be in the following service level to get notification" ), orientation="horizontal", show_titles=False, elements=[ DropdownChoice( label=_("from:"), choices=cmk.gui.mkeventd. service_levels, prefix_values=True), DropdownChoice( label=_(" to:"), choices=cmk.gui.mkeventd. service_levels, prefix_values=True), ], ), ), ("host_events", ListChoice( title=_("Host Events"), choices=[ ('d', _("Host goes down")), ('u', _("Host gets unreachble")), ('r', _("Host goes up again")), ('f', _("Start or end of flapping state")), ('s', _("Start or end of a scheduled downtime " )), ('x', _("Acknowledgement of host problem") ), ], default_value=[ 'd', 'u', 'r', 'f', 's', 'x' ], )), ("service_events", ListChoice( title=_("Service Events"), choices=[ ('w', _("Service goes into warning state") ), ('u', _("Service goes into unknown state") ), ('c', _("Service goes into critical state") ), ('r', _("Service recovers to OK")), ('f', _("Start or end of flapping state")), ('s', _("Start or end of a scheduled downtime" )), ('x', _("Acknowledgement of service problem" )), ], default_value=[ 'w', 'c', 'u', 'r', 'f', 's', 'x' ], )), ( "only_hosts", ListOfStrings( title=_( "Limit to the following hosts"), help= _("Configure the hosts for this notification. Without prefix, only exact, case sensitive matches, " "<tt>!</tt> for negation and <tt>~</tt> for regex matches." ), orientation="horizontal", # TODO: Clean this up to use an alternative between TextAscii() and RegExp(). Also handle the negation in a different way valuespec=TextAscii(size=20, ), ), ), ( "only_services", ListOfStrings( title=_( "Limit to the following services"), help= _("Configure regular expressions that match the beginning of the service names here. Prefix an " "entry with <tt>!</tt> in order to <i>exclude</i> that service." ), orientation="horizontal", # TODO: Clean this up to use an alternative between TextAscii() and RegExp(). Also handle the negation in a different way valuespec=TextAscii(size=20, ), validate=validate_only_services, ), ), ( "service_blacklist", ListOfStrings( title=_( "Blacklist the following services" ), help= _("Configure regular expressions that match the beginning of the service names here." ), orientation="horizontal", valuespec=RegExp( size=20, mode=RegExp.prefix, ), validate=validate_only_services, ), ), ]), title_function=lambda v: _("Notify by: ") + notification_script_title(v["plugin"]), ), title=_("Flexible Custom Notifications"), add_label=_("Add notification"), ), ), ])
def _common_email_parameters(protocol, port_defaults): return Dictionary( title=protocol, optional_keys=["server"], elements=[ ( "server", HostAddress( title=f"{protocol} Server", allow_empty=False, help=_( "You can specify a hostname or IP address different from the IP address " "of the host this check will be assigned to."), ), ), ( "connection", Dictionary( required_keys=[], title=_("Connection settings"), elements=[ ( "disable_tls", Checkbox( title=_("Disable TLS/SSL"), label=_("Force unencrypted communication"), ), ), ( "disable_cert_validation", Checkbox( title=_("Disable certificate validation"), label= _("Ignore unsuccessful validation (in case of TLS/SSL)" ), ), ), ( "tcp_port", Integer( title=_("TCP Port"), label=_("(default is %r for %s/TLS)") % (port_defaults, protocol), ), ), ], ), ), ( "auth", Tuple( title=_("Authentication"), elements=[ TextInput(title=_("Username"), allow_empty=False, size=24), IndividualOrStoredPassword(title=_("Password"), allow_empty=False, size=12), ], ), ), ], )
def _valuespec_brocade_fcport_inventory(): return Dictionary( title=_("Brocade port discovery"), elements=[ ( "use_portname", Checkbox( title=_("Use port name as service name"), label=_("use port name"), default_value=True, help= _("This option lets Check_MK use the port name as item instead of the " "port number. If no description is available then the port number is " "used anyway."), ), ), ( "show_isl", Checkbox( title=_( 'add "ISL" to service description for interswitch links' ), label=_("add ISL"), default_value=True, help=_( 'This option lets Check_MK add the string "ISL" to the service ' "description for interswitch links."), ), ), ( "admstates", ListChoice( title=_("Administrative port states to discover"), help=_( "When doing service discovery on brocade switches only ports with the given administrative " "states will be added to the monitoring system."), choices=_brocade_fcport_adm_choices, columns=1, toggle_all=True, default_value=["1", "3", "4"], ), ), ( "phystates", ListChoice( title=_("Physical port states to discover"), help=_( "When doing service discovery on brocade switches only ports with the given physical " "states will be added to the monitoring system."), choices=_brocade_fcport_phy_choices, columns=1, toggle_all=True, default_value=[3, 4, 5, 6, 7, 8, 9, 10], ), ), ( "opstates", ListChoice( title=_("Operational port states to discover"), help=_( "When doing service discovery on brocade switches only ports with the given operational " "states will be added to the monitoring system."), choices=_brocade_fcport_op_choices, columns=1, toggle_all=True, default_value=[1, 2, 3, 4], ), ), ], help=_( "This rule can be used to control the service discovery for brocade ports. " "You can configure the port states for inventory " "and the use of the description as service name."), )
def _valuespec_special_agents_vsphere(): return Transform( valuespec=Dictionary( title=_("VMWare ESX via vSphere"), help=_( "This rule allows monitoring of VMWare ESX via the vSphere API. " "You can configure your connection settings here.", ), elements=[ ( "user", TextInput( title=_("vSphere User name"), allow_empty=False, ), ), ( "secret", IndividualOrStoredPassword( title=_("vSphere secret"), allow_empty=False, ), ), ( "direct", DropdownChoice( title=_("Type of query"), choices=[ (True, _("Queried host is a host system")), (False, _("Queried host is the vCenter")), ], ), ), ( "tcp_port", Integer( title=_("TCP Port number"), help=_("Port number for HTTPS connection to vSphere"), default_value=443, minvalue=1, maxvalue=65535, ), ), ( "ssl", Alternative( title=_("SSL certificate checking"), elements=[ FixedValue(value=False, title=_("Deactivated"), totext=""), FixedValue(value=True, title=_("Use hostname"), totext=""), TextInput( title=_("Use other hostname"), help= _("The IP of the other hostname needs to be the same IP as the host address" ), ), ], default_value=True, ), ), ( "timeout", Integer( title=_("Connect Timeout"), help= _("The network timeout in seconds when communicating with vSphere or " "to the Check_MK Agent. The default is 60 seconds. Please note that this " "is not a total timeout but is applied to each individual network transation." ), default_value=60, minvalue=1, unit=_("seconds"), ), ), ( "infos", Transform( valuespec=ListChoice( choices=[ ("hostsystem", _("Host Systems")), ("virtualmachine", _("Virtual Machines")), ("datastore", _("Datastores")), ("counters", _("Performance Counters")), ("licenses", _("License Usage")), ], default_value=[ "hostsystem", "virtualmachine", "datastore", "counters" ], allow_empty=False, ), forth=lambda v: [x.replace("storage", "datastore") for x in v], title=_("Retrieve information about..."), ), ), ( "skip_placeholder_vms", Checkbox( title=_("Placeholder VMs"), label=_("Do not monitor placeholder VMs"), default_value=True, true_label=_("ignore"), false_label=_("monitor"), help= _("Placeholder VMs are created by the Site Recovery Manager(SRM) and act as backup " "virtual machines in case the default vm is unable to start. This option tells the " "vsphere agent to exclude placeholder vms in its output." ), ), ), ( "host_pwr_display", DropdownChoice( title=_("Display ESX Host power state on"), choices=[ (None, _("The queried ESX system (vCenter / Host)")), ("esxhost", _("The ESX Host")), ("vm", _("The Virtual Machine")), ], default_value=None, ), ), ( "vm_pwr_display", DropdownChoice( title=_( "Display VM power state <i>additionally</i> on"), help= _("The power state can be displayed additionally either " "on the ESX host or the VM. This will result in services " "for <i>both</i> the queried system and the ESX host / VM. " "By disabling the unwanted services it is then possible " "to configure where the services are displayed."), choices=[ (None, _("The queried ESX system (vCenter / Host)")), ("esxhost", _("The ESX Host")), ("vm", _("The Virtual Machine")), ], default_value=None, ), ), ( "snapshots_on_host", Checkbox( title=_("VM snapshot summary"), label=_("Display snapshot summary on ESX hosts"), default_value=False, help= _("By default the snapshot summary service is displayed on the vCenter. " "Users who run an ESX host on its own or do not include their vCenter in the " "monitoring can choose to display the snapshot summary on the ESX host itself." ), ), ), ( "vm_piggyname", DropdownChoice( title=_("Piggyback name of virtual machines"), choices=[ ("alias", _("Use the name specified in the ESX system")), ( "hostname", _("Use the VMs hostname if set, otherwise fall back to ESX name" ), ), ], default_value="alias", ), ), ( "spaces", DropdownChoice( title=_("Spaces in hostnames"), choices=[ ("cut", _("Cut everything after first space")), ("underscore", _("Replace with underscores")), ], default_value="underscore", ), ), ], optional_keys=[ "tcp_port", "timeout", "vm_pwr_display", "host_pwr_display", "vm_piggyname", ], ignored_keys=["use_pysphere"], ), forth=_transform_agent_vsphere, )
def _valuespec_special_agents_ibmsvc(): return Dictionary( title=_("IBM SVC / V7000 storage systems"), help= _("This rule set selects the <tt>ibmsvc</tt> agent instead of the normal Check_MK Agent " "and allows monitoring of IBM SVC / V7000 storage systems by calling " "ls* commands there over SSH. " "Make sure you have SSH key authentication enabled for your monitoring user. " "That means: The user your monitoring is running under on the monitoring " "system must be able to ssh to the storage system as the user you gave below " "without password."), elements=[ ( "user", TextInput( title=_("IBM SVC / V7000 user name"), allow_empty=True, help= _("User name on the storage system. Read only permissions are sufficient." ), ), ), ( "accept-any-hostkey", Checkbox( title=_("Accept any SSH Host Key"), label=_("Accept any SSH Host Key"), default_value=False, help= _("Accepts any SSH Host Key presented by the storage device. " "Please note: This might be a security issue because man-in-the-middle " "attacks are not recognized! Better solution would be to add the " "SSH Host Key of the monitored storage devices to the .ssh/known_hosts " "file for the user your monitoring is running under (on OMD: the site user)" ), ), ), ( "infos", Transform( valuespec=ListChoice( choices=[ ("lshost", _("Hosts Connected")), ("lslicense", _("Licensing Status")), ("lsmdisk", _("MDisks")), ("lsmdiskgrp", _("MDisksGrps")), ("lsnode", _("IO Groups")), ("lsnodestats", _("Node Stats")), ("lssystem", _("System Info")), ("lssystemstats", _("System Stats")), ("lseventlog", _("Event Log")), ("lsportfc", _("FC Ports")), ("lsportsas", _("SAS Ports")), ("lsenclosure", _("Enclosures")), ("lsenclosurestats", _("Enclosure Stats")), ("lsarray", _("RAID Arrays")), ("disks", _("Physical Disks")), ], default_value=[ "lshost", "lslicense", "lsmdisk", "lsmdiskgrp", "lsnode", "lsnodestats", "lssystem", "lssystemstats", "lsportfc", "lsenclosure", "lsenclosurestats", "lsarray", "disks", ], allow_empty=False, ), title=_("Retrieve information about..."), ), ), ], optional_keys=[], )
def _special_agents_ipmi_sensors_vs_freeipmi() -> Dictionary: return Dictionary( elements=[ *_special_agents_ipmi_sensors_vs_ipmi_common_elements(), ( "privilege_lvl", DropdownChoice( title=_("Privilege Level"), choices=[ ("user", "USER"), ("operator", "OPERATOR"), ("admin", "ADMIN"), ], default_value="operator", ), ), ( "ipmi_driver", TextInput( title=_("IPMI driver"), ), ), ( "driver_type", TextInput( title=_("IPMI driver type"), help=_("Driver type to use instead of doing an auto selection"), ), ), ( "BMC_key", TextInput( title=_("BMC key"), help=_( "K_g BMC key to use when authenticating with the remote host for IPMI 2.0" ), ), ), ( "quiet_cache", Checkbox( title=_("Quiet cache"), label=_("Enable"), help=("Do not output information about cache creation/deletion"), ), ), ( "sdr_cache_recreate", Checkbox( title=_("SDR cache recreate"), label=_("Enable"), help=_("Automatically recreate the sensor data repository (SDR) cache"), ), ), ( "interpret_oem_data", Checkbox( title=_("OEM data interpretation"), label=_("Enable"), help=_("Attempt to interpret OEM data"), ), ), ( "output_sensor_state", Checkbox( title=_("Sensor state"), label=_("Enable"), help=_("Output sensor state"), ), ), ( "output_sensor_thresholds", Checkbox( title=_("Sensor threshold"), label=_("Enable"), help=_("Output sensor thresholds"), ), ), ( "ignore_not_available_sensors", Checkbox( title=_("Suppress not available sensors"), label=_("Enable"), help=_("Ignore not-available (i.e. N/A) sensors in output"), ), ), ], optional_keys=[ "ipmi_driver", "driver_type", "quiet_cache", "sdr_cache_recreate", "interpret_oem_data", "output_sensor_state", "output_sensor_thresholds", "ignore_not_available_sensors", "BMC_key", ], )
def parameter_valuespec(self): return Dictionary( help= _("The Data-Guard statistics are available in Oracle Enterprise Edition with enabled Data-Guard. " "The <tt>init.ora</tt> parameter <tt>dg_broker_start</tt> must be <tt>TRUE</tt> for this check. " "The apply and transport lag can be configured with this rule."), elements=[ ("active_dataguard_option", MonitoringState( title=_( "State in case of Active Data-Guard Option is active: " ), help= _("The Active Data-Guard Option needs an addional License from Oracle." ), default_value=1, )), ("primary_broker_state", Checkbox( title=_("Check State of Broker on Primary: "), default_value=False, help=_( "Data-Guards with dg_broker_start=false needs Ignore Brokerstate to monitor " "the Switchoverstate on Primary."), )), ("apply_lag", Tuple( title=_("Apply Lag Maximum Time"), help= _("The maximum limit for the apply lag in <tt>v$dataguard_stats</tt>." ), elements=[ Age(title=_("Warning at"), ), Age(title=_("Critical at"), ) ], )), ("apply_lag_min", Tuple( title=_("Apply Lag Minimum Time"), help= _("The minimum limit for the apply lag in <tt>v$dataguard_stats</tt>. " "This is only useful if also <i>Apply Lag Maximum Time</i> has been configured." ), elements=[ Age(title=_("Warning at"), ), Age(title=_("Critical at"), ) ], )), ("transport_lag", Tuple( title=_("Transport Lag"), help= _("The limit for the transport lag in <tt>v$dataguard_stats</tt>" ), elements=[ Age(title=_("Warning at"), ), Age(title=_("Critical at"), ) ], )), ], )
def vs_bulk_discovery(render_form=False, include_subfolders=True): selection_elements: List[ValueSpec] = [] if include_subfolders: selection_elements.append( Checkbox(label=_("Include all subfolders"), default_value=True)) selection_elements += [ Checkbox( label=_("Only include hosts that failed on previous discovery"), default_value=False), Checkbox(label=_("Only include hosts with a failed discovery check"), default_value=False), Checkbox(label=_("Exclude hosts where the agent is unreachable"), default_value=False), ] return Dictionary( title=_("Bulk discovery"), render="form" if render_form else "normal", elements=[ ( "mode", DropdownChoice( title=_("Mode"), default_value="new", choices=[ ("new", _("Add unmonitored services and new host labels")), ("remove", _("Remove vanished services")), ( "fixall", _("Add unmonitored services and new host labels, remove vanished services" ), ), ("refresh", _("Refresh all services (tabula rasa), add new host labels" )), ("only-host-labels", _("Only discover new host labels")), ], ), ), ("selection", Tuple(title=_("Selection"), elements=selection_elements)), ( "performance", Tuple( title=_("Performance options"), elements=[ Checkbox(label=_("Do a full service scan"), default_value=True), Integer(label=_("Number of hosts to handle at once"), default_value=10), ], ), ), ( "error_handling", Checkbox( title=_("Error handling"), label=_("Ignore errors in single check plugins"), default_value=True, ), ), ], optional_keys=[], )
def valuespec(self): return Checkbox( title=_("Discovery failed"), help=self._help_text(), default_value=False, )
def _parameter_valuespec_filestats(): return Dictionary( elements=file_size_age_elements + [ ( "mincount", Tuple( title=_("Minimal file count"), elements=[ Integer(title=_("Warning below")), Integer(title=_("Critical below")), ], ), ), ( "maxcount", Tuple( title=_("Maximal file count"), elements=[ Integer(title=_("Warning at or above")), Integer(title=_("Critical at or above")), ], ), ), ( "show_all_files", Checkbox( title=_("Show files in service details"), label=("Show files"), help= _("Display all files that have reached a WARN or a CRIT status in the " "service details. Note: displaying the files leads to a performance loss " "for large numbers of files within the file group. Please enable this feature " "only if it is needed."), ), ), ( "additional_rules", ListOf( valuespec=Tuple(elements=[ TextInput( title=_("Display name"), help=_( "Specify a user-friendly name that will be displayed in the service " "details, along with the pattern to match."), ), RegExp( title=_("Filename/- expression"), mode="case_sensitive", size=70, ), Dictionary(elements=file_size_age_elements), ], ), title=_("Additional rules for outliers"), help= _("This feature is to apply different rules to files that are " "inconsistent with the files expected in this file group. " "This means that the rules set for the file group are overwritten. " "You can specify a filename or a regular expresion, and additional " "rules that are applied to the matching files. In case of multiple " "matching rules, the first matching rule is applied. " "Note: this feature is intended for outliers, and is therefore not " "suitable to configure subgroups. "), ), ), ], help= _("Here you can impose various levels on the results reported by the" " mk_filstats plugin. Note that some levels only apply to a matching" " output format (e.g. max/min count levels are not applied if only the" " smallest, largest, oldest and newest file is reported). In order to" " receive the required data, you must configure the plugin mk_filestats." ), )
def liveproxyd_connection_params_elements(cls): defaults = ConfigDomainLiveproxy.connection_params_defaults() return [ ( "channels", Integer( title=_("Number of channels to keep open"), minvalue=2, maxvalue=50, default_value=defaults["channels"], ), ), ( "heartbeat", Tuple( title=_("Regular heartbeat"), orientation="float", elements=[ Integer( label=_("One heartbeat every"), unit=_("sec"), minvalue=1, default_value=defaults["heartbeat"][0], ), Float( label=_("with a timeout of"), unit=_("sec"), minvalue=0.1, default_value=defaults["heartbeat"][1], display_format="%.1f", ), ], ), ), ( "channel_timeout", Float( title=_("Timeout waiting for a free channel"), minvalue=0.1, default_value=defaults["channel_timeout"], unit=_("sec"), ), ), ( "query_timeout", Float( title=_("Total query timeout"), minvalue=0.1, unit=_("sec"), default_value=defaults["query_timeout"], ), ), ( "connect_retry", Float( title=_("Cooling period after failed connect/heartbeat"), minvalue=0.1, unit=_("sec"), default_value=defaults["connect_retry"], ), ), ( "cache", Checkbox( title=_("Enable Caching"), label=_("Cache several non-status queries"), help= _("This option will enable the caching of several queries that " "need no current data. This reduces the number of Livestatus " "queries to sites and cuts down the response time of remote " "sites with large latencies."), default_value=defaults["cache"], ), ), ]
def _parameter_valuespec_logwatch_ec(): return Alternative( title=_("Forwarding"), help= _("Instead of using the regular logwatch check all lines received by logwatch can " "be forwarded to a Check_MK event console daemon to be processed. The target event " "console can be configured for each host in a separate rule."), elements=[ FixedValue( "", totext=_("Messages are handled by logwatch."), title=_("No forwarding"), ), Dictionary( title=_("Forward Messages to Event Console"), elements=[ ( "method", Transform( # TODO: Clean this up to some CascadingDropdown() Alternative( title=_("Forwarding Method"), elements=[ FixedValue( "", title= _("Local: Send events to local Event Console in same OMD site" ), totext=_( "Directly forward to Event Console" ), ), TextInput( title= _("Local: Send events to local Event Console into unix socket" ), allow_empty=False, ), FixedValue( "spool:", title= _("Local: Spooling - Send events to local event console in same OMD site" ), totext=_("Spool to Event Console"), ), Transform( TextInput(allow_empty=False, ), title= _("Local: Spooling - Send events to local Event Console into given spool directory" ), forth=lambda x: x[6:], # remove prefix back=lambda x: "spool:" + x, # add prefix ), CascadingDropdown( title= _("Remote: Send events to remote syslog host" ), choices=[ ( "tcp", _("Send via TCP"), Dictionary( elements=[ ( "address", TextInput( title=_( "Address"), allow_empty= False, ), ), ( "port", NetworkPort( title=_( "Port"), default_value= 514, ), ), ( "spool", Dictionary( title= _("Spool messages that could not be sent" ), help= _("Messages that can not be forwarded, e.g. when the target Event Console is " "not running, can temporarily be stored locally. Forwarding is tried again " "on next execution. When messages are spooled, the check will go into WARNING " "state. In case messages are dropped by the rules below, the check will shortly " "go into CRITICAL state for this execution." ), elements=[ ( "max_age", Age( title =_( "Maximum spool duration" ), help =_( "Messages that are spooled longer than this time will be thrown away." ), default_value =60 * 60 * 24 * 7, # 1 week should be fine (if size is not exceeded) ), ), ( "max_size", Filesize( title =_( "Maximum spool size" ), help= _("When the total size of spooled messages exceeds this number, the oldest " "messages of the currently spooled messages is thrown away until the left " "messages have the half of the maximum size." ), default_value = 500000, # do not save more than 500k of message ), ), ], optional_keys= [], ), ), ], optional_keys=["spool"], ), ), ( "udp", _("Send via UDP"), Dictionary( elements=[ ( "address", TextInput( title=_( "Address"), allow_empty= False, ), ), ( "port", NetworkPort( title=_( "Port"), default_value= 514, ), ), ], optional_keys=[], ), ), ], ), ], match=lambda x: 4 if isinstance(x, tuple) else (0 if not x else (2 if x == "spool:" else (3 if x.startswith("spool:") else 1))), ), # migrate old (tcp, address, port) tuple to new dict forth=lambda v: (v[0], { "address": v[1], "port": v[2] }) if (isinstance(v, tuple) and not isinstance( v[1], dict)) else v, ), ), ( "facility", DropdownChoice( title=_("Syslog facility for forwarded messages"), help=_( "When forwarding messages and no facility can be extracted from the " "message this facility is used."), choices=mkeventd.syslog_facilities, default_value=17, # local1 ), ), ( "restrict_logfiles", ListOfStrings( title= _("Restrict Logfiles (Prefix matching regular expressions)" ), help= _('Put the item names of the logfiles here. For example "System$" ' 'to select the service "LOG System". You can use regular expressions ' "which must match the beginning of the logfile name." ), ), ), ( "monitor_logfilelist", Checkbox( title=_("Monitoring of forwarded logfiles"), label=_( "Warn if list of forwarded logfiles changes"), help= _("If this option is enabled, the check monitors the list of forwarded " "logfiles and will warn you if at any time a logfile is missing or exceeding " "when compared to the initial list that was snapshotted during service detection. " "Reinventorize this check in order to make it OK again." ), ), ), ( "expected_logfiles", ListOf( TextInput(), title=_("List of expected logfiles"), help= _("When the monitoring of forwarded logfiles is enabled, the check verifies that " "all of the logfiles listed here are reported by the monitored system." ), ), ), ( "logwatch_reclassify", Checkbox( title= _("Reclassify messages before forwarding them to the EC" ), label=_("Apply logwatch patterns"), help= _("If this option is enabled, the logwatch lines are first reclassified by the logwatch " "patterns before they are sent to the event console. If you reclassify specific lines to " "IGNORE they are not forwarded to the event console. This takes the burden from the " "event console to process the message itself through all of its rulesets. The reclassifcation " "of each line takes into account from which logfile the message originates. So you can create " "logwatch reclassification rules specifically designed for a logfile <i>access.log</i>, " "which do not apply to other logfiles."), ), ), ( "separate_checks", Checkbox( title=_( "Create a separate check for each logfile"), label=_("Separate check"), help= _("If this option is enabled, there will be one separate check for each logfile found during " "the service discovery. This option also changes the behaviour for unknown logfiles. " "The default logwatch check forwards all logfiles to the event console, even logfiles " "which were not known during the service discovery. Creating one check per logfile changes " "this behaviour so that any data from unknown logfiles is discarded." ), ), ), ], optional_keys=[ "restrict_logfiles", "expected_logfiles", "logwatch_reclassify", "separate_checks", ], ignored_keys=["service_level"], ), ], default_value="", )
def _parameter_valuespec_disk_io(): return Dictionary(elements=[ ("read", Levels(title=_("Read throughput"), unit=_("MB/s"), default_value=None, default_levels=(50.0, 100.0))), ("write", Levels(title=_("Write throughput"), unit=_("MB/s"), default_value=None, default_levels=(50.0, 100.0))), ("average", Integer(title=_("Average"), help=_("When averaging is set, a floating average value " "of the disk throughput is computed and the levels for read " "and write will be applied to the average instead of the current " "value."), default_value=5, minvalue=1, unit=_("minutes"))), ("latency", Tuple( title=_("IO Latency"), elements=[ Float(title=_("warning at"), unit=_("ms"), default_value=80.0), Float(title=_("critical at"), unit=_("ms"), default_value=160.0), ], )), ( "latency_perfdata", Checkbox(title=_("Performance Data for Latency"), label=_("Collect performance data for disk latency"), help=_("Note: enabling performance data for the latency might " "cause incompatibilities with existing historical data " "if you are running PNP4Nagios in SINGLE mode.")), ), ("read_ql", Tuple( title=_("Read Queue-Length"), elements=[ Float(title=_("warning at"), default_value=80.0), Float(title=_("critical at"), default_value=90.0), ], )), ("write_ql", Tuple( title=_("Write Queue-Length"), elements=[ Float(title=_("warning at"), default_value=80.0), Float(title=_("critical at"), default_value=90.0), ], )), ( "ql_perfdata", Checkbox(title=_("Performance Data for Queue Length"), label=_("Collect performance data for disk latency"), help=_("Note: enabling performance data for the latency might " "cause incompatibilities with existing historical data " "if you are running PNP4Nagios in SINGLE mode.")), ), ("read_ios", Levels( title=_("Read operations"), unit=_("1/s"), default_levels=(400.0, 600.0), )), ("write_ios", Levels( title=_("Write operations"), unit=_("1/s"), default_levels=(300.0, 400.0), )), ],)
def _parameter_valuespec_netapp_volumes(): return Dictionary(elements=[ ("levels", Alternative( title=_("Levels for volume"), show_alternative_title=True, default_value=(80.0, 90.0), match=match_dual_level_type, elements=[ get_free_used_dynamic_valuespec("used", "volume"), Transform( get_free_used_dynamic_valuespec("free", "volume", default_value=(20.0, 10.0)), forth=transform_filesystem_free, back=transform_filesystem_free, ) ], )), ("perfdata", ListChoice( title=_("Performance data for protocols"), help= _("Specify for which protocol performance data should get recorded." ), choices=[ ("", _("Summarized data of all protocols")), ("nfs", _("NFS")), ("cifs", _("CIFS")), ("san", _("SAN")), ("fcp", _("FCP")), ("iscsi", _("iSCSI")), ], )), ("magic", Float(title=_( "Magic factor (automatic level adaptation for large volumes)"), default_value=0.8, minvalue=0.1, maxvalue=1.0)), ("magic_normsize", Integer(title=_("Reference size for magic factor"), default_value=20, minvalue=1, unit=_("GB"))), ("levels_low", Tuple( title=_("Minimum levels if using magic factor"), help=_( "The volume levels will never fall below these values, when using " "the magic factor and the volume is very small."), elements=[ Percentage(title=_("Warning if above"), unit=_("% usage"), allow_int=True, default_value=50), Percentage(title=_("Critical if above"), unit=_("% usage"), allow_int=True, default_value=60) ], )), ("inodes_levels", Alternative( title=_("Levels for Inodes"), help=_( "The number of remaining inodes on the filesystem. " "Please note that this setting has no effect on some filesystem checks." ), elements=[ Tuple( title=_("Percentage free"), elements=[ Percentage(title=_("Warning if less than")), Percentage(title=_("Critical if less than")), ], ), Tuple( title=_("Absolute free"), elements=[ Integer(title=_("Warning if less than"), size=10, unit=_("inodes"), minvalue=0, default_value=10000), Integer(title=_("Critical if less than"), size=10, unit=_("inodes"), minvalue=0, default_value=5000), ], ) ], default_value=(10.0, 5.0), )), ("show_inodes", DropdownChoice( title=_("Display inode usage in check output..."), choices=[ ("onproblem", _("Only in case of a problem")), ("onlow", _("Only in case of a problem or if inodes are below 50%")), ("always", _("Always")), ], default_value="onlow", )), ("trend_range", Optional(Integer( title=_("Time Range for filesystem trend computation"), default_value=24, minvalue=1, unit=_("hours")), title=_("Trend computation"), label=_("Enable trend computation"))), ("trend_mb", Tuple( title=_("Levels on trends in MB per time range"), elements=[ Integer(title=_("Warning at"), unit=_("MB / range"), default_value=100), Integer(title=_("Critical at"), unit=_("MB / range"), default_value=200) ], )), ("trend_perc", Tuple( title=_("Levels for the percentual growth per time range"), elements=[ Percentage( title=_("Warning at"), unit=_("% / range"), default_value=5, ), Percentage( title=_("Critical at"), unit=_("% / range"), default_value=10, ), ], )), ("trend_timeleft", Tuple( title=_("Levels on the time left until the filesystem gets full"), elements=[ Integer( title=_("Warning if below"), unit=_("hours"), default_value=12, ), Integer( title=_("Critical if below"), unit=_("hours"), default_value=6, ), ], )), ("trend_showtimeleft", Checkbox( title=_("Display time left in check output"), label=_("Enable"), help= _("Normally, the time left until the disk is full is only displayed when " "the configured levels have been breached. If you set this option " "the check always reports this information"))), ("trend_perfdata", Checkbox( title=_("Trend performance data"), label=_("Enable generation of performance data from trends"))), ], )
def _parameter_valuespec_oracle_dataguard_stats(): return Dictionary( help= _("The Data-Guard statistics are available in Oracle Enterprise Edition with enabled Data-Guard. " "The <tt>init.ora</tt> parameter <tt>dg_broker_start</tt> must be <tt>TRUE</tt> for this check. " "The apply and transport lag can be configured with this rule."), elements=[ ( "active_dataguard_option", MonitoringState( title=_( "State in case of Active Data-Guard Option is active: " ), help= _("The Active Data-Guard Option needs an addional License from Oracle." ), default_value=1, ), ), ( "mrp_option", Tuple( title= _("State in case Managed Recovery Process (MRP) is started or stopped" ), help= _("The MRP is usally started on each physical " "standby node. But in some setups this may vary and the process should " "only be started on specific or random nodes. Here you may define which " "state a specific node or service should have in case the MRP is started " "or stopped."), elements=[ MonitoringState( title=_("State in case MRP is started"), default_value=0), MonitoringState( title=_("State in case MRP is stopped"), default_value=2), ], ), ), ( "primary_broker_state", Checkbox( title=_("Check State of Broker on Primary: "), default_value=False, help=_( "Data-Guards with dg_broker_start=false needs Ignore Brokerstate to monitor " "the Switchoverstate on Primary."), ), ), ( "apply_lag", Tuple( title=_("Apply Lag Maximum Time"), help= _("The maximum limit for the apply lag in <tt>v$dataguard_stats</tt>." ), elements=[ Age(title=_("Warning at"), ), Age(title=_("Critical at"), ), ], ), ), ( "apply_lag_min", Tuple( title=_("Apply Lag Minimum Time"), help= _("The minimum limit for the apply lag in <tt>v$dataguard_stats</tt>. " "This is only useful if also <i>Apply Lag Maximum Time</i> has been configured." ), elements=[ Age(title=_("Warning at"), ), Age(title=_("Critical at"), ), ], ), ), ( "transport_lag", Tuple( title=_("Transport Lag"), help= _("The limit for the transport lag in <tt>v$dataguard_stats</tt>" ), elements=[ Age(title=_("Warning at"), ), Age(title=_("Critical at"), ), ], ), ), ], )
def _valuespec_active_checks_bi_aggr(): return Transform( valuespec=Dictionary( title=_("Check State of BI Aggregation"), help=_( "Connect to the local or a remote monitoring host, which uses Check_MK BI to aggregate " "several states to a single BI aggregation, which you want to show up as a single " "service." ), elements=[ ( "base_url", TextInput( title=_("Base URL (OMD Site)"), help=_( "The base URL to the monitoring instance. For example <tt>http://mycheckmk01/mysite</tt>. " "You can use macros like <tt>$HOSTADDRESS$</tt> and <tt>$HOSTNAME$</tt> within this URL to " "make them be replaced by the hosts values." ), size=60, allow_empty=False, ), ), ( "aggregation_name", TextInput( title=_("Aggregation Name"), help=_( "The name of the aggregation to fetch. It will be added to the service description. You can " "use macros like <tt>$HOSTADDRESS$</tt> and <tt>$HOSTNAME$</tt> within this parameter to " "make them be replaced by the hosts values. The aggregation name is the title in the " "top-level-rule of your BI pack." ), allow_empty=False, ), ), ( "credentials", CascadingDropdown( choices=[ ("automation", _("Use the credentials of the 'automation' user")), ( "configured", _("Use the following credentials"), Tuple( elements=[ TextInput( title=_("Automation Username"), allow_empty=True, help=_( "The name of the automation account to use for fetching the BI aggregation via HTTP. Note: You may " "also set credentials of a standard user account, though it is disadvised. " "Using the credentials of a standard user also requires a valid authentication method set in the " "optional parameters." ), ), IndividualOrStoredPassword( title=_("Automation Secret"), help=_( "Valid automation secret for the automation user" ), allow_empty=False, ), ] ), ), ], help=_( "Here you can configured the credentials to be used. Keep in mind that the <tt>automation</tt> user need " "to exist if you choose this option" ), title=_("Login credentials"), default_value="automation", ), ), ( "optional", Dictionary( title=_("Optional parameters"), elements=[ ( "auth_mode", DropdownChoice( title=_("Authentication Mode"), default_value="header", choices=[ ("header", _("Authorization Header")), ("basic", _("HTTP Basic")), ("digest", _("HTTP Digest")), ("kerberos", _("Kerberos")), ], deprecated_choices=("cookie",), invalid_choice_error=_( "The specified choice is no longer available. " "Please use another, like 'header' instead." ), ), ), ( "timeout", Integer( title=_("Seconds before connection times out"), unit=_("sec"), default_value=60, ), ), ( "in_downtime", DropdownChoice( title=_("State, if BI aggregate is in scheduled downtime"), choices=[ (None, _("Use normal state, ignore downtime")), ("ok", _("Force to be OK")), ("warn", _("Force to be WARN, if aggregate is not OK")), ], ), ), ( "acknowledged", DropdownChoice( title=_("State, if BI aggregate is acknowledged"), choices=[ (None, _("Use normal state, ignore acknowledgement")), ("ok", _("Force to be OK")), ("warn", _("Force to be WARN, if aggregate is not OK")), ], ), ), ( "track_downtimes", Checkbox( title=_("Track downtimes"), label=_("Automatically track downtimes of aggregation"), help=_( "If this is active, the check will automatically go into downtime " "whenever the aggregation does. This downtime is also cleaned up " "automatically when the aggregation leaves downtime. " "Downtimes you set manually for this check are unaffected." ), ), ), ], ), ), ], optional_keys=False, ), forth=_active_checks_bi_aggr_transform_from_disk, )
def vs_parameters(cls): return [ ( "rows", ListOf( valuespec=Dictionary( elements=[ ( "title", TextInput( title=_("Title"), allow_empty=False, ), ), ( "query", CascadingDropdown( orientation="horizontal", title=_("Query"), label=_("Table") + ": ", choices=[ ( "hosts", _("Hosts"), visuals.VisualFilterList( info_list=["host"], ), ), ( "services", _("Services"), visuals.VisualFilterList( info_list=["host", "service"], ), ), ( "events", _("Events"), visuals.VisualFilterList( info_list=["host", "event"], ), ), ], ), ), ], optional_keys=[], ), title=_("Rows"), add_label=_("Add new row"), del_label=_("Delete this row"), allow_empty=False, ), ), ( "show_stale", Checkbox( title=_("Show stale hosts and services"), default_value=True, ), ), ( "show_failed_notifications", Checkbox( title=_("Show failed notifications"), default_value=True, ), ), ( "show_sites_not_connected", Checkbox( title=_("Display a message if sites are not connected"), default_value=True, ), ), ]
def dashlet_vs_general_settings(dashlet_type: Type[Dashlet], single_infos: List[str]): return Dictionary( title=_('General Settings'), render='form', optional_keys=['title', 'title_url'], elements=[ ('type', FixedValue( dashlet_type.type_name(), totext=dashlet_type.title(), title=_('Element type'), )), visuals.single_infos_spec(single_infos), ('background', Checkbox( title=_('Colored background'), label=_('Render background'), help=_( 'Render gray background color behind the elements content.' ), default_value=True, )), ('show_title', DropdownChoice( title=_("Show title header"), help= _('Render the titlebar including title and link above the element.' ), choices=[ (False, _("Don't show any header")), (True, _("Show header with highlighted background")), ("transparent", _("Show title without any background")), ], default_value=True, )), ('title', TextUnicode( title=_('Custom title') + '<sup>*</sup>', placeholder=_( "This option is macro-capable, please check the inline help for more " "information."), help=" ".join(( _('Most elements have a hard coded static title and some are aware of their ' 'content and set the title dynamically, like the view snapin, which ' 'displays the title of the view. If you like to use any other title, set it ' 'here.'), _title_help_text_for_macros(dashlet_type), )), size=75, )), ('title_url', TextUnicode( title=_('Link of Title'), help= _('The URL of the target page the link of the element should link to.' ), size=50, )), ], )
def vs_graph_render_option_elements(default_values=None, exclude=None): # Allow custom default values to be specified by the caller. This is, for example, # needed by the dashlets which should add the host/service by default. if default_values is None: default_values = artwork.get_default_graph_render_options() else: default_values = default_values.copy() for k, v in artwork.get_default_graph_render_options().items(): default_values.setdefault(k, v) elements = [ ("font_size", Fontsize(default_value=default_values["font_size"], )), ("show_title", DropdownChoice( title=_("Title"), choices=[ (False, _("Don't show graph title")), (True, _("Show graph title")), ("inline", _("Show graph title on graph area")), ], default_value=default_values["show_title"], )), ("title_format", Transform( vs_title_infos(), forth=transform_graph_render_options_title_format, )), ("show_graph_time", Checkbox( title=_("Show graph time range"), label=_("Show the graph time range on top of the graph"), default_value=default_values["show_graph_time"], )), ("show_margin", Checkbox( title=_("Show margin round the graph"), label=_("Show a margin round the graph"), default_value=default_values["show_margin"], )), ("show_legend", Checkbox( title=_("Show legend"), label=_("Show the graph legend"), default_value=default_values["show_legend"], )), ("show_vertical_axis", Checkbox( title=_("Show vertical axis"), label=_("Show the graph vertical axis"), default_value=default_values["show_vertical_axis"], )), ("vertical_axis_width", CascadingDropdown( title=_("Vertical axis width"), orientation="horizontal", choices=[ ("fixed", _("Use fixed width (relative to font size)")), ("explicit", _("Use absolute width:"), Float(title="", default_value=40.0, unit=_("pt"))), ], )), ("show_time_axis", Checkbox( title=_("Show time axis"), label=_("Show the graph time axis"), default_value=default_values["show_time_axis"], )), ("show_controls", Checkbox( title=_("Show controls"), label=_("Show the graph controls"), default_value=default_values["show_controls"], )), ("show_pin", Checkbox( title=_("Show pin"), label=_("Show the pin"), default_value=default_values["show_pin"], )), ("show_time_range_previews", Checkbox( title=_("Show time range previews"), label="Show previews", default_value=default_values["show_time_range_previews"], )), ("foreground_color", GraphColor( title=_("Foreground color"), default_value=default_values["foreground_color"], )), ("background_color", GraphColor( title=_("Background color"), default_value=default_values["background_color"], )), ("canvas_color", GraphColor( title=_("Canvas color"), default_value=default_values["canvas_color"], )), ] if exclude: elements = [x for x in elements if x[0] not in exclude] return elements
def _vs_mandatory_elements(self) -> list[DictionaryEntry]: ident_attr: List = [] if self._new: ident_attr = [ ( "ident", ID( title=_("Unique ID"), help=_( "The ID must be a unique text. It will be used as an internal key " "when objects refer to this object."), default_value=self._default_id, allow_empty=False, size=80, ), ), ] else: ident_attr = [ ( "ident", FixedValue( value=self._ident, title=_("Unique ID"), ), ), ] if self._mode_type.is_site_specific(): site_attr = [ ("site", self._mode_type.site_valuespec()), ] else: site_attr = [] if self._mode_type.can_be_disabled(): disable_attr = [ ( "disabled", Checkbox( title=_("Activation"), help= _("Disabled %s are kept in the configuration but are not active." ) % self._mode_type.name_singular(), label=_("do not activate this %s") % self._mode_type.name_singular(), ), ), ] else: disable_attr = [] elements = (ident_attr + [ ( "title", TextInput( title=_("Title"), help=_( "The title of the %s. It will be used as display name." ) % (self._mode_type.name_singular()), allow_empty=False, size=80, ), ), ("comment", RuleComment()), ("docu_url", DocumentationURL()), ] + disable_attr + site_attr) return elements
Integer( title=_("Warning if below"), unit=_("hours"), default_value=12, ), Integer( title=_("Critical if below"), unit=_("hours"), default_value=6, ), ])), ("trend_showtimeleft", Checkbox( title=_("Display time left in check output"), label=_("Enable"), help= _("Normally, the time left until the disk is full is only displayed when " "the configured levels have been breached. If you set this option " "the check always reports this information"))), ("trend_perfdata", Checkbox(title=_("Trend performance data"), label=_("Enable generation of performance data from trends"))), ] filesystem_elements: List[_Tuple[str, ValueSpec]] = fs_levels_elements \ + fs_levels_elements_hack \ + fs_reserved_elements \ + fs_inodes_elements \ + fs_magic_elements \ + size_trend_elements
Percentage(title=_("High utilization at "), default_value=100.0), Age(title=_("Warning after "), default_value=5 * 60), Age(title=_("Critical after "), default_value=15 * 60), ], help=_("A single thread fully utilizing a single core (potentially due to a bug) " "may go unnoticed when only monitoring the total utilization of the CPU. " "With this configuration, check_mk will alert if a single core is " "exceeding a utilization threshold over an extended period of time." "This is currently only supported on linux and windows agents " "as well as devices monitored through the host-resource mib"))), ("core_util_graph", Checkbox( title=_("Graphs for individual cores"), label=_("Enable performance graph for utilization of individual cores"), help=_("This adds another graph to the performance CPU utilization " "details page, showing utilization of individual cores. " "Please note that this graph may be impractical on " "device with very many cores. " "This is currently only supported on linux and windows agents " "as well as devices monitored through the host-resource mib"))), ("iowait", Tuple( title=_("Levels on disk wait (IO wait)"), elements=[ Percentage(title=_("Warning at a disk wait of"), default_value=30.0), Percentage(title=_("Critical at a disk wait of"), default_value=50.0) ], help=_( "The disk wait is the total percentage of time all CPUs have nothing else to do but waiting " "for data coming from or going to disk. If you have a significant disk wait " "the the bottleneck of your server is IO. Please note that depending on the "