def _parameter_valuespec_k8s_pods_memory(): return Dictionary(elements=[ ("rss", Tuple( title=_("Resident memory usage"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("swap", Tuple( title=_("Swap memory usage"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("usage_bytes", Tuple( title=_("Total memory usage"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ],)
def _parameter_valuespec_storage_throughput(): # type: () -> Dictionary return Dictionary(elements=[ ("read", Tuple( title=_("Read throughput per second"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("write", Tuple( title=_("Write throughput per second"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("total", Tuple( title=_("Total throughput per second"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ], )
def _parameter_valuespec_graylog_cluster_traffic(): return Dictionary(elements=[ ("input", Tuple( title=_("Absolute levels for input traffic"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")) ], )), ("output", Tuple( title=_("Absolute levels for output traffic"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")) ], )), ("decoded", Tuple( title=_("Absolute levels for decoded traffic"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")) ], )), ], )
def _parameter_valuespec_memory_multiitem(): return Dictionary( help=_( "The memory levels for one specific module of this host. This is relevant for hosts that have " "several distinct memory areas, e.g. pluggable cards"), elements=[ ( "levels", Alternative( title=_("Memory levels"), elements=[ Tuple( title=_( "Specify levels in percentage of total RAM"), elements=[ Percentage( title=_("Warning at a memory usage of"), default_value=80.0, maxvalue=None, ), Percentage( title=_("Critical at a memory usage of"), default_value=90.0, maxvalue=None, ), ], ), Tuple( title=_("Specify levels in absolute usage values"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], ), ], ), ), ], optional_keys=[], )
def _parameter_valuespec_docker_node_disk_usage(): return Dictionary( help= _("Allows to define levels for the counts and size of Docker Containers, Images, Local Volumes, and the Build Cache." ), elements=[ ("size", Tuple( title=_("Size"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("reclaimable", Tuple( title=_("Reclaimable"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("count", Tuple( title=_("Total count"), elements=[ Integer(title=_("Warning at")), Integer(title=_("Critical at")), ], )), ("active", Tuple( title=_("Active"), elements=[ Integer(title=_("Warning at")), Integer(title=_("Critical at")), ], )), ], )
def parameter_valuespec(self): return Dictionary(elements=[ ("read", Tuple( title=_(u"Read throughput per second"), elements=[ Filesize(title=_(u"Warning at")), Filesize(title=_(u"Critical at")), ], )), ("write", Tuple( title=_(u"Write throughput per second"), elements=[ Filesize(title=_(u"Warning at")), Filesize(title=_(u"Critical at")), ], )), ("total", Tuple( title=_(u"Total throughput per second"), elements=[ Filesize(title=_(u"Warning at")), Filesize(title=_(u"Critical at")), ], )), ], )
def _parameter_valuespec_filestats(): return Dictionary( elements=[("min_age", Tuple( title=_("Minimal age of a file"), elements=[ Age(title=_("Warning if younger than")), Age(title=_("Critical if younger than")), ], )), ("max_age", Tuple( title=_("Maximal age of a file"), elements=[ Age(title=_("Warning if older than")), Age(title=_("Critical if older than")), ], )), ("min_size", Tuple( title=_("Minimal size of a file"), elements=[ Filesize(title=_("Warning if below")), Filesize(title=_("Critical if below")), ], )), ("max_size", Tuple( title=_("Maximal size of a file"), elements=[ Filesize(title=_("Warning if above")), Filesize(title=_("Critical if above")), ], ))], help=_( "Here you can impose various levels the results reported by the" " mk_filstats plugin. Note that those levels only concern about a single file." ), )
def _parameter_valuespec_plesk_backups(): return Dictionary( help=_("This check monitors backups configured for domains in plesk."), elements=[ ("no_backup_configured_state", MonitoringState(title=_("State when no backup is configured"), default_value=1)), ("no_backup_found_state", MonitoringState(title=_("State when no backup can be found"), default_value=1)), ( "backup_age", Tuple( title=_("Maximum age of backups"), help=_("The maximum age of the last backup."), elements=[ Age(title=_("Warning at")), Age(title=_("Critical at")), ], ), ), ( "total_size", Tuple( title=_("Maximum size of all files on backup space"), help= _("The maximum size of all files on the backup space. " "This might be set to the allowed quotas on the configured " "FTP server to be notified if the space limit is reached." ), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], ), ), ], optional_keys=['backup_age', 'total_size'], )
def _parameter_valuespec_couchbase_vbuckets(): return Dictionary( title=_("Couchbase vBuckets"), elements=[ ("item_memory", Tuple( title="Item memory size", elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("resident_items_ratio", Tuple( title="Active vBuckets: Lower levels for resident items ratio", elements=[ Percentage(title=_("Warning at or below"), unit="%"), Percentage(title=_("Critical at or below"), unit="%"), ], )), ("vb_pending_num", Tuple( title="Active vBuckets: Levels for number of pending vBuckets", elements=[ Integer(title=_("Warning at")), Integer(title=_("Critical at")), ], )), ("vb_replica_num", Tuple( title= "Replica vBuckets: Levels for total number of replica vBuckets", elements=[ Integer(title=_("Warning at")), Integer(title=_("Critical at")), ], )), ], )
def _parameter_valuespec_sap_hana_memory(): return Dictionary( elements=[ ( "levels", CascadingDropdown( title=_("Levels for memory usage"), choices=[ ( "perc_used", _("Percentual levels for used memory"), Tuple(elements=[ Percentage( title=_("Warning at a memory usage of"), default_value=80.0, maxvalue=None, ), Percentage( title=_("Critical at a memory usage of"), default_value=90.0, maxvalue=None, ), ], ), ), ( "abs_free", _("Absolute levels for free memory"), Tuple(elements=[ Filesize(title=_("Warning below")), Filesize(title=_("Critical below")), ], ), ), ("ignore", _("Do not impose levels")), ], ), ), ], optional_keys=[], )
def parameter_valuespec(self): return Transform( Dictionary( help= _("Memory levels for simple devices not running more complex OSs" ), elements=[ ("levels", CascadingDropdown( title=_("Levels for memory usage"), choices=[ ("perc_used", _("Percentual levels for used memory"), Tuple(elements=[ Percentage( title=_("Warning at a memory usage of"), default_value=80.0, maxvalue=None), Percentage( title=_("Critical at a memory usage of"), default_value=90.0, maxvalue=None) ], )), ("abs_free", _("Absolute levels for free memory"), Tuple(elements=[ Filesize(title=_("Warning below")), Filesize(title=_("Critical below")) ], )), ("ignore", _("Do not impose levels")), ], )), ], optional_keys=[], ), # Convert default levels from discovered checks forth=lambda v: not isinstance(v, dict) and {"levels": ("perc_used", v)} or v, )
def _parameter_valuespec_graylog_jvm(): return Dictionary(elements=[ ( "used", Tuple( title=_("Absolute levels for used heap space"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], ), ), ( "committed", Tuple( title=_("Absolute levels for committed heap space"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], ), ), ], )
def _parameter_valuespec_netapp_fcportio(): return Dictionary(elements=[ ( "read", Tuple( title=_("Read"), elements=[ Filesize(title=_("Warning if below")), Filesize(title=_("Critical if below")), ], ), ), ( "write", Tuple( title=_("Write"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], ), ), ], )
def levels_absolute_or_dynamic(name, value): return Alternative( title=_("Levels of %s %s") % (name, value), default_value=(80.0, 90.0), elements=[ Tuple(title=_("Percentage %s space") % value, elements=[ Percentage(title=_("Warning at"), unit=_("% used")), Percentage(title=_("Critical at"), unit=_("% used")), ]), Tuple(title=_("Absolute %s space") % value, elements=[ Integer(title=_("Warning at"), unit=_("MB"), default_value=500), Integer(title=_("Critical at"), unit=_("MB"), default_value=1000), ]), ListOf( Tuple( orientation="horizontal", elements=[ Filesize(title=_(" larger than")), Alternative( title=_("Levels for the %s %s size") % (name, value), elements=[ Tuple(title=_("Percentage %s space") % value, elements=[ Percentage(title=_("Warning at"), unit=_("% used")), Percentage(title=_("Critical at"), unit=_("% used")), ]), Tuple(title=_("Absolute free space"), elements=[ Integer(title=_("Warning at"), unit=_("MB")), Integer(title=_("Critical at"), unit=_("MB")), ]), ]), ], ), title=_('Dynamic levels'), ), ])
def _parameter_valuespec_mongodb_mem(): return Dictionary( title=_("MongoDB Memory"), elements=[ ( "resident_levels", Tuple( title=_("Resident memory usage"), help= _("The value of resident is roughly equivalent to the amount of RAM, " "currently used by the database process. In normal use this value tends to grow. " "In dedicated database servers this number tends to approach the total amount of system memory." ), elements=[ Filesize(title=_("Warning at"), default_value=1 * 1024**3), Filesize(title=_("Critical at"), default_value=2 * 1024**3), ], ), ), ( "mapped_levels", Tuple( title=_("Mapped memory usage"), help= _("The value of mapped shows the amount of mapped memory by the database. " "Because MongoDB uses memory-mapped files, this value is likely to be to be " "roughly equivalent to the total size of your database or databases." ), elements=[ Filesize(title=_("Warning at"), default_value=1 * 1024**3), Filesize(title=_("Critical at"), default_value=2 * 1024**3), ], ), ), ( "virtual_levels", Tuple( title=_("Virtual memory usage"), help= _("Virtual displays the quantity of virtual memory used by the mongod process. " ), elements=[ Filesize(title=_("Warning at"), default_value=2 * 1024**3), Filesize(title=_("Critical at"), default_value=4 * 1024**3), ], ), ), ], )
def get_free_used_dynamic_valuespec(what, name, default_value=(80.0, 90.0)): if what == "used": title = _("used space") course = _("above") else: title = _("free space") course = _("below") vs_subgroup = [ Tuple( title=_("Percentage %s") % title, elements=[ Percentage(title=_("Warning if %s") % course, unit="%", minvalue=0.0), Percentage(title=_("Critical if %s") % course, unit="%", minvalue=0.0), ]), Tuple( title=_("Absolute %s") % title, elements=[ Integer(title=_("Warning if %s") % course, unit=_("MB"), minvalue=0), Integer(title=_("Critical if %s") % course, unit=_("MB"), minvalue=0), ]) ] def validate_dynamic_levels(value, varprefix): if [v for v in value if v[0] < 0]: raise MKUserError(varprefix, _("You need to specify levels " "of at least 0 bytes.")) return Alternative( title=_("Levels for %s %s") % (name, title), style="dropdown", show_alternative_title=True, default_value=default_value, elements=vs_subgroup + [ ListOf( Tuple( orientation="horizontal", elements=[ Filesize(title=_("%s larger than") % name.title()), Alternative(elements=vs_subgroup) ]), title=_('Dynamic levels'), allow_empty=False, validate=validate_dynamic_levels, ) ], )
def _parameter_valuespec_mssql_file_sizes(): return Dictionary( title=_("File Size Levels"), elements=[ ( "data_files", Tuple( title=_("Total data file size: Absolute upper levels"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], ), ), ( "log_files", Tuple( title=_("Total log file size: Absolute upper levels"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")) ], ), ), ( "log_files_used", Alternative( title=_( "Used log files: Absolute or relative upper levels"), elements=[ Tuple( title=_("Upper absolute levels"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], ), Tuple( title=_("Upper percentage levels"), elements=[ Percentage(title=_("Warning at")), Percentage(title=_("Critical at")), ], ), ], ), ), ], )
def get_fileinfo_groups_param_elements(): return [ ( "minage_oldest", Tuple( title=_("Minimal age of oldest file"), elements=[ Age(title=_("Warning below")), Age(title=_("Critical below")), ], ), ), ( "maxage_oldest", Tuple( title=_("Maximal age of oldest file"), elements=[ Age(title=_("Warning at or above")), Age(title=_("Critical at or above")), ], ), ), ( "minage_newest", Tuple( title=_("Minimal age of newest file"), elements=[ Age(title=_("Warning below")), Age(title=_("Critical below")), ], ), ), ( "maxage_newest", Tuple( title=_("Maximal age of newest file"), elements=[ Age(title=_("Warning at or above")), Age(title=_("Critical at or above")), ], ), ), ( "minsize_smallest", Tuple( title=_("Minimal size of smallest file"), elements=[ Filesize(title=_("Warning below")), Filesize(title=_("Critical below")), ], ), ), ( "maxsize_smallest", Tuple( title=_("Maximal size of smallest file"), elements=[ Filesize(title=_("Warning at or above")), Filesize(title=_("Critical at or above")), ], ), ), ( "minsize_largest", Tuple( title=_("Minimal size of largest file"), elements=[ Filesize(title=_("Warning below")), Filesize(title=_("Critical below")), ], ), ), ( "maxsize_largest", Tuple( title=_("Maximal size of largest file"), elements=[ Filesize(title=_("Warning at or above")), Filesize(title=_("Critical at or above")), ], ), ), ( "minsize", Tuple( title=_("Minimal size"), elements=[ Filesize(title=_("Warning below")), Filesize(title=_("Critical below")), ], ), ), ( "maxsize", Tuple( title=_("Maximal size"), elements=[ Filesize(title=_("Warning at or above")), Filesize(title=_("Critical at or above")), ], ), ), ( "mincount", Tuple( title=_("Minimal file count"), elements=[ Integer(title=_("Warning below")), Integer(title=_("Critical below")), ], ), ), ( "maxcount", Tuple( title=_("Maximal file count"), elements=[ Integer(title=_("Warning at or above")), Integer(title=_("Critical at or above")), ], ), ), ( "timeofday", ListOfTimeRanges( title=_("Only check during the following times of the day"), help=_("Outside these ranges the check will always be OK"), ), ), ( "conjunctions", ListOf( Tuple(elements=[ MonitoringState(title=_("Monitoring state"), default_value=2), ListOf( CascadingDropdown( orientation="horizontal", choices=[ ("count", _("File count at"), Integer()), ("count_lower", _("File count below"), Integer()), ("size", _("File size at"), Filesize()), ("size_lower", _("File size below"), Filesize()), ("size_largest", _("Largest file size at"), Filesize()), ( "size_largest_lower", _("Largest file size below"), Filesize(), ), ("size_smallest", _("Smallest file size at"), Filesize()), ( "size_smallest_lower", _("Smallest file size below"), Filesize(), ), ("age_oldest", _("Oldest file age at"), Age()), ("age_oldest_lower", _("Oldest file age below"), Age()), ("age_newest", _("Newest file age at"), Age()), ("age_newest_lower", _("Newest file age below"), Age()), ], ), magic="@#@#", ), ], ), title=_("Level conjunctions"), help= _("In order to check dependent file group statistics you can configure " "conjunctions of single levels now. A conjunction consists of a monitoring state " "and any number of upper or lower levels. If all of the configured levels within " "a conjunction are reached then the related state is reported." ), ), ), ]
def db_levels_common(): return [ ( "levels", Alternative( title=_("Levels for the Tablespace usage"), default_value=(10.0, 5.0), elements=[ Tuple( title=_("Percentage free space"), elements=[ Percentage( title=_("Warning if below"), # xgettext: no-python-format unit=_("% free"), ), Percentage( title=_("Critical if below"), # xgettext: no-python-format unit=_("% free"), ), ], ), Tuple( title=_("Absolute free space"), elements=[ Integer(title=_("Warning if below"), unit=_("MB"), default_value=1000), Integer(title=_("Critical if below"), unit=_("MB"), default_value=500), ], ), ListOf( Tuple( orientation="horizontal", elements=[ Filesize(title=_("Tablespace larger than")), Alternative( title=_("Levels for the Tablespace size"), elements=[ Tuple( title=_("Percentage free space"), elements=[ Percentage( title=_( "Warning if below"), # xgettext: no-python-format unit=_("% free"), ), Percentage( title=_( "Critical if below"), # xgettext: no-python-format unit=_("% free"), ), ], ), Tuple( title=_("Absolute free space"), elements=[ Integer(title=_( "Warning if below"), unit=_("MB")), Integer(title=_( "Critical if below"), unit=_("MB")), ], ), ], ), ], ), title=_("Dynamic levels"), ), ], ), ), ( "magic", Float( title= _("Magic factor (automatic level adaptation for large tablespaces)" ), help=_("This is only be used in case of percentual levels"), minvalue=0.1, maxvalue=1.0, default_value=0.9, ), ), ( "magic_normsize", Integer( title=_("Reference size for magic factor"), minvalue=1, default_value=1000, unit=_("MB"), ), ), ( "magic_maxlevels", Tuple( title=_("Maximum levels if using magic factor"), help=_( "The tablespace levels will never be raise above these values, when using " "the magic factor and the tablespace is very small."), elements=[ Percentage( title=_("Maximum warning level"), # xgettext: no-python-format unit=_("% free"), allow_int=True, default_value=60.0, ), Percentage( title=_("Maximum critical level"), # xgettext: no-python-format unit=_("% free"), allow_int=True, default_value=50.0, ), ], ), ), ]
def _parameter_valuespec_logwatch_ec(): return Alternative( title=_("Forwarding"), help= _("Instead of using the regular logwatch check all lines received by logwatch can " "be forwarded to a Check_MK event console daemon to be processed. The target event " "console can be configured for each host in a separate rule."), elements=[ FixedValue( "", totext=_("Messages are handled by logwatch."), title=_("No forwarding"), ), Dictionary( title=_("Forward Messages to Event Console"), elements=[ ( "method", Transform( # TODO: Clean this up to some CascadingDropdown() Alternative( title=_("Forwarding Method"), elements=[ FixedValue( "", title= _("Local: Send events to local Event Console in same OMD site" ), totext=_( "Directly forward to Event Console" ), ), TextInput( title= _("Local: Send events to local Event Console into unix socket" ), allow_empty=False, ), FixedValue( "spool:", title= _("Local: Spooling - Send events to local event console in same OMD site" ), totext=_("Spool to Event Console"), ), Transform( TextInput(allow_empty=False, ), title= _("Local: Spooling - Send events to local Event Console into given spool directory" ), forth=lambda x: x[6:], # remove prefix back=lambda x: "spool:" + x, # add prefix ), CascadingDropdown( title= _("Remote: Send events to remote syslog host" ), choices=[ ( "tcp", _("Send via TCP"), Dictionary( elements=[ ( "address", TextInput( title=_( "Address"), allow_empty= False, ), ), ( "port", NetworkPort( title=_( "Port"), default_value= 514, ), ), ( "spool", Dictionary( title= _("Spool messages that could not be sent" ), help= _("Messages that can not be forwarded, e.g. when the target Event Console is " "not running, can temporarily be stored locally. Forwarding is tried again " "on next execution. When messages are spooled, the check will go into WARNING " "state. In case messages are dropped by the rules below, the check will shortly " "go into CRITICAL state for this execution." ), elements=[ ( "max_age", Age( title =_( "Maximum spool duration" ), help =_( "Messages that are spooled longer than this time will be thrown away." ), default_value =60 * 60 * 24 * 7, # 1 week should be fine (if size is not exceeded) ), ), ( "max_size", Filesize( title =_( "Maximum spool size" ), help= _("When the total size of spooled messages exceeds this number, the oldest " "messages of the currently spooled messages is thrown away until the left " "messages have the half of the maximum size." ), default_value = 500000, # do not save more than 500k of message ), ), ], optional_keys= [], ), ), ], optional_keys=["spool"], ), ), ( "udp", _("Send via UDP"), Dictionary( elements=[ ( "address", TextInput( title=_( "Address"), allow_empty= False, ), ), ( "port", NetworkPort( title=_( "Port"), default_value= 514, ), ), ], optional_keys=[], ), ), ], ), ], match=lambda x: 4 if isinstance(x, tuple) else (0 if not x else (2 if x == "spool:" else (3 if x.startswith("spool:") else 1))), ), # migrate old (tcp, address, port) tuple to new dict forth=lambda v: (v[0], { "address": v[1], "port": v[2] }) if (isinstance(v, tuple) and not isinstance( v[1], dict)) else v, ), ), ( "facility", DropdownChoice( title=_("Syslog facility for forwarded messages"), help=_( "When forwarding messages and no facility can be extracted from the " "message this facility is used."), choices=mkeventd.syslog_facilities, default_value=17, # local1 ), ), ( "restrict_logfiles", ListOfStrings( title= _("Restrict Logfiles (Prefix matching regular expressions)" ), help= _('Put the item names of the logfiles here. For example "System$" ' 'to select the service "LOG System". You can use regular expressions ' "which must match the beginning of the logfile name." ), ), ), ( "monitor_logfilelist", Checkbox( title=_("Monitoring of forwarded logfiles"), label=_( "Warn if list of forwarded logfiles changes"), help= _("If this option is enabled, the check monitors the list of forwarded " "logfiles and will warn you if at any time a logfile is missing or exceeding " "when compared to the initial list that was snapshotted during service detection. " "Reinventorize this check in order to make it OK again." ), ), ), ( "expected_logfiles", ListOf( TextInput(), title=_("List of expected logfiles"), help= _("When the monitoring of forwarded logfiles is enabled, the check verifies that " "all of the logfiles listed here are reported by the monitored system." ), ), ), ( "logwatch_reclassify", Checkbox( title= _("Reclassify messages before forwarding them to the EC" ), label=_("Apply logwatch patterns"), help= _("If this option is enabled, the logwatch lines are first reclassified by the logwatch " "patterns before they are sent to the event console. If you reclassify specific lines to " "IGNORE they are not forwarded to the event console. This takes the burden from the " "event console to process the message itself through all of its rulesets. The reclassifcation " "of each line takes into account from which logfile the message originates. So you can create " "logwatch reclassification rules specifically designed for a logfile <i>access.log</i>, " "which do not apply to other logfiles."), ), ), ( "separate_checks", Checkbox( title=_( "Create a separate check for each logfile"), label=_("Separate check"), help= _("If this option is enabled, there will be one separate check for each logfile found during " "the service discovery. This option also changes the behaviour for unknown logfiles. " "The default logwatch check forwards all logfiles to the event console, even logfiles " "which were not known during the service discovery. Creating one check per logfile changes " "this behaviour so that any data from unknown logfiles is discarded." ), ), ), ], optional_keys=[ "restrict_logfiles", "expected_logfiles", "logwatch_reclassify", "separate_checks", ], ignored_keys=["service_level"], ), ], default_value="", )
def process_level_elements(): cpu_rescale_max_choices: DropdownChoices = [ (True, _("100% is all cores at full load")), (False, _("N * 100% as each core contributes with 100% at full load")), ] return [ ("cpu_rescale_max", DropdownChoice( title=_("CPU rescale maximum load"), help=_("CPU utilization is delivered by the Operating " "System as a per CPU core basis. Thus each core contributes " "with a 100% at full utilization, producing a maximum load " "of N*100% (N=number of cores). For simplicity this maximum " "can be rescaled down, making 100% the maximum and thinking " "in terms of total CPU utilization."), default_value=True, choices=cpu_rescale_max_choices, invalid_choice_title=_("Unspecified.") + " " + _("Starting from version 1.6.0 this value must be configured. " "Read Werk #6646 for further information."), invalid_choice_error=_("CPU rescale maximum load is Unspecified.") + " " + _("Starting from version 1.6.0 this value must be configured. " "Read Werk #6646 for further information."), )), ('levels', Tuple( title=_('Levels for process count'), help=_("Please note that if you specify and also if you modify levels " "here, the change is activated only during an inventory." "Saving this rule is not enough. This is due to the nature of" "inventory rules."), elements=[ Integer( title=_("Critical below"), unit=_("processes"), default_value=1, ), Integer( title=_("Warning below"), unit=_("processes"), default_value=1, ), Integer( title=_("Warning above"), unit=_("processes"), default_value=99999, ), Integer( title=_("Critical above"), unit=_("processes"), default_value=99999, ), ], )), ("cpulevels", Tuple( title=_("Levels on total CPU utilization"), help=_("By activating this options you can set levels on the total " "CPU utilization of all included processes."), elements=[ Percentage(title=_("Warning at"), default_value=90, maxvalue=10000), Percentage(title=_("Critical at"), default_value=98, maxvalue=10000), ], )), ("cpu_average", Integer( title=_("CPU Averaging"), help=_("By activating averaging, Check_MK will compute the average of " "the total CPU utilization over a given interval. If you have defined " "alerting levels then these will automatically be applied on the " "averaged value. This helps to mask out short peaks. "), unit=_("minutes"), minvalue=1, default_value=15, )), ("single_cpulevels", Tuple( title=_("Levels on CPU utilization of a single process"), help=_("Here you can define levels on the CPU utilization of single " "processes. For performance reasons CPU Averaging will not be " "applied to to the levels of single processes."), elements=[ Percentage(title=_("Warning at"), default_value=90, maxvalue=10000), Percentage(title=_("Critical at"), default_value=98, maxvalue=10000), ], )), ("min_age", Tuple( title=_("Minimum allowed age"), help=_("Set lower levels on the age of the process (not the consumed CPU time, " "but the real time)."), elements=[ Age(title=_("Warning at"), default_value=3600), Age(title=_("Critical at"), default_value=1800), ], )), ("max_age", Tuple( title=_("Maximum allowed age"), help=_("Set upper levels on the age of the process (not the consumed CPU time, " "but the real time)."), elements=[ Age(title=_("Warning at"), default_value=3600), Age(title=_("Critical at"), default_value=7200), ], )), ("virtual_levels", Tuple( title=_("Virtual memory usage"), elements=[ Filesize(title=_("Warning at"), default_value=1000 * 1024 * 1024 * 1024), Filesize(title=_("Critical at"), default_value=2000 * 1024 * 1024 * 1024), ], )), ("resident_levels", Tuple( title=_("Physical memory usage"), elements=[ Filesize(title=_("Warning at"), default_value=100 * 1024 * 1024), Filesize(title=_("Critical at"), default_value=200 * 1024 * 1024), ], )), ("resident_levels_perc", Tuple(title=_("Physical memory usage, in percentage of total RAM"), elements=[ Percentage(title=_("Warning at"), default_value=25.0), Percentage(title=_("Critical at"), default_value=50.0), ])), ("handle_count", Tuple( title=_('Handle Count (Windows only)'), help=_("The number of object handles in the processes object table. This includes " "open handles to threads, files and other resources like registry keys."), elements=[ Integer( title=_("Warning above"), unit=_("handles"), ), Integer( title=_("Critical above"), unit=_("handles"), ), ], )), ('process_info', DropdownChoice( title=_("Enable per-process details in long-output"), label=_("Enable per-process details"), help=_("If active, the long output of this service will contain a list of all the " "matching processes and their details (i.e. PID, CPU usage, memory usage). " "Please note that HTML output will only work if \"Escape HTML codes in " "plugin output\" is disabled in global settings. This might expose you to " "Cross-Site-Scripting (everyone with write-access to checks could get " "scripts executed on the monitoring site in the context of the user of the " "monitoring site) so please do this if you understand the consequences."), choices=[ (None, _("Disable")), ("text", _("Text output")), ("html", _("HTML output")), ], default_value=None, )), ('process_info_arguments', Integer( title=_("Include process arguments in long-output"), label=_("Include per-process arguments (security risk!)"), help=_("If non-zero, the the list of all the matching processes and their details" " in the long-output will include up to the first N arguments of each" " processes. Please note this may include sensitive data like credentials," " and is strongly discouraged."), default_value=0, )), ('icon', UserIconOrAction( title=_("Add custom icon or action"), help=_("You can assign icons or actions to the found services in the status GUI."), )), ]
Transform( Integer( title=_("Time Range for trend computation"), default_value=TREND_RANGE_DEFAULT, minvalue=1, unit=_("hours"), ), forth=_transform_trend_range_not_none, ), ), ( "trend_bytes", Tuple( title=_("Levels on trends per time range"), elements=[ Filesize(title=_("Warning at"), default_value=100 * 1024**2), Filesize(title=_("Critical at"), default_value=200 * 1024**2), ], ), ), ( "trend_perc", Tuple( title=_("Levels for the percentual growth per time range"), elements=[ Percentage( title=_("Warning at"), unit=_("% / range"), default_value=5, ), Percentage(
def _parameter_valuespec_rabbitmq_queues(): return Dictionary(elements=[ ("msg_upper", Tuple( title=_("Upper level for total number of messages"), elements=[ Integer(title=_("Warning at"), unit="messages"), Integer(title=_("Critical at"), unit="messages"), ], )), ("msg_lower", Tuple( title=_("Lower level for total number of messages"), elements=[ Integer(title=_("Warning below"), unit="messages"), Integer(title=_("Critical below"), unit="messages"), ], )), ("msg_ready_upper", Tuple( title=_("Upper level for total number of ready messages"), elements=[ Integer(title=_("Warning at"), unit="messages"), Integer(title=_("Critical at"), unit="messages"), ], )), ("msg_ready_lower", Tuple( title=_("Lower level for total number of ready messages"), elements=[ Integer(title=_("Warning below"), unit="messages"), Integer(title=_("Critical below"), unit="messages"), ], )), ("msg_unack_upper", Tuple( title=_("Upper level for total number of unacknowledged messages"), elements=[ Integer(title=_("Warning at"), unit="messages"), Integer(title=_("Critical at"), unit="messages"), ], )), ("msg_unack_lower", Tuple( title=_("Lower level for total number of unacknowledged messages"), elements=[ Integer(title=_("Warning below"), unit="messages"), Integer(title=_("Critical below"), unit="messages"), ], )), ("msg_publish_upper", Tuple( title=_("Upper level for total number of published messages"), elements=[ Integer(title=_("Warning at"), unit="messages"), Integer(title=_("Critical at"), unit="messages"), ], )), ("msg_publish_lower", Tuple( title=_("Lower level for total number of published messages"), elements=[ Integer(title=_("Warning below"), unit="messages"), Integer(title=_("Critical below"), unit="messages"), ], )), ("msg_publish_rate_upper", Tuple( title=_("Upper level for published message rate"), elements=[ Float(title=_("Warning at"), unit="1/s"), Float(title=_("Critical at"), unit="1/s"), ], )), ("msg_publish_rate_lower", Tuple( title=_("Lower level for published message rate"), elements=[ Float(title=_("Warning below"), unit="1/s"), Float(title=_("Critical below"), unit="1/s"), ], )), ("abs_memory", Tuple( title=_("Absolute levels for used memory"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ],)
( "maxage_newest", Tuple( title=_("Maximal age of newest file"), elements=[ Age(title=_("Warning at or above")), Age(title=_("Critical at or above")), ], ), ), ( "minsize_smallest", Tuple( title=_("Minimal size of smallest file"), elements=[ Filesize(title=_("Warning below")), Filesize(title=_("Critical below")), ], ), ), ( "maxsize_smallest", Tuple( title=_("Maximal size of smallest file"), elements=[ Filesize(title=_("Warning at or above")), Filesize(title=_("Critical at or above")), ], ), ), (
def _parameter_valuespec_mssql_tablespaces(): return Dictionary(elements=[ ("size", Tuple( title=_("Upper levels for size"), elements=[Filesize(title=_("Warning at")), Filesize(title=_("Critical at"))], )), ("reserved", Alternative( title=_("Upper levels for reserved space"), elements=[ Tuple( title=_("Absolute levels"), elements=[Filesize(title=_("Warning at")), Filesize(title=_("Critical at"))], ), Tuple( title=_("Percentage levels"), elements=[ Percentage(title=_("Warning at")), Percentage(title=_("Critical at")) ], ), ], )), ("data", Alternative( title=_("Upper levels for data"), elements=[ Tuple( title=_("Absolute levels"), elements=[Filesize(title=_("Warning at")), Filesize(title=_("Critical at"))], ), Tuple( title=_("Percentage levels"), elements=[ Percentage(title=_("Warning at")), Percentage(title=_("Critical at")) ], ), ], )), ("indexes", Alternative( title=_("Upper levels for indexes"), elements=[ Tuple( title=_("Absolute levels"), elements=[Filesize(title=_("Warning at")), Filesize(title=_("Critical at"))], ), Tuple( title=_("Percentage levels"), elements=[ Percentage(title=_("Warning at")), Percentage(title=_("Critical at")) ], ), ], )), ("unused", Alternative( title=_("Upper levels for unused space"), elements=[ Tuple( title=_("Absolute levels"), elements=[Filesize(title=_("Warning at")), Filesize(title=_("Critical at"))], ), Tuple( title=_("Percentage levels"), elements=[ Percentage(title=_("Warning at")), Percentage(title=_("Critical at")) ], ), ], )), ("unallocated", Alternative( title=_("Lower levels for unallocated space"), elements=[ Tuple( title=_("Absolute levels"), elements=[ Filesize(title=_("Warning below")), Filesize(title=_("Critical below")) ], ), Tuple( title=_("Percentage levels"), elements=[ Percentage(title=_("Warning below")), Percentage(title=_("Critical below")) ], ), ], )), ],)
def _parameter_valuespec_memory_pagefile_win(): return Dictionary( elements=[ ( "memory", Alternative( title=_("Memory Levels"), elements=[ Tuple( title=_("Memory usage in percent"), elements=[ Percentage(title=_("Warning at")), Percentage(title=_("Critical at")), ], ), Transform( Tuple( title=_("Absolute free memory"), elements=[ Filesize(title=_("Warning if less than")), Filesize(title=_("Critical if less than")), ], ), # Note: Filesize values lesser 1MB will not work # -> need hide option in filesize valuespec back=lambda x: (x[0] // 1024 // 1024, x[1] // 1024 // 1024), forth=lambda x: (x[0] * 1024 * 1024, x[1] * 1024 * 1024)), PredictiveLevels(unit=_("GB"), default_difference=(0.5, 1.0)) ], default_value=(80.0, 90.0))), ( "pagefile", Alternative( title=_("Commit charge Levels"), elements=[ Tuple( title= _("Commit charge in percent (relative to commit limit)" ), elements=[ Percentage(title=_("Warning at")), Percentage(title=_("Critical at")), ], ), Transform( Tuple( title=_("Absolute commitable memory"), elements=[ Filesize(title=_("Warning if less than")), Filesize(title=_("Critical if less than")), ], ), # Note: Filesize values lesser 1MB will not work # -> need hide option in filesize valuespec back=lambda x: (x[0] // 1024 // 1024, x[1] // 1024 // 1024), forth=lambda x: (x[0] * 1024 * 1024, x[1] * 1024 * 1024)), PredictiveLevels(unit=_("GB"), default_difference=(0.5, 1.0)) ], default_value=(80.0, 90.0))), ("average", Integer( title=_("Averaging"), help= _("If this parameter is set, all measured values will be averaged " "over the specified time interval before levels are being applied. Per " "default, averaging is turned off. "), unit=_("minutes"), minvalue=1, default_value=60, )), ], )
def _parameter_valuespec_memory_simple(): return Transform( valuespec=Dictionary( help=_( "Memory levels for simple devices not running more complex OSs" ), elements=[ ( "levels", CascadingDropdown( title=_("Levels for RAM usage"), choices=[ ( "perc_used", _("Percentual levels for used RAM"), Tuple(elements=[ Percentage( title=_("Warning at a RAM usage of"), default_value=80.0, maxvalue=None, ), Percentage( title=_("Critical at a RAM usage of"), default_value=90.0, maxvalue=None, ), ], ), ), ( "abs_free", _("Absolute levels for free RAM"), Tuple(elements=[ Filesize(title=_("Warning below")), Filesize(title=_("Critical below")), ], ), ), ], ), ), ( "levels_swap", CascadingDropdown( title=_("Levels for swap usage"), choices=[ ( "perc_used", _("Percentual levels for used swap"), Tuple(elements=[ Percentage( title=_("Warning at a swap usage of"), maxvalue=None), Percentage( title=_("Critical at a swap usage of"), maxvalue=None), ], ), ), ( "abs_free", _("Absolute levels for free swap"), Tuple(elements=[ Filesize(title=_("Warning below")), Filesize(title=_("Critical below")), ], ), ), ], ), ), ( "swap_errors", MonitoringState( title=_("Monitoring state in case of swap errors"), default_value=0, ), ), ], optional_keys=True, ), # Convert default levels from discovered checks forth=lambda v: not isinstance(v, dict) and {"levels": ("perc_used", v)} or v, )
def parameter_valuespec(self): return Dictionary( elements=[ ("minage_oldest", Tuple( title=_("Minimal age of oldest file"), elements=[ Age(title=_("Warning if younger than")), Age(title=_("Critical if younger than")), ], )), ("maxage_oldest", Tuple( title=_("Maximal age of oldest file"), elements=[ Age(title=_("Warning if older than")), Age(title=_("Critical if older than")), ], )), ("minage_newest", Tuple( title=_("Minimal age of newest file"), elements=[ Age(title=_("Warning if younger than")), Age(title=_("Critical if younger than")), ], )), ("maxage_newest", Tuple( title=_("Maximal age of newest file"), elements=[ Age(title=_("Warning if older than")), Age(title=_("Critical if older than")), ], )), ("minsize_smallest", Tuple( title=_("Minimal size of smallest file"), elements=[ Filesize(title=_("Warning if below")), Filesize(title=_("Critical if below")), ], )), ("maxsize_smallest", Tuple( title=_("Maximal size of smallest file"), elements=[ Filesize(title=_("Warning if above")), Filesize(title=_("Critical if above")), ], )), ("minsize_largest", Tuple( title=_("Minimal size of largest file"), elements=[ Filesize(title=_("Warning if below")), Filesize(title=_("Critical if below")), ], )), ("maxsize_largest", Tuple( title=_("Maximal size of largest file"), elements=[ Filesize(title=_("Warning if above")), Filesize(title=_("Critical if above")), ], )), ("mincount", Tuple( title=_("Minimal file count"), elements=[ Integer(title=_("Warning if below")), Integer(title=_("Critical if below")), ], )), ("maxcount", Tuple( title=_("Maximal file count"), elements=[ Integer(title=_("Warning if above")), Integer(title=_("Critical if above")), ], )), ], help=_("Here you can impose various levels the results reported by the" " mk_filstats plugin. Note that some levels only apply to a matching" " putput format (e.g. max/min count levels are not applied if only the" " smallest, largest, oldes and newest file is reported). In order to" " receive the required data, you must configure the plugin mk_filestats."), )
def valuespec(self): return Dictionary( title=_("Automatic disk space cleanup"), help= _("You can configure your monitoring site to free disk space based on the ages " "of files or free space of the volume the site is placed on.<br>" "The monitoring site is executing the program <tt>diskspace</tt> 5 past " "every full hour as cron job. Details about the execution are logged to the file " "<tt>var/log/diskspace.log</tt>. You can always execut this program manually " "(add the <tt>-v</tt> option to see details about the actions taken)." ), elements=[ ( "max_file_age", Age( minvalue=1, # 1 sec default_value=31536000, # 1 year title=_("Delete files older than"), help= _("The historic events (state changes, downtimes etc.) of your hosts and services " "is stored in the monitoring " "history as plain text log files. One history log file contains the monitoring " "history of a given time period of all hosts and services. The files which are " "older than the configured time will be removed on the next execution of the " "disk space cleanup.<br>" "The historic metrics are stored in files for each host and service " "individually. When a host or service is removed from the monitoring, it's " "metric files remain untouched on your disk until the files last update " "(modification time) is longer ago than the configure age." ))), ( "min_free_bytes", Tuple( elements=[ Filesize( title=_("Cleanup when disk space is below"), minvalue=1, # min 1 byte default_value=0, ), Age( title=_("Never remove files newer than"), minvalue=1, # minimum 1 sec default_value=2592000, # 1 month help=_( "With this option you can prevent cleanup of files which have been updated " "within this time range."), ), ], title=_( "Delete additional files when disk space is below" ), help= _("When the disk space cleanup by file age was not able to gain enough " "free disk space, then the cleanup mechanism starts cleaning up additional " "files. The files are deleted by age, the oldest first, until the files are " "newer than the configured minimum file age."))), ( "cleanup_abandoned_host_files", Age( title=_("Cleanup abandoned host files older than"), minvalue=3600, # 1 hour default_value=2592000, # 1 month help= _("During monitoring there are several dedicated files created for each host. " "There are, for example, the discovered services, performance data and " "different temporary files created. During deletion of a host, these files " "are normally deleted. But there are cases, where the files are left on " "the disk until manual deletion, for example if you move a host from one " "site to another or deleting a host manually from the configuration.<br>" "The performance data (RRDs) and HW/SW inventory archive are never deleted " "during host deletion. They are only deleted automatically when you enable " "this option and after the configured period."))), ], default_keys=["cleanup_abandoned_host_files"], empty_text=_("Disk space cleanup is disabled"), )
def _parameter_valuespec_graylog_cluster_stats_mongodb(): return Dictionary(elements=[ ("indexes_lower", Tuple( title=_("Total number of indexes lower level"), elements=[ Integer(title=_("Warning if less then"), unit="indexes"), Integer(title=_("Critical if less then"), unit="indexes") ], )), ("indexes_upper", Tuple( title=_("Total number of indexes upper level"), elements=[ Integer(title=_("Warning at"), unit="indexes"), Integer(title=_("Critical at"), unit="indexes") ], )), ("storage_size_upper", Tuple( title=_("Upper levels for allocated storage size"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("index_size_upper", Tuple( title=_("Upper levels for total index size"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("data_size_upper", Tuple( title=_("Upper levels for total uncompressed data size"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("file_size_upper", Tuple( title=_("Upper levels for data file size"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("ns_size_mb_upper", Tuple( title=_("Upper levels for total namespace size"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("avg_obj_size_upper", Tuple( title=_("Upper levels for average document size"), elements=[ Filesize(title=_("Warning at")), Filesize(title=_("Critical at")), ], )), ("num_extents_lower", Tuple( title=_("Total number of extents lower level"), elements=[ Integer(title=_("Warning if less then"), unit="extents"), Integer(title=_("Critical if less then"), unit="extents") ], )), ("num_extents_upper", Tuple( title=_("Total number of extents upper level"), elements=[ Integer(title=_("Warning at"), unit="extents"), Integer(title=_("Critical at"), unit="extents") ], )), ("collections_lower", Tuple( title=_("Total number of collections lower level"), elements=[ Integer(title=_("Warning if less then"), unit="collections"), Integer(title=_("Critical if less then"), unit="collections") ], )), ("collections_upper", Tuple( title=_("Total number of collections upper level"), elements=[ Integer(title=_("Warning at"), unit="collections"), Integer(title=_("Critical at"), unit="collections") ], )), ("ojects_lower", Tuple( title=_("Total number of objects lower level"), elements=[ Integer(title=_("Warning if less then"), unit="objects"), Integer(title=_("Critical if less then"), unit="objects") ], )), ("objects_upper", Tuple( title=_("Total number of objects upper level"), elements=[ Integer(title=_("Warning at"), unit="objects"), Integer(title=_("Critical at"), unit="objects") ], )), ], )