default="http://localhost") check_api.add_option("a", "action", "The status check to perform. (nodeup, health)", default="health") check_api.add_option( "n", "node", "Check if a specified node is up. Used with `nodeup` action. (default: celery.ubuntu)", default="celery.ubuntu") check_api.add_option("l", "limit", "Number of tasks in the past to check. (default: 1000)", default=1000) check_api.enable_status("warning") check_api.enable_status("critical") check_api.start() if check_api.options.action not in ("nodeup", "health"): check_api.unknown_error("unknown action specified %s." % check_api.options.action) response = requests.get("%s:%d/api/workers" % (check_api.options.host, int(check_api.options.port))) try: response.raise_for_status() except Exception as e: print "Status Critical, flower API not reachable"
#!/usr/bin/python import collectd import string import sys from NagAconda import Plugin MyPlugin = Plugin("Plugin to check memory usage from collectd", "1.0") MyPlugin.add_option('H', 'host', 'host to check.', required=True) MyPlugin.add_option('S','socket','Socket to connect to. (default=/var/run/collectd-unixsock)',required=False,default='/var/run/collectd-unixsock') MyPlugin.enable_status('warning') MyPlugin.enable_status('critical') MyPlugin.start() formula = "(#"+MyPlugin.options.host+"/memory/memory-free 0# + #"+MyPlugin.options.host+"/memory/memory-cached 0# + #"+MyPlugin.options.host+"/memory/memory-buffered 0# ) / ( #"+MyPlugin.options.host+"/memory/memory-free 0# + #t430s-fpg/memory/memory-cached 0# + #"+MyPlugin.options.host+"/memory/memory-buffered 0# + #"+MyPlugin.options.host+"/memory/memory-used 0#)*100" c = collectd.Collect(MyPlugin.options.socket) val=c.calculate(formula) MyPlugin.set_value('mem_free', val, scale='%') MyPlugin.finish()
placed in the following directory on the host machine: /usr/local/nagios/libexec/ """ import sys import requests import simplejson as json from NagAconda import Plugin check_api = Plugin("Used to determine the status of a Celery worker.", "1.0") check_api.add_option("p", "port", "Port of the Celery host machine serving the Celerymon API. (default: 8989)", default=8989) check_api.add_option("h", "host", "Host of the Celery worker instance. (default: http://localhost)", default="http://localhost") check_api.add_option("a", "action", "The status check to perform. (nodeup, health)", default="health") check_api.add_option("n", "node", "Check if a specified node is up. Used with `nodeup` action. (default: celery.ubuntu)", default="celery.ubuntu") check_api.add_option("l", "limit", "Number of tasks in the past to check. (default: 100)", default=100) check_api.enable_status("warning") check_api.enable_status("critical") check_api.start() if check_api.options.action not in ("nodeup", "health"): check_api.unknown_error("unknown action specified %s." % check_api.options.action) response = requests.get("%s:%d/api/worker/" % (check_api.options.host, int(check_api.options.port))) try: response.raise_for_status() except Exception as e: print "Status Critical, celerymon API not reachable" sys.exit(2)
plugin.add_option('s', 'cache_ttl_splay', 'Cache TTL splay factor (default: 0.5)', default=0.5) plugin.add_option('g', 'cache_grace', 'Cache grace period in seconds (default: 60)', default=60) plugin.add_option('r', 'metrics_max_age', 'Metrics maximum age in seconds (default: 300)', default=300) plugin.add_option('a', 'metric_host', 'Metric host address', required=True) plugin.add_option('m', 'metric_name', 'Metric name', required=True) plugin.enable_status('warning') plugin.enable_status('critical') plugin.start() # Execute check try: value = GangliaMetrics(gmetad_host=plugin.options.gmetad_host, gmetad_port=plugin.options.gmetad_port, gmetad_timeout=plugin.options.gmetad_timeout, cache_path=plugin.options.cache_path, cache_ttl=plugin.options.cache_ttl, cache_ttl_splay=plugin.options.cache_ttl_splay, cache_grace=plugin.options.cache_grace, metrics_max_age=plugin.options.metrics_max_age, debug_level=plugin.options.verbose).get_value(
#!/usr/bin/python # Von https://pythonhosted.org/NagAconda/plugin.html from NagAconda import Plugin import os import subprocess from pyparsing import * btrfs_check = Plugin("Plugin to show disk usage of btrfs.", "0.1") btrfs_check.add_option('m', 'mountpoint', 'mountpoint for btrfs', required=True) btrfs_check.enable_status('warning') btrfs_check.enable_status('critical') btrfs_check.start() btrfs_output = subprocess.check_output(["btrfs", "fi", "df", btrfs_check.options.mountpoint]) # DEBUG: # print btrfs_output # PyParsing definitions # # Output is something like: # Data, RAID1: total=222.72GB, used=197.44GB # System, RAID1: total=64.00MB, used=40.00KB # System: total=4.00MB, used=0.00 # Metadata, RAID1: total=10.00GB, used=5.40GB # Parse Byte values with units byteDefs=["B", "KB", "MB", "GB", "TB", "PB", "EB"] byteDef = oneOf(byteDefs)
return (float(sum(vals))/float(len(vals))) else: return (None) else: # non 200 return code return (None) g = Plugin("Graphite Nagios Plugin.", "0.9") # FIXME: http vs https support, user auth? g.add_option('t', 'target', 'Graphite Target', required=True) g.add_option('h', 'host', 'Graphite Host', required=True) g.add_option('w', 'window', 'Time Window', default='-5minutes') g.add_option('u', 'units', 'Metric units', default='percent') g.enable_status('critical') g.enable_status('warning') g.start() # Bounds checking on crit and warn if g.options.raw_critical < g.options.raw_warning: g.unknown_error("ERROR: Critical level (%s) is set LOWER than Warning level (%s)" % ( g.options.raw_critical, g.options.raw_warning, )) # Build url # FIXME: pickle seems efficient, but maybe harder to debug? url = 'http://%s/render?from=%s&target=%s&format=pickle' % ( g.options.host,
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """ import math import urllib2 from NagAconda import Plugin graphite = Plugin("Plugin to retrieve data from graphite", "1.0") graphite.add_option("u", "url", "URL to query for data", required=False) graphite.add_option("m", "minute", "period of data to get", required=False) graphite.add_option("-hM", "hostMafia", "host of Mafia server", required=False) graphite.add_option("-kM", "keyMafia", "key string", required=False) graphite.enable_status("warning") graphite.enable_status("critical") graphite.start() url = graphite.options.url if not url: url = ''.join([ 'http://localhost/render?format=raw', '&from=-', graphite.options.minute, 'minutes', '&target=servers.', graphite.options.hostMafia.replace('.', '_'), '_9400.', graphite.options.keyMafia, ]) try: usock = urllib2.urlopen(url) data = usock.read()