def main(): helper = PluginHelper() helper.parser.add_option('-w', help='warning free (X% or XM)', dest='warning') helper.parser.add_option('-c', help='critical free (X% or XM)', dest='critical') helper.parse_arguments() warn = helper.options.warning crit = helper.options.critical memory = getMemory() if helper.options.warning is not None: warn = helper.options.warning if re.match('.*%$', warn): warn = str(memory['total'] * int(re.search('\d*', warn).group(0)) / 100) else: warn = '0' if helper.options.critical is not None: crit = helper.options.critical if re.match('.*%$', crit): crit = str(memory['total'] * int(re.search('\d*', crit).group(0)) / 100) else: crit = '0' helper.status(ok) status = "OK" if memory['totalfree'] <= int(warn): helper.status(warning) status = "WARNING" if memory['totalfree'] <= int(crit): helper.status(critical) status = "CRITICAL" helper.add_summary(status + ': Memory free: %(totalfree)s %% (%(free)s %% including buffers/cached)' % {'totalfree': (round((float(memory['totalfree']) / float(memory['total']) * 100), 1 )), 'free': (round((float(memory['free']) / float(memory['total']) * 100), 1 ))}) helper.add_metric(label='total',value=memory['total']) helper.add_metric(label='free',value=memory['free']) helper.add_metric(label='totalfree',value=memory['totalfree'], warn=warn+'..0', crit=crit+'..0') helper.add_metric(label='used',value=memory['used']) helper.add_metric(label='buffers',value=memory['buffers']) helper.add_metric(label='cached',value=memory['cached']) helper.add_metric(label='swapcached',value=memory['swapcached']) helper.check_all_metrics() helper.exit()
from pynag.Plugins import PluginHelper, ok, warning, critical, unknown p = PluginHelper() chars = string.letters + string.digits randomstring = ''.join([random.choice(chars) for i in xrange(4)]) # avoid cache default_url = 'http://www.vedur.is/ofanflod/snjoflodaspa' p.parser.add_option('--url', dest='url', default=default_url) p.parse_arguments() p.show_legacy = True html = requests.get(p.options.url).content # Initial Status is OK, unless avalanche threats detected. p.status(ok) # We are going to parse the html and look for certain divs, according to the vedur.is page # it should have the following threat codes: # <div class="lev1"> <!-- Low risk --> # <div class="lev2"> <!-- some risk--> # <div class="lev3"> <!-- Considerable risk --> # <div class="lev4"> <!-- High risk --> # <div class="lev5"> <!-- Very high risk --> soup = BeautifulSoup(html) lev1 = soup.findAll('div', {'class': 'lev1'}) lev2 = soup.findAll('div', {'class': 'lev2'}) lev3 = soup.findAll('div', {'class': 'lev3'}) lev4 = soup.findAll('div', {'class': 'lev4'}) lev5 = soup.findAll('div', {'class': 'lev5'})
import string import sys reload(sys) sys.setdefaultencoding('utf-8') from BeautifulSoup import BeautifulSoup from pynag.Plugins import PluginHelper,ok,warning,critical,unknown p = PluginHelper() default_url = 'http://www.isanicelandicvolcanoerupting.com' p.parser.add_option('--url', dest='url', default=default_url) p.parse_arguments() p.show_legacy = True html = requests.get(p.options.url).content soup = BeautifulSoup(html) answer = soup.find('h3').text p.add_summary('Source says: "%s"' % answer) if 'yes' in answer.lower(): p.status(warning) elif 'no' in answer.lower(): p.status(ok) else: p.status(unknown) p.check_all_metrics() p.exit()
descriptions=["Uptime", "Signal Strength", "CPU usage (1 Minute Average)", "CPU usage (5 Minute Average)", "CPU usage (15 Minute Average)", "Total memory", "Free memory", "Tx Rate", "Rx Rate" ] oids=[".1.3.6.1.2.1.1.3.0", ".1.3.6.1.4.1.14988.1.1.1.1.1.4", ".1.3.6.1.4.1.10002.1.1.1.4.2.1.3.1", ".1.3.6.1.4.1.10002.1.1.1.4.2.1.3.2",".1.3.6.1.4.1.10002.1.1.1.4.2.1.3.3",".1.3.6.1.4.1.10002.1.1.1.1.1.0", ".1.3.6.1.4.1.10002.1.1.1.1.2.0",".1.3.6.1.4.1.14988.1.1.1.1.1.2",".1.3.6.1.4.1.14988.1.1.1.1.1.3"] units =['', '', '', '%', '%', '%', '', 'Byte', '', '' ] ############## ## Main ## ############### if __name__ == '__main__': # The default return value should be always OK helper.status(ok) # shows the list of possible types if the flag is set if flag_list == True: for w,v in zip(names, descriptions): print w + ' = ' + v helper.status(unknown) helper.exit(summary='This is just a list and not a check!') # verify that a hostname is set verify_host(host, helper) # open session after validated host sess = netsnmp.Session(Version=version, DestHost=host, Community=community) # verify, that status(/type) parameter is not empty
reload(sys) sys.setdefaultencoding('utf-8') helper = PluginHelper() helper.parse_arguments() now = time.time() url = 'http://www.einkamal.is' html = requests.get(url).content soup = BeautifulSoup(html) tables = soup.find('div', {'class':'welcomemsg'}) p = tables.findAll('p') li = soup.find('li',{'class':'accounts'}) active_accounts = li.find('b').text active_accounts = active_accounts.replace('.','') li = soup.find('li',{'class':'active'}) logged_in = li.find('b').text logged_in = logged_in.replace('.','') helper.add_metric('active users', active_accounts) helper.add_metric('logged in users', logged_in) helper.status(ok) helper.add_summary("%s logged in users. %s active accounts" % (logged_in,active_accounts)) helper.exit()
if __name__ == '__main__': plugin = PluginHelper() plugin.parser.add_option("-H", "--hostname", help="Zookeeper's host", default='127.0.0.1') plugin.parser.add_option("-p", "--port", help="Zookeeper's port", default='2181') plugin.parse_arguments() try: zk = ZkClient(plugin.options.hostname, plugin.options.port) except socket.error: plugin.status(critical) plugin.add_summary("Can't connect to {}:{}".format( plugin.options.hostname, plugin.options.port)) plugin.exit() try: if zk.cmd('ruok') != 'imok': plugin.status(critical) plugin.add_summary("Command 'ruok' failed") plugin.exit() except socket.error, socket.timeout: plugin.status(critical) plugin.add_summary("Can't connect to {}:{}".format( plugin.options.hostname, plugin.options.port)) plugin.exit()
'Current Phase L6 in 1mA', 'Real Power L1 in Watt', 'Real Power L2 in Watt', 'Real Power L3 in Watt', 'Reaktiv Power L1 in VAr', 'Reaktiv Power L2 in VAr', 'Reaktiv Power L3 in VAr', 'Power L1 in VA', 'Power L2 in VA', 'Power L3 in VA', 'Cos(Phi) L1 * 0.001', 'Cos(Phi) L2 * 0.001', 'Cos(Phi) L3 * 0.001', 'Real Power Summe L1..L3 in Watt', 'Reaktiv Power Summe L1..L3 in Watt', 'Power Summe L1..L3 in Watt', 'Voltage L1-L2', 'Voltage L2-L3', 'Voltage L3-L1' ] ############## ## Main ## ############### if __name__ == '__main__': # The default return value should be always OK helper.status(ok) # shows the list of possible types if the flag is set if flag_list == True: for w, v in zip(names, descriptions): print w + ' = ' + v helper.status(unknown) helper.exit(summary='This is just a list and not a check!') # verify that a hostname is set verify_host(host, helper) # open session after validated host sess = netsnmp.Session(Version=version, DestHost=host, Community=community) # verify, that status(/type) parameter is not empty
# Now lets find those keyfigures, the content of textdata is dynamic so # some guesswork is required if 'Mannfj' in textdata: p.add_metric(label="mannfjoldi", value=numberdata) elif "Hagv" in textdata: p.add_metric(label="hagvoxtur", value=numberdata) elif "VLF" in textdata: p.add_metric("verg landsframleidsla", value=numberdata, uom="Mkr") elif "VNV" in textdata: p.add_metric(label="VNV", value=numberdata) elif "Launav" in textdata: p.add_metric(label="launavisitala", value=numberdata) elif "Bygg.v" in textdata: p.add_metric(label="byggingavisitala", value=numberdata) elif "sit. framl" in textdata: p.add_metric(label="visitala framleidsluverds", value=numberdata) elif "Fiskafli" in textdata: p.add_metric(label="fiskafli", value=numberdata, uom="tonn") elif "ruskipti" in textdata: p.add_metric(label="voruskipti", value=numberdata, uom="Mkr") summary = "%s metrics collected from hagstofan" % (len(p._perfdata.metrics)) p.add_summary(summary) p.status(ok) p.check_all_metrics() p.exit()
from BeautifulSoup import BeautifulSoup from pynag.Plugins import PluginHelper,ok,warning,critical,unknown p = PluginHelper() chars = string.letters + string.digits randomstring= ''.join([random.choice(chars) for i in xrange(4)]) # avoid cache default_url = 'http://www.vedur.is/ofanflod/snjoflodaspa' p.parser.add_option('--url', dest='url', default=default_url) p.parse_arguments() p.show_legacy = True html = requests.get(p.options.url).content # Initial Status is OK, unless avalanche threats detected. p.status(ok) # We are going to parse the html and look for certain divs, according to the vedur.is page # it should have the following threat codes: # <div class="lev1"> <!-- Low risk --> # <div class="lev2"> <!-- some risk--> # <div class="lev3"> <!-- Considerable risk --> # <div class="lev4"> <!-- High risk --> # <div class="lev5"> <!-- Very high risk --> soup = BeautifulSoup(html) lev1 = soup.findAll('div', {'class':'lev1'}) lev2 = soup.findAll('div', {'class':'lev2'}) lev3 = soup.findAll('div', {'class':'lev3'}) lev4 = soup.findAll('div', {'class':'lev4'}) lev5 = soup.findAll('div', {'class':'lev5'})
if helper.options.show_debug: logging.basicConfig() logging.getLogger().setLevel(logging.DEBUG) else: logging.disable(logging.ERROR) try: response = requests.get(url, auth=(username, password), verify=False, timeout=20) except requests.exceptions.Timeout as e: logging.debug(e, exc_info=1) helper.add_summary('Could not establish connection') helper.add_long_output(str(e)) helper.status(critical) except requests.exceptions.ConnectionError as e: logging.debug(e, exc_info=1) helper.add_summary('Connection error') helper.add_long_output('Connection error' + str(e)) helper.status(critical) except requests.exceptions.HTTPError as e: logging.debug(e, exc_info=1) helper.add_summary('HTTP error') helper.add_long_output(str(e)) helper.status(critical) except requests.exceptions.RequestException as e: logging.debug(e, exc_info=1) helper.add_summary('Unknown error')
9 : "%", 10: "ms", 11: "Pa", 12: "psi", 13: "g", 14: "F", 15: "feet", 16: "inches", 17: "cm", 18: "meters", 19: "rpm", 20: "degrees", } # The default return value should be always OK helper.status(ok) ###### ## here we check the inlet ###### if typ.lower() == "inlet": # OIDs for Inlet from PDU2-MIB oid_inlet_value = '.1.3.6.1.4.1.13742.6.5.2.3.1.4' # the value from the sensor (must be devided by the digit) oid_inlet_unit = '.1.3.6.1.4.1.13742.6.3.3.4.1.6' # the unit of the value oid_inlet_digits = '.1.3.6.1.4.1.13742.6.3.3.4.1.7' # the digit we need for the real_value oid_inlet_state = '.1.3.6.1.4.1.13742.6.5.2.3.1.3' # the state if this is ok or not ok oid_inlet_warning_upper = '.1.3.6.1.4.1.13742.6.3.3.4.1.24' # warning_upper_threhsold (must be divided by the digit) oid_inlet_critical_upper = '.1.3.6.1.4.1.13742.6.3.3.4.1.23' # critical_upper_threhold (must be divided by the digit) oid_inlet_warning_lower = '.1.3.6.1.4.1.13742.6.3.3.4.1.22' oid_inlet_critical_lower = '.1.3.6.1.4.1.13742.6.3.3.4.1.21'
# Lets Parse the data: my_plugin.add_summary( "%s seconds response time" % results['ResponseTime']) # and add metrics: my_plugin.add_metric( label='Total Accesses', value=results['Total Accesses'], uom='c', ) my_plugin.add_metric( label='Total kBytes', value=results['Total kBytes'], uom='kb', ) my_plugin.add_metric( label='CPULoad', value=float(results['CPULoad'])*100, uom='%', ) my_plugin.add_metric( label='Uptime', value=results['Uptime'], uom='c', ) my_plugin.add_metric( label='ReqPerSec', value=results['ReqPerSec'], ) my_plugin.add_metric( label='BytesPerSec', value=results['BytesPerSec'], uom='b', ) my_plugin.add_metric( label='BytesPerReq', value=results['BytesPerReq'], uom='b', ) my_plugin.add_metric( label='BusyWorkers', value=results['BusyWorkers'], ) my_plugin.add_metric( label='IdleWorkers', value=results['IdleWorkers'], ) my_plugin.add_metric( label='ResponseTime', value=results['ResponseTime'], uom='s',warn=my_plugin.options.warning, crit=my_plugin.options.critical ) my_plugin.add_metric( label='Open slots', value=results['OpenSlots'] ) # By default assume everything is ok. Any thresholds specified with --threshold can overwrite this status: my_plugin.status(ok) # Here all metrics will be checked against thresholds that are either # built-in or added via --threshold from the command-line my_plugin.check_all_metrics() # Print out plugin information and exit nagios-style my_plugin.exit()
#!/usr/bin/env python import requests from BeautifulSoup import BeautifulSoup from pynag.Plugins import PluginHelper, ok, warning, critical, unknown import simplejson as json p = PluginHelper() p.parser.add_option('--url', dest='url', default='http://apis.is/bus/realtime') p.parse_arguments() html = requests.get(p.options.url).content json = json.loads(html) buses_running = len(json['results']) p.add_metric('buses running', buses_running) soup = BeautifulSoup(html) warnings = soup.findAll('div', {'class': 'warning'}) p.add_summary('%s buses are currently running' % (buses_running)) for i in warnings: p.status(warning) p.add_long_output(i.text) p.check_all_metrics() p.exit()
tn = Telnet(self.host, self.port, self.timeout) tn.write('{}\n'.format(word)) return tn.read_all() if __name__ == '__main__': plugin = PluginHelper() plugin.parser.add_option("-H","--hostname", help="Zookeeper's host", default='127.0.0.1') plugin.parser.add_option("-p","--port", help="Zookeeper's port", default='2181') plugin.parse_arguments() try: zk = ZkClient(plugin.options.hostname, plugin.options.port) except socket.error: plugin.status(critical) plugin.add_summary("Can't connect to {}:{}".format(plugin.options.hostname, plugin.options.port)) plugin.exit() try: if zk.cmd('ruok') != 'imok': plugin.status(critical) plugin.add_summary("Command 'ruok' failed") plugin.exit() except socket.error, socket.timeout: plugin.status(critical) plugin.add_summary("Can't connect to {}:{}".format(plugin.options.hostname, plugin.options.port)) plugin.exit() try: if zk.cmd('isro') != 'rw':
service_ascii = [ord(c) for c in s] # we need the length of the service name length = str(len(s)) # make the oid oid = base_oid + "." + length + "." + ".".join(str(x) for x in service_ascii) return oid if __name__ == "__main__": # verify that a hostname is set verify_host(host, helper) sess = netsnmp.Session(Version=version, DestHost=host, Community=community) # The default return value should be always OK helper.status(ok) # if no partition / disk is set, we will do a scan if service == "" or service is None: scan = True ########## # Here we do a scan ########## if scan: services = walk_data(sess, base_oid, helper)[0] if not services: print "No services found - SNMP disabled?" quit()
from BeautifulSoup import BeautifulSoup from pynag.Plugins import PluginHelper,ok,warning,critical,unknown p = PluginHelper() default_url = 'https://api.eveonline.com/server/ServerStatus.xml.aspx/' p.parser.add_option('--url', dest='url', default=default_url) p.parse_arguments() p.show_legacy = True try: html = requests.get(p.options.url).content except Exception, e: p.status(unknown) p.add_summary("%s error encountered while trying to connect to EVE api: %s" % (type(e), e)) p.exit() soup = BeautifulSoup(html) serverOpen = soup.findAll('serveropen') onlinePlayers = soup.findAll('onlineplayers') if not serverOpen or not onlinePlayers: p.status(unknown) p.add_summary("Failed to get all metrics from EVE API") p.add_long_output("HTTP request returned:") p.add_long_output(html) p.exit()
p.parser.add_option('--url', dest='url', default=default_url) p.parse_arguments() p.check_all_metrics() p.show_legacy = True html = requests.get(p.options.url).content soup = BeautifulSoup(html) activitylist = soup.find('div', {'class': 'activityNumbers activityNumbersNew'}) activities = activitylist.findAll('div', recursive=False) p.add_metric('metrics_found', value=len(activities), warn='0..1') p.add_summary('%s metrics found on landspitali website' % (len(activities))) for i in activities: metric_name = i.get('class') metric_value = i.find('div', {'class': "todaysCount"}).text heading = i.find('div', {'class': 'heading'}) text = i.find('div', {'class': 'todaysText'}) # If string dag... is found, this is a counter for the whole day if 'dag...' in heading.text: uom = 'c' else: uom = '' p.add_metric(metric_name, metric_value, uom=uom) p.add_long_output("%s: %s %s %s" % (metric_name, heading.text, metric_value, text.text)) p.status(ok) p.exit()
hostname = my_plugin.options.hostname address = my_plugin.options.address if hostname is None: my_plugin.parser.error('-H argument is required') # Here comes the specific check logic try: start_time = time.time() result = socket.gethostbyname( hostname ) # result will contain the ip address resolved end_time = time.time() # If no address was specified with -a, then we return # OK if hostname resolved to anything at all if address is None or address == result: my_plugin.status(ok) my_plugin.add_summary("%s resolves to %s" % (hostname, result)) else: my_plugin.status(critical) my_plugin.add_summary("%s resolves to %s but should resolve to %s" % (hostname,result,address)) # Add run_time metric, so we can also alert if lookup takes to long run_time = end_time - start_time my_plugin.add_metric('run_time', run_time) except gaierror: # If any exceptions happened in the code above, lets return a critical status my_plugin.status(critical) my_plugin.add_summary('Could not resolve host "%s"' % hostname ) # when check_all_metrics() is run, any metrics we have added with add_metric() will be processed against # Thresholds (like --threshold). This part will allow our plugin users to alert on lookup_time
m = Manager(helper.options.host, helper.options.community, int(helper.options.version)) formatstring = helper.options.value + ': %s' commandstring = "m." + helper.options.value content = eval(commandstring) helper.add_summary(formatstring % content) # Read metrics from /proc/loadavg and add them as performance metrics #load1,load5,load15,processes,last_proc_id = content.split() #running,total = processes.split('/') # If we so desire we can set default thresholds by adding warn attribute here # However we decide that there are no thresholds by default and they have to be # applied on runtime with the --threshold option helper.add_metric(label=helper.options.value, value=content) #helper.add_metric(label='load5',value=load5) #helper.add_metric(label='load15',value=load15) #helper.add_metric(label='running_processes',value=running) #helper.add_metric(label='total_processes',value=total) # By default assume everything is ok. Any thresholds specified with --threshold can overwrite this status: helper.status(ok) # Here all metrics will be checked against thresholds that are either # built-in or added via --threshold from the command-line helper.check_all_metrics() # Print out plugin information and exit nagios-style helper.exit()
#!/usr/bin/env python import requests from BeautifulSoup import BeautifulSoup from pynag.Plugins import PluginHelper,ok,warning,critical,unknown p = PluginHelper() p.parser.add_option('--url', dest='url', default='http://www.vedur.is') p.parse_arguments() html = requests.get(p.options.url).content soup = BeautifulSoup(html) warnings = soup.findAll('div', {'class':'warning'}) p.add_summary('%s warnings are being displayed on vedur.is' % len(warnings)) for i in warnings: p.status(warning) p.add_long_output( i.text ) p.status(ok) p.check_all_metrics() p.exit()
status = key.split('.', 4)[2] http_status_counter[status] = ( http_status_counter.get(status, 0) + measurement['value']) else: helper.add_metric(label="%s.%s" % (key, measurement['statistic'].lower()), value=measurement['value']) helper.add_summary('{} is {}'.format(key, measurement['value'])) for status in http_status_counter: helper.add_metric(label='http{}'.format(status), value=http_status_counter[status]) helper.add_summary('{} is {}'.format(key, measurement['value'])) json_data, version, err = request_data(health_endpoint, **get_args) if json_data is None: if err is None: helper.status(unknown) helper.add_summary('no health data available') else: helper.status(critical) helper.add_summary('could not fetch health data: {}'.format(err)) else: # Only check health if there are no metrics specified in check if helper.options.metrics is None: status = json_data['status'] if status == 'UP': helper.status(ok) elif status in ('DOWN', 'OUT_OF_SERVICE'): helper.status(critical) else: helper.status(unknown) helper.add_summary('global status is {}'.format(status))
import requests import string import sys reload(sys) sys.setdefaultencoding('utf-8') from BeautifulSoup import BeautifulSoup from pynag.Plugins import PluginHelper, ok, warning, critical, unknown p = PluginHelper() default_url = 'http://www.isanicelandicvolcanoerupting.com' p.parser.add_option('--url', dest='url', default=default_url) p.parse_arguments() p.show_legacy = True html = requests.get(p.options.url).content soup = BeautifulSoup(html) answer = soup.find('h3').text p.add_summary('Source says: "%s"' % answer) if 'yes' in answer.lower(): p.status(warning) elif 'no' in answer.lower(): p.status(ok) else: p.status(unknown) p.check_all_metrics() p.exit()