def main(): p = PluginHelper() # Warn on inactive level = 2 service_status = get_service_status(sys.argv[1]) if loaded(service_status)[0] is False: p.exit(3, "%s - %s" % (service_status['name'], loaded(service_status)[1]), "\n" + service_status['unparsed']) active = service_status['headers']['Active'][0] if active.startswith("inactive") or active.startswith('failed'): p.add_status(level) elif active.startswith("active"): p.add_status(0) else: p.add_status(3) p.add_summary("%s - %s" % ( service_status['name'], active)) p.add_long_output("\n" + service_status['unparsed']) p.exit()
def main(): p = PluginHelper() # Warn on inactive level = 2 service_status = get_service_status(sys.argv[1]) if loaded(service_status)[0] is False: p.exit(3, "%s - %s" % (service_status['name'], loaded(service_status)[1]), "\n" + service_status['unparsed']) active = service_status['headers']['Active'][0] if active.startswith("inactive") or active.startswith('failed'): p.add_status(level) elif active.startswith("active"): p.add_status(0) else: p.add_status(3) p.add_summary("%s - %s" % (service_status['name'], active)) p.add_long_output("\n" + service_status['unparsed']) p.exit()
distance = columns[7].text.strip() direction = columns[8].text location = columns[9].text depth = depth.replace(',','.') scale = scale.replace(',','.') quality = quality.replace(',','.') latitude = latitude.replace(',','.') longitude = longitude.replace(',','.') distance = distance.replace(',','.') # manipulate location, well.. at least remove spaces location = location.replace(' ','_') datetimestr = str_date + " " + str_time.split(',',1)[0] timestamp = time.mktime( parse(datetimestr).timetuple() ) timestamp = int(timestamp) timesince = now-timestamp if timesince > 60*60: # Less than one hour since earthquake continue if row.find('ATHUGI') > 0: major_earthquakes += 1 recent_earthquakes += 1 helper.add_long_output("%s %s: scale=%s depth=%s quality=%s %s %s" % (str_date, str_time, scale, depth, quality, distance, location)) helper.add_summary('%s major earthquakes. %s total earthquakes' % (major_earthquakes, recent_earthquakes)) helper.add_metric('major earthquakes', value=major_earthquakes, crit='1..inf') helper.add_metric('recent earthquakes', value=recent_earthquakes, warn='3..inf') helper.check_all_metrics() helper.exit()
#Claculate UTC-time from local-time if remote_time_utc_dir == '+': remote_timestamp -= datetime.timedelta(hours=remote_time_hours_offset, minutes=remote_time_minutes_offset) elif remote_time_utc_dir == '-': remote_timestamp += datetime.timedelta(hours=remote_time_hours_offset, minutes=remote_time_minutes_offset) try: # Windows will return the local time (not UTC), so we need to use the local time to compare # Force this this if '-l' or '--localtime' is set in commandline if windows or use_local : local_timestamp = datetime.datetime.now() time_type = 'Remote (Local)' else: # usually the we need the UTC time local_timestamp = datetime.datetime.utcnow() time_type = 'Remote (UTC)' #Calculate the offset between local and remote time offset = time.mktime(local_timestamp.timetuple()) - time.mktime(remote_timestamp.timetuple()) + 60 * o_tzoff helper.add_metric(label = 'offset', value = offset, uom = 's') helper.check_all_metrics() except IndexError: helper.exit(summary = 'remote device does not return a time value', exit_code = unknown, perfdata = '') #Print out plugin information and exit nagios-style helper.add_summary('%s: ' % (time_type) + datetime.datetime.fromtimestamp(time.mktime(remote_timestamp.timetuple())).strftime('%H:%M:%S') + '. Offset = %d s' % offset) helper.add_long_output('%s: ' % (time_type) + datetime.datetime.fromtimestamp(time.mktime(remote_timestamp.timetuple())).strftime('%Y.%m.%d %H:%M:%S')) helper.exit()
scale = scale.replace(',', '.') quality = quality.replace(',', '.') latitude = latitude.replace(',', '.') longitude = longitude.replace(',', '.') distance = distance.replace(',', '.') # manipulate location, well.. at least remove spaces location = location.replace(' ', '_') datetimestr = str_date + " " + str_time.split(',', 1)[0] timestamp = time.mktime(parse(datetimestr).timetuple()) timestamp = int(timestamp) timesince = now - timestamp if timesince > 60 * 60: # Less than one hour since earthquake continue if row.find('ATHUGI') > 0: major_earthquakes += 1 recent_earthquakes += 1 helper.add_long_output( "%s %s: scale=%s depth=%s quality=%s %s %s" % (str_date, str_time, scale, depth, quality, distance, location)) helper.add_summary('%s major earthquakes. %s total earthquakes' % (major_earthquakes, recent_earthquakes)) helper.add_metric('major earthquakes', value=major_earthquakes, crit='1..inf') helper.add_metric('recent earthquakes', value=recent_earthquakes, warn='3..inf') helper.check_all_metrics() helper.exit()
#!/usr/bin/env python import requests from BeautifulSoup import BeautifulSoup from pynag.Plugins import PluginHelper,ok,warning,critical,unknown p = PluginHelper() p.parser.add_option('--url', dest='url', default='http://www.vedur.is') p.parse_arguments() html = requests.get(p.options.url).content soup = BeautifulSoup(html) warnings = soup.findAll('div', {'class':'warning'}) p.add_summary('%s warnings are being displayed on vedur.is' % len(warnings)) for i in warnings: p.status(warning) p.add_long_output( i.text ) p.status(ok) p.check_all_metrics() p.exit()
result = ping( device=device, destination=helper.options.destination, source=helper.options.source or None, #ttl=helper.options.ttl, timeout=helper.options.probe_timeout, size=helper.options.size, count=helper.options.count, debug=helper.options.show_debug) if helper.options.show_debug: print(result) if 'error' in result: helper.status(critical) helper.add_summary('%s: unable to ping' % helper.options.destination) helper.add_long_output(result['error']) elif 'success' in result: success = result['success'] helper.status(ok) helper.add_summary('%s: rta %.1fms, pl %d%%' % (helper.options.destination, success['rtt_avg'], success['packet_loss'])) helper.add_metric('pl', success['packet_loss'], uom='%') helper.add_metric('rta', success['rtt_avg'], uom='ms') else: helper.status(unknown) helper.add_summary('Unrecognized result from ping function') helper.add_long_output(str(result)) helper.check_all_metrics() helper.exit()
if helper.options.show_debug: logging.basicConfig() logging.getLogger().setLevel(logging.DEBUG) else: logging.disable(logging.ERROR) try: response = requests.get(url, auth=(username, password), verify=False, timeout=20) except requests.exceptions.Timeout as e: logging.debug(e, exc_info=1) helper.add_summary('Could not establish connection') helper.add_long_output(str(e)) helper.status(critical) except requests.exceptions.ConnectionError as e: logging.debug(e, exc_info=1) helper.add_summary('Connection error') helper.add_long_output('Connection error' + str(e)) helper.status(critical) except requests.exceptions.HTTPError as e: logging.debug(e, exc_info=1) helper.add_summary('HTTP error') helper.add_long_output(str(e)) helper.status(critical) except requests.exceptions.RequestException as e: logging.debug(e, exc_info=1)
inlet_critical_upper = real_value(inlet_critical_uppers[x], inlet_digit) inlet_warning_lower = real_value(inlet_warning_lowers[x], inlet_digit) inlet_critical_lower = real_value(inlet_critical_lowers[x], inlet_digit) if inlet_state == "belowLowerCritical" or inlet_state == "aboveUpperCritical": # we don't want to use the thresholds. we rely on the state value of the device helper.add_summary("%s is %s" % (inlet_value, inlet_unit, inlet_state)) helper.status(critical) if inlet_state == "belowLowerWarning" or inlet_state == "aboveUpperWarning": helper.add_summary("%s %s is %s" % (inlet_value, inlet_unit, inlet_state)) helper.status(warning) # we always want to see the values in the long output and in the perf data helper.add_summary("%s %s" % (inlet_value, inlet_unit)) helper.add_long_output("%s %s: %s" % (inlet_value, inlet_unit, inlet_state)) helper.add_metric("Sensor " + str(x), inlet_value, inlet_warning_lower + ":" + inlet_warning_upper, inlet_critical_lower + ":" + inlet_critical_upper, "", "", inlet_unit) ###### # here we check the outlets ###### if typ.lower() == "outlet": # here we need the id base_oid_outlet_name = '.1.3.6.1.4.1.13742.6.3.5.3.1.3.1' # Name base_oid_outlet_state = '.1.3.6.1.4.1.13742.6.5.4.3.1.3.1' # Value oid_outlet_name = base_oid_outlet_name + "." + id # here we add the id, to get the name oid_outlet_state = base_oid_outlet_state + "." + id + ".14" # here we add the id, to get the state # we just want to receive the status of one sensor
for row in rows: textdata = row.find('td', {'class': 'textdata'}) numberdata = row.find('td', {'class': 'numberdata'}) if not textdata or not numberdata: continue # Get the text content out of the <td> cells textdata = textdata.text numberdata = numberdata.text # clear some formatting numberdata = numberdata.replace('.', '').replace(',', '') # Add the keyfigure data to longoutput output = "%-30s %s" % (textdata, numberdata) p.add_long_output(output) # Now lets find those keyfigures, the content of textdata is dynamic so # some guesswork is required if 'Mannfj' in textdata: p.add_metric(label="mannfjoldi", value=numberdata) elif "Hagv" in textdata: p.add_metric(label="hagvoxtur", value=numberdata) elif "VLF" in textdata: p.add_metric("verg landsframleidsla", value=numberdata, uom="Mkr") elif "VNV" in textdata: p.add_metric(label="VNV", value=numberdata) elif "Launav" in textdata: p.add_metric(label="launavisitala", value=numberdata) elif "Bygg.v" in textdata: p.add_metric(label="byggingavisitala", value=numberdata)
#!/usr/bin/env python import requests from BeautifulSoup import BeautifulSoup from pynag.Plugins import PluginHelper, ok, warning, critical, unknown import simplejson as json p = PluginHelper() p.parser.add_option('--url', dest='url', default='http://apis.is/bus/realtime') p.parse_arguments() html = requests.get(p.options.url).content json = json.loads(html) buses_running = len(json['results']) p.add_metric('buses running', buses_running) soup = BeautifulSoup(html) warnings = soup.findAll('div', {'class': 'warning'}) p.add_summary('%s buses are currently running' % (buses_running)) for i in warnings: p.status(warning) p.add_long_output(i.text) p.check_all_metrics() p.exit()
p.parser.add_option('--url', dest='url', default=default_url) p.parse_arguments() p.check_all_metrics() p.show_legacy = True html = requests.get(p.options.url).content soup = BeautifulSoup(html) activitylist = soup.find('div', {'class':'activityNumbers activityNumbersNew'}) activities = activitylist.findAll('div', recursive=False) p.add_metric('metrics_found', value=len(activities), warn='0..1') p.add_summary('%s metrics found on landspitali website' % (len(activities))) for i in activities: metric_name = i.get('class') metric_value = i.find('div', {'class': "todaysCount"}).text heading = i.find('div', {'class': 'heading'}) text = i.find('div', {'class': 'todaysText'}) # If string dag... is found, this is a counter for the whole day if 'dag...' in heading.text: uom = 'c' else: uom = '' p.add_metric(metric_name, metric_value, uom=uom) p.add_long_output("%s: %s %s %s" % (metric_name, heading.text, metric_value, text.text)) p.status(ok) p.exit()
remote_timestamp += datetime.timedelta(hours=remote_time_hours_offset, minutes=remote_time_minutes_offset) try: # Windows will return the local time (not UTC), so we need to use the local time to compare # Force this this if '-l' or '--localtime' is set in commandline if windows or use_local: local_timestamp = datetime.datetime.now() time_type = 'Remote (Local)' else: # usually the we need the UTC time local_timestamp = datetime.datetime.utcnow() time_type = 'Remote (UTC)' # Calculate the offset between local and remote time offset = time.mktime(local_timestamp.timetuple()) - time.mktime(remote_timestamp.timetuple()) + 60 * o_tzoff helper.add_metric(label='offset', value=offset, uom='s') helper.check_all_metrics() except IndexError: helper.exit(summary='remote device does not return a time value', exit_code=unknown, perfdata='') # Print out plugin information and exit nagios-style helper.add_summary( '%s: ' % (time_type) + datetime.datetime.fromtimestamp(time.mktime(remote_timestamp.timetuple())).strftime( '%H:%M:%S') + '. Offset = %d s' % offset) helper.add_long_output( '%s: ' % (time_type) + datetime.datetime.fromtimestamp(time.mktime(remote_timestamp.timetuple())).strftime( '%Y.%m.%d %H:%M:%S')) helper.exit()
p.parser.add_option('--url', dest='url', default=default_url) p.parse_arguments() p.check_all_metrics() p.show_legacy = True html = requests.get(p.options.url).content soup = BeautifulSoup(html) activitylist = soup.find('div', {'class': 'activityNumbers activityNumbersNew'}) activities = activitylist.findAll('div', recursive=False) p.add_metric('metrics_found', value=len(activities), warn='0..1') p.add_summary('%s metrics found on landspitali website' % (len(activities))) for i in activities: metric_name = i.get('class') metric_value = i.find('div', {'class': "todaysCount"}).text heading = i.find('div', {'class': 'heading'}) text = i.find('div', {'class': 'todaysText'}) # If string dag... is found, this is a counter for the whole day if 'dag...' in heading.text: uom = 'c' else: uom = '' p.add_metric(metric_name, metric_value, uom=uom) p.add_long_output("%s: %s %s %s" % (metric_name, heading.text, metric_value, text.text)) p.status(ok) p.exit()
try: html = requests.get(p.options.url).content except Exception, e: p.status(unknown) p.add_summary("%s error encountered while trying to connect to EVE api: %s" % (type(e), e)) p.exit() soup = BeautifulSoup(html) serverOpen = soup.findAll('serveropen') onlinePlayers = soup.findAll('onlineplayers') if not serverOpen or not onlinePlayers: p.status(unknown) p.add_summary("Failed to get all metrics from EVE API") p.add_long_output("HTTP request returned:") p.add_long_output(html) p.exit() server_status = serverOpen[0].text num_players = onlinePlayers[0].text p.add_summary('Server open: %s' % (server_status)) if server_status != 'True': p.status(critical) p.add_metric(label='online players', value=num_players)
conf = { 'device_type': 'cisco_ios', 'host': helper.options.host, 'username': helper.options.username, 'password': helper.options.password } device = netmiko.ConnectHandler(**conf) result = get_peers(device=device, debug=helper.options.show_debug) if helper.options.show_debug: print result if 'error' in result: helper.status(unknown) helper.add_summary('%s: unable to check') helper.add_long_output(result['error']) elif 'success' in result: if not result['success']['peers']: helper.status(warning) helper.add_summary('No IPv6 BGP peers configured') not_ok_count = result['success']['counts']['active'] + result['success'][ 'counts']['idle'] if not_ok_count: helper.status(critical) helper.add_summary('%s peers in non OK state' % not_ok_count) else: helper.status(ok) helper.add_summary('All %s peers OK' % result['success']['counts']['total']) helper.add_long_output(get_stats_line(result['success']['counts'])) for peer in result['success']['peers']: