def main(): """ Main function to run the check """ args = parse_args() zagg_sender = ZaggSender(verbose=args.verbose, debug=args.debug) discovery_key_disk = 'disc.disk' interval = 3 pcp_disk_dev_metrics = ['disk.dev.total', 'disk.dev.avactive'] item_prototype_macro_disk = '#OSO_DISK' item_prototype_key_tps = 'disc.disk.tps' item_prototype_key_putil = 'disc.disk.putil' disk_metrics = pminfo.get_sampled_data(pcp_disk_dev_metrics, interval, 2) pcp_metrics_divided = {} for metric in pcp_disk_dev_metrics: pcp_metrics_divided[metric] = { k: v for k, v in disk_metrics.items() if metric in k } # do TPS checks; use disk.dev.total filtered_disk_totals = clean_up_metric_dict( pcp_metrics_divided[pcp_disk_dev_metrics[0]], pcp_disk_dev_metrics[0] + '.') # Add dynamic items zagg_sender.add_zabbix_dynamic_item(discovery_key_disk, item_prototype_macro_disk, filtered_disk_totals.keys()) # calculate the TPS and add them to the ZaggSender for disk, totals in filtered_disk_totals.iteritems(): disk_tps = (totals[1] - totals[0]) / interval zagg_sender.add_zabbix_keys( {'%s[%s]' % (item_prototype_key_tps, disk): disk_tps}) # do % Util checks; use disk.dev.avactive filtered_disk_totals = clean_up_metric_dict( pcp_metrics_divided[pcp_disk_dev_metrics[1]], pcp_disk_dev_metrics[1] + '.') # calculate the % Util and add them to the ZaggSender for disk, totals in filtered_disk_totals.iteritems(): total_active = (float)(totals[1] - totals[0]) / 1000.0 putil = 100 * total_active / interval zagg_sender.add_zabbix_keys( {'%s[%s]' % (item_prototype_key_putil, disk): putil}) zagg_sender.send_metrics()
def main(): """ Main function to run the check """ args = parse_args() zagg_sender = ZaggSender(verbose=args.verbose, debug=args.debug) discovery_key_network = 'disc.network' pcp_network_dev_metrics = [ 'network.interface.in.bytes', 'network.interface.out.bytes' ] item_proto_macro_network = '#OSO_NET_INTERFACE' item_proto_key_in_bytes = 'disc.network.in.bytes' item_proto_key_out_bytes = 'disc.network.out.bytes' network_metrics = pminfo.get_metrics(pcp_network_dev_metrics) pcp_metrics_divided = {} for metric in pcp_network_dev_metrics: pcp_metrics_divided[metric] = { k: v for k, v in network_metrics.items() if metric in k } # do Network In; use network.interface.in.bytes filtered_network_totals = clean_up_metric_dict( pcp_metrics_divided[pcp_network_dev_metrics[0]], pcp_network_dev_metrics[0] + '.') # Add dynamic items zagg_sender.add_zabbix_dynamic_item(discovery_key_network, item_proto_macro_network, filtered_network_totals.keys()) # Report Network IN bytes; them to the ZaggSender for interface, total in filtered_network_totals.iteritems(): zagg_sender.add_zabbix_keys( {'%s[%s]' % (item_proto_key_in_bytes, interface): total}) # Report Network OUT Bytes; use network.interface.out.bytes filtered_network_totals = clean_up_metric_dict( pcp_metrics_divided[pcp_network_dev_metrics[1]], pcp_network_dev_metrics[1] + '.') # calculate the % Util and add them to the ZaggSender for interface, total in filtered_network_totals.iteritems(): zagg_sender.add_zabbix_keys( {'%s[%s]' % (item_proto_key_out_bytes, interface): total}) zagg_sender.send_metrics()
def main(): ''' Gather and send details on all visible S3 buckets ''' logger.info("start") discovery_key = "disc.aws" discovery_macro = "#S3_BUCKET" prototype_s3_size = "disc.aws.size" prototype_s3_count = "disc.aws.objects" args = parse_args() if args.verbose: logger.setLevel(logging.DEBUG) logger.debug("verbose flag set") ocutil = OCUtil() dc_yaml = ocutil.get_dc('docker-registry') registry_config_secret = get_registry_config_secret(dc_yaml) oc_yaml = ocutil.get_secrets(registry_config_secret) aws_access, aws_secret = get_aws_creds(oc_yaml) awsutil = AWSUtil(aws_access, aws_secret, args.debug) bucket_list = awsutil.get_bucket_list(args.debug) bucket_stats = {} for bucket in bucket_list: s3_size, s3_objects = awsutil.get_bucket_info(bucket, args.debug) bucket_stats[bucket] = {"size": s3_size, "objects": s3_objects} if args.debug: print "Bucket stats: " + str(bucket_stats) if args.test: print "Test-only. Received results: " + str(bucket_stats) else: zgs = ZaggSender(verbose=args.debug) zgs.add_zabbix_dynamic_item(discovery_key, discovery_macro, bucket_list) for bucket in bucket_stats.keys(): zab_key = "{}[{}]".format(prototype_s3_size, bucket) zgs.add_zabbix_keys( {zab_key: int(round(bucket_stats[bucket]["size"]))}) zab_key = "{}[{}]".format(prototype_s3_count, bucket) zgs.add_zabbix_keys({zab_key: bucket_stats[bucket]["objects"]}) zgs.send_metrics()
def send_zagg_data(bucket_list, bucket_stats, args): '''send data to zabbix ''' discovery_key = "disc.aws" discovery_macro = "#S3_BUCKET" prototype_s3_size = "disc.aws.size" prototype_s3_count = "disc.aws.objects" zgs = ZaggSender(verbose=args.debug) zgs.add_zabbix_dynamic_item(discovery_key, discovery_macro, bucket_list) for bucket in bucket_stats.keys(): zab_key = "{}[{}]".format(prototype_s3_size, bucket) zgs.add_zabbix_keys( {zab_key: int(round(bucket_stats[bucket]["size"]))}) zab_key = "{}[{}]".format(prototype_s3_count, bucket) zgs.add_zabbix_keys({zab_key: bucket_stats[bucket]["objects"]}) zgs.send_metrics()
def report_to_zabbix(self, disc_key, disc_macro, item_proto_key, value): """ Sends the commands exit code to zabbix. """ zs = ZaggSender() # Add the dynamic item self.verbose_print("Adding the dynamic item to Zabbix - %s, %s, [%s]" % \ (disc_key, disc_macro, self.args.name)) zs.add_zabbix_dynamic_item(disc_key, disc_macro, [self.args.name]) # Send the value for the dynamic item self.verbose_print("Sending metric to Zabbix - %s[%s]: %s" % \ (item_proto_key, self.args.name, value)) zs.add_zabbix_keys({'%s[%s]' % (item_proto_key, self.args.name): value}) # Actually send them zs.send_metrics()
def report_to_zabbix(self, disc_key, disc_macro, item_proto_key, value): """ Sends the commands exit code to zabbix. """ zs = ZaggSender() # Add the dynamic item self.verbose_print("Adding the dynamic item to Zabbix - %s, %s, [%s]" % \ (disc_key, disc_macro, self.args.name)) zs.add_zabbix_dynamic_item(disc_key, disc_macro, [self.args.name]) # Send the value for the dynamic item self.verbose_print("Sending metric to Zabbix - %s[%s]: %s" % \ (item_proto_key, self.args.name, value)) zs.add_zabbix_keys({'%s[%s]' % (item_proto_key, self.args.name): value}) # Actually send them zs.send_metrics()
def main(): """ Main function to run the check """ args = parse_args() zagg_sender = ZaggSender(verbose=args.verbose, debug=args.debug) discovery_key_disk = 'disc.disk' interval = 3 pcp_disk_dev_metrics = ['disk.dev.total', 'disk.dev.avactive'] item_prototype_macro_disk = '#OSO_DISK' item_prototype_key_tps = 'disc.disk.tps' item_prototype_key_putil = 'disc.disk.putil' disk_metrics = pminfo.get_sampled_data(pcp_disk_dev_metrics, interval, 2) pcp_metrics_divided = {} for metric in pcp_disk_dev_metrics: pcp_metrics_divided[metric] = {k: v for k, v in disk_metrics.items() if metric in k} # do TPS checks; use disk.dev.total filtered_disk_totals = clean_up_metric_dict(pcp_metrics_divided[pcp_disk_dev_metrics[0]], pcp_disk_dev_metrics[0] + '.') # Add dynamic items zagg_sender.add_zabbix_dynamic_item(discovery_key_disk, item_prototype_macro_disk, filtered_disk_totals.keys()) # calculate the TPS and add them to the ZaggSender for disk, totals in filtered_disk_totals.iteritems(): disk_tps = (totals[1] - totals[0]) / interval zagg_sender.add_zabbix_keys({'%s[%s]' % (item_prototype_key_tps, disk): disk_tps}) # do % Util checks; use disk.dev.avactive filtered_disk_totals = clean_up_metric_dict(pcp_metrics_divided[pcp_disk_dev_metrics[1]], pcp_disk_dev_metrics[1] + '.') # calculate the % Util and add them to the ZaggSender for disk, totals in filtered_disk_totals.iteritems(): total_active = (float)(totals[1] - totals[0]) / 1000.0 putil = 100 * total_active / interval zagg_sender.add_zabbix_keys({'%s[%s]' % (item_prototype_key_putil, disk): putil}) zagg_sender.send_metrics()
def main(): ''' Gather and send details on all visible S3 buckets ''' discovery_key = "disc.gcp" discovery_macro = "#GCS_BUCKET" prototype_bucket_size = "disc.gcp.size" prototype_bucket_count = "disc.gcp.objects" args = parse_args() ocutil = OCUtil() dc_yaml = ocutil.get_dc('docker-registry') registry_config_secret = get_registry_config_secret(dc_yaml) oc_yaml = ocutil.get_secrets(registry_config_secret) bucket = get_gcp_info(oc_yaml) gsutil = GcloudUtil(verbose=args.debug) bucket_list = gsutil.get_bucket_list() bucket_stats = {} for bucket in bucket_list: size, objects = gsutil.get_bucket_info(bucket) bucket_stats[bucket] = {"size": size, "objects": objects} if args.debug: print "Bucket stats: " + str(bucket_stats) if args.test: print "Test-only. Received results: " + str(bucket_stats) else: zgs = ZaggSender(verbose=args.debug) zgs.add_zabbix_dynamic_item(discovery_key, discovery_macro, bucket_list) for bucket in bucket_stats.keys(): zab_key = "{}[{}]".format(prototype_bucket_size, bucket) zgs.add_zabbix_keys({zab_key: int(round(bucket_stats[bucket]["size"]))}) zab_key = "{}[{}]".format(prototype_bucket_count, bucket) zgs.add_zabbix_keys({zab_key: bucket_stats[bucket]["objects"]}) zgs.send_metrics()
def main(): """ Gather and send details on all visible S3 buckets """ discovery_key = "disc.aws" discovery_macro = "#S3_BUCKET" prototype_s3_size = "disc.aws.size" prototype_s3_count = "disc.aws.objects" args = parse_args() ocutil = OCUtil() oc_yaml = ocutil.get_secrets("dockerregistry") aws_access, aws_secret = get_aws_creds(oc_yaml) awsutil = AWSUtil(aws_access, aws_secret, args.debug) bucket_list = awsutil.get_bucket_list(args.debug) bucket_stats = {} for bucket in bucket_list: s3_size, s3_objects = awsutil.get_bucket_info(bucket, args.debug) bucket_stats[bucket] = {"size": s3_size, "objects": s3_objects} if args.debug: print "Bucket stats: " + str(bucket_stats) if args.test: print "Test-only. Received results: " + str(bucket_stats) else: zgs = ZaggSender(verbose=args.debug) zgs.add_zabbix_dynamic_item(discovery_key, discovery_macro, bucket_list) for bucket in bucket_stats.keys(): zab_key = "{}[{}]".format(prototype_s3_size, bucket) zgs.add_zabbix_keys({zab_key: int(round(bucket_stats[bucket]["size"]))}) zab_key = "{}[{}]".format(prototype_s3_count, bucket) zgs.add_zabbix_keys({zab_key: bucket_stats[bucket]["objects"]}) zgs.send_metrics()
def main(): """ Main function to run the check """ args = parse_args() zagg_sender = ZaggSender(verbose=args.verbose, debug=args.debug) discovery_key_network = 'disc.network' pcp_network_dev_metrics = ['network.interface.in.bytes', 'network.interface.out.bytes'] item_proto_macro_network = '#OSO_NET_INTERFACE' item_proto_key_in_bytes = 'disc.network.in.bytes' item_proto_key_out_bytes = 'disc.network.out.bytes' network_metrics = pminfo.get_metrics(pcp_network_dev_metrics) pcp_metrics_divided = {} for metric in pcp_network_dev_metrics: pcp_metrics_divided[metric] = {k: v for k, v in network_metrics.items() if metric in k} # do Network In; use network.interface.in.bytes filtered_network_totals = clean_up_metric_dict(pcp_metrics_divided[pcp_network_dev_metrics[0]], pcp_network_dev_metrics[0] + '.') # Add dynamic items zagg_sender.add_zabbix_dynamic_item(discovery_key_network, item_proto_macro_network, filtered_network_totals.keys()) # Report Network IN bytes; them to the ZaggSender for interface, total in filtered_network_totals.iteritems(): zagg_sender.add_zabbix_keys({'%s[%s]' % (item_proto_key_in_bytes, interface): total}) # Report Network OUT Bytes; use network.interface.out.bytes filtered_network_totals = clean_up_metric_dict(pcp_metrics_divided[pcp_network_dev_metrics[1]], pcp_network_dev_metrics[1] + '.') # calculate the % Util and add them to the ZaggSender for interface, total in filtered_network_totals.iteritems(): zagg_sender.add_zabbix_keys({'%s[%s]' % (item_proto_key_out_bytes, interface): total}) zagg_sender.send_metrics()
def main(): """ Main function to run the check """ args = parse_args() zagg_sender = ZaggSender(verbose=args.verbose, debug=args.debug) filesys_full_metric = ['filesys.full'] filesys_inode_derived_metrics = {'filesys.inodes.pused' : 'filesys.usedfiles / (filesys.usedfiles + filesys.freefiles) * 100' } discovery_key_fs = 'disc.filesys' item_prototype_macro_fs = '#OSO_FILESYS' item_prototype_key_full = 'disc.filesys.full' item_prototype_key_inode = 'disc.filesys.inodes.pused' # Get the disk space filesys_full_metrics = pminfo.get_metrics(filesys_full_metric) filtered_filesys_metrics = filter_out_docker_filesystems(filesys_full_metrics, 'filesys.full.') zagg_sender.add_zabbix_dynamic_item(discovery_key_fs, item_prototype_macro_fs, filtered_filesys_metrics.keys()) for filesys_name, filesys_full in filtered_filesys_metrics.iteritems(): zagg_sender.add_zabbix_keys({'%s[%s]' % (item_prototype_key_full, filesys_name): filesys_full}) # Get filesytem inode metrics filesys_inode_metrics = pminfo.get_metrics(derived_metrics=filesys_inode_derived_metrics) filtered_filesys_inode_metrics = filter_out_docker_filesystems(filesys_inode_metrics, 'filesys.inodes.pused.') for filesys_name, filesys_inodes in filtered_filesys_inode_metrics.iteritems(): zagg_sender.add_zabbix_keys({'%s[%s]' % (item_prototype_key_inode, filesys_name): filesys_inodes}) zagg_sender.send_metrics()
def report_to_zabbix(self, total_snapshottable_vols, total_snapshots_created, total_snapshot_creation_errors): """ Sends the commands exit code to zabbix. """ zs = ZaggSender(verbose=True) # Populate EBS_SNAPSHOTTER_DISC_SCHEDULE_MACRO with the schedule zs.add_zabbix_dynamic_item(EBS_SNAPSHOTTER_DISC_KEY, EBS_SNAPSHOTTER_DISC_SCHEDULE_MACRO, \ [self.args.with_schedule]) # Send total_snapshottable_vols prototype item key and value zs.add_zabbix_keys({'%s[%s]' % (EBS_SNAPSHOTTER_SNAPSHOTTABLE_VOLUMES_KEY, self.args.with_schedule): \ total_snapshottable_vols}) # Send total_snapshots_created prototype item key and value zs.add_zabbix_keys({'%s[%s]' % (EBS_SNAPSHOTTER_SNAPSHOTS_CREATED_KEY, self.args.with_schedule): \ total_snapshots_created}) # Send total_snapshot_creation_errors prototype item key and value zs.add_zabbix_keys({'%s[%s]' % (EBS_SNAPSHOTTER_SNAPSHOT_CREATION_ERRORS_KEY, self.args.with_schedule): \ total_snapshot_creation_errors}) # Actually send them zs.send_metrics()
class OpenshiftMasterZaggClient(object): """ Checks for the Openshift Master """ def __init__(self): self.args = None self.zagg_sender = None self.ora = None self.zabbix_api_key = None self.zabbix_healthz_key = None def run(self): """ Main function to run the check """ self.parse_args() self.zagg_sender = ZaggSender(verbose=self.args.verbose, debug=self.args.debug) if self.args.local: self.ora = OpenshiftRestApi() self.args.api_ping = True self.args.healthz = True self.zabbix_api_key = 'openshift.master.local.api.ping' self.zabbix_healthz_key = 'openshift.master.local.api.healthz' else: master_cfg_from_yaml = [] with open('/etc/origin/master/master-config.yaml', 'r') as yml: master_cfg_from_yaml = yaml.load(yml) self.ora = OpenshiftRestApi(host=master_cfg_from_yaml['oauthConfig']['masterURL'], verify_ssl=True) self.zabbix_api_key = 'openshift.master.api.ping' self.zabbix_healthz_key = 'openshift.master.api.healthz' try: if self.args.healthz or self.args.all_checks: self.healthz_check() except Exception as ex: print "Problem performing healthz check: %s " % ex.message self.zagg_sender.add_zabbix_keys({self.zabbix_healthz_key: 'false'}) try: if self.args.api_ping or self.args.all_checks: self.api_ping() if self.args.project_count or self.args.all_checks: self.project_count() if self.args.pod_count or self.args.all_checks: self.pod_count() if self.args.user_count or self.args.all_checks: self.user_count() if self.args.pv_info or self.args.all_checks: self.pv_info() if self.args.nodes_not_ready or self.args.all_checks: self.nodes_not_ready() except Exception as ex: print "Problem Openshift API checks: %s " % ex.message self.zagg_sender.add_zabbix_keys({self.zabbix_api_key: 0}) # Openshift API is down try: if self.args.metrics or self.args.all_checks: self.metric_check() except Exception as ex: print "Problem getting Openshift metrics at /metrics: %s " % ex.message self.zagg_sender.add_zabbix_keys({'openshift.master.metric.ping' : 0}) # Openshift Metrics are down self.zagg_sender.send_metrics() def parse_args(self): """ parse the args from the cli """ parser = argparse.ArgumentParser(description='Network metric sender') parser.add_argument('-v', '--verbose', action='store_true', default=None, help='Verbose?') parser.add_argument('--debug', action='store_true', default=None, help='Debug?') parser.add_argument('-l', '--local', action='store_true', default=False, help='Run local checks against the local API (https://127.0.0.1)') master_check_group = parser.add_argument_group('Different Checks to Perform') master_check_group.add_argument('--all-checks', action='store_true', default=None, help='Do all of the checks') master_check_group.add_argument('--api-ping', action='store_true', default=None, help='Verify the Openshift API is alive') master_check_group.add_argument('--healthz', action='store_true', default=None, help='Query the Openshift Master API /healthz') master_check_group.add_argument('--metrics', action='store_true', default=None, help='Query the Openshift Master Metrics at /metrics') master_check_group.add_argument('--project-count', action='store_true', default=None, help='Query the Openshift Master for Number of Pods') master_check_group.add_argument('--pod-count', action='store_true', default=None, help='Query the Openshift Master for Number of Running Pods') master_check_group.add_argument('--user-count', action='store_true', default=None, help='Query the Openshift Master for Number of Users') master_check_group.add_argument('--pv-info', action='store_true', default=None, help='Query the Openshift Master for Persistent Volumes Info') master_check_group.add_argument('--nodes-not-ready', action='store_true', default=None, help='Query the Openshift Master for number of nodes not in Ready state') self.args = parser.parse_args() def api_ping(self): """ Verify the Openshift API health is responding correctly """ print "\nPerforming Openshift API ping check..." response = self.ora.get('/api/v1/nodes') print "\nOpenshift API ping is alive" print "Number of nodes in the Openshift cluster: %s" % len(response['items']) self.zagg_sender.add_zabbix_keys({self.zabbix_api_key: 1, 'openshift.master.node.count': len(response['items'])}) def healthz_check(self): """ check the /healthz API call """ print "\nPerforming /healthz check..." response = self.ora.get('/healthz', rtype='text') print "healthz check returns: %s " %response self.zagg_sender.add_zabbix_keys({self.zabbix_healthz_key: str('ok' in response).lower()}) def metric_check(self): """ collect certain metrics from the /metrics API call """ print "\nPerforming /metrics check..." response = self.ora.get('/metrics', rtype='text') for metric_type in text_string_to_metric_families(response): # Collect the apiserver_request_latencies_summary{resource="pods",verb="LIST",quantiles in /metrics # Collect the apiserver_request_latencies_summary{resource="pods",verb="WATCHLIST",quantiles in /metrics if metric_type.name == 'apiserver_request_latencies_summary': key_str = 'openshift.master.apiserver.latency.summary' for sample in metric_type.samples: if (sample[1]['resource'] == 'pods' and sample[1].has_key('quantile') and 'LIST' in sample[1]['verb']): curr_key_str = key_str + ".pods.quantile.%s.%s" % (sample[1]['verb'], sample[1]['quantile'].split('.')[1]) if math.isnan(sample[2]): value = 0 else: value = sample[2] self.zagg_sender.add_zabbix_keys({curr_key_str.lower(): int(value/1000)}) # Collect the scheduler_e2e_scheduling_latency_microseconds{quantiles in /metrics if metric_type.name == 'scheduler_e2e_scheduling_latency_microseconds': for sample in metric_type.samples: if sample[1].has_key('quantile'): key_str = 'openshift.master.scheduler.e2e.scheduling.latency' curr_key_str = key_str + ".quantile.%s" % (sample[1]['quantile'].split('.')[1]) if math.isnan(sample[2]): value = 0 else: value = sample[2] self.zagg_sender.add_zabbix_keys({curr_key_str.lower(): int(value/1000)}) self.zagg_sender.add_zabbix_keys({'openshift.master.metric.ping' : 1}) # def project_count(self): """ check the number of projects in Openshift """ print "\nPerforming project count check..." excluded_names = ['openshift', 'openshift-infra', 'default', 'ops-monitor'] response = self.ora.get('/oapi/v1/projects') project_names = [project['metadata']['name'] for project in response['items']] valid_names = set(project_names) - set(excluded_names) print "Project count: %s" % len(valid_names) self.zagg_sender.add_zabbix_keys({'openshift.project.count' : len(valid_names)}) def pod_count(self): """ check the number of pods in Openshift """ print "\nPerforming pod count check..." response = self.ora.get('/api/v1/pods') # Get running pod count running_pod_count = 0 for i in response['items']: if 'containerStatuses' in i['status']: if 'running' in i['status']['containerStatuses'][0]['state']: running_pod_count += 1 # Get running pod count on compute only nodes (non-infra) running_user_pod_count = 0 for i in response['items']: if 'containerStatuses' in i['status']: if 'running' in i['status']['containerStatuses'][0]['state']: if 'nodeSelector' in i['spec']: if i['spec']['nodeSelector']['type'] == 'compute': running_user_pod_count += 1 print "Total pod count: %s" % len(response['items']) print "Running pod count: %s" % running_pod_count print "User Running pod count: %s" % running_user_pod_count self.zagg_sender.add_zabbix_keys({'openshift.master.pod.running.count' : running_pod_count, 'openshift.master.pod.user.running.count' : running_user_pod_count, 'openshift.master.pod.total.count' : len(response['items'])}) def user_count(self): """ check the number of users in Openshift """ print "\nPerforming user count check..." response = self.ora.get('/oapi/v1/users') print "Total user count: %s" % len(response['items']) self.zagg_sender.add_zabbix_keys({'openshift.master.user.count' : len(response['items'])}) def pv_info(self): """ Gather info about the persistent volumes in Openshift """ print "\nPerforming user persistent volume count...\n" response = self.ora.get('/api/v1/persistentvolumes') pv_capacity_total = 0 pv_capacity_available = 0 pv_types = {'Available': 0, 'Bound': 0, 'Released': 0, 'Failed': 0} # Dynamic items variables discovery_key_pv = 'disc.pv' item_prototype_macro_pv = '#OSO_PV' item_prototype_key_count = 'disc.pv.count' item_prototype_key_available = 'disc.pv.available' dynamic_pv_count = defaultdict(int) dynamic_pv_available = defaultdict(int) for item in response['items']: # gather dynamic pv counts dynamic_pv_count[item['spec']['capacity']['storage']] += 1 #get count of each pv type available pv_types[item['status']['phase']] += 1 #get info for the capacity and capacity available capacity = item['spec']['capacity']['storage'] if item['status']['phase'] == 'Available': # get total available capacity pv_capacity_available = pv_capacity_available + int(capacity.replace('Gi', '')) # gather dynamic pv available counts dynamic_pv_available[item['spec']['capacity']['storage']] += 1 pv_capacity_total = pv_capacity_total + int(capacity.replace('Gi', '')) print "Total Persistent Volume Total count: %s" % len(response['items']) print 'Total Persistent Volume Capacity: %s' % pv_capacity_total print 'Total Persisten Volume Available Capacity: %s' % pv_capacity_available self.zagg_sender.add_zabbix_keys( {'openshift.master.pv.total.count' : len(response['items']), 'openshift.master.pv.space.total': pv_capacity_total, 'openshift.master.pv.space.available': pv_capacity_available}) for key, value in pv_types.iteritems(): print "Total Persistent Volume %s count: %s" % (key, value) self.zagg_sender.add_zabbix_keys( {'openshift.master.pv.%s.count' %key.lower() : value}) # Add dynamic items self.zagg_sender.add_zabbix_dynamic_item(discovery_key_pv, item_prototype_macro_pv, dynamic_pv_count.keys()) for size, count in dynamic_pv_count.iteritems(): print print "Total Persistent Volume %s count: %s" % (size, count) print "Total Persistent Volume available %s count: %s" % (size, dynamic_pv_available[size]) self.zagg_sender.add_zabbix_keys({"%s[%s]" %(item_prototype_key_count, size) : count, "%s[%s]" %(item_prototype_key_available, size) : dynamic_pv_available[size]}) def nodes_not_ready(self): """ check the number of nodes in the cluster that are not ready""" print "\nPerforming nodes not ready check..." response = self.ora.get('/api/v1/nodes') nodes_not_schedulable = [] for n in response['items']: if "unschedulable" in n['spec']: nodes_not_schedulable.append(n) nodes_not_ready = [] for n in response['items']: has_ready_status = False for cond in n['status']['conditions']: if cond['reason'] == "KubeletReady": has_ready_status = True if cond['status'].lower() != "true": nodes_not_ready.append(n) if has_ready_status == False: nodes_not_ready.append(n) print "Count of nodes not schedulable: %s" % len(nodes_not_schedulable) print "Count of nodes not ready: %s" % len(nodes_not_ready) self.zagg_sender.add_zabbix_keys( {'openshift.master.nodesnotready.count' : len(nodes_not_ready)}) self.zagg_sender.add_zabbix_keys( {'openshift.master.nodesnotschedulable.count' : len(nodes_not_schedulable)})
class OpsZaggClient(object): """ class to send data to zagg """ def __init__(self): self.zagg_sender = None self.args = None self.config = None self.pcp_metrics = [] self.heartbeat = None def run(self): """ main function to run the script """ self.parse_args() self.parse_config(self.args.config_file) self.config_zagg_sender() if self.args.send_pcp_metrics: self.add_pcp_metrics() if self.args.send_heartbeat: self.add_heartbeat() if self.args.key and self.args.value: self.add_zabbix_key() if self.args.discovery_key and self.args.macro_string and self.args.macro_names: self.add_zabbix_dynamic_item() self.zagg_sender.send_metrics() def parse_args(self): """ parse the args from the cli """ parser = argparse.ArgumentParser(description='Zagg metric sender') parser.add_argument('--send-pcp-metrics', help="send pcp metrics to zagg", action="store_true") parser.add_argument('--send-heartbeat', help="send heartbeat metric to zagg", action="store_true") parser.add_argument('-s', '--host', help='specify host name as registered in Zabbix') parser.add_argument('-z', '--zagg-url', help='url of Zagg server') parser.add_argument('--zagg-user', help='username of the Zagg server') parser.add_argument('--zagg-pass', help='Password of the Zagg server') parser.add_argument('--zagg-ssl-verify', default=None, help='Whether to verify ssl certificates.') parser.add_argument('-v', '--verbose', action='store_true', default=None, help='Verbose?') parser.add_argument('--debug', action='store_true', default=None, help='Debug?') parser.add_argument('-c', '--config-file', help='ops-zagg-client config file', default='/etc/openshift_tools/zagg_client.yaml') key_value_group = parser.add_argument_group('Sending a Key-Value Pair') key_value_group.add_argument('-k', '--key', help='zabbix key') key_value_group.add_argument('-o', '--value', help='zabbix value') low_level_discovery_group = parser.add_argument_group('Sending a Low Level Discovery Item') low_level_discovery_group.add_argument('--discovery-key', help='discovery key') low_level_discovery_group.add_argument('--macro-string', help='macro string') low_level_discovery_group.add_argument('--macro-names', help='comma separated list of macro names') self.args = parser.parse_args() def parse_config(self, config_file): """ parse config file """ self.config = yaml.load(file(config_file)) def config_zagg_sender(self): """ configure the zagg_sender """ zagg_url = self.args.zagg_url if self.args.zagg_url else self.config['zagg']['url'] zagg_user = self.args.zagg_user if self.args.zagg_user else self.config['zagg']['user'] zagg_password = self.args.zagg_pass if self.args.zagg_pass else self.config['zagg']['pass'] zagg_verbose = self.args.verbose if self.args.verbose else self.config['zagg']['verbose'] zagg_debug = self.args.debug if self.args.debug else self.config['zagg']['debug'] zagg_ssl_verify = self.args.zagg_ssl_verify if self.args.zagg_ssl_verify else self.config['zagg']['ssl_verify'] host = self.args.host if self.args.host else self.config['host']['name'] if isinstance(zagg_verbose, str): zagg_verbose = (zagg_verbose == 'True') if isinstance(zagg_debug, str): zagg_debug = (zagg_debug == 'True') if isinstance(zagg_ssl_verify, str): zagg_ssl_verify = (zagg_ssl_verify == 'True') zagg_conn = ZaggConnection(url=zagg_url, user=zagg_user, password=zagg_password, ssl_verify=zagg_ssl_verify, debug=zagg_debug, ) self.zagg_sender = ZaggSender(host, zagg_conn, zagg_verbose, zagg_debug) def add_heartbeat(self): """ crate a hearbeat metric """ heartbeat = ZaggHeartbeat(templates=self.config['heartbeat']['templates'], hostgroups=self.config['heartbeat']['hostgroups'], ) self.zagg_sender.add_heartbeat(heartbeat) def add_pcp_metrics(self): """ collect pcp metrics to send to ZaggSender """ self.zagg_sender.add_pcp_metrics(self.config['pcp']['metrics']) def add_zabbix_key(self): """ send zabbix key/value pair to zagg """ self.zagg_sender.add_zabbix_keys({self.args.key : self.args.value}) def add_zabbix_dynamic_item(self): """ send zabbix low level discovery item to zagg """ self.zagg_sender.add_zabbix_dynamic_item(self.args.discovery_key, self.args.macro_string, self.args.macro_names.split(','), )
class OpsZaggClient(object): """ class to send data to zagg """ def __init__(self): self.zagg_sender = None self.args = None self.config = None self.pcp_metrics = [] self.heartbeat = None def run(self): """ main function to run the script """ self.parse_args() self.parse_config(self.args.config_file) self.config_zagg_sender() if self.args.send_pcp_metrics: self.add_pcp_metrics() if self.args.send_heartbeat: self.add_heartbeat() if self.args.key and self.args.value: self.add_zabbix_key() if self.args.discovery_key and self.args.macro_string and self.args.macro_names: self.add_zabbix_dynamic_item() self.zagg_sender.send_metrics() def parse_args(self): """ parse the args from the cli """ parser = argparse.ArgumentParser(description='Zagg metric sender') parser.add_argument('--send-pcp-metrics', help="send pcp metrics to zagg", action="store_true") parser.add_argument('--send-heartbeat', help="send heartbeat metric to zagg", action="store_true") parser.add_argument('-s', '--host', help='specify host name as registered in Zabbix') parser.add_argument('-z', '--zagg-url', help='url of Zagg server') parser.add_argument('--zagg-user', help='username of the Zagg server') parser.add_argument('--zagg-pass', help='Password of the Zagg server') parser.add_argument('--zagg-ssl-verify', default=None, help='Whether to verify ssl certificates.') parser.add_argument('-v', '--verbose', action='store_true', default=None, help='Verbose?') parser.add_argument('--debug', action='store_true', default=None, help='Debug?') parser.add_argument('-c', '--config-file', help='ops-zagg-client config file', default='/etc/openshift_tools/zagg_client.yaml') key_value_group = parser.add_argument_group('Sending a Key-Value Pair') key_value_group.add_argument('-k', '--key', help='zabbix key') key_value_group.add_argument('-o', '--value', help='zabbix value') low_level_discovery_group = parser.add_argument_group( 'Sending a Low Level Discovery Item') low_level_discovery_group.add_argument('--discovery-key', help='discovery key') low_level_discovery_group.add_argument('--macro-string', help='macro string') low_level_discovery_group.add_argument( '--macro-names', help='comma separated list of macro names') self.args = parser.parse_args() def parse_config(self, config_file): """ parse config file """ self.config = yaml.load(file(config_file)) def config_zagg_sender(self): """ configure the zagg_sender """ zagg_url = self.args.zagg_url if self.args.zagg_url else self.config[ 'zagg']['url'] zagg_user = self.args.zagg_user if self.args.zagg_user else self.config[ 'zagg']['user'] zagg_password = self.args.zagg_pass if self.args.zagg_pass else self.config[ 'zagg']['pass'] zagg_verbose = self.args.verbose if self.args.verbose else self.config[ 'zagg']['verbose'] zagg_debug = self.args.debug if self.args.debug else self.config[ 'zagg']['debug'] zagg_ssl_verify = self.args.zagg_ssl_verify if self.args.zagg_ssl_verify else self.config[ 'zagg']['ssl_verify'] host = self.args.host if self.args.host else self.config['host']['name'] if isinstance(zagg_verbose, str): zagg_verbose = (zagg_verbose == 'True') if isinstance(zagg_debug, str): zagg_debug = (zagg_debug == 'True') if isinstance(zagg_ssl_verify, str): zagg_ssl_verify = (zagg_ssl_verify == 'True') zagg_conn = ZaggConnection( url=zagg_url, user=zagg_user, password=zagg_password, ssl_verify=zagg_ssl_verify, debug=zagg_debug, ) self.zagg_sender = ZaggSender(host, zagg_conn, zagg_verbose, zagg_debug) def add_heartbeat(self): """ crate a hearbeat metric """ heartbeat = ZaggHeartbeat( templates=self.config['heartbeat']['templates'], hostgroups=self.config['heartbeat']['hostgroups'], ) self.zagg_sender.add_heartbeat(heartbeat) def add_pcp_metrics(self): """ collect pcp metrics to send to ZaggSender """ self.zagg_sender.add_pcp_metrics(self.config['pcp']['metrics']) def add_zabbix_key(self): """ send zabbix key/value pair to zagg """ self.zagg_sender.add_zabbix_keys({self.args.key: self.args.value}) def add_zabbix_dynamic_item(self): """ send zabbix low level discovery item to zagg """ self.zagg_sender.add_zabbix_dynamic_item( self.args.discovery_key, self.args.macro_string, self.args.macro_names.split(','), )
ITEM_PROTOTYPE_KEY_INODE = 'disc.filesys.inodes.pused' def filter_out_docker_filesystems(metric_dict, filesystem_filter): """ Simple filter to elimate unnecessary characters in the key name """ filtered_dict = {k.replace(filesystem_filter, ''):v for (k, v) in metric_dict.iteritems() if 'docker' not in k } return filtered_dict # Get the disk space FILESYS_FULL_METRICS = pminfo.get_metrics(FILESYS_FULL_METRIC) FILTERED_FILESYS_METRICS = filter_out_docker_filesystems(FILESYS_FULL_METRICS, 'filesys.full.') ZS.add_zabbix_dynamic_item(DISCOVERY_KEY_FS, ITEM_PROTOTYPE_MACRO_FS, FILTERED_FILESYS_METRICS.keys()) for filesys_name, filesys_full in FILTERED_FILESYS_METRICS.iteritems(): ZS.add_zabbix_keys({'%s[%s]' % (ITEM_PROTOTYPE_KEY_FULL, filesys_name): filesys_full}) # Get filesytem inode metrics FILESYS_INODE_METRICS = pminfo.get_metrics(derived_metrics=FILESYS_INODE_DERIVED_METRICS) FILTERED_FILESYS_INODE_METRICS = filter_out_docker_filesystems(FILESYS_INODE_METRICS, 'filesys.inodes.pused.') for filesys_name, filesys_inodes in FILTERED_FILESYS_INODE_METRICS.iteritems(): ZS.add_zabbix_keys({'%s[%s]' % (ITEM_PROTOTYPE_KEY_INODE, filesys_name): filesys_inodes}) ZS.send_metrics()
class DockerContainerUsageCli(object): ''' This is the class that actually pulls eveyrthing together into a cli script. ''' def __init__(self, config_file=None): if not config_file: self.config_file = '/etc/openshift_tools/container_metrics.yml' else: self.config_file = config_file self.config = None self.parse_config() self.cli = AutoVersionClient(base_url='unix://var/run/docker.sock', timeout=120) self.docker_util = DockerUtil(self.cli) self.zagg_sender = ZaggSender(verbose=True) def parse_config(self): """ parse config file """ if not self.config: if not os.path.exists(self.config_file): raise IOError(self.config_file + " does not exist.") self.config = yaml.load(file(self.config_file)) def format_ctr_name(self, ctr_name): ''' Takes a container name and if there's a name_format_regex specified, it applies it ''' for item in self.config['usage_checks']: name_match_regex = item['name_match_regex'] if item.has_key('name_format_regex') and re.match(name_match_regex, ctr_name): try: name_format_regex = item['name_format_regex'] new_name = re.sub(name_match_regex, name_format_regex, ctr_name) return new_name except sre_constants.error as ex: # Just use the full name (we don't want to die because of name formatting) print "\nError: %s: [%s]. Using full name [%s].\n" % (ex.message, name_format_regex, ctr_name) return ctr_name return ctr_name def main(self): ''' The main entrypoint of the cli ''' ctr_regexes = [uchk['name_match_regex'] for uchk in self.config['usage_checks']] use_cgroups = self.config.get('use_cgroups', False) ctrs = self.docker_util.get_ctrs_matching_names(ctr_regexes) for ctr_name, ctr in ctrs.iteritems(): (cpu_stats, mem_stats) = self.docker_util.get_ctr_stats(ctr, use_cgroups=use_cgroups) formatted_ctr_name = self.format_ctr_name(ctr_name) # Add the container hostnames as macros for the dynamic item. self.zagg_sender.add_zabbix_dynamic_item(ZBX_DOCKER_DISC_KEY, ZBX_DOCKER_DISC_MACRO, [formatted_ctr_name]) data = { '%s[%s]' % (ZBX_CTR_CPU_USED_PCT_KEY, formatted_ctr_name): cpu_stats.used_pct, '%s[%s]' % (ZBX_CTR_MEM_USED_KEY, formatted_ctr_name): mem_stats.used, '%s[%s]' % (ZBX_CTR_MEM_LIMIT_KEY, formatted_ctr_name): mem_stats.limit, '%s[%s]' % (ZBX_CTR_MEM_LIMIT_USED_PCT_KEY, formatted_ctr_name): mem_stats.limit_used_pct, '%s[%s]' % (ZBX_CTR_MEM_FAILCNT_KEY, formatted_ctr_name): mem_stats.failcnt, } print "%s:" % formatted_ctr_name for k, v in data.iteritems(): print " %s: %s" % (k, v) print self.zagg_sender.add_zabbix_keys(data) # Actually send the metrics self.zagg_sender.send_metrics()
class OpsZaggClient(object): """ class to send data to zagg """ def __init__(self): self.zagg_sender = None self.args = None self.config = None self.heartbeat = None def run(self): """ main function to run the script """ self.parse_args() self.parse_config(self.args.config_file) self.config_zagg_sender() if self.args.send_heartbeat: self.add_heartbeat() if self.args.key and self.args.value: self.add_zabbix_key() if self.args.discovery_key and self.args.macro_string and self.args.macro_names: self.add_zabbix_dynamic_item() self.zagg_sender.send_metrics() def parse_args(self): """ parse the args from the cli """ parser = argparse.ArgumentParser(description="Zagg metric sender") parser.add_argument("--send-heartbeat", help="send heartbeat metric to zagg", action="store_true") parser.add_argument("-s", "--host", help="specify host name as registered in Zabbix") parser.add_argument("-z", "--zagg-url", help="url of Zagg server") parser.add_argument("--zagg-user", help="username of the Zagg server") parser.add_argument("--zagg-pass", help="Password of the Zagg server") parser.add_argument("--zagg-ssl-verify", default=None, help="Whether to verify ssl certificates.") parser.add_argument("-v", "--verbose", action="store_true", default=None, help="Verbose?") parser.add_argument("--debug", action="store_true", default=None, help="Debug?") parser.add_argument( "-c", "--config-file", help="ops-zagg-client config file", default="/etc/openshift_tools/zagg_client.yaml" ) key_value_group = parser.add_argument_group("Sending a Key-Value Pair") key_value_group.add_argument("-k", "--key", help="zabbix key") key_value_group.add_argument("-o", "--value", help="zabbix value") low_level_discovery_group = parser.add_argument_group("Sending a Low Level Discovery Item") low_level_discovery_group.add_argument("--discovery-key", help="discovery key") low_level_discovery_group.add_argument("--macro-string", help="macro string") low_level_discovery_group.add_argument("--macro-names", help="comma separated list of macro names") self.args = parser.parse_args() def parse_config(self, config_file): """ parse config file """ self.config = yaml.load(file(config_file)) def config_zagg_sender(self): """ configure the zagg_sender """ zagg_url = self.args.zagg_url if self.args.zagg_url else self.config["zagg"]["url"] zagg_user = self.args.zagg_user if self.args.zagg_user else self.config["zagg"]["user"] zagg_password = self.args.zagg_pass if self.args.zagg_pass else self.config["zagg"]["pass"] zagg_verbose = self.args.verbose if self.args.verbose else self.config["zagg"]["verbose"] zagg_debug = self.args.debug if self.args.debug else self.config["zagg"]["debug"] zagg_ssl_verify = self.args.zagg_ssl_verify if self.args.zagg_ssl_verify else self.config["zagg"]["ssl_verify"] host = self.args.host if self.args.host else self.config["host"]["name"] if isinstance(zagg_verbose, str): zagg_verbose = zagg_verbose == "True" if isinstance(zagg_debug, str): zagg_debug = zagg_debug == "True" if isinstance(zagg_ssl_verify, str): zagg_ssl_verify = zagg_ssl_verify == "True" zagg_conn = ZaggConnection( url=zagg_url, user=zagg_user, password=zagg_password, ssl_verify=zagg_ssl_verify, debug=zagg_debug ) self.zagg_sender = ZaggSender(host, zagg_conn, zagg_verbose, zagg_debug) def add_heartbeat(self): """ crate a hearbeat metric """ heartbeat = ZaggHeartbeat( templates=self.config["heartbeat"]["templates"], hostgroups=self.config["heartbeat"]["hostgroups"] ) self.zagg_sender.add_heartbeat(heartbeat) def add_zabbix_key(self): """ send zabbix key/value pair to zagg """ self.zagg_sender.add_zabbix_keys({self.args.key: self.args.value}) def add_zabbix_dynamic_item(self): """ send zabbix low level discovery item to zagg """ self.zagg_sender.add_zabbix_dynamic_item( self.args.discovery_key, self.args.macro_string, self.args.macro_names.split(",") )
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #This is not a module, but pylint thinks it is. This is a command. #pylint: disable=invalid-name from openshift_tools.monitoring.zagg_sender import ZaggSender from openshift_tools.monitoring import pminfo FILESYSTEM_METRIC = ['filesys.full'] DISCOVERY_KEY_FS = 'disc.filesys' ITEM_PROTOTYPE_MACRO_FS = '#OSO_FILESYS' ITEM_PROTOTYPE_KEY_FULL = 'disc.filesys.full' FILESYS_METRICS = pminfo.get_metrics(FILESYSTEM_METRIC) FILTERED_FILESYS_METRICS = {k.replace('filesys.full.', ''):v for (k, v) in FILESYS_METRICS.iteritems() if 'docker' not in k} ZS = ZaggSender() ZS.add_zabbix_dynamic_item(DISCOVERY_KEY_FS, ITEM_PROTOTYPE_MACRO_FS, FILTERED_FILESYS_METRICS.keys()) for filesys_name, filesys_full in FILTERED_FILESYS_METRICS.iteritems(): ZS.add_zabbix_keys({'%s[%s]' % (ITEM_PROTOTYPE_KEY_FULL, filesys_name): filesys_full}) ZS.send_metrics()
class CertificateReporting(object): ''' class with ability to parse through x509 certificates to extract and report to zabbix the expiration date assocated with the cert ''' def __init__(self): ''' constructor ''' self.args = None self.current_date = datetime.datetime.today() self.parse_args() self.zsend = ZaggSender(debug=self.args.debug) def dprint(self, msg): ''' debug printer ''' if self.args.debug: print msg def parse_args(self): ''' parse command line args ''' argparser = argparse.ArgumentParser(description='certificate checker') argparser.add_argument('--debug', default=False, action='store_true') argparser.add_argument( '--cert-list', default="/etc/origin", type=str, help='comma-separated list of dirs/certificates') self.args = argparser.parse_args() def days_to_expiration(self, cert_file): ''' return days to expiration for a certificate ''' crypto = OpenSSL.crypto cert = open(cert_file).read() certificate = crypto.load_certificate(crypto.FILETYPE_PEM, cert) expiration_date_asn1 = certificate.get_notAfter() # expiration returned in ASN.1 GENERALIZEDTIME format # YYYYMMDDhhmmss with a trailing 'Z' expiration_date = parser.parse(expiration_date_asn1).replace( tzinfo=None) delta = expiration_date - self.current_date return delta.days def process_certificates(self): ''' check through list of certificates/directories ''' for cert in self.args.cert_list.split(','): if not os.path.exists(cert): self.dprint("{} does not exist. skipping.".format(cert)) continue mode = os.stat(cert).st_mode if S_ISDIR(mode): self.all_certs_in_dir(cert) elif S_ISREG(mode): days = self.days_to_expiration(cert) self.dprint("{} in {} days".format(cert, days)) self.add_to_zabbix(cert, days) else: self.dprint("not a file. not a directory. skipping.") # now push out all queued up item(s) to zabbix self.zsend.send_metrics() def add_to_zabbix(self, certificate, days_to_expiration): ''' queue up item for submission to zabbix ''' self.zsend.add_zabbix_dynamic_item(CERT_DISC_KEY, CERT_DISC_MACRO, [certificate]) zbx_key = "{}[{}]".format(CERT_DISC_KEY, certificate) self.zsend.add_zabbix_keys({zbx_key: days_to_expiration}) def all_certs_in_dir(self, directory): ''' recursively go through all *.crt files in 'directory' ''' for root, _, filenames in os.walk(directory): for filename in filenames: if filename.endswith('.crt'): full_path = os.path.join(root, filename) days = self.days_to_expiration(full_path) self.dprint("{} in {} days".format(full_path, days)) self.add_to_zabbix(full_path, days)