def test_metrics_group_hash(self): now = round(time.time(), METRICS_TIMESTAMP_PRECISION) metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) dimension = PanoptesMetricDimension(u'if_alias', u'bar') metric = PanoptesMetric(u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=now) metric_diff_timestamp = PanoptesMetric(u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=now + 0.01) metrics_group.add_dimension(dimension) metrics_group_two.add_dimension(dimension) self.assertEqual(metrics_group.__hash__(), metrics_group_two.__hash__()) metrics_group.add_metric(metric) metrics_group_two.add_metric(metric_diff_timestamp) self.assertEqual(metrics_group.__hash__(), metrics_group_two.__hash__())
def _get_storage_metrics(self): self._storage_metrics = dict() try: host_resource_storage_indices = \ self._get_host_resource_indices(oid_filter=hrStorageType, host_resource_strings=[hrStorageFlashMemory, hrStorageVirtualMemory]) for index in host_resource_storage_indices: storage_descriptor = self.host_resources_map[hrStorageDescr + u'.' + index] # pylint: disable=E1136 self._storage_metrics[storage_descriptor] = dict() allocation_units = int( self.host_resources_map[hrStorageAllocationUnits + u'.' + index]) # pylint: disable=E1136 self._storage_metrics[storage_descriptor][u'storage_used'] = \ int(self.host_resources_map[ hrStorageUsed + u'.' + index]) * allocation_units # pylint: disable=E1136 self._storage_metrics[storage_descriptor][u'storage_total'] = \ int(self.host_resources_map[ hrStorageSize + u'.' + index]) * allocation_units # pylint: disable=E1136 self._storage_metrics[storage_descriptor][u'storage_type'] = \ STORAGE_TYPE_REVERSE_MAP[self.host_resources_map[ hrStorageType + u'.' + index]] # pylint: disable=E1136 except Exception as e: self._polling_status.handle_exception(u'storage', e) # todo Do we need to pop the stats from self._storage_metrics? try: if len(self._storage_metrics) > 0: for storage_entity in self._storage_metrics: storage_metrics_group = PanoptesMetricsGroup( self._device, u'storage', self._execute_frequency) storage_metrics_group.add_dimension( PanoptesMetricDimension( u'storage_type', self._storage_metrics[storage_entity] [u'storage_type'])) storage_metrics_group.add_dimension( PanoptesMetricDimension(u'storage_entity', storage_entity)) storage_metrics_group.add_metric( PanoptesMetric( u'storage_used', self._storage_metrics[storage_entity] [u'storage_used'], PanoptesMetricType.GAUGE)) storage_metrics_group.add_metric( PanoptesMetric( u'storage_total', self._storage_metrics[storage_entity] [u'storage_total'], PanoptesMetricType.GAUGE)) self._arista_device_metrics.add(storage_metrics_group) self._polling_status.handle_success(u'storage') self._logger.debug( u'Found Storage metrics "%s" for %s: %s' % (self._storage_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'storage', e)
def run(self, context): logger = context.logger resource = context.data host = resource.resource_endpoint config = context.config[u'main'] execute_frequency = int(config[u'execute_frequency']) start_time = time() try: count = int(config[u'count']) except KeyError: count = DEFAULT_PING_COUNT logger.info(u'For device {}, count not set - setting it to {}'.format(host, DEFAULT_PING_COUNT)) except ValueError: raise PanoptesPollingPluginConfigurationError( u'For device {}, configured count is not an integer: {}'.format(host, config[u'count'])) try: timeout = int(config[u'timeout']) except KeyError: timeout = DEFAULT_PING_TIMEOUT logger.info(u'For device {}, timeout not set - setting it to {}s'.format(host, DEFAULT_PING_TIMEOUT)) except ValueError: raise PanoptesPollingPluginConfigurationError( u'For device {}, configured timeout is not an integer: {}'.format(host, config[u'timeout'])) ping_metrics_group = PanoptesMetricsGroup(resource, u'ping', execute_frequency) try: panoptes_ping = PanoptesPing(hostname=host, count=count, timeout=timeout) for metric, object_property in list(PING_METRICS.items()): ping_metrics_group.add_metric(PanoptesMetric(metric, getattr(panoptes_ping, object_property), PanoptesMetricType.GAUGE)) if panoptes_ping.packet_loss_pct == 100.0: ping_status = DEVICE_METRICS_STATES.PING_FAILURE else: ping_status = DEVICE_METRICS_STATES.SUCCESS except Exception as e: logger.warn(u'For device {}, ping failed: {}'.format(host, repr(e))) ping_status = DEVICE_METRICS_STATES.PING_FAILURE ping_metrics_group.add_metric(PanoptesMetric(u'ping_status', ping_status, PanoptesMetricType.GAUGE)) logger.debug(u'For device {}, ping results are: {}'.format(host, str(ping_metrics_group.json))) ping_metrics_group_set = PanoptesMetricsGroupSet() ping_metrics_group_set.add(ping_metrics_group) end_time = time() logger.info(u'Done pinging device "{}" in {} seconds, {} metric groups'.format(host, round(end_time - start_time, 2), len(ping_metrics_group_set))) return ping_metrics_group_set
def _get_crypto_metrics(self): self._crypto_metrics = dict() try: crypto_cpu_entry_indices = set([ x.split(u'.')[-1] for x in self._get_entity_indices(ent_physical_class=u'cpu', ent_strings=[u'Crypto Asic']) ]) interval = self._get_crypto_cpu_interval() for index in crypto_cpu_entry_indices: self._crypto_metrics[index] = dict() packets_in = int( self._snmp_connection.get(oid=cepStatsMeasurement + u'.' + index + u'.' + interval + pktsIn).value) packets_out = int( self._snmp_connection.get(oid=cepStatsMeasurement + u'.' + index + u'.' + interval + pktsOut).value) self._crypto_metrics[index][u'packets_in'] = packets_in self._crypto_metrics[index][u'packets_out'] = packets_out self._crypto_metrics[index][u'cpu_name'] = self._get_cpu_name( index) except Exception as e: self._polling_status.handle_exception(u'crypto', e) try: if self._crypto_metrics: for cpu_id in self._crypto_metrics: crypto_metrics_group = PanoptesMetricsGroup( self._device, u'crypto', self._execute_frequency) crypto_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_no', cpu_id)) crypto_metrics_group.add_dimension( PanoptesMetricDimension( u'cpu_name', self._crypto_metrics[cpu_id][u'cpu_name'])) crypto_metrics_group.add_metric( PanoptesMetric( u'packets_in', self._crypto_metrics[cpu_id][u'packets_in'], PanoptesMetricType.COUNTER)) crypto_metrics_group.add_metric( PanoptesMetric( u'packets_out', self._crypto_metrics[cpu_id][u'packets_out'], PanoptesMetricType.COUNTER)) self._asr_device_metrics.add(crypto_metrics_group) self._polling_status.handle_success(u'crypto') self._logger.debug( u'Found crypto metrics "%s" for %s: %s' % (self._crypto_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'crypto', e)
def test_panoptes_metric(self): with self.assertRaises(AssertionError): PanoptesMetric(None, 0, PanoptesMetricType.GAUGE) with self.assertRaises(ValueError): PanoptesMetric('1', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric('test_metric', None, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric('test_metric', 0, None) with self.assertRaises(AssertionError): PanoptesMetric('test_metric', True, PanoptesMetricType.GAUGE) metric1 = PanoptesMetric('test_metric', 0, PanoptesMetricType.GAUGE) self.assertEqual(metric1.metric_name, 'test_metric') self.assertEqual(metric1.metric_value, 0) self.assertEqual(metric1.metric_type, PanoptesMetricType.GAUGE) self.assertNotEqual(metric1, None) metric2 = PanoptesMetric('test_metric', 0, PanoptesMetricType.GAUGE) self.assertEqual(metric1, metric2) metric2 = PanoptesMetric('test_metric', 1, PanoptesMetricType.GAUGE) self.assertNotEqual(metric1, metric2) metric2 = PanoptesMetric('test_metric', 1, PanoptesMetricType.COUNTER) self.assertNotEqual(metric1, metric2)
def _get_load_metrics(self): try: interval = self._get_qfp_interval() # n.b. There should only be one qfp entry per crypto device. qfp_entry_index = self._get_entity_indices( ent_physical_class=u'cpu', ent_strings=[u'qfp', u'QFP'])[0].split(u'.')[-1] self._load_metrics = dict() processing_load = int( self._snmp_connection.get(oid=ceqfpUtilProcessingLoad + u'.' + qfp_entry_index + u'.' + interval).value) self._load_metrics[u'processing_load'] = processing_load except Exception as e: self._polling_status.handle_exception(u'load', e) try: if self._load_metrics: load_metrics_group = PanoptesMetricsGroup( self._device, u'load', self._execute_frequency) load_metrics_group.add_metric( PanoptesMetric(u'processing_load', self._load_metrics[u'processing_load'], PanoptesMetricType.GAUGE)) self._asr_device_metrics.add(load_metrics_group) self._polling_status.handle_success(u'load') self._logger.debug( u'Found load metrics "%s" for %s: %s' % (self._load_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'load', e)
def test_panoptes_metric_dimension(self): with self.assertRaises(ValueError): PanoptesMetricDimension(u'contain$_invalid_character$', u'bar') with self.assertRaises(ValueError): PanoptesMetricDimension(u'foo', u'contains_pipe|') dimension_one = PanoptesMetricDimension(u'if_alias', u'bar') self.assertEqual( dimension_one.json, u'{"dimension_name": "if_alias", "dimension_value": "bar"}') self.assertEqual(repr(dimension_one), u'PanoptesMetricDimension[if_alias|bar]') metric_one = PanoptesMetric( u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) with self.assertRaises(AssertionError): assert dimension_one == metric_one dimension_two = PanoptesMetricDimension(u'if_alias', u'foo') with self.assertRaises(AssertionError): assert dimension_one == dimension_two dimension_three = PanoptesMetricDimension(u'if_alias', u'bar') assert dimension_one == dimension_three
def test_panoptes_metric(self): with self.assertRaises(AssertionError): PanoptesMetric(None, 0, PanoptesMetricType.GAUGE) with self.assertRaises(ValueError): PanoptesMetric(u'1', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric(u'test_metric', None, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric(u'test_metric', 0, None) with self.assertRaises(AssertionError): PanoptesMetric(u'test_metric', True, PanoptesMetricType.GAUGE) metric1 = PanoptesMetric( u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) self.assertEqual(metric1.metric_name, u'test_metric') self.assertEqual(metric1.metric_value, 0) self.assertEqual(metric1.metric_timestamp, mock_time.return_value) self.assertEqual(metric1.metric_type, PanoptesMetricType.GAUGE) self.assertEqual( repr(metric1), u"PanoptesMetric[test_metric|0|GAUGE|{}]".format( mock_time.return_value)) self.assertNotEqual(metric1, None) # Check PanoptesMetric.__eq__ assert metric1 == PanoptesMetric(u'test_metric', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetricDimension(u"test", u"value") with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric(u'different_name', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric(u'test_metric', 1, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric(u'test_metric', 0, PanoptesMetricType.COUNTER)
def test_panoptes_metric(self): with self.assertRaises(AssertionError): PanoptesMetric(None, 0, PanoptesMetricType.GAUGE) with self.assertRaises(ValueError): PanoptesMetric('1', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric('test_metric', None, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric('test_metric', 0, None) with self.assertRaises(AssertionError): PanoptesMetric('test_metric', True, PanoptesMetricType.GAUGE) metric1 = PanoptesMetric( 'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) self.assertEqual(metric1.metric_name, 'test_metric') self.assertEqual(metric1.metric_value, 0) self.assertEqual(metric1.metric_timestamp, mock_time.return_value) self.assertEqual(metric1.metric_type, PanoptesMetricType.GAUGE) self.assertEqual( repr(metric1), "{{'metric_creation_timestamp': {}, 'metric_type': 'gauge', 'metric_name': 'test_metric', " "'metric_value': 0}}".format(mock_time.return_value)) self.assertNotEqual(metric1, None) # Check PanoptesMetric.__eq__ assert metric1 == PanoptesMetric('test_metric', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetricDimension("test", "value") with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric('different_name', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric('test_metric', 1, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric('test_metric', 0, PanoptesMetricType.COUNTER)
def run(self, context): metric_group_set = PanoptesMetricsGroupSet() metric1 = PanoptesMetric("test", 0.0, PanoptesMetricType.GAUGE) metric_group = PanoptesMetricsGroup(self.panoptes_resource, "Test", _TEST_INTERVAL) metric_group.add_metric(metric1) metric_group_set.add(metric_group) return metric_group_set
def test_panoptes_metric_json_and_repr(self): metric = PanoptesMetric('test_metric', 0, PanoptesMetricType.GAUGE, mock_time.return_value) serialized = json.loads(metric.json) expected = {"metric_creation_timestamp": mock_time.return_value, "metric_name": "test_metric", "metric_type": "gauge", "metric_value": 0} self.assertEqual(ordered(serialized), ordered(expected))
def _get_memory_metrics(self): self._memory_metrics = dict() self._memory_metrics[u'dram'] = dict() try: allocation_units = int( self.host_resources_map[hrStorageAllocationUnits + u'.1']) # pylint: disable=E1136 memory_used = ( int(self.host_resources_map[hrStorageUsed + u'.1']) - int(self.host_resources_map[hrStorageUsed + u'.3']) ) * allocation_units # total - cached self._memory_metrics[u'dram'][u'memory_used'] = memory_used memory_total = \ int(self.host_resources_map[hrStorageSize + u'.1']) * allocation_units # pylint: disable=E1136 self._memory_metrics[u'dram'][u'memory_total'] = memory_total except Exception as e: self._polling_status.handle_exception(u'memory', e) self._memory_metrics.pop(u'dram') try: if len(self._memory_metrics) > 0: for memory_type in self._memory_metrics: memory_metrics_group = PanoptesMetricsGroup( self._device, u'memory', self._execute_frequency) memory_metrics_group.add_dimension( PanoptesMetricDimension(u'memory_type', memory_type)) memory_metrics_group.add_metric( PanoptesMetric( u'memory_used', self._memory_metrics[memory_type][u'memory_used'], PanoptesMetricType.GAUGE)) memory_metrics_group.add_metric( PanoptesMetric( u'memory_total', self._memory_metrics[memory_type][u'memory_total'], PanoptesMetricType.GAUGE)) self._arista_device_metrics.add(memory_metrics_group) self._polling_status.handle_success(u'memory') self._logger.debug( u'Found Memory metrics "%s" for %s: %s' % (self._memory_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'memory', e)
def _get_system_cpu_metrics(self): self._cpu_metrics = dict() self._cpu_metrics[u'ctrl'] = dict() try: cpus = self._snmp_connection.bulk_walk( oid=hrProcessorLoad, non_repeaters=0, max_repetitions=_MAX_REPETITIONS) if len(cpus) == 0: raise PanoptesMetricsNullException for cpu in cpus: # The last int for each cpu is a temporary index we will append to hrDeviceDescription to get the name temp_id = int(cpu.index.rsplit(u'.', 1)[-1]) # last object if temp_id != 1: # only include individual core info self._cpu_metrics[u'ctrl'][temp_id] = dict() self._cpu_metrics[u'ctrl'][temp_id][u'cpu_util'] = int( cpu.value) self._cpu_metrics[u'ctrl'][temp_id][ u'cpu_name'] = self._get_cpu_name(temp_id) except Exception as e: self._polling_status.handle_exception(u'cpu', e) self._cpu_metrics.pop(u'ctrl') try: if len(self._cpu_metrics) > 0: for cpu_type in self._cpu_metrics: for cpu_id in list(self._cpu_metrics[cpu_type].keys()): cpu_metrics_group = PanoptesMetricsGroup( self._device, u'cpu', self._execute_frequency) cpu_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_type', cpu_type)) cpu_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_no', u'1.' + str(cpu_id))) cpu_metrics_group.add_dimension( PanoptesMetricDimension( u'cpu_name', self._cpu_metrics[cpu_type] [cpu_id][u'cpu_name'])) cpu_metrics_group.add_metric( PanoptesMetric( u'cpu_utilization', self._cpu_metrics[cpu_type] [cpu_id][u'cpu_util'], PanoptesMetricType.GAUGE)) self._arista_device_metrics.add(cpu_metrics_group) self._polling_status.handle_success(u'cpu') self._logger.debug( u'Found CPU metrics "%s" for %s: %s' % (self._cpu_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'cpu', e)
def test_panoptes_metric_set(self): metric_set = PanoptesMetricSet() metric1 = PanoptesMetric( u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) metric2 = PanoptesMetric( u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) metric_set.add(metric1) metric_set.add(metric2) assert len(metric_set) == 1 self.assertIn(metric1, metric_set.metrics) # Test PanoptesMetricSet.__repr__ _METRIC_SET_REPR = u"PanoptesMetricSet[PanoptesMetric[test_metric|0|GAUGE|{}]]".format( mock_time.return_value) self.assertEqual(repr(metric_set), _METRIC_SET_REPR) with self.assertRaises(Exception): metric_set.remove(self.__panoptes_resource) metric_set.remove(metric1) assert len(metric_set) == 0 # Test PanoptesMetricSet.__iter__ and 'next' metric_count = 0 metric_set_iterator = iter(metric_set) for _ in metric_set: self.assertIn(next(metric_set_iterator), metric_set.metrics) metric_count += 1 assert len(metric_set) == metric_count with self.assertRaises(Exception): next(metric_set_iterator)
def test_panoptes_metric_set(self): metric_set = PanoptesMetricSet() metric1 = PanoptesMetric( 'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) metric2 = PanoptesMetric( 'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) metric_set.add(metric1) metric_set.add(metric2) assert len(metric_set) == 1 self.assertIn(metric1, metric_set.metrics) # Test PanoptesMetricSet.__repr__ _METRIC_SET_REPR = "set([{{'metric_creation_timestamp': {}, 'metric_type': 'gauge', " \ "'metric_name': 'test_metric', 'metric_value': 0}}])".format(mock_time.return_value) self.assertEqual(repr(metric_set), _METRIC_SET_REPR) with self.assertRaises(Exception): metric_set.remove(self.__panoptes_resource) metric_set.remove(metric1) assert len(metric_set) == 0 # Test PanoptesMetricSet.__iter__ and 'next' metric_count = 0 metric_set_iterator = iter(metric_set) for _ in metric_set: self.assertIn(metric_set_iterator.next(), metric_set.metrics) metric_count += 1 assert len(metric_set) == metric_count with self.assertRaises(Exception): metric_set_iterator.next()
def prepare_panoptes_metrics_group_set(self, file_path=None): panoptes_metric_group_set = PanoptesMetricsGroupSet() path_to_metrics_file = plugin_results_file if file_path is None else file_path with open(path_to_metrics_file) as results_file: panoptes_json_data = json.load(results_file) for panoptes_data_object in panoptes_json_data: resource = panoptes_data_object[u'resource'] panoptes_resource = PanoptesResource( resource_site=resource[u'resource_site'], resource_class=resource[u'resource_class'], resource_subclass=resource[u'resource_subclass'], resource_type=resource[u'resource_type'], resource_id=resource[u'resource_id'], resource_endpoint=resource[u'resource_endpoint'], resource_plugin=resource[u'resource_plugin'], resource_creation_timestamp=0) panoptes_metric_group = PanoptesMetricsGroup( resource=panoptes_resource, group_type=panoptes_data_object[u'metrics_group_type'], interval=panoptes_data_object[u'metrics_group_interval'] ) for dimension in panoptes_data_object[u'dimensions']: panoptes_metric_group.add_dimension( PanoptesMetricDimension( name=dimension[u'dimension_name'], value=dimension[u'dimension_value'] ) ) for metric in panoptes_data_object[u'metrics']: panoptes_metric_group.add_metric( PanoptesMetric( metric_name=metric[u'metric_name'], metric_value=metric[u'metric_value'], metric_type=PanoptesMetricType().GAUGE if metric[u'metric_type'] == u'gauge' else PanoptesMetricType().COUNTER, metric_creation_timestamp=metric[u'metric_creation_timestamp'] ) ) panoptes_metric_group_set.add(panoptes_metric_group) return panoptes_metric_group_set
def testMetricsGroup(self): now = round(time.time(), METRICS_TIMESTAMP_PRECISION) metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) self.assertEqual(metrics_group.group_type, 'test') self.assertEqual(metrics_group.interval, 120) self.assertEqual(metrics_group.schema_version, '0.2') self.assertGreaterEqual(metrics_group.creation_timestamp, now) dimension_one = PanoptesMetricDimension('if_alias', 'bar') dimension_two = PanoptesMetricDimension('if_alias', 'foo') metrics_group.add_dimension(dimension_one) with self.assertRaises(KeyError): metrics_group.add_dimension(dimension_two) self.assertEqual(len(metrics_group.dimensions), 1) self.assertEqual(metrics_group.contains_dimension_by_name('if_alias'), True) self.assertEqual(metrics_group.contains_dimension_by_name('baz'), False) metrics_group.delete_dimension_by_name('if_alias') self.assertEqual(metrics_group.contains_dimension_by_name('if_alias'), False) self.assertEqual(len(metrics_group.dimensions), 0) self.assertEqual(metrics_group.get_dimension_by_name('foo'), None) metrics_group.add_dimension(dimension_two) dimension_three = PanoptesMetricDimension('if_alias', 'bar') metrics_group.upsert_dimension(dimension_three) self.assertEqual(len(metrics_group.dimensions), 1) self.assertEqual(metrics_group.get_dimension_by_name('if_alias').value, 'bar') dimension_four = PanoptesMetricDimension('if_name', 'eth0') metrics_group.upsert_dimension(dimension_four) self.assertEqual(len(metrics_group.dimensions), 2) with self.assertRaises(AssertionError): metrics_group.add_metric(None) metric = PanoptesMetric('test_metric', 0, PanoptesMetricType.GAUGE) metrics_group.add_metric(metric) to_json = metrics_group.json metrics = PanoptesMetricsGroup.flatten_metrics(json.loads(to_json)['metrics']) self.assertEquals(metrics['gauge']['test_metric']['value'], 0) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) metrics_group_two.add_dimension(dimension_two) metrics_group_two.upsert_dimension(dimension_three) metrics_group_two.upsert_dimension(dimension_four) metrics_group_two.add_metric(metric) self.assertEqual(metrics_group, metrics_group_two)
def get_metrics(self): try: logger = self._logger events_ts_metric_group = PanoptesMetricsGroup( self._device_resource, u'heartbeat', self._execute_frequency) events_ts_metric_group.add_metric( PanoptesMetric(u'status', 1, PanoptesMetricType.GAUGE)) events_ts_metric_group.add_metric( PanoptesMetric(u'heartbeat_enrichment_timestamp', self._get_enrichment_ts(), PanoptesMetricType.GAUGE)) self._device_heartbeat_metrics.add(events_ts_metric_group) logger.debug( u'Heartbeat metrics for host {} PanoptesMetricsGroupSet {}'. format(self._device_fqdn, self._device_heartbeat_metrics)) return self._device_heartbeat_metrics except Exception as e: raise PanoptesPollingPluginError( u'Failed to get timestamp metrics for the host "%s": %s' % (self._device_fqdn, repr(e)))
def device_status_metrics_group(self): """ Creates device_status_metrics_group Returns: PanoptesMetricsGroup: The PanoptesMetricsGroup for the status of this device """ if self._ping and self.device_status in _PING_STATES: try: panoptes_ping = PanoptesPing(hostname=self.device_name) if panoptes_ping.packet_loss_pct == 100.0: self._device_status = DEVICE_METRICS_STATES.PING_FAILURE except: self._device_status = DEVICE_METRICS_STATES.PING_FAILURE self._device_status_metrics_group.add_metric( PanoptesMetric(self._metric_name, self._device_status, PanoptesMetricType.GAUGE)) return self._device_status_metrics_group
def _transformation_rate(context, metrics_group, inputs): kv_store = context.get_kv_store(PanoptesMetricsKeyValueStore) logger = context.logger output_metrics_group = metrics_group.copy() for metric in metrics_group.metrics: if metric.metric_name in inputs: key = const.KV_STORE_DELIMITER.join( [_make_key(metrics_group), metric.metric_name]) try: new_stored_value = const.KV_STORE_DELIMITER.join( [str(metric.metric_value), str(metric.metric_timestamp)]) stored_value = kv_store.getset( key, new_stored_value, const.METRICS_KV_STORE_TTL_MULTIPLE * metrics_group.interval) except Exception as e: context.logger.error( u'Error trying to fetch/store/convert for key "%s": %s, skipping conversion' % (key, repr(e))) continue if stored_value is None: logger.debug( u'Could not find existing value for key "%s", skipping conversion' % key) continue logger.debug(u'Calculating rate for %s' % key) value, timestamp = stored_value.split(const.KV_STORE_DELIMITER) time_difference = metric.metric_timestamp - float(timestamp) if time_difference < 0: logger.debug( u'Time difference is negative for key "%s": (%.2f), skipping conversion' % (key, time_difference)) continue elif time_difference == 0: logger.debug( u'Time difference is zero for key "%s", skipping conversion' % key) continue elif time_difference > (metrics_group.interval * const.METRICS_KV_STORE_TTL_MULTIPLE): logger.debug( u'Time difference is greater than TTL multiple for key "%s": (%.2f), skipping conversion' % (key, time_difference)) continue confidence = round( old_div(metrics_group.interval, time_difference), 2) if confidence < const.METRICS_CONFIDENCE_THRESHOLD: logger.warn( u'Confidence for key "%s" is %.2f, which is below the threshold of %.2f' % (key, confidence, const.METRICS_CONFIDENCE_THRESHOLD)) value = float(value) counter_difference = metric.metric_value - value if counter_difference >= 0: rate = int(old_div(counter_difference, time_difference)) logger.debug(u'Rate for %s is %d' % (key, rate)) try: output_metrics_group.add_metric( PanoptesMetric(metric.metric_name, rate, PanoptesMetricType.GAUGE)) except KeyError: logger.warn( u'Metric %s already present as gauge in %s, skipping' % (metric.metric_name, metrics_group.type)) else: logger.debug( u'New counter value (%.2f) is less than current counter (%.2f) for key "%s": (%.2f), ' u'skipping conversion' % (metric.metric_value, value, key, counter_difference)) return output_metrics_group
def testMetricsGroup(self): now = round(time.time(), METRICS_TIMESTAMP_PRECISION) metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) self.assertEqual(metrics_group.group_type, u'test') self.assertEqual(metrics_group.interval, 120) self.assertEqual(metrics_group.schema_version, u'0.2') self.assertGreaterEqual(metrics_group.creation_timestamp, now) with patch(u'yahoo_panoptes.framework.metrics.time', mock_time): metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) dimension_one = PanoptesMetricDimension(u'if_alias', u'bar') dimension_two = PanoptesMetricDimension(u'if_alias', u'foo') metrics_group.add_dimension(dimension_one) with self.assertRaises(KeyError): metrics_group.add_dimension(dimension_two) # Test basic dimension operations self.assertEqual(len(metrics_group.dimensions), 1) self.assertTrue( metrics_group.contains_dimension_by_name(u'if_alias')) self.assertFalse(metrics_group.contains_dimension_by_name(u'baz')) self.assertEqual( metrics_group.get_dimension_by_name(u'if_alias').value, u'bar') metrics_group.delete_dimension_by_name(u'if_alias') self.assertFalse( metrics_group.contains_dimension_by_name(u'if_alias')) self.assertEqual(len(metrics_group.dimensions), 0) self.assertEqual(metrics_group.get_dimension_by_name(u'foo'), None) metrics_group.add_dimension(dimension_two) dimension_three = PanoptesMetricDimension(u'if_alias', u'test') metrics_group.upsert_dimension(dimension_three) self.assertEqual(len(metrics_group.dimensions), 1) self.assertEqual( metrics_group.get_dimension_by_name(u'if_alias').value, u'test') dimension_four = PanoptesMetricDimension(u'if_name', u'eth0') metrics_group.upsert_dimension(dimension_four) self.assertEqual(len(metrics_group.dimensions), 2) # Test basic metric operations with self.assertRaises(AssertionError): metrics_group.add_metric(None) metric = PanoptesMetric(u'test_metric', 0, PanoptesMetricType.GAUGE) metrics_group.add_metric(metric) with self.assertRaises(KeyError): metrics_group.add_metric(metric) to_json = metrics_group.json metrics = PanoptesMetricsGroup.flatten_metrics( json.loads(to_json)[u'metrics']) self.assertEquals(metrics[u'gauge'][u'test_metric'][u'value'], 0) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) metrics_group_two.add_dimension(dimension_two) metrics_group_two.upsert_dimension(dimension_three) metrics_group_two.upsert_dimension(dimension_four) metrics_group_two.add_metric(metric) self.assertEqual(metrics_group, metrics_group_two) # Check PanoptesMetricsGroup.__eq__ panoptes_resource_two = PanoptesResource( resource_site=u'test2', resource_class=u'test2', resource_subclass=u'test2', resource_type=u'test2', resource_id=u'test2', resource_endpoint=u'test2', resource_plugin=u'test2') metrics_group_two = PanoptesMetricsGroup(panoptes_resource_two, u'test', 120) metrics_group_three = PanoptesMetricsGroup( self.__panoptes_resource, u'test', 120) with self.assertRaises(AssertionError): assert metrics_group_two == metrics_group_three metrics_group_three = metrics_group.copy() with self.assertRaises(AssertionError): assert metrics_group == dimension_one assert metrics_group == metrics_group_three metrics_group_three.delete_dimension_by_name(u"if_name") with self.assertRaises(AssertionError): assert metrics_group == metrics_group_three metrics_group_three.upsert_dimension(dimension_four) assert metrics_group == metrics_group_three metric_two = PanoptesMetric(u'test_metric_2', 1, PanoptesMetricType.GAUGE) metrics_group_three.add_metric(metric_two) with self.assertRaises(AssertionError): assert metrics_group == metrics_group_three # Test PanoptesMetricsGroup.__repr__ _METRICS_GROUP_REPR = u'PanoptesMetricsGroup[' \ u'resource:plugin|test|site|test|class|test|subclass|test|type|test|id|' \ u'test|endpoint|test,' \ u'interval:120,schema_version:0.2,group_type:test,creation_timestamp:{},' \ u'dimensions:[PanoptesMetricDimension[if_alias|test],' \ u'PanoptesMetricDimension[if_name|eth0]],' \ u'metrics:[PanoptesMetric[test_metric|0|GAUGE|{}]]]'.format(mock_time.return_value, mock_time.return_value) self.assertEqual(repr(metrics_group), _METRICS_GROUP_REPR) dimensions_as_dicts = [{ u'dimension_name': dimension.name, u'dimension_value': dimension.value } for dimension in metrics_group.dimensions] self.assertEqual( PanoptesMetricsGroup.flatten_dimensions(dimensions_as_dicts), { u'if_alias': u'test', u'if_name': u'eth0' })
def get_results(self): self._polling_status = PanoptesPollingStatus( resource=self.resource, execute_frequency=self.execute_frequency, logger=self.logger, metric_name=u'interface_polling_status') interface_metrics = dict() try: start_time = time.time() self._build_dot3stats_map() self._build_if_table_stats_map() self._build_ifx_table_stats_map() end_time = time.time() self._logger.info( u'SNMP calls for device %s completed in %.2f seconds' % (self.host, end_time - start_time)) interface_metrics.update(self._getdot3stats()) if_interface_metrics = self._getif_table_stats() ifx_interface_metrics = self._getifx_table_stats() if self._plugin_config.get('dimension', {}).get('include_interface_index', 0): """ #Interface indexes are ephemeral and can change after the restart of a device or the snmp agent. To add this field as a dimension include the following in the .panoptes-plugin configuration file. [dimension] include_interface_index = 1 """ self._DIMENSION_MAP.update( {'interface_index': lambda x: str(x)}) # https://github.com/PyCQA/pylint/issues/1694 for i in self.interface_indices: # pylint: disable=E1133 if i not in interface_metrics: interface_metrics[i] = dict() interface_metrics[i].update(ifx_interface_metrics[i]) interface_metrics[i].update(if_interface_metrics[i]) for interface_index in list(interface_metrics.keys()): self._interface_metrics_group = PanoptesMetricsGroup( self.resource, u'interface', self.execute_frequency) interface = interface_metrics[interface_index] for dimension_name, dimension_method in list( self._DIMENSION_MAP.items()): self._smart_add_dimension(method=dimension_method, dimension_name=dimension_name, index=interface_index) for metric in list(interface.keys()): metric_type = _METRIC_TYPE_MAP[metric] if not isinstance(interface[metric], numbers.Number): self._interface_metrics_group.add_metric( PanoptesMetric(str(metric), _MISSING_METRIC_VALUE, metric_type)) else: self._interface_metrics_group.add_metric( PanoptesMetric(str(metric), interface[metric], metric_type)) self._device_interface_metrics.add( self._interface_metrics_group) self._polling_status.handle_success(u'interface') self._logger.debug( u'Found interface metrics: "%s" for device "%s"' % (interface_metrics, self.host)) except Exception as e: self._polling_status.handle_exception(u'interface', e) finally: self._device_interface_metrics.add( self._polling_status.device_status_metrics_group) return self._device_interface_metrics
def test_panoptes_metrics_group_set(self): """Tests basic PanoptesMetricsGroupSet operations""" metrics_group_set = PanoptesMetricsGroupSet() metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) metrics_group_set.add(metrics_group) metrics_group_set.add(metrics_group_two) assert len(metrics_group_set) == 1 self.assertIn(metrics_group, metrics_group_set.metrics_groups) metrics_group_set.remove(metrics_group_two) assert len(metrics_group_set) == 0 metrics_group_set.add(metrics_group) metrics_group_three = PanoptesMetricsGroup(self.__panoptes_resource, u'test3', 120) metrics_group_three.add_metric( PanoptesMetric(u"test3", 0.0, PanoptesMetricType.GAUGE)) metrics_group_set.add(metrics_group_three) assert len(metrics_group_set) == 2 metrics_group_set_two = PanoptesMetricsGroupSet() metrics_group_four = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) metrics_group_four.add_metric( PanoptesMetric(u"test4", 0.0, PanoptesMetricType.GAUGE)) metrics_group_set_two.add(metrics_group_four) assert len(metrics_group_set_two) == 1 # Test PanoptesMetricsGroupSet.__add__ metrics_group_set_union = metrics_group_set + metrics_group_set_two assert len(metrics_group_set_union) == 3 with self.assertRaises(AssertionError): metrics_group_set.remove(self.__panoptes_resource) with self.assertRaises(TypeError): metrics_group_set + metrics_group # Test PanoptesMetricsGroupSet.__iter__ & 'next' metrics_group_count = 0 metrics_group_set_union_interator = iter(metrics_group_set_union) for _ in metrics_group_set_union: self.assertIn(next(metrics_group_set_union_interator), metrics_group_set_union.metrics_groups) metrics_group_count += 1 assert len(metrics_group_set_union) == metrics_group_count with self.assertRaises(Exception): next(metrics_group_set_union_interator) # Test PanoptesMetricsGroupSet.__repr__ _METRICS_GROUP_SET_REPR = u"PanoptesMetricsGroupSet[PanoptesMetricsGroup[resource:" \ u"plugin|test|site|test|class|test|subclass|test|type|test|id|test|endpoint|test," \ u"interval:120,schema_version:0.2,group_type:test,creation_timestamp:{}," \ u"dimensions:[],metrics:[]],PanoptesMetricsGroup[resource:" \ u"plugin|test|site|test|class|test|subclass|test|type|test|id|test|endpoint|test," \ u"interval:120,schema_version:0.2,group_type:test3,creation_timestamp:{}," \ u"dimensions:[],metrics:[" \ u"PanoptesMetric[test3|0.0|GAUGE|{}]]]]".format(mock_time.return_value, mock_time.return_value, mock_time.return_value) self.assertEqual(repr(metrics_group_set), _METRICS_GROUP_SET_REPR)
def testMetricsGroup(self): now = round(time.time(), METRICS_TIMESTAMP_PRECISION) metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) self.assertEqual(metrics_group.group_type, 'test') self.assertEqual(metrics_group.interval, 120) self.assertEqual(metrics_group.schema_version, '0.2') self.assertGreaterEqual(metrics_group.creation_timestamp, now) with patch('yahoo_panoptes.framework.metrics.time', mock_time): metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) dimension_one = PanoptesMetricDimension('if_alias', 'bar') dimension_two = PanoptesMetricDimension('if_alias', 'foo') metrics_group.add_dimension(dimension_one) with self.assertRaises(KeyError): metrics_group.add_dimension(dimension_two) # Test basic dimension operations self.assertEqual(len(metrics_group.dimensions), 1) self.assertTrue( metrics_group.contains_dimension_by_name('if_alias')) self.assertFalse(metrics_group.contains_dimension_by_name('baz')) self.assertEqual( metrics_group.get_dimension_by_name('if_alias').value, 'bar') metrics_group.delete_dimension_by_name('if_alias') self.assertFalse( metrics_group.contains_dimension_by_name('if_alias')) self.assertEqual(len(metrics_group.dimensions), 0) self.assertEqual(metrics_group.get_dimension_by_name('foo'), None) metrics_group.add_dimension(dimension_two) dimension_three = PanoptesMetricDimension('if_alias', 'test') metrics_group.upsert_dimension(dimension_three) self.assertEqual(len(metrics_group.dimensions), 1) self.assertEqual( metrics_group.get_dimension_by_name('if_alias').value, 'test') dimension_four = PanoptesMetricDimension('if_name', 'eth0') metrics_group.upsert_dimension(dimension_four) self.assertEqual(len(metrics_group.dimensions), 2) # Test basic metric operations with self.assertRaises(AssertionError): metrics_group.add_metric(None) metric = PanoptesMetric('test_metric', 0, PanoptesMetricType.GAUGE) metrics_group.add_metric(metric) with self.assertRaises(KeyError): metrics_group.add_metric(metric) to_json = metrics_group.json metrics = PanoptesMetricsGroup.flatten_metrics( json.loads(to_json)['metrics']) self.assertEquals(metrics['gauge']['test_metric']['value'], 0) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) metrics_group_two.add_dimension(dimension_two) metrics_group_two.upsert_dimension(dimension_three) metrics_group_two.upsert_dimension(dimension_four) metrics_group_two.add_metric(metric) self.assertEqual(metrics_group, metrics_group_two) # Check PanoptesMetricsGroup.__eq__ panoptes_resource_two = PanoptesResource(resource_site='test2', resource_class='test2', resource_subclass='test2', resource_type='test2', resource_id='test2', resource_endpoint='test2', resource_plugin='test2') metrics_group_two = PanoptesMetricsGroup(panoptes_resource_two, 'test', 120) metrics_group_three = PanoptesMetricsGroup( self.__panoptes_resource, 'test', 120) with self.assertRaises(AssertionError): assert metrics_group_two == metrics_group_three metrics_group_three = metrics_group.copy() with self.assertRaises(AssertionError): assert metrics_group == dimension_one assert metrics_group == metrics_group_three metrics_group_three.delete_dimension_by_name("if_name") with self.assertRaises(AssertionError): assert metrics_group == metrics_group_three metrics_group_three.upsert_dimension(dimension_four) assert metrics_group == metrics_group_three metric_two = PanoptesMetric('test_metric_2', 1, PanoptesMetricType.GAUGE) metrics_group_three.add_metric(metric_two) with self.assertRaises(AssertionError): assert metrics_group == metrics_group_three # Test PanoptesMetricsGroup.__repr__ _METRICS_GROUP_REPR = "{{'metrics_group_interval': 120, " \ "'resource': plugin|test|site|test|class|test|subclass|test|type|test|id|test|" \ "endpoint|test, 'dimensions': set([{{'dimension_name': 'if_alias', " \ "'dimension_value': 'test'}}, " \ "{{'dimension_name': 'if_name', 'dimension_value': 'eth0'}}]), " \ "'metrics_group_type': 'test', " \ "'metrics': set([{{'metric_creation_timestamp': {}, " \ "'metric_type': 'gauge', 'metric_name': 'test_metric', 'metric_value': 0}}]), " \ "'metrics_group_creation_timestamp': {}, " \ "'metrics_group_schema_version': '0.2'}}".format(mock_time.return_value, mock_time.return_value) self.assertEqual(repr(metrics_group), _METRICS_GROUP_REPR) dimensions_as_dicts = [{ 'dimension_name': dimension.name, 'dimension_value': dimension.value } for dimension in metrics_group.dimensions] self.assertEqual( PanoptesMetricsGroup.flatten_dimensions(dimensions_as_dicts), { 'if_alias': 'test', 'if_name': 'eth0' })
def _get_environment_metrics(self): try: self._get_temperature_metrics() self._logger.debug( u'Found Temperature metrics "%s" for %s: %s' % (self._temp_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'environment', e) try: self._get_power_metrics() self._logger.debug( u'Found Power metrics "%s" for %s: %s' % (self._power_metrics, self._polling_status.device_type, self._device_host)) # TODO Valuable to monitor cefcFRUPowerOperStatus for status "9: onButFanFail"? -- Yes, but not now except Exception as e: self._polling_status.handle_exception(u'environment', e) try: if self._temp_metrics: for index in list(self._temp_metrics.keys()): environment_metrics_group = PanoptesMetricsGroup( self._device, u'environment', self._execute_frequency) environment_metrics_group.add_dimension( PanoptesMetricDimension( u'entity_name', self._temp_metrics[index][u'entity_name'])) environment_metrics_group.add_metric( PanoptesMetric(u'temperature_fahrenheit', self._temp_metrics[index][u'temp_f'], PanoptesMetricType.GAUGE)) self._asr_device_metrics.add(environment_metrics_group) self._polling_status.handle_success(u'environment') except Exception as e: self._polling_status.handle_exception(u'environment', e) # TODO Do we need to report sensor details as well? -- Not yet try: if self._power_metrics: environment_metrics_group = PanoptesMetricsGroup( self._device, u'environment', self._execute_frequency) num_power_units_on = 0 for index in list( self._power_metrics[u'power_module_map'].keys()): if self._power_metrics[u'power_module_map'][index][ u'power_on']: num_power_units_on += 1 environment_metrics_group.add_metric( PanoptesMetric(u'power_units_total', self._power_metrics[u'power_units_total'], PanoptesMetricType.GAUGE)) environment_metrics_group.add_metric( PanoptesMetric(u'power_units_on', num_power_units_on, PanoptesMetricType.GAUGE)) self._asr_device_metrics.add(environment_metrics_group) self._polling_status.handle_success(u'environment') except Exception as e: self._polling_status.handle_exception(u'environment', e)
def test_panoptes_metrics_group_set(self): """Tests basic PanoptesMetricsGroupSet operations""" metrics_group_set = PanoptesMetricsGroupSet() metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) metrics_group_set.add(metrics_group) metrics_group_set.add(metrics_group_two) assert len(metrics_group_set) == 1 self.assertIn(metrics_group, metrics_group_set.metrics_groups) metrics_group_set.remove(metrics_group_two) assert len(metrics_group_set) == 0 metrics_group_set.add(metrics_group) metrics_group_three = PanoptesMetricsGroup(self.__panoptes_resource, 'test3', 120) metrics_group_three.add_metric( PanoptesMetric("test3", 0.0, PanoptesMetricType.GAUGE)) metrics_group_set.add(metrics_group_three) assert len(metrics_group_set) == 2 metrics_group_set_two = PanoptesMetricsGroupSet() metrics_group_four = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) metrics_group_four.add_metric( PanoptesMetric("test4", 0.0, PanoptesMetricType.GAUGE)) metrics_group_set_two.add(metrics_group_four) assert len(metrics_group_set_two) == 1 # Test PanoptesMetricsGroupSet.__add__ metrics_group_set_union = metrics_group_set + metrics_group_set_two assert len(metrics_group_set_union) == 3 with self.assertRaises(AssertionError): metrics_group_set.remove(self.__panoptes_resource) with self.assertRaises(TypeError): metrics_group_set + metrics_group # Test PanoptesMetricsGroupSet.__iter__ & 'next' metrics_group_count = 0 metrics_group_set_union_interator = iter(metrics_group_set_union) for _ in metrics_group_set_union: self.assertIn(metrics_group_set_union_interator.next(), metrics_group_set_union.metrics_groups) metrics_group_count += 1 assert len(metrics_group_set_union) == metrics_group_count with self.assertRaises(Exception): metrics_group_set_union_interator.next() # Test PanoptesMetricsGroupSet.__repr__ _METRICS_GROUP_SET_REPR = "set([{{'metrics_group_interval': 120, " \ "'resource': plugin|test|site|test|class|test|subclass|test|type|test|id|test|" \ "endpoint|test, 'dimensions': set([]), 'metrics_group_type': 'test', " \ "'metrics': set([]), 'metrics_group_creation_timestamp': {}, " \ "'metrics_group_schema_version': '0.2'}}, {{'metrics_group_interval': 120, " \ "'resource': plugin|test|site|test|class|test|subclass|test|type|test|id|test|" \ "endpoint|test, 'dimensions': set([]), 'metrics_group_type': 'test3', " \ "'metrics': set([{{'metric_creation_timestamp': {}, " \ "'metric_type': 'gauge', " \ "'metric_name': 'test3', 'metric_value': 0.0}}]), " \ "'metrics_group_creation_timestamp': {}, " \ "'metrics_group_schema_version': '0.2'}}])".format(mock_time.return_value, mock_time.return_value, mock_time.return_value) self.assertEqual(repr(metrics_group_set), _METRICS_GROUP_SET_REPR)
def _get_system_cpu_metrics(self): self._cpu_metrics = dict() self._cpu_metrics[u'ctrl'] = dict() try: cpus = self._snmp_connection.bulk_walk( oid=self._get_cpu_interval(), non_repeaters=0, max_repetitions=25) if len(cpus) == 0: raise PanoptesMetricsNullException for cpu in cpus: # The last int for each cpu is a temporary index we will append to the entPhysicalNamePrefix # and cpmCPUTotalPhysicalIndex OIDS to get the cpu name and id values, respectively temp_id = int(cpu.index.rsplit(u'.', 1)[-1]) # last object cpu_id = self._get_cpu_id(temp_id) self._cpu_metrics[u'ctrl'][cpu_id] = dict() self._cpu_metrics[u'ctrl'][cpu_id][u'cpu_util'] = int( cpu.value) self._cpu_metrics[u'ctrl'][cpu_id][ u'cpu_name'] = self._get_cpu_name( cpu_id) # report name, num as dim except Exception as e: self._polling_status.handle_exception(u'cpu', e) self._cpu_metrics.pop(u'ctrl') self._cpu_metrics[u'data'] = dict() try: interval = self._get_crypto_cpu_interval() crypto_cpu_entry_indices = set([ x.split(u'.')[-1] for x in self._get_entity_indices(ent_physical_class=u'cpu', ent_strings=[u'Crypto Asic']) ]) for index in crypto_cpu_entry_indices: self._cpu_metrics[u'data'][index] = dict() # todo special def for u'1'/util? cpu_util = int( self._snmp_connection.get(oid=cepStatsMeasurement + u'.' + index + u'.' + interval + cpuUtil).value) self._cpu_metrics[u'data'][index][u'cpu_util'] = cpu_util self._cpu_metrics[u'data'][index][ u'cpu_name'] = self._get_cpu_name(index) except Exception as e: self._polling_status.handle_exception(u'cpu', e) self._cpu_metrics.pop(u'data') try: if len(self._cpu_metrics) > 0: for cpu_type in self._cpu_metrics: for cpu_id in list(self._cpu_metrics[cpu_type].keys()): cpu_metrics_group = PanoptesMetricsGroup( self._device, u'cpu', self._execute_frequency) cpu_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_type', cpu_type)) cpu_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_no', cpu_id)) cpu_metrics_group.add_dimension( PanoptesMetricDimension( u'cpu_name', self._cpu_metrics[cpu_type] [cpu_id][u'cpu_name'])) cpu_metrics_group.add_metric( PanoptesMetric( u'cpu_utilization', self._cpu_metrics[cpu_type] [cpu_id][u'cpu_util'], PanoptesMetricType.GAUGE)) self._asr_device_metrics.add(cpu_metrics_group) self._polling_status.handle_success(u'cpu') self._logger.debug( u'Found CPU metrics "%s" for %s: %s' % (self._cpu_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'cpu', e)
def _get_memory_metrics(self): self._memory_metrics = dict() self._memory_metrics[u'dram'] = dict() try: memory_used = int( self._snmp_connection.get(oid=cempMemPoolHCUsed).value) self._memory_metrics[u'dram'][u'memory_used'] = memory_used memory_free = int( self._snmp_connection.get(oid=cempMemPoolHCFree).value) self._memory_metrics[u'dram'][ u'memory_total'] = memory_used + memory_free except Exception as e: self._polling_status.handle_exception(u'memory', e) self._memory_metrics.pop(u'dram') self._memory_metrics[u'qfp'] = dict( ) # TODO Safe to assume only one qfp_entry? try: qfp_entry_indices = set([ x.split(u'.')[-1] for x in self._get_entity_indices(ent_physical_class=u'cpu', ent_strings=[u'qfp', u'QFP']) ]) for index in qfp_entry_indices: qfp_memory_used = int( self._snmp_connection.get(oid=ceqfpMemoryResInUse + u'.' + index + u'.' + u'1').value) self._memory_metrics[u'qfp'][u'memory_used'] = qfp_memory_used qfp_memory_free = int( self._snmp_connection.get(oid=ceqfpMemoryResFree + u'.' + index + u'.' + u'1').value) self._memory_metrics[u'qfp'][ u'memory_total'] = qfp_memory_used + qfp_memory_free except Exception as e: self._polling_status.handle_exception(u'memory', e) self._memory_metrics.pop( u'qfp') # TODO Safe to assume only one qfp_entry? try: if len(self._memory_metrics) > 0: for memory_type in self._memory_metrics: memory_metrics_group = PanoptesMetricsGroup( self._device, u'memory', self._execute_frequency) memory_metrics_group.add_dimension( PanoptesMetricDimension(u'memory_type', memory_type)) memory_metrics_group.add_metric( PanoptesMetric( u'memory_used', self._memory_metrics[memory_type][u'memory_used'], PanoptesMetricType.GAUGE)) memory_metrics_group.add_metric( PanoptesMetric( u'memory_total', self._memory_metrics[memory_type][u'memory_total'], PanoptesMetricType.GAUGE)) self._asr_device_metrics.add(memory_metrics_group) self._polling_status.handle_success(u'memory') self._logger.debug( u'Found Memory metrics "%s" for %s: %s' % (self._memory_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'memory', e)
def get_results(self): self._polling_status = PanoptesPollingStatus(resource=self.resource, execute_frequency=self.execute_frequency, logger=self.logger, metric_name='interface_polling_status') interface_metrics = dict() try: start_time = time.time() self._build_dot3stats_map() self._build_if_table_stats_map() self._build_ifx_table_stats_map() end_time = time.time() self._logger.info('SNMP calls for device %s completed in %.2f seconds' % ( self.host, end_time - start_time)) interface_metrics.update(self._getdot3stats()) if_interface_metrics = self._getif_table_stats() ifx_interface_metrics = self._getifx_table_stats() # https://github.com/PyCQA/pylint/issues/1694 for i in self.interface_indices: # pylint: disable=E1133 if i not in interface_metrics: interface_metrics[i] = dict() interface_metrics[i].update(ifx_interface_metrics[i]) interface_metrics[i].update(if_interface_metrics[i]) for interface_index in interface_metrics.keys(): self._interface_metrics_group = PanoptesMetricsGroup(self.resource, 'interface', self.execute_frequency) interface = interface_metrics[interface_index] for dimension_name, dimension_method in self._DIMENSION_MAP.items(): self._smart_add_dimension(method=dimension_method, dimension_name=dimension_name, index=interface_index ) for metric in interface.keys(): metric_type = _METRIC_TYPE_MAP[metric] if not isinstance(interface[metric], numbers.Number): self._interface_metrics_group.add_metric(PanoptesMetric(str(metric), _MISSING_METRIC_VALUE, metric_type)) else: self._interface_metrics_group.add_metric(PanoptesMetric(str(metric), interface[metric], metric_type)) self._device_interface_metrics.add(self._interface_metrics_group) self._polling_status.handle_success('interface') self._logger.debug('Found interface metrics: "%s" for device "%s"' % ( interface_metrics, self.host)) except Exception as e: self._polling_status.handle_exception('interface', e) finally: self._device_interface_metrics.add(self._polling_status.device_status_metrics_group) return self._device_interface_metrics
def populateMetricsGroupSetWithTimeSeries(self) -> None: """ PanoptesMetricsGroupSet<set>{ PanoptesMetricsGroup<dict>{ dimensions<set>: { PanoptesMetricDimension(name: str, value: str), PanoptesMetricDimension(name: str, value: str), .... }, metrics<set>: { PanoptesMetric(metric_name: str, metric_value: number, metric_type: {1: 'COUNTER', 0: 'GAUGE'}), PanoptesMetric(metric_name: str, metric_value: number, metric_type: {1: 'COUNTER', 0: 'GAUGE'}), } }, PanoptesMetricsGroup<dict>{...}, PanoptesMetricsGroup<dict>{...} } Signatures: PanoptesMetricsGroup(resource: PanoptesResource, group_type: str, interval: int) - Timeseries container which is able to hold any number of metrics & dimensions. PanoptesMetricDimension(name: string_types, value: string_types) PanoptesMetric(metric_name: string_types, metric_value: number, metric_type: {1: 'COUNTER', 0: 'GAUGE'}) - Note: Panoptes performs rate conversions specified in the .panoptes-plugin file The text above shows the structure of how time series data is stored within the PanoptesMetricsGroupSet. """ interfaces = self.napalm_device_connection.get_interfaces() interface_counters = self.napalm_device_connection.get_interfaces_counters( ) for interface, obj in self.napalm_device_connection.get_lldp_neighbors( ).items(): panoptes_metrics_group = PanoptesMetricsGroup( self._device, 'napalm_interface', self._execute_frequency) panoptes_metrics_group.add_dimension( PanoptesMetricDimension('interface_name', interface)) panoptes_metrics_group.add_dimension( PanoptesMetricDimension( 'neighbor', obj[0]['hostname'] or 'no_hostname_description')) panoptes_metrics_group.add_dimension( PanoptesMetricDimension( 'neighbor_port', obj[0]['port'] or 'no_port_description')) if interface in interfaces: panoptes_metrics_group.add_dimension( PanoptesMetricDimension( 'speed', str(interfaces[interface]['speed']))) else: panoptes_metrics_group.add_dimension( PanoptesMetricDimension('speed', '0')) self._logger.debug('INTERFACE_COUNTERS: %s', interface_counters) # Avoid key errors on dummy interfaces if interface not in interface_counters: continue panoptes_metrics_group.add_metric( PanoptesMetric( 'input_rate', interface_counters[interface]['rx_unicast_packets'], PanoptesMetricType.COUNTER)) panoptes_metrics_group.add_metric( PanoptesMetric( 'output_rate', interface_counters[interface]['tx_unicast_packets'], PanoptesMetricType.COUNTER)) self._panoptes_metrics_group_set.add(panoptes_metrics_group)