def test_panoptes_metric_dimension(self): with self.assertRaises(ValueError): PanoptesMetricDimension(u'contain$_invalid_character$', u'bar') with self.assertRaises(ValueError): PanoptesMetricDimension(u'foo', u'contains_pipe|') dimension_one = PanoptesMetricDimension(u'if_alias', u'bar') self.assertEqual( dimension_one.json, u'{"dimension_name": "if_alias", "dimension_value": "bar"}') self.assertEqual(repr(dimension_one), u'PanoptesMetricDimension[if_alias|bar]') metric_one = PanoptesMetric( u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) with self.assertRaises(AssertionError): assert dimension_one == metric_one dimension_two = PanoptesMetricDimension(u'if_alias', u'foo') with self.assertRaises(AssertionError): assert dimension_one == dimension_two dimension_three = PanoptesMetricDimension(u'if_alias', u'bar') assert dimension_one == dimension_three
def _get_storage_metrics(self): self._storage_metrics = dict() try: host_resource_storage_indices = \ self._get_host_resource_indices(oid_filter=hrStorageType, host_resource_strings=[hrStorageFlashMemory, hrStorageVirtualMemory]) for index in host_resource_storage_indices: storage_descriptor = self.host_resources_map[hrStorageDescr + u'.' + index] # pylint: disable=E1136 self._storage_metrics[storage_descriptor] = dict() allocation_units = int( self.host_resources_map[hrStorageAllocationUnits + u'.' + index]) # pylint: disable=E1136 self._storage_metrics[storage_descriptor][u'storage_used'] = \ int(self.host_resources_map[ hrStorageUsed + u'.' + index]) * allocation_units # pylint: disable=E1136 self._storage_metrics[storage_descriptor][u'storage_total'] = \ int(self.host_resources_map[ hrStorageSize + u'.' + index]) * allocation_units # pylint: disable=E1136 self._storage_metrics[storage_descriptor][u'storage_type'] = \ STORAGE_TYPE_REVERSE_MAP[self.host_resources_map[ hrStorageType + u'.' + index]] # pylint: disable=E1136 except Exception as e: self._polling_status.handle_exception(u'storage', e) # todo Do we need to pop the stats from self._storage_metrics? try: if len(self._storage_metrics) > 0: for storage_entity in self._storage_metrics: storage_metrics_group = PanoptesMetricsGroup( self._device, u'storage', self._execute_frequency) storage_metrics_group.add_dimension( PanoptesMetricDimension( u'storage_type', self._storage_metrics[storage_entity] [u'storage_type'])) storage_metrics_group.add_dimension( PanoptesMetricDimension(u'storage_entity', storage_entity)) storage_metrics_group.add_metric( PanoptesMetric( u'storage_used', self._storage_metrics[storage_entity] [u'storage_used'], PanoptesMetricType.GAUGE)) storage_metrics_group.add_metric( PanoptesMetric( u'storage_total', self._storage_metrics[storage_entity] [u'storage_total'], PanoptesMetricType.GAUGE)) self._arista_device_metrics.add(storage_metrics_group) self._polling_status.handle_success(u'storage') self._logger.debug( u'Found Storage metrics "%s" for %s: %s' % (self._storage_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'storage', e)
def _smart_add_dimension(self, method, dimension_name, index): dimension = method(index) if dimension is not None and PanoptesValidators.valid_nonempty_string(str(dimension)): self._interface_metrics_group.add_dimension(PanoptesMetricDimension(dimension_name, str(dimension))) else: self._interface_metrics_group.add_dimension(PanoptesMetricDimension(dimension_name, _DEFAULT_DIMENSION_VALUE))
def _get_system_cpu_metrics(self): self._cpu_metrics = dict() self._cpu_metrics[u'ctrl'] = dict() try: cpus = self._snmp_connection.bulk_walk( oid=hrProcessorLoad, non_repeaters=0, max_repetitions=_MAX_REPETITIONS) if len(cpus) == 0: raise PanoptesMetricsNullException for cpu in cpus: # The last int for each cpu is a temporary index we will append to hrDeviceDescription to get the name temp_id = int(cpu.index.rsplit(u'.', 1)[-1]) # last object if temp_id != 1: # only include individual core info self._cpu_metrics[u'ctrl'][temp_id] = dict() self._cpu_metrics[u'ctrl'][temp_id][u'cpu_util'] = int( cpu.value) self._cpu_metrics[u'ctrl'][temp_id][ u'cpu_name'] = self._get_cpu_name(temp_id) except Exception as e: self._polling_status.handle_exception(u'cpu', e) self._cpu_metrics.pop(u'ctrl') try: if len(self._cpu_metrics) > 0: for cpu_type in self._cpu_metrics: for cpu_id in list(self._cpu_metrics[cpu_type].keys()): cpu_metrics_group = PanoptesMetricsGroup( self._device, u'cpu', self._execute_frequency) cpu_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_type', cpu_type)) cpu_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_no', u'1.' + str(cpu_id))) cpu_metrics_group.add_dimension( PanoptesMetricDimension( u'cpu_name', self._cpu_metrics[cpu_type] [cpu_id][u'cpu_name'])) cpu_metrics_group.add_metric( PanoptesMetric( u'cpu_utilization', self._cpu_metrics[cpu_type] [cpu_id][u'cpu_util'], PanoptesMetricType.GAUGE)) self._arista_device_metrics.add(cpu_metrics_group) self._polling_status.handle_success(u'cpu') self._logger.debug( u'Found CPU metrics "%s" for %s: %s' % (self._cpu_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'cpu', e)
def _get_crypto_metrics(self): self._crypto_metrics = dict() try: crypto_cpu_entry_indices = set([ x.split(u'.')[-1] for x in self._get_entity_indices(ent_physical_class=u'cpu', ent_strings=[u'Crypto Asic']) ]) interval = self._get_crypto_cpu_interval() for index in crypto_cpu_entry_indices: self._crypto_metrics[index] = dict() packets_in = int( self._snmp_connection.get(oid=cepStatsMeasurement + u'.' + index + u'.' + interval + pktsIn).value) packets_out = int( self._snmp_connection.get(oid=cepStatsMeasurement + u'.' + index + u'.' + interval + pktsOut).value) self._crypto_metrics[index][u'packets_in'] = packets_in self._crypto_metrics[index][u'packets_out'] = packets_out self._crypto_metrics[index][u'cpu_name'] = self._get_cpu_name( index) except Exception as e: self._polling_status.handle_exception(u'crypto', e) try: if self._crypto_metrics: for cpu_id in self._crypto_metrics: crypto_metrics_group = PanoptesMetricsGroup( self._device, u'crypto', self._execute_frequency) crypto_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_no', cpu_id)) crypto_metrics_group.add_dimension( PanoptesMetricDimension( u'cpu_name', self._crypto_metrics[cpu_id][u'cpu_name'])) crypto_metrics_group.add_metric( PanoptesMetric( u'packets_in', self._crypto_metrics[cpu_id][u'packets_in'], PanoptesMetricType.COUNTER)) crypto_metrics_group.add_metric( PanoptesMetric( u'packets_out', self._crypto_metrics[cpu_id][u'packets_out'], PanoptesMetricType.COUNTER)) self._asr_device_metrics.add(crypto_metrics_group) self._polling_status.handle_success(u'crypto') self._logger.debug( u'Found crypto metrics "%s" for %s: %s' % (self._crypto_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'crypto', e)
def testMetricsGroup(self): now = round(time.time(), METRICS_TIMESTAMP_PRECISION) metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) self.assertEqual(metrics_group.group_type, 'test') self.assertEqual(metrics_group.interval, 120) self.assertEqual(metrics_group.schema_version, '0.2') self.assertGreaterEqual(metrics_group.creation_timestamp, now) dimension_one = PanoptesMetricDimension('if_alias', 'bar') dimension_two = PanoptesMetricDimension('if_alias', 'foo') metrics_group.add_dimension(dimension_one) with self.assertRaises(KeyError): metrics_group.add_dimension(dimension_two) self.assertEqual(len(metrics_group.dimensions), 1) self.assertEqual(metrics_group.contains_dimension_by_name('if_alias'), True) self.assertEqual(metrics_group.contains_dimension_by_name('baz'), False) metrics_group.delete_dimension_by_name('if_alias') self.assertEqual(metrics_group.contains_dimension_by_name('if_alias'), False) self.assertEqual(len(metrics_group.dimensions), 0) self.assertEqual(metrics_group.get_dimension_by_name('foo'), None) metrics_group.add_dimension(dimension_two) dimension_three = PanoptesMetricDimension('if_alias', 'bar') metrics_group.upsert_dimension(dimension_three) self.assertEqual(len(metrics_group.dimensions), 1) self.assertEqual(metrics_group.get_dimension_by_name('if_alias').value, 'bar') dimension_four = PanoptesMetricDimension('if_name', 'eth0') metrics_group.upsert_dimension(dimension_four) self.assertEqual(len(metrics_group.dimensions), 2) with self.assertRaises(AssertionError): metrics_group.add_metric(None) metric = PanoptesMetric('test_metric', 0, PanoptesMetricType.GAUGE) metrics_group.add_metric(metric) to_json = metrics_group.json metrics = PanoptesMetricsGroup.flatten_metrics(json.loads(to_json)['metrics']) self.assertEquals(metrics['gauge']['test_metric']['value'], 0) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) metrics_group_two.add_dimension(dimension_two) metrics_group_two.upsert_dimension(dimension_three) metrics_group_two.upsert_dimension(dimension_four) metrics_group_two.add_metric(metric) self.assertEqual(metrics_group, metrics_group_two)
def test_metrics_group_hash(self): now = round(time.time(), METRICS_TIMESTAMP_PRECISION) metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) dimension = PanoptesMetricDimension(u'if_alias', u'bar') metric = PanoptesMetric(u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=now) metric_diff_timestamp = PanoptesMetric(u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=now + 0.01) metrics_group.add_dimension(dimension) metrics_group_two.add_dimension(dimension) self.assertEqual(metrics_group.__hash__(), metrics_group_two.__hash__()) metrics_group.add_metric(metric) metrics_group_two.add_metric(metric_diff_timestamp) self.assertEqual(metrics_group.__hash__(), metrics_group_two.__hash__())
def prepare_panoptes_metrics_group_set(self, file_path=None): panoptes_metric_group_set = PanoptesMetricsGroupSet() path_to_metrics_file = plugin_results_file if file_path is None else file_path with open(path_to_metrics_file) as results_file: panoptes_json_data = json.load(results_file) for panoptes_data_object in panoptes_json_data: resource = panoptes_data_object[u'resource'] panoptes_resource = PanoptesResource( resource_site=resource[u'resource_site'], resource_class=resource[u'resource_class'], resource_subclass=resource[u'resource_subclass'], resource_type=resource[u'resource_type'], resource_id=resource[u'resource_id'], resource_endpoint=resource[u'resource_endpoint'], resource_plugin=resource[u'resource_plugin'], resource_creation_timestamp=0) panoptes_metric_group = PanoptesMetricsGroup( resource=panoptes_resource, group_type=panoptes_data_object[u'metrics_group_type'], interval=panoptes_data_object[u'metrics_group_interval'] ) for dimension in panoptes_data_object[u'dimensions']: panoptes_metric_group.add_dimension( PanoptesMetricDimension( name=dimension[u'dimension_name'], value=dimension[u'dimension_value'] ) ) for metric in panoptes_data_object[u'metrics']: panoptes_metric_group.add_metric( PanoptesMetric( metric_name=metric[u'metric_name'], metric_value=metric[u'metric_value'], metric_type=PanoptesMetricType().GAUGE if metric[u'metric_type'] == u'gauge' else PanoptesMetricType().COUNTER, metric_creation_timestamp=metric[u'metric_creation_timestamp'] ) ) panoptes_metric_group_set.add(panoptes_metric_group) return panoptes_metric_group_set
def test_panoptes_metric(self): with self.assertRaises(AssertionError): PanoptesMetric(None, 0, PanoptesMetricType.GAUGE) with self.assertRaises(ValueError): PanoptesMetric(u'1', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric(u'test_metric', None, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric(u'test_metric', 0, None) with self.assertRaises(AssertionError): PanoptesMetric(u'test_metric', True, PanoptesMetricType.GAUGE) metric1 = PanoptesMetric( u'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) self.assertEqual(metric1.metric_name, u'test_metric') self.assertEqual(metric1.metric_value, 0) self.assertEqual(metric1.metric_timestamp, mock_time.return_value) self.assertEqual(metric1.metric_type, PanoptesMetricType.GAUGE) self.assertEqual( repr(metric1), u"PanoptesMetric[test_metric|0|GAUGE|{}]".format( mock_time.return_value)) self.assertNotEqual(metric1, None) # Check PanoptesMetric.__eq__ assert metric1 == PanoptesMetric(u'test_metric', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetricDimension(u"test", u"value") with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric(u'different_name', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric(u'test_metric', 1, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric(u'test_metric', 0, PanoptesMetricType.COUNTER)
def test_panoptes_metric(self): with self.assertRaises(AssertionError): PanoptesMetric(None, 0, PanoptesMetricType.GAUGE) with self.assertRaises(ValueError): PanoptesMetric('1', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric('test_metric', None, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): PanoptesMetric('test_metric', 0, None) with self.assertRaises(AssertionError): PanoptesMetric('test_metric', True, PanoptesMetricType.GAUGE) metric1 = PanoptesMetric( 'test_metric', 0, PanoptesMetricType.GAUGE, metric_creation_timestamp=mock_time.return_value) self.assertEqual(metric1.metric_name, 'test_metric') self.assertEqual(metric1.metric_value, 0) self.assertEqual(metric1.metric_timestamp, mock_time.return_value) self.assertEqual(metric1.metric_type, PanoptesMetricType.GAUGE) self.assertEqual( repr(metric1), "{{'metric_creation_timestamp': {}, 'metric_type': 'gauge', 'metric_name': 'test_metric', " "'metric_value': 0}}".format(mock_time.return_value)) self.assertNotEqual(metric1, None) # Check PanoptesMetric.__eq__ assert metric1 == PanoptesMetric('test_metric', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetricDimension("test", "value") with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric('different_name', 0, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric('test_metric', 1, PanoptesMetricType.GAUGE) with self.assertRaises(AssertionError): assert metric1 == PanoptesMetric('test_metric', 0, PanoptesMetricType.COUNTER)
def _get_memory_metrics(self): self._memory_metrics = dict() self._memory_metrics[u'dram'] = dict() try: allocation_units = int( self.host_resources_map[hrStorageAllocationUnits + u'.1']) # pylint: disable=E1136 memory_used = ( int(self.host_resources_map[hrStorageUsed + u'.1']) - int(self.host_resources_map[hrStorageUsed + u'.3']) ) * allocation_units # total - cached self._memory_metrics[u'dram'][u'memory_used'] = memory_used memory_total = \ int(self.host_resources_map[hrStorageSize + u'.1']) * allocation_units # pylint: disable=E1136 self._memory_metrics[u'dram'][u'memory_total'] = memory_total except Exception as e: self._polling_status.handle_exception(u'memory', e) self._memory_metrics.pop(u'dram') try: if len(self._memory_metrics) > 0: for memory_type in self._memory_metrics: memory_metrics_group = PanoptesMetricsGroup( self._device, u'memory', self._execute_frequency) memory_metrics_group.add_dimension( PanoptesMetricDimension(u'memory_type', memory_type)) memory_metrics_group.add_metric( PanoptesMetric( u'memory_used', self._memory_metrics[memory_type][u'memory_used'], PanoptesMetricType.GAUGE)) memory_metrics_group.add_metric( PanoptesMetric( u'memory_total', self._memory_metrics[memory_type][u'memory_total'], PanoptesMetricType.GAUGE)) self._arista_device_metrics.add(memory_metrics_group) self._polling_status.handle_success(u'memory') self._logger.debug( u'Found Memory metrics "%s" for %s: %s' % (self._memory_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'memory', e)
def _get_system_cpu_metrics(self): self._cpu_metrics = dict() self._cpu_metrics[u'ctrl'] = dict() try: cpus = self._snmp_connection.bulk_walk( oid=self._get_cpu_interval(), non_repeaters=0, max_repetitions=25) if len(cpus) == 0: raise PanoptesMetricsNullException for cpu in cpus: # The last int for each cpu is a temporary index we will append to the entPhysicalNamePrefix # and cpmCPUTotalPhysicalIndex OIDS to get the cpu name and id values, respectively temp_id = int(cpu.index.rsplit(u'.', 1)[-1]) # last object cpu_id = self._get_cpu_id(temp_id) self._cpu_metrics[u'ctrl'][cpu_id] = dict() self._cpu_metrics[u'ctrl'][cpu_id][u'cpu_util'] = int( cpu.value) self._cpu_metrics[u'ctrl'][cpu_id][ u'cpu_name'] = self._get_cpu_name( cpu_id) # report name, num as dim except Exception as e: self._polling_status.handle_exception(u'cpu', e) self._cpu_metrics.pop(u'ctrl') self._cpu_metrics[u'data'] = dict() try: interval = self._get_crypto_cpu_interval() crypto_cpu_entry_indices = set([ x.split(u'.')[-1] for x in self._get_entity_indices(ent_physical_class=u'cpu', ent_strings=[u'Crypto Asic']) ]) for index in crypto_cpu_entry_indices: self._cpu_metrics[u'data'][index] = dict() # todo special def for u'1'/util? cpu_util = int( self._snmp_connection.get(oid=cepStatsMeasurement + u'.' + index + u'.' + interval + cpuUtil).value) self._cpu_metrics[u'data'][index][u'cpu_util'] = cpu_util self._cpu_metrics[u'data'][index][ u'cpu_name'] = self._get_cpu_name(index) except Exception as e: self._polling_status.handle_exception(u'cpu', e) self._cpu_metrics.pop(u'data') try: if len(self._cpu_metrics) > 0: for cpu_type in self._cpu_metrics: for cpu_id in list(self._cpu_metrics[cpu_type].keys()): cpu_metrics_group = PanoptesMetricsGroup( self._device, u'cpu', self._execute_frequency) cpu_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_type', cpu_type)) cpu_metrics_group.add_dimension( PanoptesMetricDimension(u'cpu_no', cpu_id)) cpu_metrics_group.add_dimension( PanoptesMetricDimension( u'cpu_name', self._cpu_metrics[cpu_type] [cpu_id][u'cpu_name'])) cpu_metrics_group.add_metric( PanoptesMetric( u'cpu_utilization', self._cpu_metrics[cpu_type] [cpu_id][u'cpu_util'], PanoptesMetricType.GAUGE)) self._asr_device_metrics.add(cpu_metrics_group) self._polling_status.handle_success(u'cpu') self._logger.debug( u'Found CPU metrics "%s" for %s: %s' % (self._cpu_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'cpu', e)
def testMetricsGroup(self): now = round(time.time(), METRICS_TIMESTAMP_PRECISION) metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) self.assertEqual(metrics_group.group_type, u'test') self.assertEqual(metrics_group.interval, 120) self.assertEqual(metrics_group.schema_version, u'0.2') self.assertGreaterEqual(metrics_group.creation_timestamp, now) with patch(u'yahoo_panoptes.framework.metrics.time', mock_time): metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) dimension_one = PanoptesMetricDimension(u'if_alias', u'bar') dimension_two = PanoptesMetricDimension(u'if_alias', u'foo') metrics_group.add_dimension(dimension_one) with self.assertRaises(KeyError): metrics_group.add_dimension(dimension_two) # Test basic dimension operations self.assertEqual(len(metrics_group.dimensions), 1) self.assertTrue( metrics_group.contains_dimension_by_name(u'if_alias')) self.assertFalse(metrics_group.contains_dimension_by_name(u'baz')) self.assertEqual( metrics_group.get_dimension_by_name(u'if_alias').value, u'bar') metrics_group.delete_dimension_by_name(u'if_alias') self.assertFalse( metrics_group.contains_dimension_by_name(u'if_alias')) self.assertEqual(len(metrics_group.dimensions), 0) self.assertEqual(metrics_group.get_dimension_by_name(u'foo'), None) metrics_group.add_dimension(dimension_two) dimension_three = PanoptesMetricDimension(u'if_alias', u'test') metrics_group.upsert_dimension(dimension_three) self.assertEqual(len(metrics_group.dimensions), 1) self.assertEqual( metrics_group.get_dimension_by_name(u'if_alias').value, u'test') dimension_four = PanoptesMetricDimension(u'if_name', u'eth0') metrics_group.upsert_dimension(dimension_four) self.assertEqual(len(metrics_group.dimensions), 2) # Test basic metric operations with self.assertRaises(AssertionError): metrics_group.add_metric(None) metric = PanoptesMetric(u'test_metric', 0, PanoptesMetricType.GAUGE) metrics_group.add_metric(metric) with self.assertRaises(KeyError): metrics_group.add_metric(metric) to_json = metrics_group.json metrics = PanoptesMetricsGroup.flatten_metrics( json.loads(to_json)[u'metrics']) self.assertEquals(metrics[u'gauge'][u'test_metric'][u'value'], 0) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, u'test', 120) metrics_group_two.add_dimension(dimension_two) metrics_group_two.upsert_dimension(dimension_three) metrics_group_two.upsert_dimension(dimension_four) metrics_group_two.add_metric(metric) self.assertEqual(metrics_group, metrics_group_two) # Check PanoptesMetricsGroup.__eq__ panoptes_resource_two = PanoptesResource( resource_site=u'test2', resource_class=u'test2', resource_subclass=u'test2', resource_type=u'test2', resource_id=u'test2', resource_endpoint=u'test2', resource_plugin=u'test2') metrics_group_two = PanoptesMetricsGroup(panoptes_resource_two, u'test', 120) metrics_group_three = PanoptesMetricsGroup( self.__panoptes_resource, u'test', 120) with self.assertRaises(AssertionError): assert metrics_group_two == metrics_group_three metrics_group_three = metrics_group.copy() with self.assertRaises(AssertionError): assert metrics_group == dimension_one assert metrics_group == metrics_group_three metrics_group_three.delete_dimension_by_name(u"if_name") with self.assertRaises(AssertionError): assert metrics_group == metrics_group_three metrics_group_three.upsert_dimension(dimension_four) assert metrics_group == metrics_group_three metric_two = PanoptesMetric(u'test_metric_2', 1, PanoptesMetricType.GAUGE) metrics_group_three.add_metric(metric_two) with self.assertRaises(AssertionError): assert metrics_group == metrics_group_three # Test PanoptesMetricsGroup.__repr__ _METRICS_GROUP_REPR = u'PanoptesMetricsGroup[' \ u'resource:plugin|test|site|test|class|test|subclass|test|type|test|id|' \ u'test|endpoint|test,' \ u'interval:120,schema_version:0.2,group_type:test,creation_timestamp:{},' \ u'dimensions:[PanoptesMetricDimension[if_alias|test],' \ u'PanoptesMetricDimension[if_name|eth0]],' \ u'metrics:[PanoptesMetric[test_metric|0|GAUGE|{}]]]'.format(mock_time.return_value, mock_time.return_value) self.assertEqual(repr(metrics_group), _METRICS_GROUP_REPR) dimensions_as_dicts = [{ u'dimension_name': dimension.name, u'dimension_value': dimension.value } for dimension in metrics_group.dimensions] self.assertEqual( PanoptesMetricsGroup.flatten_dimensions(dimensions_as_dicts), { u'if_alias': u'test', u'if_name': u'eth0' })
def _get_environment_metrics(self): try: self._get_temperature_metrics() self._logger.debug( u'Found Temperature metrics "%s" for %s: %s' % (self._temp_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'environment', e) try: self._get_power_metrics() self._logger.debug( u'Found Power metrics "%s" for %s: %s' % (self._power_metrics, self._polling_status.device_type, self._device_host)) # TODO Valuable to monitor cefcFRUPowerOperStatus for status "9: onButFanFail"? -- Yes, but not now except Exception as e: self._polling_status.handle_exception(u'environment', e) try: if self._temp_metrics: for index in list(self._temp_metrics.keys()): environment_metrics_group = PanoptesMetricsGroup( self._device, u'environment', self._execute_frequency) environment_metrics_group.add_dimension( PanoptesMetricDimension( u'entity_name', self._temp_metrics[index][u'entity_name'])) environment_metrics_group.add_metric( PanoptesMetric(u'temperature_fahrenheit', self._temp_metrics[index][u'temp_f'], PanoptesMetricType.GAUGE)) self._asr_device_metrics.add(environment_metrics_group) self._polling_status.handle_success(u'environment') except Exception as e: self._polling_status.handle_exception(u'environment', e) # TODO Do we need to report sensor details as well? -- Not yet try: if self._power_metrics: environment_metrics_group = PanoptesMetricsGroup( self._device, u'environment', self._execute_frequency) num_power_units_on = 0 for index in list( self._power_metrics[u'power_module_map'].keys()): if self._power_metrics[u'power_module_map'][index][ u'power_on']: num_power_units_on += 1 environment_metrics_group.add_metric( PanoptesMetric(u'power_units_total', self._power_metrics[u'power_units_total'], PanoptesMetricType.GAUGE)) environment_metrics_group.add_metric( PanoptesMetric(u'power_units_on', num_power_units_on, PanoptesMetricType.GAUGE)) self._asr_device_metrics.add(environment_metrics_group) self._polling_status.handle_success(u'environment') except Exception as e: self._polling_status.handle_exception(u'environment', e)
def _get_memory_metrics(self): self._memory_metrics = dict() self._memory_metrics[u'dram'] = dict() try: memory_used = int( self._snmp_connection.get(oid=cempMemPoolHCUsed).value) self._memory_metrics[u'dram'][u'memory_used'] = memory_used memory_free = int( self._snmp_connection.get(oid=cempMemPoolHCFree).value) self._memory_metrics[u'dram'][ u'memory_total'] = memory_used + memory_free except Exception as e: self._polling_status.handle_exception(u'memory', e) self._memory_metrics.pop(u'dram') self._memory_metrics[u'qfp'] = dict( ) # TODO Safe to assume only one qfp_entry? try: qfp_entry_indices = set([ x.split(u'.')[-1] for x in self._get_entity_indices(ent_physical_class=u'cpu', ent_strings=[u'qfp', u'QFP']) ]) for index in qfp_entry_indices: qfp_memory_used = int( self._snmp_connection.get(oid=ceqfpMemoryResInUse + u'.' + index + u'.' + u'1').value) self._memory_metrics[u'qfp'][u'memory_used'] = qfp_memory_used qfp_memory_free = int( self._snmp_connection.get(oid=ceqfpMemoryResFree + u'.' + index + u'.' + u'1').value) self._memory_metrics[u'qfp'][ u'memory_total'] = qfp_memory_used + qfp_memory_free except Exception as e: self._polling_status.handle_exception(u'memory', e) self._memory_metrics.pop( u'qfp') # TODO Safe to assume only one qfp_entry? try: if len(self._memory_metrics) > 0: for memory_type in self._memory_metrics: memory_metrics_group = PanoptesMetricsGroup( self._device, u'memory', self._execute_frequency) memory_metrics_group.add_dimension( PanoptesMetricDimension(u'memory_type', memory_type)) memory_metrics_group.add_metric( PanoptesMetric( u'memory_used', self._memory_metrics[memory_type][u'memory_used'], PanoptesMetricType.GAUGE)) memory_metrics_group.add_metric( PanoptesMetric( u'memory_total', self._memory_metrics[memory_type][u'memory_total'], PanoptesMetricType.GAUGE)) self._asr_device_metrics.add(memory_metrics_group) self._polling_status.handle_success(u'memory') self._logger.debug( u'Found Memory metrics "%s" for %s: %s' % (self._memory_metrics, self._polling_status.device_type, self._device_host)) except Exception as e: self._polling_status.handle_exception(u'memory', e)
def populateMetricsGroupSetWithTimeSeries(self) -> None: """ PanoptesMetricsGroupSet<set>{ PanoptesMetricsGroup<dict>{ dimensions<set>: { PanoptesMetricDimension(name: str, value: str), PanoptesMetricDimension(name: str, value: str), .... }, metrics<set>: { PanoptesMetric(metric_name: str, metric_value: number, metric_type: {1: 'COUNTER', 0: 'GAUGE'}), PanoptesMetric(metric_name: str, metric_value: number, metric_type: {1: 'COUNTER', 0: 'GAUGE'}), } }, PanoptesMetricsGroup<dict>{...}, PanoptesMetricsGroup<dict>{...} } Signatures: PanoptesMetricsGroup(resource: PanoptesResource, group_type: str, interval: int) - Timeseries container which is able to hold any number of metrics & dimensions. PanoptesMetricDimension(name: string_types, value: string_types) PanoptesMetric(metric_name: string_types, metric_value: number, metric_type: {1: 'COUNTER', 0: 'GAUGE'}) - Note: Panoptes performs rate conversions specified in the .panoptes-plugin file The text above shows the structure of how time series data is stored within the PanoptesMetricsGroupSet. """ interfaces = self.napalm_device_connection.get_interfaces() interface_counters = self.napalm_device_connection.get_interfaces_counters( ) for interface, obj in self.napalm_device_connection.get_lldp_neighbors( ).items(): panoptes_metrics_group = PanoptesMetricsGroup( self._device, 'napalm_interface', self._execute_frequency) panoptes_metrics_group.add_dimension( PanoptesMetricDimension('interface_name', interface)) panoptes_metrics_group.add_dimension( PanoptesMetricDimension( 'neighbor', obj[0]['hostname'] or 'no_hostname_description')) panoptes_metrics_group.add_dimension( PanoptesMetricDimension( 'neighbor_port', obj[0]['port'] or 'no_port_description')) if interface in interfaces: panoptes_metrics_group.add_dimension( PanoptesMetricDimension( 'speed', str(interfaces[interface]['speed']))) else: panoptes_metrics_group.add_dimension( PanoptesMetricDimension('speed', '0')) self._logger.debug('INTERFACE_COUNTERS: %s', interface_counters) # Avoid key errors on dummy interfaces if interface not in interface_counters: continue panoptes_metrics_group.add_metric( PanoptesMetric( 'input_rate', interface_counters[interface]['rx_unicast_packets'], PanoptesMetricType.COUNTER)) panoptes_metrics_group.add_metric( PanoptesMetric( 'output_rate', interface_counters[interface]['tx_unicast_packets'], PanoptesMetricType.COUNTER)) self._panoptes_metrics_group_set.add(panoptes_metrics_group)
def testMetricsGroup(self): now = round(time.time(), METRICS_TIMESTAMP_PRECISION) metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) self.assertEqual(metrics_group.group_type, 'test') self.assertEqual(metrics_group.interval, 120) self.assertEqual(metrics_group.schema_version, '0.2') self.assertGreaterEqual(metrics_group.creation_timestamp, now) with patch('yahoo_panoptes.framework.metrics.time', mock_time): metrics_group = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) dimension_one = PanoptesMetricDimension('if_alias', 'bar') dimension_two = PanoptesMetricDimension('if_alias', 'foo') metrics_group.add_dimension(dimension_one) with self.assertRaises(KeyError): metrics_group.add_dimension(dimension_two) # Test basic dimension operations self.assertEqual(len(metrics_group.dimensions), 1) self.assertTrue( metrics_group.contains_dimension_by_name('if_alias')) self.assertFalse(metrics_group.contains_dimension_by_name('baz')) self.assertEqual( metrics_group.get_dimension_by_name('if_alias').value, 'bar') metrics_group.delete_dimension_by_name('if_alias') self.assertFalse( metrics_group.contains_dimension_by_name('if_alias')) self.assertEqual(len(metrics_group.dimensions), 0) self.assertEqual(metrics_group.get_dimension_by_name('foo'), None) metrics_group.add_dimension(dimension_two) dimension_three = PanoptesMetricDimension('if_alias', 'test') metrics_group.upsert_dimension(dimension_three) self.assertEqual(len(metrics_group.dimensions), 1) self.assertEqual( metrics_group.get_dimension_by_name('if_alias').value, 'test') dimension_four = PanoptesMetricDimension('if_name', 'eth0') metrics_group.upsert_dimension(dimension_four) self.assertEqual(len(metrics_group.dimensions), 2) # Test basic metric operations with self.assertRaises(AssertionError): metrics_group.add_metric(None) metric = PanoptesMetric('test_metric', 0, PanoptesMetricType.GAUGE) metrics_group.add_metric(metric) with self.assertRaises(KeyError): metrics_group.add_metric(metric) to_json = metrics_group.json metrics = PanoptesMetricsGroup.flatten_metrics( json.loads(to_json)['metrics']) self.assertEquals(metrics['gauge']['test_metric']['value'], 0) metrics_group_two = PanoptesMetricsGroup(self.__panoptes_resource, 'test', 120) metrics_group_two.add_dimension(dimension_two) metrics_group_two.upsert_dimension(dimension_three) metrics_group_two.upsert_dimension(dimension_four) metrics_group_two.add_metric(metric) self.assertEqual(metrics_group, metrics_group_two) # Check PanoptesMetricsGroup.__eq__ panoptes_resource_two = PanoptesResource(resource_site='test2', resource_class='test2', resource_subclass='test2', resource_type='test2', resource_id='test2', resource_endpoint='test2', resource_plugin='test2') metrics_group_two = PanoptesMetricsGroup(panoptes_resource_two, 'test', 120) metrics_group_three = PanoptesMetricsGroup( self.__panoptes_resource, 'test', 120) with self.assertRaises(AssertionError): assert metrics_group_two == metrics_group_three metrics_group_three = metrics_group.copy() with self.assertRaises(AssertionError): assert metrics_group == dimension_one assert metrics_group == metrics_group_three metrics_group_three.delete_dimension_by_name("if_name") with self.assertRaises(AssertionError): assert metrics_group == metrics_group_three metrics_group_three.upsert_dimension(dimension_four) assert metrics_group == metrics_group_three metric_two = PanoptesMetric('test_metric_2', 1, PanoptesMetricType.GAUGE) metrics_group_three.add_metric(metric_two) with self.assertRaises(AssertionError): assert metrics_group == metrics_group_three # Test PanoptesMetricsGroup.__repr__ _METRICS_GROUP_REPR = "{{'metrics_group_interval': 120, " \ "'resource': plugin|test|site|test|class|test|subclass|test|type|test|id|test|" \ "endpoint|test, 'dimensions': set([{{'dimension_name': 'if_alias', " \ "'dimension_value': 'test'}}, " \ "{{'dimension_name': 'if_name', 'dimension_value': 'eth0'}}]), " \ "'metrics_group_type': 'test', " \ "'metrics': set([{{'metric_creation_timestamp': {}, " \ "'metric_type': 'gauge', 'metric_name': 'test_metric', 'metric_value': 0}}]), " \ "'metrics_group_creation_timestamp': {}, " \ "'metrics_group_schema_version': '0.2'}}".format(mock_time.return_value, mock_time.return_value) self.assertEqual(repr(metrics_group), _METRICS_GROUP_REPR) dimensions_as_dicts = [{ 'dimension_name': dimension.name, 'dimension_value': dimension.value } for dimension in metrics_group.dimensions] self.assertEqual( PanoptesMetricsGroup.flatten_dimensions(dimensions_as_dicts), { 'if_alias': 'test', 'if_name': 'eth0' })