def get_counters(self, manager): for tenant, account in self.iter_accounts(manager.keystone): yield counter.Counter( name='storage.objects', type=counter.TYPE_GAUGE, volume=int(account['x-account-object-count']), unit='object', user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, ) yield counter.Counter( name='storage.objects.size', type=counter.TYPE_GAUGE, volume=int(account['x-account-bytes-used']), unit='B', user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, ) yield counter.Counter( name='storage.objects.containers', type=counter.TYPE_GAUGE, volume=int(account['x-account-container-count']), unit='container', user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, )
def get_counters(self, manager): for tenant, account in self.iter_accounts(manager.keystone): yield counter.Counter( name="storage.objects", type=counter.TYPE_GAUGE, volume=int(account["x-account-object-count"]), unit="object", user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, ) yield counter.Counter( name="storage.objects.size", type=counter.TYPE_GAUGE, volume=int(account["x-account-bytes-used"]), unit="B", user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, ) yield counter.Counter( name="storage.objects.containers", type=counter.TYPE_GAUGE, volume=int(account["x-account-container-count"]), unit="container", user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, )
def get(key): if key == "tokens/%s" % VALID_TOKEN: dt = timeutils.utcnow() + datetime.timedelta(minutes=5) return json.dumps(({'access': { 'token': {'id': VALID_TOKEN}, 'user': { 'id': 'user_id1', 'name': 'user_name1', 'tenantId': '123i2910', 'tenantName': 'mytenant', 'roles': [ {'name': 'admin'}, ]}, }}, timeutils.isotime(dt))) if key == "tokens/%s" % VALID_TOKEN2: dt = timeutils.utcnow() + datetime.timedelta(minutes=5) return json.dumps(({'access': { 'token': {'id': VALID_TOKEN2}, 'user': { 'id': 'user_id2', 'name': 'user-good', 'tenantId': 'project-good', 'tenantName': 'goodies', 'roles': [ {'name': 'Member'}, ]}, }}, timeutils.isotime(dt)))
def make_sample_from_host(host_url, name, type, unit, volume, project_id=None, user_id=None, res_metadata=None): resource_metadata = dict() if res_metadata is not None: metadata = copy.copy(res_metadata) resource_metadata = dict(zip(metadata._fields, metadata)) resource_metadata.update(get_metadata_from_host(host_url)) return sample.Sample( name='hardware.' + name, type=type, unit=unit, volume=volume, user_id=project_id, project_id=user_id, resource_id=host_url.hostname, timestamp=timeutils.isotime(), resource_metadata=resource_metadata, source='hardware', )
def get_samples(self, manager, cache, resources=[]): for vm in self._iter_vms(manager, cache): yield sample.Sample(name='services.vm.error', type=sample.TYPE_GAUGE, unit='error', volume=1, project_id=None, user_id=None, resource_id=vm.id, timestamp=timeutils.isotime(), resource_metadata={})
def get_counters(self, manager, context): for image in self.iter_images(): yield counter.Counter( name='image', type=counter.TYPE_GAUGE, volume=1, user_id=None, project_id=image.owner, resource_id=image.id, timestamp=timeutils.isotime(), resource_metadata=self.extract_image_metadata(image), )
def make_counter_from_instance(instance, name, type, unit, volume): return counter.Counter( name=name, type=type, unit=unit, volume=volume, user_id=instance.user_id, project_id=instance.tenant_id, resource_id=instance.id, timestamp=timeutils.isotime(), resource_metadata=compute_instance.get_metadata_from_object(instance), )
def make_mem_counter(instance, name, type, unit, volume): return counter.Counter( name=name, type=type, unit=unit, volume=volume, user_id=instance.user_id, project_id=instance.tenant_id, resource_id=instance.id, timestamp=timeutils.isotime(), resource_metadata={} )
def get_counters(self, manager, cache): for image in self._iter_images(manager.keystone, cache): yield counter.Counter( name='image.size', type=counter.TYPE_GAUGE, unit='B', volume=image.size, user_id=None, project_id=image.owner, resource_id=image.id, timestamp=timeutils.isotime(), resource_metadata=self.extract_image_metadata(image), )
def get_samples(self, manager, cache, resources=None): for tenant, account in self._iter_accounts(manager.keystone, cache): yield sample.Sample( name="storage.objects.containers", type=sample.TYPE_GAUGE, volume=int(account["x-account-container-count"]), unit="container", user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, )
def get_samples(self, manager, cache, resources=None): for image in self._iter_images(manager.keystone, cache): yield sample.Sample( name='image', type=sample.TYPE_GAUGE, unit='image', volume=1, user_id=None, project_id=image.owner, resource_id=image.id, timestamp=timeutils.isotime(), resource_metadata=self.extract_image_metadata(image), )
def get_samples(self, manager, cache, resources=[]): for vm in self._iter_vms(manager, cache): yield sample.Sample( name='services.vm.error', type=sample.TYPE_GAUGE, unit='error', volume=1, project_id=None, user_id=None, resource_id=vm.id, timestamp=timeutils.isotime(), resource_metadata={} )
def get_samples(self, manager, cache): for tenant, account in self._iter_accounts(manager.keystone, cache): yield sample.Sample( name='storage.objects', type=sample.TYPE_GAUGE, volume=int(account['x-account-object-count']), unit='object', user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, )
def get_samples(self, manager, cache, resources=[]): for tenant, account in self._iter_accounts(manager.keystone, cache): yield sample.Sample( name='storage.objects', type=sample.TYPE_GAUGE, volume=int(account['x-account-object-count']), unit='object', user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, )
def get_samples(self, manager, cache, resources=[]): for image in self._iter_images(manager.keystone, cache): yield sample.Sample( name='image.size', type=sample.TYPE_GAUGE, unit='B', volume=image.size, user_id=None, project_id=image.owner, resource_id=image.id, timestamp=timeutils.isotime(), resource_metadata=self.extract_image_metadata(image), )
def make_counter_from_instance(instance, name, type, volume): return counter.Counter( source='?', name=name, type=type, volume=volume, user_id=instance.user_id, project_id=instance.project_id, resource_id=instance.uuid, timestamp=timeutils.isotime(), duration=None, resource_metadata=compute_instance.get_metadata_from_dbobject( instance), )
def make_counter_from_instance(instance, name, type, unit, volume, additional_metadata={}): resource_metadata = _get_metadata_from_object(instance) resource_metadata.update(additional_metadata) return sample.Sample( name=name, type=type, unit=unit, volume=volume, user_id=instance.user_id, project_id=instance.tenant_id, resource_id=instance.id, timestamp=timeutils.isotime(), resource_metadata=resource_metadata, )
def make_vnic_counter(instance, name, type, volume, vnic_data): resource_metadata = copy.copy(vnic_data) resource_metadata['instance_id'] = instance.id return counter.Counter(source='?', name=name, type=type, volume=volume, user_id=instance.user_id, project_id=instance.project_id, resource_id=vnic_data['fref'], timestamp=timeutils.isotime(), duration=None, resource_metadata=resource_metadata)
def make_vnic_counter(instance, name, type, volume, vnic_data): resource_metadata = copy.copy(vnic_data) resource_metadata['instance_id'] = instance.uuid return counter.Counter( name=name, type=type, volume=volume, user_id=instance.user_id, project_id=instance.project_id, resource_id=vnic_data['fref'], timestamp=timeutils.isotime(), resource_metadata=resource_metadata )
def get_counters(self, manager, context): for image in self.iter_images(): yield Counter( source='?', name='image', type='absolute', volume=1, user_id=None, project_id=image['owner'], resource_id=image['id'], timestamp=timeutils.isotime(), duration=None, resource_metadata=self.extract_image_metadata(image), )
def get_samples(self, manager, cache, resources=None): for project, account in self._iter_accounts(manager.keystone, cache): containers_info = account[1] for container in containers_info: yield sample.Sample( name="storage.containers.objects", type=sample.TYPE_GAUGE, volume=int(container["count"]), unit="object", user_id=None, project_id=project, resource_id=project + "/" + container["name"], timestamp=timeutils.isotime(), resource_metadata=None, )
def get_samples(self, manager, cache, resources=[]): print "Getting samples?" for service in self._iter_services(cache, 'nova-compute', down=True): yield sample.Sample(name='services.compute_host.down', type=sample.TYPE_GAUGE, unit='down', volume=1, project_id=None, user_id=None, resource_id=service.host, timestamp=timeutils.isotime(), resource_metadata={ 'host': service.host, 'status': service.status, })
def make_sample_from_pool(pool, name, type, unit, volume, resource_metadata=None): resource_metadata = resource_metadata or {} return sample.Sample( name=name, type=type, unit=unit, volume=volume, user_id=None, project_id=pool['tenant_id'], resource_id=pool['id'], timestamp=timeutils.isotime(), resource_metadata=resource_metadata, )
def get_samples(self, manager, cache, resources=[]): for project, account in self._iter_accounts(manager.keystone, cache): containers_info = account[1] for container in containers_info: yield sample.Sample( name='storage.containers.objects.size', type=sample.TYPE_GAUGE, volume=int(container['bytes']), unit='B', user_id=None, project_id=project, resource_id=project + '/' + container['name'], timestamp=timeutils.isotime(), resource_metadata=None, )
def get_counters(self, manager): for image in self.iter_images(manager.keystone): yield counter.Counter( name='image', type=counter.TYPE_GAUGE, unit='image', volume=1, user_id=None, project_id=image.owner, resource_id=image.id, timestamp=timeutils.isotime(), resource_metadata=self.extract_image_metadata(image), ) yield counter.Counter( name='image.size', type=counter.TYPE_GAUGE, unit='B', volume=image.size, user_id=None, project_id=image.owner, resource_id=image.id, timestamp=timeutils.isotime(), resource_metadata=self.extract_image_metadata(image), )
def make_sample_from_instance(instance, name, type, unit, volume, additional_metadata={}): resource_metadata = _get_metadata_from_object(instance) resource_metadata.update(additional_metadata) return sample.Sample( name=name, type=type, unit=unit, volume=volume, user_id=instance.user_id, project_id=instance.tenant_id, resource_id=instance.id, timestamp=timeutils.isotime(), resource_metadata=resource_metadata, )
def make_sample_from_host(host_url, name, type, unit, volume, project_id=None, user_id=None, res_metadata=None): resource_metadata = make_resource_metadata(res_metadata, host_url) return sample.Sample( name="hardware." + name, type=type, unit=unit, volume=volume, user_id=project_id, project_id=user_id, resource_id=host_url.hostname, timestamp=timeutils.isotime(), resource_metadata=resource_metadata, source="hardware", )
def make_sample_from_host(host_url, name, type, unit, volume, project_id=None, user_id=None, res_metadata=None): resource_metadata = make_resource_metadata(res_metadata, host_url) return sample.Sample( name='hardware.' + name, type=type, unit=unit, volume=volume, user_id=project_id, project_id=user_id, resource_id=host_url.hostname, timestamp=timeutils.isotime(), resource_metadata=resource_metadata, source='hardware', )
def make_counter_from_host(host, name, type, unit, volume, res_metadata=None): resource_metadata = dict() if(res_metadata is not None): metadata = copy.copy(res_metadata) resource_metadata = dict(zip(metadata._fields, metadata)) resource_metadata.update(hardware_host.get_metadata_from_object(host)) return counter.Counter( name=name, type=type, unit=unit, volume=volume, user_id=None, project_id=None, resource_id=host.id, timestamp=timeutils.isotime(), resource_metadata=resource_metadata, )
def get_samples(self, manager, cache, resources=[]): for tenant, account in self._iter_accounts(manager.keystone, cache): account_data = account[0] container_info = account[1] for container in container_info: if container['name'] == '$Policies$': account_data['x-account-object-count'] = str(int(account_data['x-account-object-count']) - int(container['count'])) yield sample.Sample( name='storage.objects', type=sample.TYPE_GAUGE, volume=int(account_data['x-account-object-count']), unit='object', user_id=None, project_id=tenant, resource_id=tenant, timestamp=timeutils.isotime(), resource_metadata=None, )
def make_vnic_counter(instance, name, type, unit, volume, vnic_data): metadata = copy.copy(vnic_data) resource_metadata = dict(zip(metadata._fields, metadata)) resource_metadata['instance_id'] = instance.id resource_metadata['instance_type'] = \ instance.flavor['id'] if instance.flavor else None return counter.Counter( name=name, type=type, unit=unit, volume=volume, user_id=instance.user_id, project_id=instance.tenant_id, resource_id=vnic_data.fref, timestamp=timeutils.isotime(), resource_metadata=resource_metadata )
def get_samples(self, manager, cache, resources=[]): for project, account in self._iter_accounts(manager.keystone, cache): containers_info = account[1] for container in containers_info: LOG.info('object polster %s '%container) if container['name'] == "$Policies$": continue yield sample.Sample( name='storage.containers.objects', type=sample.TYPE_GAUGE, volume=int(container['count']), unit='object', user_id=None, project_id=project, resource_id=project + '/' + container['name'], timestamp=timeutils.isotime(), resource_metadata=None, )
def make_vnic_counter(instance, name, type, unit, volume, vnic_data): metadata = copy.copy(vnic_data) resource_metadata = dict(zip(metadata._fields, metadata)) resource_metadata['instance_id'] = instance.id resource_metadata['instance_type'] = \ instance.flavor['id'] if instance.flavor else None, return counter.Counter( name=name, type=type, unit=unit, volume=volume, user_id=instance.user_id, project_id=instance.tenant_id, resource_id=vnic_data.fref, timestamp=timeutils.isotime(), resource_metadata=resource_metadata )
def make_vnic_sample(instance, name, type, unit, volume, vnic_data): metadata = copy.copy(vnic_data) resource_metadata = dict(zip(metadata._fields, metadata)) resource_metadata['instance_id'] = instance.id resource_metadata['instance_type'] = \ instance.flavor['id'] if instance.flavor else None if vnic_data.fref is not None: rid = vnic_data.fref else: instance_name = util.instance_name(instance) rid = "%s-%s-%s" % (instance_name, instance.id, vnic_data.name) return sample.Sample(name=name, type=type, unit=unit, volume=volume, user_id=instance.user_id, project_id=instance.tenant_id, resource_id=rid, timestamp=timeutils.isotime(), resource_metadata=resource_metadata)
def make_vnic_sample(instance, name, type, unit, volume, vnic_data): metadata = copy.copy(vnic_data) resource_metadata = dict(zip(metadata._fields, metadata)) resource_metadata["instance_id"] = instance.id resource_metadata["instance_type"] = instance.flavor["id"] if instance.flavor else None if vnic_data.fref is not None: rid = vnic_data.fref else: instance_name = util.instance_name(instance) rid = "%s-%s-%s" % (instance_name, instance.id, vnic_data.name) return sample.Sample( name=name, type=type, unit=unit, volume=volume, user_id=instance.user_id, project_id=instance.tenant_id, resource_id=rid, timestamp=timeutils.isotime(), resource_metadata=resource_metadata, )
def _isotime(timestamp): # drop TZ specifier return unicode(timeutils.isotime(timestamp))[:-1]
def _make_timestamps(self, count): now = timeutils.utcnow() return [ timeutils.isotime(now + datetime.timedelta(seconds=i)) for i in range(count) ]
def _prepare_cache(endpoint, params, cache): if "network.statistics.opendaylight" in cache: return cache["network.statistics.opendaylight"] data = {} container_names = params.get("container_name", ["default"]) odl_params = {} if "auth" in params: odl_params["auth"] = params["auth"][0] if "user" in params: odl_params["user"] = params["user"][0] if "password" in params: odl_params["password"] = params["password"][0] cs = client.Client(endpoint, odl_params) for container_name in container_names: try: container_data = {} # get flow statistics container_data["flow"] = cs.statistics.get_flow_statistics(container_name) # get port statistics container_data["port"] = cs.statistics.get_port_statistics(container_name) # get table statistics container_data["table"] = cs.statistics.get_table_statistics(container_name) # get topology container_data["topology"] = cs.topology.get_topology(container_name) # get switch informations container_data["switch"] = cs.switch_manager.get_nodes(container_name) # get and optimize user links # e.g. # before: # "OF|2@OF|00:00:00:00:00:00:00:02" # after: # { # 'port': { # 'type': 'OF', # 'id': '2'}, # 'node': { # 'type': 'OF', # 'id': '00:00:00:00:00:00:00:02' # } # } user_links_raw = cs.topology.get_user_links(container_name) user_links = [] container_data["user_links"] = user_links for user_link_row in user_links_raw["userLinks"]: user_link = {} for k, v in six.iteritems(user_link_row): if k == "dstNodeConnector" or k == "srcNodeConnector": port_raw, node_raw = v.split("@") port = {} port["type"], port["id"] = port_raw.split("|") node = {} node["type"], node["id"] = node_raw.split("|") v = {"port": port, "node": node} user_link[k] = v user_links.append(user_link) # get link status to hosts container_data["active_hosts"] = cs.host_tracker.get_active_hosts(container_name) container_data["inactive_hosts"] = cs.host_tracker.get_inactive_hosts(container_name) container_data["timestamp"] = timeutils.isotime() data[container_name] = container_data except Exception: LOG.exception(_("Request failed to connect to OpenDaylight" " with NorthBound REST API")) cache["network.statistics.opendaylight"] = data return data
def _prepare_cache(endpoint, params, cache): if 'network.statistics.opendaylight' in cache: return cache['network.statistics.opendaylight'] data = {} container_names = params.get('container_name', ['default']) odl_params = {} if 'auth' in params: odl_params['auth'] = params['auth'][0] if 'user' in params: odl_params['user'] = params['user'][0] if 'password' in params: odl_params['password'] = params['password'][0] cs = client.Client(endpoint, odl_params) for container_name in container_names: try: container_data = {} # get flow statistics container_data['flow'] = cs.statistics.get_flow_statistics( container_name) # get port statistics container_data['port'] = cs.statistics.get_port_statistics( container_name) # get table statistics container_data['table'] = cs.statistics.get_table_statistics( container_name) # get topology container_data['topology'] = cs.topology.get_topology( container_name) # get switch informations container_data['switch'] = cs.switch_manager.get_nodes( container_name) # get and optimize user links # e.g. # before: # "OF|2@OF|00:00:00:00:00:00:00:02" # after: # { # 'port': { # 'type': 'OF', # 'id': '2'}, # 'node': { # 'type': 'OF', # 'id': '00:00:00:00:00:00:00:02' # } # } user_links_raw = cs.topology.get_user_links(container_name) user_links = [] container_data['user_links'] = user_links for user_link_row in user_links_raw['userLinks']: user_link = {} for k, v in six.iteritems(user_link_row): if (k == "dstNodeConnector" or k == "srcNodeConnector"): port_raw, node_raw = v.split('@') port = {} port['type'], port['id'] = port_raw.split('|') node = {} node['type'], node['id'] = node_raw.split('|') v = {'port': port, 'node': node} user_link[k] = v user_links.append(user_link) # get link status to hosts container_data['active_hosts'] = cs.host_tracker.\ get_active_hosts(container_name) container_data['inactive_hosts'] = cs.host_tracker.\ get_inactive_hosts(container_name) container_data['timestamp'] = timeutils.isotime() data[container_name] = container_data except Exception: LOG.exception(_('Request failed to connect to OpenDaylight' ' with NorthBound REST API')) cache['network.statistics.opendaylight'] = data return data
def _isotime(timestamp): # drop TZ specifier return six.text_type(timeutils.isotime(timestamp))[:-1]
def _make_timestamps(self, count): return [timeutils.isotime() for i in range(count)]
def _prepare_cache(endpoint, params, cache): if 'network.statistics.opendaylight' in cache: return cache['network.statistics.opendaylight'] data = {} container_names = params.get('container_name', ['default']) odl_params = {} if 'auth' in params: odl_params['auth'] = params['auth'][0] if 'user' in params: odl_params['user'] = params['user'][0] if 'password' in params: odl_params['password'] = params['password'][0] cs = client.Client(endpoint, odl_params) for container_name in container_names: try: container_data = {} # get flow statistics container_data['flow'] = cs.statistics.get_flow_statistics( container_name) # get port statistics container_data['port'] = cs.statistics.get_port_statistics( container_name) # get table statistics container_data['table'] = cs.statistics.get_table_statistics( container_name) # get topology container_data['topology'] = cs.topology.get_topology( container_name) # get switch informations container_data['switch'] = cs.switch_manager.get_nodes( container_name) # get and optimize user links # e.g. # before: # "OF|2@OF|00:00:00:00:00:00:00:02" # after: # { # 'port': { # 'type': 'OF', # 'id': '2'}, # 'node': { # 'type': 'OF', # 'id': '00:00:00:00:00:00:00:02' # } # } user_links_raw = cs.topology.get_user_links(container_name) user_links = [] container_data['user_links'] = user_links for user_link_row in user_links_raw['userLinks']: user_link = {} for k, v in six.iteritems(user_link_row): if (k == "dstNodeConnector" or k == "srcNodeConnector"): port_raw, node_raw = v.split('@') port = {} port['type'], port['id'] = port_raw.split('|') node = {} node['type'], node['id'] = node_raw.split('|') v = {'port': port, 'node': node} user_link[k] = v user_links.append(user_link) # get link status to hosts container_data['active_hosts'] = ( cs.host_tracker.get_active_hosts(container_name)) container_data['inactive_hosts'] = ( cs.host_tracker.get_inactive_hosts(container_name)) container_data['timestamp'] = timeutils.isotime() data[container_name] = container_data except Exception: LOG.exception(_('Request failed to connect to OpenDaylight' ' with NorthBound REST API')) cache['network.statistics.opendaylight'] = data return data
def _make_timestamps(self, count): now = timeutils.utcnow() return [timeutils.isotime(now + datetime.timedelta(seconds=i)) for i in range(count)]