def take_action(self, parsed_args): pool = BenchmarkPool(parsed_args.workers) LOG.info("Sending measures") if parsed_args.timestamp_end <= parsed_args.timestamp_start: raise ValueError("End timestamp must be after start timestamp") # If batch size is bigger than the number of measures to send, we # reduce it to make sure we send something. if parsed_args.batch > parsed_args.count: parsed_args.batch = parsed_args.count start = int(parsed_args.timestamp_start.strftime("%s")) end = int(parsed_args.timestamp_end.strftime("%s")) count = parsed_args.batch if (end - start) < count: raise ValueError("The specified time range is not large enough " "for the number of points") random_values = (random.randint(-2**32, 2**32) for _ in six.moves.range(count)) measures = [{ "timestamp": ts, "value": v } for ts, v in six.moves.zip( six.moves.range(start, end, (end - start) // count), random_values) ] times = parsed_args.count // parsed_args.batch futures = pool.map_job(functools.partial( utils.get_client(self).metric.add_measures, parsed_args.metric), itertools.repeat(measures, times), resource_id=parsed_args.resource_id) _, runtime, stats = pool.wait_job("push", futures) stats['measures per request'] = parsed_args.batch stats['measures push speed'] = ( "%.2f push/s" % (parsed_args.batch * float(stats['push speed'][:-7]))) if parsed_args.wait: sw = StopWatch() while True: status = utils.get_client(self).status.get() remaining = int(status['storage']['summary']['measures']) if remaining == 0: stats['extra wait to process measures'] = ("%s seconds" % sw.elapsed()) break else: LOG.info("Remaining measures to be processed: %d" % remaining) time.sleep(1) return self.dict2columns(stats)
def take_action(self, parsed_args): metrics = parsed_args.metric if parsed_args.query: if len(parsed_args.metric) != 1: raise ValueError("One metric is required if query is provided") metrics = parsed_args.metric[0] measures = utils.get_client(self).metric.aggregation( metrics=metrics, query=parsed_args.query, aggregation=parsed_args.aggregation, reaggregation=parsed_args.reaggregation, start=parsed_args.start, stop=parsed_args.stop, granularity=parsed_args.granularity, needed_overlap=parsed_args.needed_overlap, resource_type=parsed_args.resource_type, groupby=parsed_args.groupby, refresh=parsed_args.refresh, resample=parsed_args.resample, fill=parsed_args.fill) if parsed_args.groupby: ms = [] for g in measures: group_name = ", ".join("%s: %s" % (k, g['group'][k]) for k in sorted(g['group'])) for m in g['measures']: i = [group_name] i.extend(self.format_measures_with_tz(parsed_args, [m])[0]) ms.append(i) return ('group', ) + self.COLS, ms return self.COLS, self.format_measures_with_tz(parsed_args, measures)
def take_action(self, parsed_args): resource = self._resource_from_args(parsed_args) res = utils.get_client(self).resource.create( resource_type=parsed_args.resource_type, resource=resource) if parsed_args.formatter == 'table': normalize_metrics(res) return self.dict2columns(res)
def take_action(self, parsed_args): metrics = utils.get_client(self).metric.list( **utils.get_pagination_options(parsed_args)) for metric in metrics: utils.format_archive_policy(metric["archive_policy"]) utils.format_move_dict_to_root(metric, "archive_policy") return utils.list2cols(self.COLS, metrics)
def take_action(self, parsed_args): metric = utils.get_client(self).metric.get( metric=parsed_args.metric, resource_id=parsed_args.resource_id) utils.format_archive_policy(metric["archive_policy"]) utils.format_move_dict_to_root(metric, "archive_policy") utils.format_resource_for_metric(metric) return self.dict2columns(metric)
def take_action(self, parsed_args): res = utils.get_client(self).resource.get( resource_type=parsed_args.resource_type, resource_id=parsed_args.resource_id) if parsed_args.formatter == 'table': normalize_metrics(res) return self.dict2columns(res)
def take_action(self, parsed_args): metrics = parsed_args.metric if parsed_args.query: if len(parsed_args.metric) != 1: raise ValueError("One metric is required if query is provided") metrics = parsed_args.metric[0] measures = utils.get_client(self).metric.aggregation( metrics=metrics, query=parsed_args.query, aggregation=parsed_args.aggregation, reaggregation=parsed_args.reaggregation, start=parsed_args.start, stop=parsed_args.stop, granularity=parsed_args.granularity, needed_overlap=parsed_args.needed_overlap, resource_type=parsed_args.resource_type, groupby=parsed_args.groupby, refresh=parsed_args.refresh, resample=parsed_args.resample, fill=parsed_args.fill ) if parsed_args.groupby: ms = [] for g in measures: group_name = ", ".join("%s: %s" % (k, g['group'][k]) for k in sorted(g['group'])) for m in g['measures']: i = [group_name] i.extend(m) ms.append(i) return ('group',) + self.COLS, ms return self.COLS, measures
def take_action(self, parsed_args): resources = utils.get_client(self).resource.list( resource_type=parsed_args.resource_type, **utils.get_pagination_options(parsed_args)) # Do not dump metrics because it makes the list way too long for r in resources: del r['metrics'] return self._list2cols(resources)
def take_action(self, parsed_args): resource_type = {'name': parsed_args.name} if parsed_args.attribute: resource_type['attributes'] = dict(parsed_args.attribute) res = utils.get_client(self).resource_type.create( resource_type=resource_type) utils.format_resource_type(res) return self.dict2columns(res)
def take_action(self, parsed_args): pool = BenchmarkPool(parsed_args.workers) LOG.info("Getting metrics") futures = pool.map_job(utils.get_client(self).metric.get, parsed_args.metric * parsed_args.count, resource_id=parsed_args.resource_id) result, runtime, stats = pool.wait_job("show", futures) return self.dict2columns(stats)
def take_action(self, parsed_args): resources = utils.get_client(self).resource.history( resource_type=parsed_args.resource_type, resource_id=parsed_args.resource_id, **utils.get_pagination_options(parsed_args)) if parsed_args.formatter == 'table': return self._list2cols(list(map(normalize_metrics, resources))) return self._list2cols(resources)
def take_action(self, parsed_args): archive_policy = utils.dict_from_parsed_args(parsed_args, ['definition']) ap = utils.get_client(self).archive_policy.update( name=parsed_args.name, archive_policy=archive_policy) if parsed_args.formatter == 'table': utils.format_archive_policy(ap) return self.dict2columns(ap)
def take_action(self, parsed_args): resources = utils.get_client(self).resource.history( resource_type=parsed_args.resource_type, resource_id=parsed_args.resource_id, **utils.get_pagination_options(parsed_args)) cols = resources[0].keys() if resources else self.COLS if parsed_args.formatter == 'table': return utils.list2cols(cols, map(normalize_metrics, resources)) return utils.list2cols(cols, resources)
def take_action(self, parsed_args): status = utils.get_client(self).status.get() return self.dict2columns({ "storage/total number of measures to process": status['storage']['summary']['measures'], "storage/number of metric having measures to process": status['storage']['summary']['metrics'], })
def take_action(self, parsed_args): archive_policy = utils.dict_from_parsed_args( parsed_args, ['name', 'back_window', 'aggregation_methods', 'definition']) ap = utils.get_client(self).archive_policy.create( archive_policy=archive_policy) if parsed_args.formatter == 'table': utils.format_archive_policy(ap) return self.dict2columns(ap)
def take_action(self, parsed_args): metric = utils.get_client(self).metric.get( metric=parsed_args.metric, resource_id=parsed_args.resource_id) metric['archive_policy/name'] = metric["archive_policy"]["name"] del metric['archive_policy'] del metric['created_by_user_id'] del metric['created_by_project_id'] utils.format_resource_for_metric(metric) return self.dict2columns(metric)
def _resource_from_args(self, parsed_args, update=False): # Get the resource type to set the correct type rt_attrs = utils.get_client(self).resource_type.get( name=parsed_args.resource_type)['attributes'] resource = {} if not update: resource['id'] = parsed_args.resource_id if parsed_args.attribute: for attr in parsed_args.attribute: attr, __, value = attr.partition(":") attr_type = rt_attrs.get(attr, {}).get('type') if attr_type == "number": value = float(value) elif attr_type == "bool": value = bool(distutils.util.strtobool(value)) resource[attr] = value if (parsed_args.add_metric or parsed_args.create_metric or (update and parsed_args.delete_metric)): if update: r = utils.get_client(self).resource.get( parsed_args.resource_type, parsed_args.resource_id) default = r['metrics'] for metric_name in parsed_args.delete_metric: try: del default[metric_name] except KeyError: raise exceptions.MetricNotFound( message="Metric name %s not found" % metric_name) else: default = {} resource['metrics'] = default for metric in parsed_args.add_metric: name, _, value = metric.partition(":") resource['metrics'][name] = value for metric in parsed_args.create_metric: name, _, value = metric.partition(":") if value is "": resource['metrics'][name] = {} else: resource['metrics'][name] = {'archive_policy_name': value} return resource
def _resource_from_args(self, parsed_args, update=False): # Get the resource type to set the correct type rt_attrs = utils.get_client(self).resource_type.get( name=parsed_args.resource_type)['attributes'] resource = {} if not update: resource['id'] = parsed_args.resource_id if parsed_args.attribute: for attr in parsed_args.attribute: attr, __, value = attr.partition(":") attr_type = rt_attrs.get(attr, {}).get('type') if attr_type == "number": value = float(value) elif attr_type == "bool": value = bool(distutils.util.strtobool(value)) resource[attr] = value if (parsed_args.add_metric or parsed_args.create_metric or (update and parsed_args.delete_metric)): if update: r = utils.get_client(self).resource.get( parsed_args.resource_type, parsed_args.resource_id) default = r['metrics'] for metric_name in parsed_args.delete_metric: try: del default[metric_name] except KeyError: raise exceptions.MetricNotFound( message="Metric name %s not found" % metric_name) else: default = {} resource['metrics'] = default for metric in parsed_args.add_metric: name, _, value = metric.partition(":") resource['metrics'][name] = value for metric in parsed_args.create_metric: name, _, value = metric.partition(":") if value: resource['metrics'][name] = {'archive_policy_name': value} else: resource['metrics'][name] = {} return resource
def _take_action(self, metric, parsed_args): if parsed_args.name: metric['name'] = parsed_args.name if parsed_args.unit: metric['unit'] = parsed_args.unit metric = utils.get_client(self).metric.create(metric) utils.format_archive_policy(metric["archive_policy"]) utils.format_move_dict_to_root(metric, "archive_policy") utils.format_resource_for_metric(metric) return self.dict2columns(metric)
def take_action(self, parsed_args): pool = BenchmarkPool(parsed_args.workers) LOG.info("Creating metrics") futures = pool.submit_job( parsed_args.count, utils.get_client(self).metric._create_new, archive_policy_name=parsed_args.archive_policy_name, resource_id=parsed_args.resource_id) created_metrics, runtime, stats = pool.wait_job("create", futures) if not parsed_args.keep: LOG.info("Deleting metrics") pool = BenchmarkPool(parsed_args.workers) futures = pool.map_job(utils.get_client(self).metric.delete, [m['id'] for m in created_metrics]) _, runtime, dstats = pool.wait_job("delete", futures) stats.update(dstats) return self.dict2columns(stats)
def take_action(self, parsed_args): measures = utils.get_client(self).metric.get_measures( metric=parsed_args.metric, resource_id=parsed_args.resource_id, aggregation=parsed_args.aggregation, start=parsed_args.start, stop=parsed_args.stop, granularity=parsed_args.granularity, refresh=parsed_args.refresh, resample=parsed_args.resample) return self.COLS, self.format_measures_with_tz(parsed_args, measures)
def take_action(self, parsed_args): measures = utils.get_client(self).metric.get_measures( metric=parsed_args.metric, resource_id=parsed_args.resource_id, aggregation=parsed_args.aggregation, start=parsed_args.start, stop=parsed_args.stop, granularity=parsed_args.granularity, refresh=parsed_args.refresh, resample=parsed_args.resample ) return self.COLS, measures
def take_action(self, parsed_args): measures = utils.get_client(self).metric.get_measures( metric=parsed_args.metric, resource_id=parsed_args.resource_id, aggregation=parsed_args.aggregation, start=parsed_args.start, stop=parsed_args.stop, granularity=parsed_args.granularity, refresh=parsed_args.refresh, resample=parsed_args.resample) # Convert datetime.datetime into string return self.COLS, [(ts.isoformat(), g, value) for ts, g, value in measures]
def take_action(self, parsed_args): pool = BenchmarkPool(parsed_args.workers) LOG.info("Getting measures") futures = pool.submit_job(parsed_args.count, utils.get_client(self).metric.get_measures, metric=parsed_args.metric, resource_id=parsed_args.resource_id, aggregation=parsed_args.aggregation, start=parsed_args.start, stop=parsed_args.stop) result, runtime, stats = pool.wait_job("show", futures) stats['measures per request'] = len(result[0]) return self.dict2columns(stats)
def take_action(self, parsed_args): metric = utils.get_client(self).metric._create_new( archive_policy_name=parsed_args.archive_policy_name, name=parsed_args.name, resource_id=parsed_args.resource_id, unit=parsed_args.unit, ) utils.format_resource_for_metric(metric) if 'archive_policy' in metric: metric['archive_policy/name'] = metric["archive_policy"]["name"] del metric['archive_policy'] del metric['created_by_user_id'] del metric['created_by_project_id'] return self.dict2columns(metric)
def take_action(self, parsed_args): status = utils.get_client(self).status.get() d = { "storage/total number of measures to process": status['storage']['summary']['measures'], "storage/number of metric having measures to process": status['storage']['summary']['metrics'], } if 'metricd' in status: d["metricd/processors"] = status['metricd']['processors'] return self.dict2columns(d)
def take_action(self, parsed_args): operations = [] if parsed_args.attribute: for name, attrs in parsed_args.attribute: operations.append({'op': 'add', 'path': '/attributes/%s' % name, 'value': attrs}) if parsed_args.remove_attribute: for name in parsed_args.remove_attribute: operations.append({'op': 'remove', 'path': '/attributes/%s' % name}) res = utils.get_client(self).resource_type.update( parsed_args.name, operations) utils.format_resource_type(res) return self.dict2columns(res)
def take_action(self, parsed_args): operations = [] if parsed_args.attribute: for name, attrs in parsed_args.attribute: operations.append({ 'op': 'add', 'path': '/attributes/%s' % name, 'value': attrs }) if parsed_args.remove_attribute: for name in parsed_args.remove_attribute: operations.append({ 'op': 'remove', 'path': '/attributes/%s' % name }) res = utils.get_client(self).resource_type.update( parsed_args.name, operations) utils.format_resource_type(res) return self.dict2columns(res)
def take_action(self, parsed_args): aggregates = utils.get_client(self).aggregates.fetch( operations=parsed_args.operations, resource_type=parsed_args.resource_type, search=parsed_args.search, start=parsed_args.start, stop=parsed_args.stop, granularity=parsed_args.granularity, needed_overlap=parsed_args.needed_overlap, groupby=parsed_args.groupby, ) if parsed_args.search and parsed_args.groupby: ms = [] for g in aggregates: group_name = ", ".join("%s: %s" % (k, g['group'][k]) for k in sorted(g['group'])) for row in self.flatten_measures(g["measures"]["measures"]): ms.append((group_name, ) + row) return ('group', ) + self.COLS, ms return self.COLS, list(self.flatten_measures(aggregates["measures"]))
def _resource_from_args(self, parsed_args, update=False): resource = {} if not update: resource['id'] = parsed_args.resource_id if parsed_args.attribute: for attr in parsed_args.attribute: attr, __, value = attr.partition(":") resource[attr] = value if (parsed_args.add_metric or parsed_args.create_metric or (update and parsed_args.delete_metric)): if update: r = utils.get_client(self).resource.get( parsed_args.resource_type, parsed_args.resource_id) default = r['metrics'] for metric_name in parsed_args.delete_metric: try: del default[metric_name] except KeyError: raise exceptions.MetricNotFound( message="Metric name %s not found" % metric_name) else: default = {} resource['metrics'] = default for metric in parsed_args.add_metric: name, _, value = metric.partition(":") resource['metrics'][name] = value for metric in parsed_args.create_metric: name, _, value = metric.partition(":") if value is "": resource['metrics'][name] = {} else: resource['metrics'][name] = {'archive_policy_name': value} return resource
def take_action(self, parsed_args): ap = utils.get_client(self).archive_policy.get( name=parsed_args.name) if parsed_args.formatter == 'table': utils.format_archive_policy(ap) return self.dict2columns(ap)
def take_action(self, parsed_args): policies = utils.get_client(self).archive_policy.list() if parsed_args.formatter == 'table': for ap in policies: utils.format_archive_policy(ap) return utils.list2cols(self.COLS, policies)
def take_action(self, parsed_args): utils.get_client(self).archive_policy_rule.delete(parsed_args.name)
def take_action(self, parsed_args): resource_types = utils.get_client(self).resource_type.list() for resource_type in resource_types: resource_type['attributes'] = utils.format_dict_dict( resource_type['attributes']) return utils.list2cols(self.COLS, resource_types)
def take_action(self, parsed_args): utils.get_client(self).resource.delete(parsed_args.resource_id)
def take_action(self, parsed_args): res = utils.get_client(self).resource.batch_delete( resource_type=parsed_args.resource_type, query=parsed_args.query) return self.dict2columns(res)
def take_action(self, parsed_args): for metric in parsed_args.metric: utils.get_client(self).metric.delete( metric=metric, resource_id=parsed_args.resource_id)
def take_action(self, parsed_args): res = utils.get_client(self).resource_type.get(name=parsed_args.name) utils.format_resource_type(res) return self.dict2columns(res)
def take_action(self, parsed_args): ap_rules = utils.get_client(self).archive_policy_rule.list() return utils.list2cols(self.COLS, ap_rules)
def take_action(self, parsed_args): ap_rule = utils.get_client(self).archive_policy_rule.get( name=parsed_args.name) return self.dict2columns(ap_rule)
def take_action(self, parsed_args): ap = utils.get_client(self).archive_policy.get(name=parsed_args.name) if parsed_args.formatter == 'table': utils.format_archive_policy(ap) return self.dict2columns(ap)
def take_action(self, parsed_args): utils.get_client(self).archive_policy.delete(name=parsed_args.name)
def take_action(self, parsed_args): utils.get_client(self).metric.add_measures( metric=parsed_args.metric, resource_id=parsed_args.resource_id, measures=parsed_args.measure, )
def take_action(self, parsed_args): with parsed_args.file as f: utils.get_client(self).metric.batch_metrics_measures(json.load(f))
def take_action(self, parsed_args): with parsed_args.file as f: utils.get_client(self).metric.batch_resources_metrics_measures( json.load(f), create_metrics=parsed_args.create_metrics)
def take_action(self, parsed_args): utils.get_client(self).resource_type.delete(parsed_args.name)
def take_action(self, parsed_args): rule = utils.dict_from_parsed_args( parsed_args, ["name", "metric_pattern", "archive_policy_name"]) policy = utils.get_client(self).archive_policy_rule.create(rule) return self.dict2columns(policy)