class TestNoneCache(unittest.TestCase): TEST_METRIC_NAME = "foo.bar" TEST_METRIC = bg_metric.make_metric_with_defaults( TEST_METRIC_NAME, bg_metric.MetricMetadata.create()) def setUp(self): self._accessor = Mock() self._accessor.TYPE = 'mock' self._cache = bg_metadata_cache.NoneCache(self._accessor, None) def test_has_metric_should_always_be_true(self): self.assertTrue(self._cache.has_metric("foo.bar")) def test_has_metric_should_not_use_the_accessor(self): self._cache.has_metric("foo.bar") self.assert_empty(self._accessor.method_calls) def test_get_metric_should_always_use_the_accessor(self): self._accessor.get_metric.return_value = self.TEST_METRIC self._cache.get_metric(self.TEST_METRIC_NAME) self._accessor.get_metric.assert_called_once() self._accessor.get_metric.assert_called_with(self.TEST_METRIC_NAME) def assert_empty(self, iterable): self.assertEqual(0, len(iterable))
def make_metric_with_defaults(name, metadata=None, **kwargs): """Create a bg_accessor.Metric with specified metadata.""" retention = kwargs.get("retention") if isinstance(retention, str): kwargs["retention"] = bg_metric.Retention.from_string(retention) if metadata: assert isinstance(metadata, bg_metric.MetricMetadata) assert not kwargs else: metadata = bg_metric.MetricMetadata.create(**kwargs) return bg_metric.make_metric_with_defaults(name, metadata)
def _document_to_metric(self, document): metadata = bg_metric.MetricMetadata.from_string_dict(document.config.to_dict()) # TODO: Have a look at dsl doc to avoid parsing strings to dates # https://github.com/elastic/elasticsearch-dsl-py/blob/master/docs/persistence.rst return bg_metric.make_metric_with_defaults( document.name, metadata, created_on=ttls.str_to_datetime(document.created_on), updated_on=ttls.str_to_datetime(document.updated_on), read_on=ttls.str_to_datetime(document.read_on), )
def _get_metric_tuples(accessor, src, dst, recursive, src_retention, dst_retention, dry_run=True): pattern = "%s.**" % src if recursive else src try: src_metrics = list_metrics(accessor, pattern) except TooManyMetrics as e: log.error("%s; copy aborted" % e) return # Prepare retention override if src_retention: src_retention = bg_metric.Retention.from_string(src_retention) if dst_retention: dst_retention = bg_metric.Retention.from_string(dst_retention) for src_metric in src_metrics: src_metadata = src_metric.metadata if src_retention and src_metric.metadata.retention != src_retention: src_metric.metadata = bg_metric.MetricMetadata.create( src_metadata.aggregator, src_retention, src_metadata.carbon_xfilesfactor) dst_metric_name = src_metric.name.replace(src, dst, 1) dst_metric = accessor.get_metric(dst_metric_name) if dst_metric is None: log.debug("Metric '%s' was not found and will be created" % dst_metric_name) dst_metadata = bg_metric.MetricMetadata.create( src_metadata.aggregator, dst_retention, src_metadata.carbon_xfilesfactor) dst_metric = bg_metric.make_metric_with_defaults( dst_metric_name, dst_metadata) if not dry_run: accessor.create_metric(dst_metric) elif dst_retention and dst_metric.metadata.retention != dst_retention: log.debug( "Metric '%s' was found without '%s' retention and will be updated" % (dst_metric_name, dst_retention.as_string)) dst_metadata = dst_metric.metadata dst_metric.metadata = bg_metric.MetricMetadata.create( dst_metadata.aggregator, dst_retention, dst_metadata.carbon_xfilesfactor) if not dry_run: accessor.update_metric(dst_metric_name, dst_metric.metadata) yield (src_metric, dst_metric)
def _document_to_metric(self, document): metadata = bg_metric.MetricMetadata.from_string_dict( document.config.to_dict()) # TODO: Have a look at dsl doc to avoid parsing strings to dates # https://github.com/elastic/elasticsearch-dsl-py/blob/master/docs/persistence.rst return bg_metric.make_metric_with_defaults( document.name, metadata, created_on=ttls.str_to_datetime(document.created_on), updated_on=ttls.str_to_datetime(document.updated_on), read_on=ttls.str_to_datetime(document.read_on), )
def _get_metric_tuples( accessor, src, dst, recursive, src_retention, dst_retention, dry_run=True ): pattern = "%s.**" % src if recursive else src try: src_metrics = list_metrics(accessor, pattern) except TooManyMetrics as e: log.error("%s; copy aborted" % e) return # Prepare retention override if src_retention: src_retention = bg_metric.Retention.from_string(src_retention) if dst_retention: dst_retention = bg_metric.Retention.from_string(dst_retention) for src_metric in src_metrics: src_metadata = src_metric.metadata if src_retention and src_metric.metadata.retention != src_retention: src_metric.metadata = bg_metric.MetricMetadata.create( src_metadata.aggregator, src_retention, src_metadata.carbon_xfilesfactor) dst_metric_name = src_metric.name.replace(src, dst, 1) dst_metric = accessor.get_metric(dst_metric_name) if dst_metric is None: log.debug( "Metric '%s' was not found and will be created" % dst_metric_name ) dst_metadata = bg_metric.MetricMetadata.create( src_metadata.aggregator, dst_retention, src_metadata.carbon_xfilesfactor) dst_metric = bg_metric.make_metric_with_defaults(dst_metric_name, dst_metadata) if not dry_run: accessor.create_metric(dst_metric) elif dst_retention and dst_metric.metadata.retention != dst_retention: log.debug( "Metric '%s' was found without '%s' retention and will be updated" % (dst_metric_name, dst_retention.as_string) ) dst_metadata = dst_metric.metadata dst_metric.metadata = bg_metric.MetricMetadata.create( dst_metadata.aggregator, dst_retention, dst_metadata.carbon_xfilesfactor) if not dry_run: accessor.update_metric(dst_metric_name, dst_metric.metadata) yield (src_metric, dst_metric)
def run(self, accessor, opts): """Run the command.""" accessor.connect() metric = accessor.get_metric(opts.metric) if not metric: print("Metric '%s' was not found and will be created" % opts.metric) metadata = bg_metric.MetricMetadata.create( aggregator=bg_metric.Aggregator.from_config_name(opts.aggregator), retention=bg_metric.Retention.from_string(opts.retention), carbon_xfilesfactor=opts.x_files_factor, ) metric = bg_metric.make_metric_with_defaults(opts.metric, metadata) accessor.create_metric(metric) timestamp = int(time.mktime(opts.timestamp.timetuple())) points = [(timestamp, float(opts.value))] * opts.count accessor.insert_points_async(metric, points)
def update_metric(self, name, updated_metadata): """See bg_accessor.Accessor.""" super(_ElasticSearchAccessor, self).update_metric(name, updated_metadata) tracing.add_attr_to_trace("metric.name", name) name = bg_metric.sanitize_metric_name(name) metric = self.get_metric(name) if metric is None: raise InvalidArgumentError("Unknown metric '%s'" % name) updated_metric = bg_metric.make_metric_with_defaults( name, updated_metadata, created_on=metric.created_on, updated_on=datetime.datetime.now(), read_on=metric.read_on, ) self.create_metric(updated_metric)
def run(self, accessor, opts): """Run the command.""" accessor.connect() metric = accessor.get_metric(opts.metric) if not metric: print("Metric '%s' was not found and will be created" % opts.metric) metadata = bg_metric.MetricMetadata.create( aggregator=bg_metric.Aggregator.from_config_name( opts.aggregator), retention=bg_metric.Retention.from_string(opts.retention), carbon_xfilesfactor=opts.x_files_factor, ) metric = bg_metric.make_metric_with_defaults(opts.metric, metadata) accessor.create_metric(metric) timestamp = int(time.mktime(opts.timestamp.timetuple())) points = [(timestamp, float(opts.value))] * opts.count accessor.insert_points_async(metric, points)
def import_whisper(self, path): if not self._accessor.is_connected: self._accessor.connect() name = metric_name_from_wsp(self._opts.root_directory, self._opts.prefix, path) metadata = self._read_metadata(name, path) log.debug("%s: %s" % (name, metadata.as_string_dict())) if not metadata: return 0 metric = bg_metric.make_metric_with_defaults(name, metadata) if not self._opts.no_metadata: self._accessor.create_metric(metric) ret = 0 if not self._opts.no_data: points = self._read_points(path) self._accessor.insert_downsampled_points(metric, points) ret = len(points) return ret