def test_GlobMetricResult_be_built_from_a_metric(self): metric = bg_metric.Metric(name="foo.bar", id="0", metadata=bg_metric.MetricMetadata.create()) metric_result = bg_glob.GlobMetricResult.from_value(metric) self.assertEqual(metric_result.value, metric)
def test_GlobMetricResult_built_from_a_metric_should_discover_its_name( self): metric_name = "foo.bar" metric = bg_metric.Metric(name=metric_name, id="0", metadata=bg_metric.MetricMetadata.create()) metric_result = bg_glob.GlobMetricResult.from_value(metric) self.assertEqual(metric_result.name, metric_name)
def setUp(self): """Set up a Downsampler, aggregating with the sum and average function.""" capacity_precisions = ( self.CAPACITY, self.PRECISION, self.CAPACITY, self.PRECISION ** 2, ) retention_string = "%d*%ds:%d*%ds" % (capacity_precisions) retention = bg_metric.Retention.from_string(retention_string) self.stage_0 = retention.stages[0] self.stage_1 = retention.stages[1] uid = uuid.uuid4() metric_metadata = bg_metric.MetricMetadata.create( aggregator=bg_metric.Aggregator.total, retention=retention ) self.metric_sum = bg_metric.Metric(self.METRIC_NAME_SUM, uid, metric_metadata) uid = uuid.uuid4() metric_metadata = bg_metric.MetricMetadata.create( aggregator=bg_metric.Aggregator.average, retention=retention ) self.metric_avg = bg_metric.Metric(self.METRIC_NAME_AVG, uid, metric_metadata) self.ds = bg_ds.Downsampler(self.CAPACITY)
def _cache_get(self, metric_name): """Return a Metric from a the cache, None if no such metric.""" encoded_metric_name = self._encode(metric_name) with self.__env.begin(self.__metric_to_metadata_db, write=False) as txn: payload = txn.get(encoded_metric_name) if payload == self._EMPTY: return None, True if payload is not None: payload = self._decode(payload) if not payload: # cache miss return None, False # found something in the cache split = self.__split_payload(payload) if split is None: # invalid string => evict from cache with self.__env.begin(self.__metric_to_metadata_db, write=True) as txn: txn.delete(key=encoded_metric_name) return None, False # valid value => get id and metadata string # TODO: optimization: id is a UUID (known length) id_str, metadata_str, timestamp = split try: id = uuid.UUID(id_str) except Exception as e: logging.debug(str(e)) with self.__env.begin(self.__metric_to_metadata_db, write=True) as txn: txn.delete(key=encoded_metric_name) return None, False # if the timestamp expired evict it in order to force # its recreation for the next time if self.__expired_timestamp(timestamp): with self.__env.begin(self.__metric_to_metadata_db, write=True) as txn: txn.delete(key=encoded_metric_name) metadata = self.metadata_from_str(metadata_str) return bg_metric.Metric(metric_name, id, metadata), True
def test_document_from_metric_should_build_a_document_from_a_metric(self): p0 = "foo" p1 = "bar" p2 = "baz" metric_name = "%s.%s.%s" % (p0, p1, p2) metric_id = uuid.uuid5( uuid.UUID("{00000000-1111-2222-3333-444444444444}"), metric_name) aggregator = Aggregator.maximum retention_str = "42*1s:43*60s" retention = Retention.from_string(retention_str) carbon_xfilesfactor = 0.5 metadata = MetricMetadata.create(aggregator, retention, carbon_xfilesfactor) metric = bg_metric.Metric( metric_name, metric_id, metadata, created_on=datetime.datetime(2017, 1, 1), updated_on=datetime.datetime(2018, 2, 2), ) document = bg_elasticsearch.document_from_metric(metric) self.__check_document_value(document, "depth", 2) self.__check_document_value(document, "uuid", metric_id) self.__check_document_value(document, "p0", p0) self.__check_document_value(document, "p1", p1) self.__check_document_value(document, "p2", p2) self.assertTrue("config" in document) document_config = document["config"] self.__check_document_value(document_config, "aggregator", aggregator.name) self.__check_document_value(document_config, "retention", retention_str) self.__check_document_value(document_config, "carbon_xfilesfactor", "%f" % carbon_xfilesfactor) self.assertTrue("created_on" in document) self.assertTrue(isinstance(document["created_on"], datetime.datetime)) self.assertEqual(metric.created_on, document["created_on"]) self.assertTrue("updated_on" in document) self.assertTrue(isinstance(document["updated_on"], datetime.datetime)) self.assertEqual(metric.updated_on, document["updated_on"]) self.assertTrue("read_on" in document) self.assertEqual(document["read_on"], None)
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from mock import Mock from biggraphite import metric as bg_metric from biggraphite.drivers import hybrid DEFAULT_METRIC_NAME = "foo.bar" DEFAULT_METADATA = bg_metric.MetricMetadata() DEFAULT_METRIC = bg_metric.Metric(DEFAULT_METRIC_NAME, "id", DEFAULT_METADATA) DEFAULT_GLOB = "foo.bar.**" class TestHybridAccessor(unittest.TestCase): def setUp(self): self._metadata_accessor = Mock() self._metadata_accessor.TYPE = 'mock' self._data_accessor = Mock() self._data_accessor.TYPE = 'mock' self._accessor = hybrid.HybridAccessor("test_hybrid", self._metadata_accessor, self._data_accessor) def test_connect_should_be_called_on_both_accessors(self):