def test_insert_retrieve_versioning(self): test_status = 'pyapi.status' now = datetime.now() series = Series(ENTITY, VERSION_METRIC) val = random.randint(0, VALUE - 1) series.add_samples( Sample(value=val, time=now - timedelta(seconds=2), version={'status': test_status})) sf = SeriesFilter(metric=VERSION_METRIC) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) vf = VersioningFilter(versioned=True) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, versioning_filter=vf) successful = self.service.insert(series) time.sleep(WAIT_TIME) series, = self.service.query(query) # print(series) last_sample = series.data[-1] self.assertTrue(successful) self.assertEqual(last_sample.v, val) self.assertIsNotNone(last_sample.version) self.assertEqual(last_sample.version['status'], test_status)
def test_insert_retrieve(self): DATE = datetime.now() msg = Message(TYPE, SOURCE, ENTITY, DATE, SEVERITY, TAGS, MESSAGE) self.ms.insert(msg) time.sleep(2) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=DATE, end_date=datetime.now()) query = MessageQuery(entity_filter=ef, date_filter=df) result = self.ms.query(query) print(result) self.assertIsNotNone(result) self.assertGreater(len(result), 0) m = result[0] self.assertIsInstance(m, Message) """ In the future may be replaced with: self.assertItemsEqual(msg.__dict__.items(), m.__dict__.items()) """ self.assertEqual(msg.type, m.type) self.assertEqual(msg.source, m.source) self.assertEqual(msg.entity, m.entity) # Uncomment when JodaTime will be replaced # self.assertEqual(msg.date, m.date) self.assertEqual(msg.severity, m.severity) self.assertEqual(msg.tags, m.tags) self.assertEqual(msg.message, m.message) self.assertEqual(msg.persist, m.persist)
def test_aggregate_series(self): val = random.randint(0, VALUE - 1) insert_series_sample(self.service, val, val + 1) time.sleep(WAIT_TIME) now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) aggr = Aggregate(period={ 'count': 10, 'unit': TimeUnit.SECOND }, types=[AggregateType.MAX, AggregateType.MIN]) tf = TransformationFilter(aggregate=aggr) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf) series = self.service.query(query) self.assertEqual(len(series), 2) if series[0].aggregate['type'] == 'MAX': max = series[0].get_last_value() min = series[1].get_last_value() else: min = series[0].get_last_value() max = series[1].get_last_value() self.assertGreaterEqual(max, min)
def test_insert_retrieve(self): DATE = datetime.now() prop = Property(TYPE, ENTITY, TAGS, KEY, DATE) self.ps.insert(prop) time.sleep(2) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=DATE, end_date=datetime.now()) query = PropertiesQuery(type=TYPE, entity_filter=ef, date_filter=df) result = self.ps.query(query) print(result) self.assertIsNotNone(result) self.assertGreater(len(result), 0) p = result[0] """ In the future may be replaced with: self.assertItemsEqual(prop.__dict__.items(), p.__dict__.items()) """ self.assertIsInstance(p, Property) self.assertEqual(prop.type, p.type) self.assertEqual(prop.entity, p.entity) # Uncomment when JodaTime will be replaced # self.assertEqual(prop.date, p.date) self.assertEqual(prop.key, p.key) self.assertEqual(prop.tags, p.tags)
def test_value_filter(self): """ Check value filter. """ insert_series_sample(self.service, None, 2, 3) time.sleep(WAIT_TIME) sf = SeriesFilter(metric=METRIC, tags={TAG: TAG_VALUE}, exact_match=True) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date='now - 1 * MINUTE', end_date="now") vf = ValueFilter('Float.isNaN(value) OR value=2') query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, value_filter=vf) series = self.service.query(query) # print(series) self.assertIsNotNone(series) self.assertEqual(1, len(series)) s = series[0] self.assertIsInstance(s, Series) self.assertEqual(2, len(s.data)) self.assertEqual(None, s.get_first_value()) self.assertEqual(2, s.get_last_value())
def test_forecast(self): now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}, type=models.SeriesType.FORECAST) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df) series = self.svc.query(query) self.assertEqual(series[0].type, models.SeriesType.FORECAST)
def test_query_dataframe(self): ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=DATE, end_date=datetime.now()) query = PropertiesQuery(type=TYPE, entity_filter=ef, date_filter=df) result = self.service.query_dataframe(query) # print(result) self.assertIsNotNone(result) self.assertEqual((1, 5), result.shape) self.assertTrue(isinstance(result.loc[0, 'date'], datetime)) self.assertEqual(KEY_VALUE, result.loc[0, KEY_NAME]) self.assertEqual(TYPE, result.loc[0, 'type']) self.assertEqual(TAG_VALUE, result.loc[0, TAG]) self.assertEqual(ENTITY, result.loc[0, 'entity'])
def test_group(self): time.sleep(1) insert_series_sample(self.svc, VALUE - 1) time.sleep(WAIT_TIME) now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) tf = TransformationFilter(group=Group(type=AggregateType.COUNT, period={'count': 1, 'unit': TimeUnit.SECOND})) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf) series = self.svc.query(query) self.assertEqual(series[0].get_last_value(), 1)
def test_rate(self): v1 = 5 v2 = 3 insert_series_sample(self.svc, v1, v2) time.sleep(WAIT_TIME + 2) now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) tf = TransformationFilter(rate=Rate(counter=False)) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf) series = self.svc.query(query) self.assertEqual(int(series[0].get_last_value()), v2 - v1)
def test_query_dataframe(self): exp = "message LIKE '* DataFrame'" ef = EntityFilter(entity=ENTITY) df = DateFilter(interval=INTERVAL, end_date=datetime.now()) query = MessageQuery(entity_filter=ef, date_filter=df, expression=exp) result = self.service.query_dataframe(query) # print(result) self.assertIsNotNone(result) self.assertEqual((1, 7), result.shape) self.assertTrue(isinstance(result.loc[0, 'date'], datetime)) self.assertEqual(MESSAGE, result.loc[0, 'message']) self.assertEqual(TYPE, result.loc[0, 'type']) self.assertEqual(SOURCE, result.loc[0, 'source']) self.assertEqual(ENTITY, result.loc[0, 'entity']) self.assertEqual(SEVERITY, result.loc[0, 'severity']) self.assertEqual(TAG_VALUE, result.loc[0, TAG])
def test_insert_retrieve_series(self): val = random.randint(0, VALUE - 1) insert_series_sample(self.service, val) time.sleep(WAIT_TIME + 2) now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df) series = self.service.query(query) # for s in series: # print(s) self.assertIsNotNone(series) self.assertGreater(len(series), 0) s = series[0] self.assertIsInstance(s, Series) self.assertGreater(len(s.data), 0) self.assertEqual(s.get_last_value(), val)
connection = connect_url('https://atsd_hostname:8443', 'username', 'password') # Set query entity = "axibase.com" type = 'web' source = 'access.log' # Specify date interval interval = {"count": 15, "unit": "MINUTE"} endDate = "NOW" message_service = MessageService(connection) # Query the messages and save response to DataFrame ef = EntityFilter(entity=entity) df = DateFilter(interval=interval, end_date=endDate) query = MessageQuery(entity_filter=ef, date_filter=df, type=type, source=source) messages = message_service.query_dataframe(query, columns=[ 'date', 'entity', 'geoip_city', 'geoip_country_code', 'geoip_region_name' ]) print(messages) # date entity geoip_city geoip_country_code geoip_region_name # 0 2018-07-26T17:56:39.303Z example.org Kazan RU Tatarstan
else: metric_expression = "name like '%s'" % args.metric_filter title = '%s, metric filter: %s' % (title, args.metric_filter) message = [title] now = datetime.now() metrics = entities_service.metrics(args.entity, expression=metric_expression, min_insert_date=now - timedelta(seconds=args.last_hours * 3600), use_entity_insert_time=True) log('Processing: ') for metric in metrics: sf = SeriesFilter(metric=metric.name) ef = EntityFilter(entity=args.entity) df = DateFilter(start_date=datetime(now.year, now.month, now.day) - timedelta(days=args.data_interval), end_date='now') tf = TransformationFilter() query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df) if args.aggregate_period > 0: tf.set_aggregate( Aggregate(period={'count': args.aggregate_period, 'unit': TimeUnit.MINUTE}, types=[AggregateType.MEDIAN])) if args.interpolate_period > 0: tf.set_interpolate(Interpolate(period={'count': args.interpolate_period, 'unit': TimeUnit.MINUTE}, function=InterpolateFunction.LINEAR)) query.set_transformation_filter(tf) series_list = svc.query(query) for series in series_list:
if metric_service.get(metric) is None: logging.warning("'" + metric + "' metric does not exist") exit(1) series_service = SeriesService(connection) def insert_or_warning(series_to_insert): if not dry_run: series_service.insert(series_to_insert) else: logging.warning("Dry run enabled, series are not inserted.") dst_entity_filter = EntityFilter(dst_entity) dst_date_filter = DateFilter(start_date, 'now') series_filter = SeriesFilter(metric, tag_expression=tag_expression) limit_control = ControlFilter(limit=1, direction="ASC") sample_filter = SampleFilter("!Double.isNaN(value)") dst_series_query = SeriesQuery(series_filter, dst_entity_filter, dst_date_filter, control_filter=limit_control, sample_filter=sample_filter) dst_series = series_service.query(dst_series_query) if no_data(dst_series): logging.warning("No destination series found for '%s' entity, '%s' metric" % (dst_entity, metric)) exit(1) def err(tags, time=None, entity=source_entity): error_msg = "No series found for '" + entity + "' entity, '" + metric + "' metric and '" + str(tags) + "'" if time is not None:
ENTITY = 'pyapi.alerts_service.entity' METRIC = 'test_alert_metric_1' RULE = 'test_rule_1' ID = 1 ACKNOWLEDGED = False VALUE = 1 MESSAGE = '' TAGS = {} TEXT_VALUE = '1' SEVERITY = 'WARNING' REPEAT_COUNT = 0 OPEN_VALUE = 1 INTERVAL = {"count": 2, "unit": "MINUTE"} START_DATE = datetime.now() ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=START_DATE, interval=INTERVAL) class TestAlertsService(ServiceTestBase): def setUp(self): """ Insert series to open the alert. """ series = Series(ENTITY, METRIC) series.add_samples(Sample(VALUE, datetime.now())) self._series_service = SeriesService(self.connection) self._series_service.insert(series) time.sleep(self.wait_time) def test_fields_match(self): """
# Initialize services svc = SeriesService(connection) metric_service = MetricsService(connection) metric_expression = "enabled AND persistent AND retentionDays > 0" metric_list = metric_service.list(expression=metric_expression) series_count = 0 # ATSD expired data removal schedule frequency, default is one day grace_interval_days = 1 t = PrettyTable(['Metric', 'Entity', 'Tags', 'Retention Days', 'Threshold', 'Presented Sample Date']) for metric in metric_list: # calculate datetime before which there is data threshold = datetime.now() - timedelta(days=metric.retention_days + grace_interval_days) # query series with current metric and all entities from the beginning up to threshold # enough to get at least one value, limit set to 1 sf = SeriesFilter(metric=metric.name) ef = EntityFilter(entity='*') df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=threshold) cf = ControlFilter(limit=1) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, control_filter=cf) series_list = svc.query(query) for sl in series_list: if len(sl.data) > 0: series_count += 1 t.add_row([sl.metric, sl.entity, sl.tags, metric.retention_days, threshold, to_iso(sl.data[0].get_date())]) print(t) print("\nSeries count with broken retention date is %d." % series_count)
# set series metric = 'm-to-delete' entity = 'e-to-delete' tags = {'tag_key_1': 'tag_value_1', 'tag_key_2': 'tag_value_2'} # specify date interval startDate = "2018-10-01T00:00:00Z" endDate = "2018-10-02T00:00:00Z" series_service = SeriesService(connection) # query the series to be deleted, use exactMatch to exclude not specified tags sf = SeriesFilter(metric=metric, tags=tags, exact_match=True) ef = EntityFilter(entity=entity) df = DateFilter(start_date=startDate, end_date=endDate) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df) series_list = series_service.query(query) if len(series_list) > 1: raise Exception('There are multiple series meet the requirements') series = series_list[0] # check data existence if len(series.data) == 0: print('No data in required interval') else: # replace value of samples with nan for sample in series.data: print("- Deleting %s, %s " % (sample.get_date(), sample.v))
# set the name of entity_group and tag expression entity_group_name = 'docker-containers' property_type = 'docker.container.config.env' tag_expression = 'env.*' eg_service = EntityGroupsService(connection) properties_service = PropertiesService(connection) entities_list = eg_service.get_entities(entity_group_name, tags=tag_expression) # exclude entities that have no required tags entities = [entity for entity in entities_list if entity.tags] entities_service = EntitiesService(connection) # prepare property query ef = EntityFilter('entity') df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now()) property_query = PropertiesQuery(entity_filter=ef, date_filter=df, type=property_type) print('entity_name,entity_label,tags') for entity in entities: pretty_tags = print_tags(entity.tags) for key in entity.tags: entity.tags[key] = '' # set actual entity and execute property query property_query.set_entity_filter(EntityFilter(entity.name)) properties_list = properties_service.query(property_query) # set entity tags from property tags if properties_list:
while line: # skip commented lines if line.startswith('#'): line = fp.readline() continue metric_name, entity_name, interval, end_date, forecast_name, comments = line.split( ',') count, unit = interval.split('-') sf = SeriesFilter(metric=metric_name) ef = EntityFilter(entity=entity_name) df = DateFilter(end_date=end_date, interval={ 'count': count, 'unit': unit }) ff = None if forecast_name: sf.set_type('FORECAST') if forecast_name != '-': ff = ForecastFilter(forecast_name=forecast_name) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, forecast_filter=ff) series = svc.query(query)