def test_insert_retrieve_versioning(self): test_status = 'pyapi.status' now = datetime.now() series = Series(ENTITY, VERSION_METRIC) val = random.randint(0, VALUE - 1) series.add_samples( Sample(value=val, time=now - timedelta(seconds=2), version={'status': test_status})) sf = SeriesFilter(metric=VERSION_METRIC) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) vf = VersioningFilter(versioned=True) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, versioning_filter=vf) successful = self.service.insert(series) time.sleep(WAIT_TIME) series, = self.service.query(query) # print(series) last_sample = series.data[-1] self.assertTrue(successful) self.assertEqual(last_sample.v, val) self.assertIsNotNone(last_sample.version) self.assertEqual(last_sample.version['status'], test_status)
def test_aggregate_series(self): val = random.randint(0, VALUE - 1) insert_series_sample(self.service, val, val + 1) time.sleep(WAIT_TIME) now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) aggr = Aggregate(period={ 'count': 10, 'unit': TimeUnit.SECOND }, types=[AggregateType.MAX, AggregateType.MIN]) tf = TransformationFilter(aggregate=aggr) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf) series = self.service.query(query) self.assertEqual(len(series), 2) if series[0].aggregate['type'] == 'MAX': max = series[0].get_last_value() min = series[1].get_last_value() else: min = series[0].get_last_value() max = series[1].get_last_value() self.assertGreaterEqual(max, min)
def test_value_filter(self): """ Check value filter. """ insert_series_sample(self.service, None, 2, 3) time.sleep(WAIT_TIME) sf = SeriesFilter(metric=METRIC, tags={TAG: TAG_VALUE}, exact_match=True) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date='now - 1 * MINUTE', end_date="now") vf = ValueFilter('Float.isNaN(value) OR value=2') query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, value_filter=vf) series = self.service.query(query) # print(series) self.assertIsNotNone(series) self.assertEqual(1, len(series)) s = series[0] self.assertIsInstance(s, Series) self.assertEqual(2, len(s.data)) self.assertEqual(None, s.get_first_value()) self.assertEqual(2, s.get_last_value())
def test_forecast(self): now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}, type=models.SeriesType.FORECAST) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df) series = self.svc.query(query) self.assertEqual(series[0].type, models.SeriesType.FORECAST)
def test_group(self): time.sleep(1) insert_series_sample(self.svc, VALUE - 1) time.sleep(WAIT_TIME) now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) tf = TransformationFilter(group=Group(type=AggregateType.COUNT, period={'count': 1, 'unit': TimeUnit.SECOND})) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf) series = self.svc.query(query) self.assertEqual(series[0].get_last_value(), 1)
def test_rate(self): v1 = 5 v2 = 3 insert_series_sample(self.svc, v1, v2) time.sleep(WAIT_TIME + 2) now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) tf = TransformationFilter(rate=Rate(counter=False)) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf) series = self.svc.query(query) self.assertEqual(int(series[0].get_last_value()), v2 - v1)
def test_insert_retrieve_series(self): val = random.randint(0, VALUE - 1) insert_series_sample(self.service, val) time.sleep(WAIT_TIME + 2) now = datetime.now() sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}) ef = EntityFilter(entity=ENTITY) df = DateFilter(start_date=now - timedelta(hours=1), end_date=now) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df) series = self.service.query(query) # for s in series: # print(s) self.assertIsNotNone(series) self.assertGreater(len(series), 0) s = series[0] self.assertIsInstance(s, Series) self.assertGreater(len(s.data), 0) self.assertEqual(s.get_last_value(), val)
connection = connect_url('https://atsd_hostname:8443', 'username', 'password') # disable deleting inappropriate values deleteValues = False # specify metric name metric_name = "ca.daily.reservoir_storage_af" svc = SeriesService(connection) metrics_service = MetricsService(connection) # query series with current metric and all entities sf = SeriesFilter(metric=metric_name) ef = EntityFilter(entity='*') df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now()) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df) series = svc.query(query) if deleteValues: print('Inappropriate values to be deleted.\n') else: print('Leave as is inappropriate values.\n') print('metric,entity,tags,data') for s in series: # filter non-positive values s.data = [ sample for sample in s.data if sample.v is not None and sample.v <= 0 ] if len(s.data) > 0:
# Specify date interval startDate = "2018-10-01T00:00:00Z" endDate = "2018-10-02T00:00:00Z" # Exclude samples with NaN values (NaN represents deleted values) expr = '!Float.isNaN(value)' series_service = SeriesService(connection) # Query the series to be deleted, use exactMatch to exclude not specified tags sf = SeriesFilter(metric=metric, tags=tags, exact_match=True) ef = EntityFilter(entity=entity) df = DateFilter(start_date=startDate, end_date=endDate) vf = SampleFilter(expr) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, sample_filter=vf) series_list = series_service.query(query) if len(series_list) > 1: raise Exception('There are multiple series meet the requirements') series = series_list[0] # Check data existence if len(series.data) == 0: print('No data in required interval') else: # Replace value of samples with NaN for sample in series.data: print("- Deleting %s, %s " % (sample.get_date(), sample.v))
message = [title] now = datetime.now() metrics = entities_service.metrics(args.entity, expression=metric_expression, min_insert_date=now - timedelta(seconds=args.last_hours * 3600), use_entity_insert_time=True) log('Processing: ') for metric in metrics: sf = SeriesFilter(metric=metric.name) ef = EntityFilter(entity=args.entity) df = DateFilter(start_date=datetime(now.year, now.month, now.day) - timedelta(days=args.data_interval), end_date='now') tf = TransformationFilter() query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df) if args.aggregate_period > 0: tf.set_aggregate( Aggregate(period={'count': args.aggregate_period, 'unit': TimeUnit.MINUTE}, types=[AggregateType.MEDIAN])) if args.interpolate_period > 0: tf.set_interpolate(Interpolate(period={'count': args.interpolate_period, 'unit': TimeUnit.MINUTE}, function=InterpolateFunction.LINEAR)) query.set_transformation_filter(tf) series_list = svc.query(query) for series in series_list: metric_id = '- %s %s' % (series.metric, print_tags(series.tags)) log('\t' + metric_id)
# Initialize services svc = SeriesService(connection) metric_service = MetricsService(connection) metric_expression = "enabled AND persistent AND retentionDays > 0" metric_list = metric_service.list(expression=metric_expression) series_count = 0 # ATSD expired data removal schedule frequency, default is one day grace_interval_days = 1 t = PrettyTable(['Metric', 'Entity', 'Tags', 'Retention Days', 'Threshold', 'Presented Sample Date']) for metric in metric_list: # calculate datetime before which there is data threshold = datetime.now() - timedelta(days=metric.retention_days + grace_interval_days) # query series with current metric and all entities from the beginning up to threshold # enough to get at least one value, limit set to 1 sf = SeriesFilter(metric=metric.name) ef = EntityFilter(entity='*') df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=threshold) cf = ControlFilter(limit=1) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, control_filter=cf) series_list = svc.query(query) for sl in series_list: if len(sl.data) > 0: series_count += 1 t.add_row([sl.metric, sl.entity, sl.tags, metric.retention_days, threshold, to_iso(sl.data[0].get_date())]) print(t) print("\nSeries count with broken retention date is %d." % series_count)
# set metric name metric_name = "ca.daily.reservoir_storage_af" # print header print( 'entity,entityLabel,seriesTags,firstValueDate,firstValue,lastValueDate,lastValue' ) # query series with current metric for all entities with meta information in ascending order to get first value sf = SeriesFilter(metric=metric_name) ef = EntityFilter(entity='*') df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now()) cf = ControlFilter(limit=1, add_meta=True, direction="ASC") query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, control_filter=cf) series_list_asc = svc.query(query) # change filter to get last value and query series, descending order set by default query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, control_filter=ControlFilter(limit=1)) series_list_desc = svc.query(query) for series_asc in series_list_asc: if len(series_asc.data) > 0: # get corresponding descending series and remove series from desc list index_series_desc = -1
series_service = SeriesService(connection) def insert_or_warning(series_to_insert): if not dry_run: series_service.insert(series_to_insert) else: logging.warning("Dry run enabled, series are not inserted.") dst_entity_filter = EntityFilter(dst_entity) dst_date_filter = DateFilter(start_date, 'now') series_filter = SeriesFilter(metric, tag_expression=tag_expression) limit_control = ControlFilter(limit=1, direction="ASC") sample_filter = SampleFilter("!Double.isNaN(value)") dst_series_query = SeriesQuery(series_filter, dst_entity_filter, dst_date_filter, control_filter=limit_control, sample_filter=sample_filter) dst_series = series_service.query(dst_series_query) if no_data(dst_series): logging.warning("No destination series found for '%s' entity, '%s' metric" % (dst_entity, metric)) exit(1) def err(tags, time=None, entity=source_entity): error_msg = "No series found for '" + entity + "' entity, '" + metric + "' metric and '" + str(tags) + "'" if time is not None: error_msg += " before " + str(time) return error_msg source_entity_filter = EntityFilter(source_entity)
# add rate and group to the transformation filter if specified instead of query if query != 'query': for attr in query.split('+'): if attr == 'rate': tf.set_rate(Rate(period={'count': 3, 'unit': TimeUnit.MINUTE})) elif attr == 'group': tf.set_group( Group(type=AggregateType.SUM, truncate=True, order=0, interpolate={ 'type': InterpolateType.LINEAR, 'extend': True }, period={ 'count': 4, 'unit': TimeUnit.MINUTE })) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf) actual_series = svc.query(query) if actual_series != expected_series: print(filename) with open(filename, 'w') as fp: serialized = [series.to_dictionary() for series in actual_series] fp.write('%s\n' % serialized)
',') count, unit = interval.split('-') sf = SeriesFilter(metric=metric_name) ef = EntityFilter(entity=entity_name) df = DateFilter(end_date=end_date, interval={ 'count': count, 'unit': unit }) ff = None if forecast_name: sf.set_type('FORECAST') if forecast_name != '-': ff = ForecastFilter(forecast_name=forecast_name) query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, forecast_filter=ff) series = svc.query(query) if series: if not series[0].data: print('No data for: %s' % line) else: print('Empty response for %s' % line) line = fp.readline()