コード例 #1
0
    def test_aggregate_series(self):
        val = random.randint(0, VALUE - 1)

        insert_series_sample(self.service, val, val + 1)
        time.sleep(WAIT_TIME)

        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]})
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        aggr = Aggregate(period={
            'count': 10,
            'unit': TimeUnit.SECOND
        },
                         types=[AggregateType.MAX, AggregateType.MIN])
        tf = TransformationFilter(aggregate=aggr)
        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
                            date_filter=df,
                            transformation_filter=tf)

        series = self.service.query(query)
        self.assertEqual(len(series), 2)

        if series[0].aggregate['type'] == 'MAX':
            max = series[0].get_last_value()
            min = series[1].get_last_value()
        else:
            min = series[0].get_last_value()
            max = series[1].get_last_value()

        self.assertGreaterEqual(max, min)
コード例 #2
0
metrics = entities_service.metrics(args.entity, expression=metric_expression,
                                   min_insert_date=now - timedelta(seconds=args.last_hours * 3600),
                                   use_entity_insert_time=True)
log('Processing: ')
for metric in metrics:
    sf = SeriesFilter(metric=metric.name)
    ef = EntityFilter(entity=args.entity)
    df = DateFilter(start_date=datetime(now.year, now.month, now.day) - timedelta(days=args.data_interval),
                    end_date='now')
    tf = TransformationFilter()
    query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

    if args.aggregate_period > 0:
        tf.set_aggregate(
            Aggregate(period={'count': args.aggregate_period, 'unit': TimeUnit.MINUTE}, types=[AggregateType.MEDIAN]))

    if args.interpolate_period > 0:
        tf.set_interpolate(Interpolate(period={'count': args.interpolate_period, 'unit': TimeUnit.MINUTE},
                                       function=InterpolateFunction.LINEAR))

    query.set_transformation_filter(tf)

    series_list = svc.query(query)
    for series in series_list:
        metric_id = '- %s %s' % (series.metric, print_tags(series.tags))
        log('\t' + metric_id)
        # exclude empty series for specific tags
        if len(series.data) > 2:
            ts = {int(sample.t / 1000): sample.v for sample in series.data}
コード例 #3
0
ファイル: migration.py プロジェクト: Revel109/atsd-api-python
    with open(filename) as fp:
        line = fp.readline()
        while line:
            series = Series.from_dict(ast.literal_eval(line))
            expected_series.extend(series)
            line = fp.readline()

    # prepare series query
    sf = SeriesFilter(metric=metric_name, tags=tags, exact_match=exact_match)
    ef = EntityFilter(entity=entity_name)
    df = DateFilter(start_date=start_date, end_date=end_date)
    aggregate = Aggregate(period={
        'count': 7,
        'unit': TimeUnit.MINUTE
    },
                          threshold={
                              'min': 10,
                              'max': 90
                          },
                          types=aggregate_types,
                          order=1)
    tf = TransformationFilter(aggregate=aggregate)

    # add rate and group to the transformation filter if specified instead of query
    if query != 'query':
        for attr in query.split('+'):
            if attr == 'rate':
                tf.set_rate(Rate(period={'count': 3, 'unit': TimeUnit.MINUTE}))
            elif attr == 'group':
                tf.set_group(
                    Group(type=AggregateType.SUM,
                          truncate=True,