def test_aggregate_series(self):
        val = random.randint(0, VALUE - 1)

        insert_series_sample(self.service, val, val + 1)
        time.sleep(WAIT_TIME)

        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]})
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        aggr = Aggregate(period={
            'count': 10,
            'unit': TimeUnit.SECOND
        },
                         types=[AggregateType.MAX, AggregateType.MIN])
        tf = TransformationFilter(aggregate=aggr)
        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
                            date_filter=df,
                            transformation_filter=tf)

        series = self.service.query(query)
        self.assertEqual(len(series), 2)

        if series[0].aggregate['type'] == 'MAX':
            max = series[0].get_last_value()
            min = series[1].get_last_value()
        else:
            min = series[0].get_last_value()
            max = series[1].get_last_value()

        self.assertGreaterEqual(max, min)
    def test_value_filter(self):
        """
        Check value filter.
        """
        insert_series_sample(self.service, None, 2, 3)
        time.sleep(WAIT_TIME)

        sf = SeriesFilter(metric=METRIC,
                          tags={TAG: TAG_VALUE},
                          exact_match=True)
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date='now - 1 * MINUTE', end_date="now")
        vf = ValueFilter('Float.isNaN(value) OR value=2')
        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
                            date_filter=df,
                            value_filter=vf)
        series = self.service.query(query)
        # print(series)
        self.assertIsNotNone(series)
        self.assertEqual(1, len(series))
        s = series[0]
        self.assertIsInstance(s, Series)
        self.assertEqual(2, len(s.data))
        self.assertEqual(None, s.get_first_value())
        self.assertEqual(2, s.get_last_value())
    def test_insert_retrieve_versioning(self):

        test_status = 'pyapi.status'
        now = datetime.now()

        series = Series(ENTITY, VERSION_METRIC)
        val = random.randint(0, VALUE - 1)
        series.add_samples(
            Sample(value=val,
                   time=now - timedelta(seconds=2),
                   version={'status': test_status}))

        sf = SeriesFilter(metric=VERSION_METRIC)
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        vf = VersioningFilter(versioned=True)
        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
                            date_filter=df,
                            versioning_filter=vf)

        successful = self.service.insert(series)
        time.sleep(WAIT_TIME)
        series, = self.service.query(query)
        # print(series)
        last_sample = series.data[-1]

        self.assertTrue(successful)
        self.assertEqual(last_sample.v, val)
        self.assertIsNotNone(last_sample.version)
        self.assertEqual(last_sample.version['status'], test_status)
    def test_forecast(self):
        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}, type=models.SeriesType.FORECAST)
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

        series = self.svc.query(query)
        self.assertEqual(series[0].type, models.SeriesType.FORECAST)
    def test_group(self):
        time.sleep(1)
        insert_series_sample(self.svc, VALUE - 1)
        time.sleep(WAIT_TIME)

        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]})
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        tf = TransformationFilter(group=Group(type=AggregateType.COUNT, period={'count': 1, 'unit': TimeUnit.SECOND}))
        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf)

        series = self.svc.query(query)
        self.assertEqual(series[0].get_last_value(), 1)
    def test_rate(self):
        v1 = 5
        v2 = 3
        insert_series_sample(self.svc, v1, v2)
        time.sleep(WAIT_TIME + 2)

        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]})
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        tf = TransformationFilter(rate=Rate(counter=False))
        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf)

        series = self.svc.query(query)
        self.assertEqual(int(series[0].get_last_value()), v2 - v1)
    def test_insert_retrieve_series(self):
        val = random.randint(0, VALUE - 1)

        insert_series_sample(self.service, val)
        time.sleep(WAIT_TIME + 2)

        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]})
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

        series = self.service.query(query)
        # for s in series:
        #     print(s)

        self.assertIsNotNone(series)
        self.assertGreater(len(series), 0)

        s = series[0]
        self.assertIsInstance(s, Series)

        self.assertGreater(len(s.data), 0)
        self.assertEqual(s.get_last_value(), val)
# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# disable deleting inappropriate values
deleteValues = False

# specify metric name
metric_name = "ca.daily.reservoir_storage_af"

svc = SeriesService(connection)
metrics_service = MetricsService(connection)

# query series with current metric and all entities
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity='*')
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series = svc.query(query)

if deleteValues:
    print('Inappropriate values to be deleted.\n')
else:
    print('Leave as is inappropriate values.\n')

print('metric,entity,tags,data')
for s in series:
    # filter non-positive values
    s.data = [
        sample for sample in s.data if sample.v is not None and sample.v <= 0
Exemplo n.º 9
0
# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set series
metric = 'm-to-delete'
entity = 'e-to-delete'
tags = {'tag_key_1': 'tag_value_1', 'tag_key_2': 'tag_value_2'}

# specify date interval
startDate = "2018-10-01T00:00:00Z"
endDate = "2018-10-02T00:00:00Z"

series_service = SeriesService(connection)

# query the series to be deleted, use exactMatch to exclude not specified tags
sf = SeriesFilter(metric=metric, tags=tags, exact_match=True)
ef = EntityFilter(entity=entity)
df = DateFilter(start_date=startDate, end_date=endDate)
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series_list = series_service.query(query)

if len(series_list) > 1:
    raise Exception('There are multiple series meet the requirements')

series = series_list[0]

# check data existence
if len(series.data) == 0:
    print('No data in required interval')
else:
    # replace value of samples with nan
Exemplo n.º 10
0
# Series filter
metric = args.metric
entity = args.entity

# Date interval
startDate = args.start
endDate = args.end

# Exclude samples with NaN values (NaN represents deleted values)
expr = '!Float.isNaN(value)'

series_service = SeriesService(connection)

# Query the series to be deleted, use exactMatch=False to include all tags
sf = SeriesFilter(metric=metric, exact_match=False)
ef = EntityFilter(entity=entity)
df = DateFilter(start_date=startDate, end_date=endDate)
vf = SampleFilter(expr)
query = SeriesQuery(series_filter=sf,
                    entity_filter=ef,
                    date_filter=df,
                    sample_filter=vf)
series_list = series_service.query(query)

if len(series_list) == 0:
    print("No series are found")
else:
    for series in series_list:
        # Replace value of samples with NaN
        if len(series.data) == 0:
Exemplo n.º 11
0
    logging.warning("'" + metric + "' metric does not exist")
    exit(1)

series_service = SeriesService(connection)


def insert_or_warning(series_to_insert):
    if not dry_run:
        series_service.insert(series_to_insert)
    else:
        logging.warning("Dry run enabled, series are not inserted.")


dst_entity_filter = EntityFilter(dst_entity)
dst_date_filter = DateFilter(start_date, 'now')
series_filter = SeriesFilter(metric, tag_expression=tag_expression)
limit_control = ControlFilter(limit=1, direction="ASC")
sample_filter = SampleFilter("!Double.isNaN(value)")
dst_series_query = SeriesQuery(series_filter, dst_entity_filter, dst_date_filter,
                               control_filter=limit_control, sample_filter=sample_filter)
dst_series = series_service.query(dst_series_query)

if no_data(dst_series):
    logging.warning("No destination series found for '%s' entity, '%s' metric" % (dst_entity, metric))
    exit(1)


def err(tags, time=None, entity=source_entity):
    error_msg = "No series found for '" + entity + "' entity, '" + metric + "' metric and '" + str(tags) + "'"
    if time is not None:
        error_msg += " before " + str(time)
Exemplo n.º 12
0
            key for key in dir(AggregateType) if not key.startswith('_')
        ]
    else:
        aggregate_types = aggregate_types.split('+')

    # try to retrieve series from the previous query
    expected_series = []
    with open(filename) as fp:
        line = fp.readline()
        while line:
            series = Series.from_dict(ast.literal_eval(line))
            expected_series.extend(series)
            line = fp.readline()

    # prepare series query
    sf = SeriesFilter(metric=metric_name, tags=tags, exact_match=exact_match)
    ef = EntityFilter(entity=entity_name)
    df = DateFilter(start_date=start_date, end_date=end_date)
    aggregate = Aggregate(period={
        'count': 7,
        'unit': TimeUnit.MINUTE
    },
                          threshold={
                              'min': 10,
                              'max': 90
                          },
                          types=aggregate_types,
                          order=1)
    tf = TransformationFilter(aggregate=aggregate)

    # add rate and group to the transformation filter if specified instead of query
Exemplo n.º 13
0
filename = 'data-availability.csv'

with open(filename) as fp:
    line = fp.readline()
    while line:

        # skip commented lines
        if line.startswith('#'):
            line = fp.readline()
            continue

        metric_name, entity_name, interval, end_date, forecast_name, comments = line.split(
            ',')
        count, unit = interval.split('-')

        sf = SeriesFilter(metric=metric_name)
        ef = EntityFilter(entity=entity_name)
        df = DateFilter(end_date=end_date,
                        interval={
                            'count': count,
                            'unit': unit
                        })
        ff = None

        if forecast_name:
            sf.set_type('FORECAST')
            if forecast_name != '-':
                ff = ForecastFilter(forecast_name=forecast_name)

        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
Exemplo n.º 14
0
filename = 'data-availability.csv'

with open(filename) as fp:
    line = fp.readline()
    while line:

        # skip commented lines
        if line.startswith('#'):
            line = fp.readline()
            continue

        metric_name, entity_name, interval, end_date, forecast_name, comments = line.split(',')
        count, unit = interval.split('-')

        sf = SeriesFilter(metric=metric_name)
        ef = EntityFilter(entity=entity_name)
        df = DateFilter(end_date=end_date, interval={'count': count, 'unit': unit})
        ff = None

        if forecast_name:
            sf.set_type('FORECAST')
            if forecast_name != '-':
                ff = ForecastFilter(forecast_name=forecast_name)

        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, forecast_filter=ff)

        series = svc.query(query)
        if series:
            if not series[0].data:
                print('No data for: %s' % line)
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')
svc = SeriesService(connection)
metrics_service = MetricsService(connection)

# specify source and destination metric names
metric_src = 'metric_src'
metric_dst = 'metric_dst'

# copy series with all entities, specific entity name can be set instead
entity = '*'

# query series with required metric and all entities
sf = SeriesFilter(metric=metric_src)
ef = EntityFilter(entity='*')
df = DateFilter(start_date='1970-01-01T00:00:00Z', end_date=datetime.now())
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series = svc.query(query)

# copy metric meta information
metric = metrics_service.get(metric_src)
metric.name = metric_dst
metrics_service.create_or_replace(metric)

for s in series:
    s.metric = metric_dst
    s.aggregate = None
    svc.insert(s)