def test_insert_retrieve(self):
        DATE = datetime.now()
        prop = Property(TYPE, ENTITY, TAGS, KEY, DATE)
        self.ps.insert(prop)

        time.sleep(2)

        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=DATE, end_date=datetime.now())
        query = PropertiesQuery(type=TYPE, entity_filter=ef, date_filter=df)
        result = self.ps.query(query)

        print(result)

        self.assertIsNotNone(result)
        self.assertGreater(len(result), 0)
        p = result[0]
        """
        In the future may be replaced with:
        self.assertItemsEqual(prop.__dict__.items(), p.__dict__.items())
        """
        self.assertIsInstance(p, Property)
        self.assertEqual(prop.type, p.type)
        self.assertEqual(prop.entity, p.entity)
        # Uncomment when JodaTime will be replaced
        # self.assertEqual(prop.date, p.date)
        self.assertEqual(prop.key, p.key)
        self.assertEqual(prop.tags, p.tags)
    def test_insert_retrieve(self):
        DATE = datetime.now()
        msg = Message(TYPE, SOURCE, ENTITY, DATE, SEVERITY, TAGS, MESSAGE)
        self.ms.insert(msg)

        time.sleep(2)

        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=DATE, end_date=datetime.now())
        query = MessageQuery(entity_filter=ef, date_filter=df)
        result = self.ms.query(query)

        print(result)

        self.assertIsNotNone(result)
        self.assertGreater(len(result), 0)
        m = result[0]
        self.assertIsInstance(m, Message)
        """
        In the future may be replaced with:
        self.assertItemsEqual(msg.__dict__.items(), m.__dict__.items())
        """
        self.assertEqual(msg.type, m.type)
        self.assertEqual(msg.source, m.source)
        self.assertEqual(msg.entity, m.entity)
        # Uncomment when JodaTime will be replaced
        # self.assertEqual(msg.date, m.date)
        self.assertEqual(msg.severity, m.severity)
        self.assertEqual(msg.tags, m.tags)
        self.assertEqual(msg.message, m.message)
        self.assertEqual(msg.persist, m.persist)
    def test_insert_retrieve_versioning(self):

        test_status = 'pyapi.status'
        now = datetime.now()

        series = Series(ENTITY, VERSION_METRIC)
        val = random.randint(0, VALUE - 1)
        series.add_samples(
            Sample(value=val,
                   time=now - timedelta(seconds=2),
                   version={'status': test_status}))

        sf = SeriesFilter(metric=VERSION_METRIC)
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        vf = VersioningFilter(versioned=True)
        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
                            date_filter=df,
                            versioning_filter=vf)

        successful = self.service.insert(series)
        time.sleep(WAIT_TIME)
        series, = self.service.query(query)
        # print(series)
        last_sample = series.data[-1]

        self.assertTrue(successful)
        self.assertEqual(last_sample.v, val)
        self.assertIsNotNone(last_sample.version)
        self.assertEqual(last_sample.version['status'], test_status)
    def test_aggregate_series(self):
        val = random.randint(0, VALUE - 1)

        insert_series_sample(self.service, val, val + 1)
        time.sleep(WAIT_TIME)

        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]})
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        aggr = Aggregate(period={
            'count': 10,
            'unit': TimeUnit.SECOND
        },
                         types=[AggregateType.MAX, AggregateType.MIN])
        tf = TransformationFilter(aggregate=aggr)
        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
                            date_filter=df,
                            transformation_filter=tf)

        series = self.service.query(query)
        self.assertEqual(len(series), 2)

        if series[0].aggregate['type'] == 'MAX':
            max = series[0].get_last_value()
            min = series[1].get_last_value()
        else:
            min = series[0].get_last_value()
            max = series[1].get_last_value()

        self.assertGreaterEqual(max, min)
    def test_value_filter(self):
        """
        Check value filter.
        """
        insert_series_sample(self.service, None, 2, 3)
        time.sleep(WAIT_TIME)

        sf = SeriesFilter(metric=METRIC,
                          tags={TAG: TAG_VALUE},
                          exact_match=True)
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date='now - 1 * MINUTE', end_date="now")
        vf = ValueFilter('Float.isNaN(value) OR value=2')
        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
                            date_filter=df,
                            value_filter=vf)
        series = self.service.query(query)
        # print(series)
        self.assertIsNotNone(series)
        self.assertEqual(1, len(series))
        s = series[0]
        self.assertIsInstance(s, Series)
        self.assertEqual(2, len(s.data))
        self.assertEqual(None, s.get_first_value())
        self.assertEqual(2, s.get_last_value())
    def test_forecast(self):
        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]}, type=models.SeriesType.FORECAST)
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

        series = self.svc.query(query)
        self.assertEqual(series[0].type, models.SeriesType.FORECAST)
Example #7
0
 def test_query_dataframe(self):
     ef = EntityFilter(entity=ENTITY)
     df = DateFilter(start_date=DATE, end_date=datetime.now())
     query = PropertiesQuery(type=TYPE, entity_filter=ef, date_filter=df)
     result = self.service.query_dataframe(query)
     # print(result)
     self.assertIsNotNone(result)
     self.assertEqual((1, 5), result.shape)
     self.assertTrue(isinstance(result.loc[0, 'date'], datetime))
     self.assertEqual(KEY_VALUE, result.loc[0, KEY_NAME])
     self.assertEqual(TYPE, result.loc[0, 'type'])
     self.assertEqual(TAG_VALUE, result.loc[0, TAG])
     self.assertEqual(ENTITY, result.loc[0, 'entity'])
    def test_group(self):
        time.sleep(1)
        insert_series_sample(self.svc, VALUE - 1)
        time.sleep(WAIT_TIME)

        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]})
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        tf = TransformationFilter(group=Group(type=AggregateType.COUNT, period={'count': 1, 'unit': TimeUnit.SECOND}))
        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf)

        series = self.svc.query(query)
        self.assertEqual(series[0].get_last_value(), 1)
    def test_rate(self):
        v1 = 5
        v2 = 3
        insert_series_sample(self.svc, v1, v2)
        time.sleep(WAIT_TIME + 2)

        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]})
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        tf = TransformationFilter(rate=Rate(counter=False))
        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, transformation_filter=tf)

        series = self.svc.query(query)
        self.assertEqual(int(series[0].get_last_value()), v2 - v1)
 def test_query_dataframe(self):
     exp = "message LIKE '* DataFrame'"
     ef = EntityFilter(entity=ENTITY)
     df = DateFilter(interval=INTERVAL, end_date=datetime.now())
     query = MessageQuery(entity_filter=ef, date_filter=df, expression=exp)
     result = self.service.query_dataframe(query)
     # print(result)
     self.assertIsNotNone(result)
     self.assertEqual((1, 7), result.shape)
     self.assertTrue(isinstance(result.loc[0, 'date'], datetime))
     self.assertEqual(MESSAGE, result.loc[0, 'message'])
     self.assertEqual(TYPE, result.loc[0, 'type'])
     self.assertEqual(SOURCE, result.loc[0, 'source'])
     self.assertEqual(ENTITY, result.loc[0, 'entity'])
     self.assertEqual(SEVERITY, result.loc[0, 'severity'])
     self.assertEqual(TAG_VALUE, result.loc[0, TAG])
    def test_insert_retrieve_series(self):
        val = random.randint(0, VALUE - 1)

        insert_series_sample(self.service, val)
        time.sleep(WAIT_TIME + 2)

        now = datetime.now()
        sf = SeriesFilter(metric=METRIC, tags={TAG: [TAG_VALUE]})
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

        series = self.service.query(query)
        # for s in series:
        #     print(s)

        self.assertIsNotNone(series)
        self.assertGreater(len(series), 0)

        s = series[0]
        self.assertIsInstance(s, Series)

        self.assertGreater(len(s.data), 0)
        self.assertEqual(s.get_last_value(), val)
Example #12
0
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set series
metric = 'm-to-delete'
entity = 'e-to-delete'
tags = {'tag_key_1': 'tag_value_1', 'tag_key_2': 'tag_value_2'}

# specify date interval
startDate = "2018-10-01T00:00:00Z"
endDate = "2018-10-02T00:00:00Z"

series_service = SeriesService(connection)

# query the series to be deleted, use exactMatch to exclude not specified tags
sf = SeriesFilter(metric=metric, tags=tags, exact_match=True)
ef = EntityFilter(entity=entity)
df = DateFilter(start_date=startDate, end_date=endDate)
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series_list = series_service.query(query)

if len(series_list) > 1:
    raise Exception('There are multiple series meet the requirements')

series = series_list[0]

# check data existence
if len(series.data) == 0:
    print('No data in required interval')
else:
    # replace value of samples with nan
    for sample in series.data:
Example #13
0
    metric_expression = None
else:
    metric_expression = "name like '%s'" % args.metric_filter
    title = '%s, metric filter: %s' % (title, args.metric_filter)

message = [title]

now = datetime.now()

metrics = entities_service.metrics(args.entity, expression=metric_expression,
                                   min_insert_date=now - timedelta(seconds=args.last_hours * 3600),
                                   use_entity_insert_time=True)
log('Processing: ')
for metric in metrics:
    sf = SeriesFilter(metric=metric.name)
    ef = EntityFilter(entity=args.entity)
    df = DateFilter(start_date=datetime(now.year, now.month, now.day) - timedelta(days=args.data_interval),
                    end_date='now')
    tf = TransformationFilter()
    query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

    if args.aggregate_period > 0:
        tf.set_aggregate(
            Aggregate(period={'count': args.aggregate_period, 'unit': TimeUnit.MINUTE}, types=[AggregateType.MEDIAN]))

    if args.interpolate_period > 0:
        tf.set_interpolate(Interpolate(period={'count': args.interpolate_period, 'unit': TimeUnit.MINUTE},
                                       function=InterpolateFunction.LINEAR))

    query.set_transformation_filter(tf)
ENTITY = 'pyapi.alerts_service.entity'
METRIC = 'test_alert_metric_1'
RULE = 'test_rule_1'
ID = 1
ACKNOWLEDGED = False
VALUE = 1
MESSAGE = ''
TAGS = {}
TEXT_VALUE = '1'
SEVERITY = 'WARNING'
REPEAT_COUNT = 0
OPEN_VALUE = 1
INTERVAL = {"count": 2, "unit": "MINUTE"}
START_DATE = datetime.now()
ef = EntityFilter(entity=ENTITY)
df = DateFilter(start_date=START_DATE, interval=INTERVAL)


class TestAlertsService(ServiceTestBase):
    def setUp(self):
        """
        Insert series to open the alert.
        """
        series = Series(ENTITY, METRIC)
        series.add_samples(Sample(VALUE, datetime.now()))
        self._series_service = SeriesService(self.connection)
        self._series_service.insert(series)
        time.sleep(self.wait_time)

    def test_fields_match(self):
Example #15
0
if metric_service.get(metric) is None:
    logging.warning("'" + metric + "' metric does not exist")
    exit(1)

series_service = SeriesService(connection)


def insert_or_warning(series_to_insert):
    if not dry_run:
        series_service.insert(series_to_insert)
    else:
        logging.warning("Dry run enabled, series are not inserted.")


dst_entity_filter = EntityFilter(dst_entity)
dst_date_filter = DateFilter(start_date, 'now')
series_filter = SeriesFilter(metric, tag_expression=tag_expression)
limit_control = ControlFilter(limit=1, direction="ASC")
sample_filter = SampleFilter("!Double.isNaN(value)")
dst_series_query = SeriesQuery(series_filter, dst_entity_filter, dst_date_filter,
                               control_filter=limit_control, sample_filter=sample_filter)
dst_series = series_service.query(dst_series_query)

if no_data(dst_series):
    logging.warning("No destination series found for '%s' entity, '%s' metric" % (dst_entity, metric))
    exit(1)


def err(tags, time=None, entity=source_entity):
    error_msg = "No series found for '" + entity + "' entity, '" + metric + "' metric and '" + str(tags) + "'"
# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# disable deleting inappropriate values
deleteValues = False

# specify metric name
metric_name = "ca.daily.reservoir_storage_af"

svc = SeriesService(connection)
metrics_service = MetricsService(connection)

# query series with current metric and all entities
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity='*')
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series = svc.query(query)

if deleteValues:
    print('Inappropriate values to be deleted.\n')
else:
    print('Leave as is inappropriate values.\n')

print('metric,entity,tags,data')
for s in series:
    # filter non-positive values
    s.data = [
        sample for sample in s.data if sample.v is not None and sample.v <= 0
    ]
# set the name of entity_group and tag expression
entity_group_name = 'docker-containers'
property_type = 'docker.container.config.env'
tag_expression = 'env.*'

eg_service = EntityGroupsService(connection)
properties_service = PropertiesService(connection)
entities_list = eg_service.get_entities(entity_group_name, tags=tag_expression)
# exclude entities that have no required tags
entities = [entity for entity in entities_list if entity.tags]

entities_service = EntitiesService(connection)

# prepare property query
ef = EntityFilter('entity')
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
property_query = PropertiesQuery(entity_filter=ef,
                                 date_filter=df,
                                 type=property_type)

print('entity_name,entity_label,tags')
for entity in entities:
    pretty_tags = print_tags(entity.tags)
    for key in entity.tags:
        entity.tags[key] = ''

    # set actual entity and execute property query
    property_query.set_entity_filter(EntityFilter(entity.name))
    properties_list = properties_service.query(property_query)
    # set entity tags from property tags
Example #18
0
# Initialize services
svc = SeriesService(connection)

# specify years to increase
year_count = 5
# specify metric and entity names
metric_name = 'sml.power-consumed'
entity_name = '*'
# specify date filter
start_date = '2018-01-01T00:00:00Z'
end_date = '2019-01-01T00:00:01Z'

# prepare series_query and execute it
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity=entity_name)
df = DateFilter(start_date=start_date, end_date=end_date)
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series_list = svc.query(query)

for series in series_list:
    # update  timestamps
    updated_data = []
    for sample in series.data:
        current_date = sample.get_date()
        for i in range(1, year_count + 1):
            try:
                # increment year
                new_date = current_date.replace(year=current_date.year + i)
                updated_data.append(
                    Sample(value=sample.v,