entities_list = eg_service.get_entities(entity_group_name, tags=tag_expression)
# exclude entities that have no required tags
entities = [entity for entity in entities_list if entity.tags]

entities_service = EntitiesService(connection)

# prepare property query
ef = EntityFilter('entity')
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
property_query = PropertiesQuery(entity_filter=ef,
                                 date_filter=df,
                                 type=property_type)

print('entity_name,entity_label,tags')
for entity in entities:
    pretty_tags = print_tags(entity.tags)
    for key in entity.tags:
        entity.tags[key] = ''

    # set actual entity and execute property query
    property_query.set_entity_filter(EntityFilter(entity.name))
    properties_list = properties_service.query(property_query)
    # set entity tags from property tags
    if properties_list:
        property_tags = properties_list[0].tags
        for key, value in six.iteritems(property_tags):
            entity.tags['env.%s' % key] = value

    print('%s,%s,%s' % (entity.name, print_str(entity.label), pretty_tags))
    # Uncomment next line to delete tags
    # entities_service.update(entity)
# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set grace_interval to one day
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes *
                                             60)

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query entities that have name started with 06
entities = entities_service.list(expression="name LIKE '06*'")

print('metric, entity, tags, last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity)
    for metric in metrics:
        # query all series for each metric and entity
        series = metrics_service.series(metric,
                                        entity,
                                        min_insert_date=min_insert_date,
                                        max_insert_date=max_insert_date)
        for s in series:
            print("%s,%s,%s,%s" %
                  (s.metric, s.entity, print_tags(s.tags), s.last_insert_date))
Ejemplo n.º 3
0
    tf = TransformationFilter()
    query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

    if args.aggregate_period > 0:
        tf.set_aggregate(
            Aggregate(period={'count': args.aggregate_period, 'unit': TimeUnit.MINUTE}, types=[AggregateType.MEDIAN]))

    if args.interpolate_period > 0:
        tf.set_interpolate(Interpolate(period={'count': args.interpolate_period, 'unit': TimeUnit.MINUTE},
                                       function=InterpolateFunction.LINEAR))

    query.set_transformation_filter(tf)

    series_list = svc.query(query)
    for series in series_list:
        metric_id = '- %s %s' % (series.metric, print_tags(series.tags))
        log('\t' + metric_id)
        # exclude empty series for specific tags
        if len(series.data) > 2:
            ts = {int(sample.t / 1000): sample.v for sample in series.data}

            detector = AnomalyDetector(ts, score_threshold=args.min_score)

            anomalies = []
            for anomaly in detector.get_anomalies():
                if time.mktime(now.timetuple()) - args.last_hours * 3600 <= anomaly.exact_timestamp:
                    anomalies.append(anomaly)

            if anomalies:
                message.append(metric_id)
                for anomaly in anomalies:
Ejemplo n.º 4
0
series_list_desc = svc.query(query)

for series_asc in series_list_asc:
    if len(series_asc.data) > 0:

        # get corresponding descending series and remove series from desc list
        index_series_desc = -1
        for idx, sd in enumerate(series_list_desc):
            if sd.entity == series_asc.entity and sd.metric == series_asc.entity and sd.tags == series_asc.tags:
                index_series_desc = idx
                break
        series_desc = series_list_desc.pop(index_series_desc)

        # get label from meta information
        label = series_asc.meta['entity'].label if series_asc.meta[
            'entity'].label is not None else ''
        # get first and last samples in series to output
        print('%s,%s,%s,%s,%s,%s,%s' %
              (series_asc.entity, label, print_tags(series_asc.tags),
               series_asc.get_first_value_date(), series_asc.get_first_value(),
               series_desc.get_first_value_date(),
               series_desc.get_first_value()))

# print remaining series that are not in ascending list
for series_desc in series_list_desc:
    label = series_desc.meta['entity'].label if series_desc.meta[
        'entity'].label is not None else ''
    print('%s,%s,%s,%s,%s,%s,%s' %
          (series_desc.entity, label, print_tags(series_desc.tags), '', '',
           series_desc.get_first_value_date(), series_desc.get_first_value()))
Ejemplo n.º 5
0
series_list_asc = svc.query(query)

# change filter to get last value and query series, descending order set by default
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, control_filter=ControlFilter(limit=1))
series_list_desc = svc.query(query)

for series_asc in series_list_asc:
    if len(series_asc.data) > 0:

        # get corresponding descending series and remove series from desc list
        index_series_desc = -1
        for idx, sd in enumerate(series_list_desc):
            if sd.entity == series_asc.entity and sd.metric == series_asc.entity and sd.tags == series_asc.tags:
                index_series_desc = idx
                break
        series_desc = series_list_desc.pop(index_series_desc)

        # get label from meta information
        label = series_asc.meta['entity'].label if series_asc.meta['entity'].label is not None else ''
        # get first and last samples in series to output
        print('%s,%s,%s,%s,%s,%s,%s' % (series_asc.entity, label, print_tags(series_asc.tags),
                                              series_asc.get_first_value_date(), series_asc.get_first_value(),
                                              series_desc.get_first_value_date(), series_desc.get_first_value()))

# print remaining series that are not in ascending list
for series_desc in series_list_desc:
    label = series_desc.meta['entity'].label if series_desc.meta['entity'].label is not None else ''
    print('%s,%s,%s,%s,%s,%s,%s' % (series_desc.entity, label, print_tags(series_desc.tags),
                                          '', '',
                                          series_desc.get_first_value_date(), series_desc.get_first_value()))
Ejemplo n.º 6
0
# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set export parameters
start_date = "2018-05-01T00:00:00Z"
end_date = "now"
type = "logger"
source = "com.axibase.tsd.service.search.searchserviceimpl"
entity = "7eacbc4723ed"

message_service = MessageService(connection)

ef = EntityFilter(entity=entity)
df = DateFilter(start_date=start_date, end_date=end_date)
query = MessageQuery(entity_filter=ef,
                     date_filter=df,
                     type=type,
                     source=source)

messages = message_service.query(query)

with open('export.csv', 'w') as f:
    print('date, entity, type, source, severity, tags, message', file=f)
    for message in messages:
        # make message body single line
        msg = message.message.replace("\n", r"\n").replace("\t", r"\t")
        print('%s, %s, %s, %s, %s, %s, %s' %
              (message.date, message.entity, message.type, message.source,
               message.severity, print_tags(message.tags), msg),
              file=f)
Ejemplo n.º 7
0
# Query the series to be deleted, use exactMatch=False to include all tags
sf = SeriesFilter(metric=metric, exact_match=False)
ef = EntityFilter(entity=entity)
df = DateFilter(start_date=startDate, end_date=endDate)
vf = SampleFilter(expr)
query = SeriesQuery(series_filter=sf,
                    entity_filter=ef,
                    date_filter=df,
                    sample_filter=vf)
series_list = series_service.query(query)

if len(series_list) == 0:
    print("No series are found")
else:
    for series in series_list:
        # Replace value of samples with NaN
        if len(series.data) == 0:
            print("Skip series with no data in the interval ", series.metric,
                  series.entity, series.tags)
            continue
        print("Deleting %s values for %s, %s, %s" %
              (len(series.data), series.metric, series.entity,
               utils.print_tags(series.tags)))
        for sample in series.data:
            print("- Deleting %s, %s " % (sample.get_date(), sample.v))
            sample.v = None
        series.aggregate = None
        # Uncomment next line to delete
        # series_service.insert(series)
Ejemplo n.º 8
0
# Exclude samples with NaN values (NaN represents deleted values)
expr = '!Float.isNaN(value)'

series_service = SeriesService(connection)

# Query the series to be deleted, use exactMatch=False to include all tags
sf = SeriesFilter(metric=metric, exact_match=False)
ef = EntityFilter(entity=entity)
df = DateFilter(start_date=startDate, end_date=endDate)
vf = SampleFilter(expr)
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, sample_filter=vf)
series_list = series_service.query(query)

if len(series_list) == 0:
    print("No series are found")
else:
    for series in series_list:
        # Replace value of samples with NaN
        if len(series.data) == 0:
            print("Skip series with no data in the interval ", series.metric, series.entity, series.tags)
            continue
        print("Deleting %s values for %s, %s, %s" % (
            len(series.data), series.metric, series.entity, utils.print_tags(series.tags)))
        for sample in series.data:
            print("- Deleting %s, %s " % (sample.get_date(), sample.v))
            sample.v = None
        series.aggregate = None
        # Uncomment next line to delete
        # series_service.insert(series)
from atsd_client.utils import print_tags, print_str

'''
Delete entity tags with names starting with the specified prefix from entities that belongs specific entity group.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services

# set the name of entity_group and prefix for tag key
entity_group_name = 'docker-images'
tag_expression = 'env.*'

eg_service = EntityGroupsService(connection)
entities_service = EntitiesService(connection)
entities_list = eg_service.get_entities(entity_group_name, tags=tag_expression)
# exclude entities that have no required tags
entities = [entity for entity in entities_list if entity.tags]

print('entity_name,entity_label,tags')
for entity in entities:
    pretty_tags = print_tags(entity.tags)
    for key in entity.tags:
        entity.tags[key] = ''
    print('%s,%s,%s' % (entity.name, print_str(entity.label), pretty_tags))
    # Uncomment next line to delete tags
    # entities_service.update(entity)
'''
Locate high-cardinality series that have tags more than specified cardinality.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
metrics_service = MetricsService(connection)

# query all metrics that have last_insert_date, i.e. series
metrics = metrics_service.list(min_insert_date="1970-01-01T00:00:00.000Z")

# set cardinality
cardinality = 8
series_count = 0

print('metric,entity,tags,last_insert_date')
for metric in metrics:
    # query series list for each metric
    series_list = metrics_service.series(metric)
    for s in series_list:
        # check tags cardinality for each series in list
        if len(s.tags) > cardinality:
            series_count += 1
            print("%s,%s,%s,%s" % (s.metric, s.entity, print_tags(s.tags), s.last_insert_date))

print("Number of series that have more than %d tags combinations is %d " % (cardinality, series_count))
Ejemplo n.º 11
0
Export messages from ATSD into CSV-file using specified start_date, end_date, type, source and entity.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set export parameters
start_date = "2018-05-01T00:00:00Z"
end_date = "now"
type = "logger"
source = "com.axibase.tsd.service.search.searchserviceimpl"
entity = "7eacbc4723ed"

message_service = MessageService(connection)

ef = EntityFilter(entity=entity)
df = DateFilter(start_date=start_date, end_date=end_date)
query = MessageQuery(entity_filter=ef, date_filter=df, type=type, source=source)

messages = message_service.query(query)

with open('export.csv', 'w') as f:
    print('date, entity, type, source, severity, tags, message', file=f)
    for message in messages:
        # make message body single line
        msg = message.message.replace("\n", r"\n").replace("\t", r"\t")
        print('%s, %s, %s, %s, %s, %s, %s' % (
            message.date, message.entity, message.type, message.source, message.severity, print_tags(message.tags), msg),
              file=f)
Ejemplo n.º 12
0
    tf = TransformationFilter()
    query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

    if args.aggregate_period > 0:
        tf.set_aggregate(
            Aggregate(period={'count': args.aggregate_period, 'unit': TimeUnit.MINUTE}, types=[AggregateType.MEDIAN]))

    if args.interpolate_period > 0:
        tf.set_interpolate(Interpolate(period={'count': args.interpolate_period, 'unit': TimeUnit.MINUTE},
                                       function=InterpolateFunction.LINEAR))

    query.set_transformation_filter(tf)

    series_list = svc.query(query)
    for series in series_list:
        metric_id = '- %s %s' % (series.metric, print_tags(series.tags))
        log('\t' + metric_id)
        # exclude empty series for specific tags
        if len(series.data) > 2:
            ts = {int(sample.t / 1000): sample.v for sample in series.data}

            detector = AnomalyDetector(ts, score_threshold=args.min_score)

            anomalies = []
            for anomaly in detector.get_anomalies():
                if time.mktime(now.timetuple()) - args.last_hours * 3600 <= anomaly.exact_timestamp:
                    anomalies.append(anomaly)

            if anomalies:
                message.append(metric_id)
                for anomaly in anomalies: