Delete series for all metrics for the specified entity with names starting with the specified prefix.
'''

# Connect to ATSD server
# connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Set query
entity = "entity"
metric_expr = "name LIKE 'me*'"

# Initialize services
entities_service = EntitiesService(connection)
series_service = SeriesService(connection)

# Query all metrics for entity
metrics = entities_service.metrics(entity=entity, expression=metric_expr)

if not metrics:
    print("No metrics are found for entity " + entity)
else:
    # Delete series for each metric
    for metric in metrics:
        query = SeriesDeleteQuery(entity=entity,
                                  metric=metric.name,
                                  exact_match=False)
        print("deleting ", entity, metric.name)
        # Uncomment next line to delete series
        # response = series_service.delete(query)
        # print(response)
Beispiel #2
0
from prettytable import PrettyTable

from atsd_client import connect, connect_url
from atsd_client.services import MetricsService, EntitiesService

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

entity = 'my-entity'

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all metrics for entity
metrics = entities_service.metrics(entity, tags='frequency,seasonal_adjustment,observation_start,observation_end,category,parent_category', limit=5)
t = PrettyTable(['Top Category', 'Category', 'Name', 'Label', 'Frequency', 'Adjustment', 'Observation Start', 'Observation End'])

# iterate over metrics and add their fields/tags as rows into a PrettyTable
for metric in metrics:
	t.add_row([metric.tags['category'], metric.tags['parent_category'], metric.name, metric.label, metric.tags['frequency'], metric.tags['seasonal_adjustment'], metric.tags['observation_start'], metric.tags['observation_end']])

# Sort metrics by name
t.sortby = "Name"

# Print metrics as ASCII table
#print(t)

# Print metrics as HTML table with header
print(t.get_html_string(title="Available Metrics"))
# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set grace interval in hours
grace_interval_hours = 1

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all entities that have last_insert_date, i.e. series
entities = entities_service.list(expression="name LIKE '06*'", min_insert_date="1970-01-01T00:00:00.000Z")

print('metric,entity,tags,last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity.name)
    for m in metrics:
        # query series list for each metric and entity
        series_list = metrics_service.series(m.name, entity.name)
        # for each list with more than 1 series
        if len(series_list) > 1:
            # calculate maximum of all last_insert_date's in list and subtract 1 hour
            # it will be lower limit date to compare
            lower_limit_date = max(s.last_insert_date for s in series_list) - timedelta(
                seconds=grace_interval_hours * 3600)
            for s in series_list:
                # check actual data existence
                if s.last_insert_date < lower_limit_date:
                    print("%s,%s,%s,%s" % (s.metric, s.entity, s.tags, s.last_insert_date))
'''
Locate series that have no data during the interval for one day before entity last_insert_date.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set grace interval in hours
grace_interval_hours = 1

# list of the agents (entities)
agents = ['nurswgvml007', 'nurswgvml010']

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

for agent in agents:
    # query agent meta information
    entity = entities_service.get(agent)
    if entity is None:
        print('Agent %s not found' % agent)
        continue
    date = entity.last_insert_date
    # query all metrics collecting by agent
    metrics = entities_service.metrics(entity, use_entity_insert_time=True)
    for metric in metrics:
        # check actual data existence
        if date - metric.last_insert_date > timedelta(seconds=grace_interval_hours * 3600):
            print("%s, %s" % (metric.name, agent))
Beispiel #5
0
title = '\nentity: %s, last hours: %s, minimal score: %s, aggregate period: %s min, interpolate period: %s min, ' \
        'data interval: %s days' % (
    args.entity, args.last_hours, args.min_score, args.aggregate_period, args.interpolate_period, args.data_interval)

if args.metric_filter is None:
    metric_expression = None
else:
    metric_expression = "name like '%s'" % args.metric_filter
    title = '%s, metric filter: %s' % (title, args.metric_filter)

message = [title]

now = datetime.now()

metrics = entities_service.metrics(args.entity, expression=metric_expression,
                                   min_insert_date=now - timedelta(seconds=args.last_hours * 3600),
                                   use_entity_insert_time=True)
log('Processing: ')
for metric in metrics:
    sf = SeriesFilter(metric=metric.name)
    ef = EntityFilter(entity=args.entity)
    df = DateFilter(start_date=datetime(now.year, now.month, now.day) - timedelta(days=args.data_interval),
                    end_date='now')
    tf = TransformationFilter()
    query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

    if args.aggregate_period > 0:
        tf.set_aggregate(
            Aggregate(period={'count': args.aggregate_period, 'unit': TimeUnit.MINUTE}, types=[AggregateType.MEDIAN]))

    if args.interpolate_period > 0:
'''
Delete series for all metrics for the specified entity with names starting with the specified prefix.
'''

# Connect to ATSD server
# connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Set query
entity = "entity"
metric_expr = "name LIKE 'me*'"

# Initialize services
entities_service = EntitiesService(connection)
series_service = SeriesService(connection)

# Query all metrics for entity
metrics = entities_service.metrics(entity=entity, expression=metric_expr)

if not metrics:
    print("No metrics are found for entity " + entity)
else:
    # Delete series for each metric
    for metric in metrics:
        query = SeriesDeleteQuery(entity=entity, metric=metric.name, exact_match=False)
        print("deleting ", entity, metric.name)
        # Uncomment next line to delete series
        # response = series_service.delete(query)
        # print(response)
'''
Locate series that have no data during the actual time interval (grace_interval) using an expression filter for entity.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set grace_interval to one day
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes * 60)

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query entities that have name started with 06
entities = entities_service.list(expression="name LIKE '06*'")

print('metric, entity, tags, last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity)
    for metric in metrics:
        # query all series for each metric and entity
        series = metrics_service.series(metric, entity, min_insert_date=min_insert_date, max_insert_date=max_insert_date)
        for s in series:
            print("%s,%s,%s,%s" % (s.metric, s.entity, print_tags(s.tags), s.last_insert_date))
title = '\nentity: %s, last hours: %s, minimal score: %s, aggregate period: %s min, interpolate period: %s min, ' \
        'data interval: %s days' % (
    args.entity, args.last_hours, args.min_score, args.aggregate_period, args.interpolate_period, args.data_interval)

if args.metric_filter is None:
    metric_expression = None
else:
    metric_expression = "name like '%s'" % args.metric_filter
    title = '%s, metric filter: %s' % (title, args.metric_filter)

message = [title]

now = datetime.now()

metrics = entities_service.metrics(args.entity, expression=metric_expression,
                                   min_insert_date=now - timedelta(seconds=args.last_hours * 3600),
                                   use_entity_insert_time=True)
log('Processing: ')
for metric in metrics:
    sf = SeriesFilter(metric=metric.name)
    ef = EntityFilter(entity=args.entity)
    df = DateFilter(start_date=datetime(now.year, now.month, now.day) - timedelta(days=args.data_interval),
                    end_date='now')
    tf = TransformationFilter()
    query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)

    if args.aggregate_period > 0:
        tf.set_aggregate(
            Aggregate(period={'count': args.aggregate_period, 'unit': TimeUnit.MINUTE}, types=[AggregateType.MEDIAN]))

    if args.interpolate_period > 0: