コード例 #1
0
from atsd_client import connect, connect_url
from atsd_client.services import MetricsService, EntitiesService

'''
Locate lagging series among all the series that differ only in tags (have the same metric and entity) during the 
grace interval. 
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set grace interval in hours
grace_interval_hours = 1

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all entities that have last_insert_date, i.e. series
entities = entities_service.list(expression="name LIKE '06*'", min_insert_date="1970-01-01T00:00:00.000Z")

print('metric,entity,tags,last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity.name)
    for m in metrics:
        # query series list for each metric and entity
        series_list = metrics_service.series(m.name, entity.name)
        # for each list with more than 1 series
        if len(series_list) > 1:
            # set lower limit to maximum last insert date over series list minus one hour
コード例 #2
0
from datetime import datetime, timedelta
from prettytable import PrettyTable

from atsd_client import connect, connect_url
from atsd_client.services import MetricsService, EntitiesService

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

entity = 'my-entity'

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all metrics for entity
metrics = entities_service.metrics(entity, tags='frequency,seasonal_adjustment,observation_start,observation_end,category,parent_category', limit=5)
t = PrettyTable(['Top Category', 'Category', 'Name', 'Label', 'Frequency', 'Adjustment', 'Observation Start', 'Observation End'])

# iterate over metrics and add their fields/tags as rows into a PrettyTable
for metric in metrics:
	t.add_row([metric.tags['category'], metric.tags['parent_category'], metric.name, metric.label, metric.tags['frequency'], metric.tags['seasonal_adjustment'], metric.tags['observation_start'], metric.tags['observation_end']])

# Sort metrics by name
t.sortby = "Name"

# Print metrics as ASCII table
#print(t)

# Print metrics as HTML table with header
print(t.get_html_string(title="Available Metrics"))
コード例 #3
0
from atsd_client import connect_url
from atsd_client.services import MetricsService, EntitiesService

'''
Locate lagging series among all the series that differ only in tags (have the same metric and entity) during the 
grace interval. 
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set grace interval in hours
grace_interval_hours = 1

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all entities that have last_insert_date, i.e. series
entities = entities_service.list(expression="name LIKE '06*'", min_insert_date="1970-01-01T00:00:00.000Z")

print('metric,entity,tags,last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity.name)
    for m in metrics:
        # query series list for each metric and entity
        series_list = metrics_service.series(m.name, entity.name)
        # for each list with more than 1 series
        if len(series_list) > 1:
            # calculate maximum of all last_insert_date's in list and subtract 1 hour
コード例 #4
0
parser = argparse.ArgumentParser(description='Anomaly detection using luminol package.')
parser.add_argument('--last_hours', '-lh', type=float, help='interested number of hours', default=24)
parser.add_argument('--min_score', '-ms', type=float, help='score threshold', default=0)
parser.add_argument('--entity', '-e', type=str, help='entity to monitor', default='060190011')
parser.add_argument('--metric_filter', '-mf', type=str, help='filter for metric names')
parser.add_argument('--aggregate_period', '-ap', type=int, help='aggregate period', default=0)
parser.add_argument('--interpolate_period', '-ip', type=int, help='interpolate period', default=60)
parser.add_argument('--data_interval', '-di', type=int, help='requested data to analyze', default=24)
parser.add_argument('--verbose', '-v', action="count", help="enable series processing logging")
args = parser.parse_args()

time_format = '%d-%m-%Y %H:%M:%S'

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')
entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)
message_service = MessageService(connection)
svc = SeriesService(connection)

title = '\nentity: %s, last hours: %s, minimal score: %s, aggregate period: %s min, interpolate period: %s min, ' \
        'data interval: %s days' % (
    args.entity, args.last_hours, args.min_score, args.aggregate_period, args.interpolate_period, args.data_interval)

if args.metric_filter is None:
    metric_expression = None
else:
    metric_expression = "name like '%s'" % args.metric_filter
    title = '%s, metric filter: %s' % (title, args.metric_filter)

message = [title]
コード例 #5
0
'''
Locate series that have no data during the interval for one day before entity last_insert_date.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set grace interval in hours
grace_interval_hours = 1

# list of the agents (entities)
agents = ['nurswgvml007', 'nurswgvml010']

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

for agent in agents:
    # query agent meta information
    entity = entities_service.get(agent)
    if entity is None:
        print('Agent %s not found' % agent)
        continue
    date = entity.last_insert_date
    # query all metrics collecting by agent
    metrics = entities_service.metrics(entity, use_entity_insert_time=True)
    for metric in metrics:
        # check actual data existence
        if date - metric.last_insert_date > timedelta(seconds=grace_interval_hours * 3600):
            print("%s, %s" % (metric.name, agent))
コード例 #6
0
from atsd_client.models import SeriesDeleteQuery

'''
Delete series for all metrics for the specified entity with names starting with the specified prefix.
'''

# Connect to ATSD server
# connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Set query
entity = "entity"
metric_expr = "name LIKE 'me*'"

# Initialize services
entities_service = EntitiesService(connection)
series_service = SeriesService(connection)

# Query all metrics for entity
metrics = entities_service.metrics(entity=entity, expression=metric_expr)

if not metrics:
    print("No metrics are found for entity " + entity)
else:
    # Delete series for each metric
    for metric in metrics:
        query = SeriesDeleteQuery(entity=entity, metric=metric.name, exact_match=False)
        print("deleting ", entity, metric.name)
        # Uncomment next line to delete series
        # response = series_service.delete(query)
        # print(response)
'''
Locate series that have no data during the actual time interval (grace_interval) using an expression filter for entity.
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set grace_interval to one day
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes *
                                             60)

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query entities that have name started with 06
entities = entities_service.list(expression="name LIKE '06*'")

print('metric, entity, tags, last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity)
    for metric in metrics:
        # query all series for each metric and entity
        series = metrics_service.series(metric,
                                        entity,
                                        min_insert_date=min_insert_date,
                                        max_insert_date=max_insert_date)
コード例 #8
0
Delete related entities (containers, images, network, volumes) based on entity-tag set to the given Docker host.
Delete Docker host entities.
'''

# Uncomment the next two lines to set custom local timezone
# os.environ['TZ'] = 'Europe/London'
# time.tzset()

tags_printer = pprint.PrettyPrinter(indent=4)

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
entity_service = EntitiesService(connection)
metric_service = MetricsService(connection)

# select all entities that collect this metric
# this metric is collected by docker hosts
docker_hosts = metric_service.series('docker.cpu.sum.usage.total.percent')

print("Docker hosts found: " + str(len(docker_hosts)))

for docker_host_series in docker_hosts:
    print("--------------")

    # get minutes since last insert
    elapsed_minutes = docker_host_series.get_elapsed_minutes()

    entity_filter = "lower(tags.docker-host) = lower('" + docker_host_series.entity + "')"
コード例 #9
0
from atsd_client import connect, connect_url
from atsd_client.services import EntitiesService
from atsd_client.utils import print_str

'''
Locate a collection of entities that have no last_insert_date.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
entities_service = EntitiesService(connection)
# query entities without last_insert_date
entity_list = entities_service.list(max_insert_date="1970-01-01T00:00:00.000Z")

print('entity_name,entity_label')
for entity in entity_list:
    print('%s,%s' % (entity.name, print_str(entity.label)))

print("\nEntities count without last insert date is %d." % (len(entity_list)))
コード例 #10
0
from datetime import datetime, timedelta

from atsd_client import connect, connect_url
from atsd_client.services import EntitiesService
from atsd_client.utils import print_str
'''
Locate series that have no data during the actual time interval (grace_interval) for specified entities.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set grace_interval to one day
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes *
                                             60)

entities_service = EntitiesService(connection)
entities = entities_service.list(expression="name like 'nur*'",
                                 min_insert_date=min_insert_date,
                                 max_insert_date=max_insert_date)

print('entity_name,entity_label')
for entity in entities:
    print('%s,%s' % (entity.name, print_str(entity.label)))
'''
Locate series that have no data during the actual time interval (grace_interval) using an expression filter for entity.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set grace_interval to one day
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes * 60)

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query entities that have name started with 06
entities = entities_service.list(expression="name LIKE '06*'")

print('metric, entity, tags, last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity)
    for metric in metrics:
        # query all series for each metric and entity
        series = metrics_service.series(metric, entity, min_insert_date=min_insert_date, max_insert_date=max_insert_date)
        for s in series:
            print("%s,%s,%s,%s" % (s.metric, s.entity, print_tags(s.tags), s.last_insert_date))
コード例 #12
0
from atsd_client.utils import print_tags

'''
Locate series that have no data during the actual time interval (grace_interval) using specific entity.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set entity and grace_interval to one day
entity = 'nurswgvml007'
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes * 60)

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all metrics for entity
metrics = entities_service.metrics(entity)

print('metric,entity,tags,last_insert_date')
for metric in metrics:
    # query all series for each metric and entity
    series = metrics_service.series(metric, entity, min_insert_date=min_insert_date, max_insert_date=max_insert_date)
    for s in series:
        print("%s,%s,%s,%s" % (s.metric, s.entity, print_tags(s.tags), s.last_insert_date))
コード例 #13
0
#!/usr/bin/env python3

from atsd_client import connect, connect_url
from atsd_client.services import EntitiesService
'''
Delete entities a) without any tags, b) with entity name having 64 characters, and c) which inserted data before 2018-Aug-01.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# Initialize services
entity_service = EntitiesService(connection)
entity_expression = "name.length() == 64 && tags.size() == 0"
entity_limit = 1000
entity_max_insert_date = '2018-08-01T00:00:00Z'
entity_list = entity_service.list(expression=entity_expression,
                                  max_insert_date=entity_max_insert_date,
                                  limit=entity_limit)

entity_count = len(entity_list)
print("Found " + str(entity_count) + " entities. Limit= " + str(entity_limit))

for idx, entity in enumerate(entity_list):
    print("- Found  " + entity.name + " : " + str(idx + 1) + "/" +
          str(entity_count) + " : inserted= " + str(entity.last_insert_date) +
          " : created= " + str(entity.created_date))
    # Uncomment next lines to delete and print delete operation status
    #res = entity_service.delete(entity)
    #print("- Delete " + entity.name + " : " + str(idx + 1) + "/" + str(entity_count) + " : " + str(res))
コード例 #14
0
from datetime import datetime, timedelta

from atsd_client import connect, connect_url
from atsd_client.services import EntitiesService
from atsd_client.utils import print_str

'''
Locate series that have no data during the actual time interval (grace_interval) for specified entities.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set grace_interval to one day
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes * 60)

entities_service = EntitiesService(connection)
entities = entities_service.list(expression="name like 'nur*'", min_insert_date=min_insert_date,
                                 max_insert_date=max_insert_date)

print('entity_name,entity_label')
for entity in entities:
    print('%s,%s' % (entity.name, print_str(entity.label)))
コード例 #15
0
#!/usr/bin/python3

from atsd_client import connect
from atsd_client.services import PortalsService, EntitiesService

'''
Queries all entities that are docker hosts, i.e. tags['docker-type'] = 'host'. 
For each host, exports a template portal by name: "Docker Host Breakdown".
'''

# Connect to ATSD server
connection = connect('connection.properties')

# Initialize services
entity_service = EntitiesService(connection)
ps = PortalsService(connection)

entity_limit = 10

# Define expression to retrieve docker hosts
entity_expression = "tags.docker-type='host'"

# Retrieve entities
entities = entity_service.list(expression=entity_expression, limit=entity_limit)

for ent in entities:
    ps.get_portal(name="Docker Host Breakdown", entity=ent.name)
コード例 #16
0
from atsd_client import connect_url
from atsd_client.services import EntitiesService
from atsd_client.utils import print_str

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

entities_service = EntitiesService(connection)
# query all entities created after specified date
entity_list = entities_service.list(
    expression=
    "createdDate > '2018-05-16T00:00:00Z' AND tags.status != 'deleted'")

print('entity_name,entity_label')
for entity in entity_list:
    print('%s,%s' % (entity.name, print_str(entity.label)))
コード例 #17
0
STAGE_1_DURATION_MIN, STAGE_1_DURATION_MAX = 4, 8
STAGE_2_DURATION_MIN, STAGE_2_DURATION_MAX = 5, 10
STAGE_3_DURATION_MIN, STAGE_3_DURATION_MAX = 16, 24

# value caching: enabled when INTERVAL_MINUTES > 0
THRESHOLD_PERCENT = 1
INTERVAL_MINUTES = 10

SAVE_AS_COMMANDS = True

connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# configuration parameters: end

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)
svc = SeriesService(connection)


def positive_spline(diff_value, start_value, t, l, x):
    return diff_value * (((x - t).total_seconds() / l.total_seconds()) ** 3) + start_value


def positive_inv_spline(diff_value, start_value, t, l, x):
    return diff_value * (1 + (((x - t).total_seconds() / l.total_seconds()) - 1) ** 3) + start_value


def negative_spline(diff_value, end_value, t, l, x):
    return diff_value * ((1 - ((x - t).total_seconds() / l.total_seconds())) ** 3) + end_value
コード例 #18
0
'''
Locate a collection of entities (docker hosts in this cases) that have not inserted data for more than 7 days.
Delete related entities (docker containers, images, network, volumes).
Delete docker host entities.
'''

# Uncomment the next two lines to set custom local timezone
# os.environ['TZ'] = 'Europe/London'
# time.tzset()

tags_printer = pprint.PrettyPrinter(indent=4)

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

entity_service = EntitiesService(connection)
metric_service = MetricsService(connection)

# select all entities that collect this metric
# this metric is collected by docker hosts
docker_hosts = metric_service.series('docker.cpu.sum.usage.total.percent')

print("Docker hosts found: " + str(len(docker_hosts)))

for docker_host_series in docker_hosts:
    print("--------------")

    # get minutes since last insert
    elapsed_minutes = docker_host_series.get_elapsed_minutes()

    entity_filter = "lower(tags.docker-host) = lower('" + docker_host_series.entity + "')"
コード例 #19
0
Find series with last_insert_date more than n hours behind the metric last_insert_date.
Print entities of that the series.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# specify metric name
metric_name = "ca.daily.reservoir_storage_af"
# set lower boundary for lastInsertDate
min_insert_date = "2018-05-01T00:00:00.000Z"
# set grace interval in hours for 14 days
grace_interval_hours = 24 * 14

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query required metric meta data
metric = metrics_service.get(metric_name)
if metric is None:
    print('No metric with name %s' % metric_name)
    sys.exit()
elif metric.last_insert_date is None:
    print('No data for metric name %s' % metric_name)
    sys.exit()

# calculate the upper boundary for the allowed last_insert_date values excluding grace interval
max_insert_date = metric.last_insert_date - timedelta(
    seconds=grace_interval_hours * 3600)
コード例 #20
0
from atsd_client import connect, connect_url
from atsd_client.services import EntitiesService
from atsd_client.utils import print_str

'''
Delete specific entity tags by name from entities that match an expression.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
entities_service = EntitiesService(connection)

# set an expression to query entities with non-empty 'category' and 'subcategory' tags
expression = "tags.category != '' AND tags.subcategory != ''"
# list the tags to be deleted
tags_to_delete = ['original_price', 'last_price', 'last_average_price', 'last_amount']

entities_list = entities_service.list(expression=expression, tags=tags_to_delete)

print('entity_name,entity_label')
for entity in entities_list:
    need_update = False
    actual_tags = entity.tags
    for key in tags_to_delete:
        if key in actual_tags:
            actual_tags[key] = ''
            # mark entity to be updated
            need_update = True
コード例 #21
0
Find series with last_insert_date more than n hours behind the metric last_insert_date.
Print entities of that the series.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# specify metric name
metric_name = "ca.daily.reservoir_storage_af"
# set lower boundary for lastInsertDate
min_insert_date = "2018-05-01T00:00:00.000Z"
# set grace interval in hours for 14 days
grace_interval_hours = 24 * 14

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query required metric meta data
metric = metrics_service.get(metric_name)
if metric is None:
    print('No metric with name %s' % metric_name)
    sys.exit()
elif metric.last_insert_date is None:
    print('No data for metric name %s' % metric_name)
    sys.exit()

# calculate the upper boundary for the allowed last_insert_date values excluding grace interval
max_insert_date = metric.last_insert_date - timedelta(seconds=grace_interval_hours * 3600)

# query series list for the metric
コード例 #22
0
Locate series that have no data during the actual time interval (grace_interval) using specific entity.
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set entity and grace_interval to one day
entity = 'nurswgvml007'
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes *
                                             60)

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all metrics for entity
metrics = entities_service.metrics(entity)

print('metric,entity,tags,last_insert_date')
for metric in metrics:
    # query all series for each metric and entity
    series = metrics_service.series(metric,
                                    entity,
                                    min_insert_date=min_insert_date,
                                    max_insert_date=max_insert_date)
    for s in series:
        print("%s,%s,%s,%s" %
              (s.metric, s.entity, print_tags(s.tags), s.last_insert_date))
コード例 #23
0
from atsd_client import connect_url
from atsd_client.services import EntitiesService
from atsd_client.utils import print_str

'''
Locate a collection of entities that have no last_insert_date.
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

entities_service = EntitiesService(connection)
# query entities without last_insert_date
entity_list = entities_service.list(max_insert_date="1970-01-01T00:00:00.000Z")

print('entity_name,entity_label')
for entity in entity_list:
    print('%s,%s' % (entity.name, print_str(entity.label)))

print("\nEntities count without last insert date is %d." % (len(entity_list)))
コード例 #24
0
from atsd_client import connect_url
from atsd_client.services import EntitiesService
from atsd_client.utils import print_str
'''
Delete specific entity tags by name from entities that match an expression.
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

entities_service = EntitiesService(connection)

# set an expression to query entities with non-empty 'category' and 'subcategory' tags
expression = "tags.category != '' AND tags.subcategory != ''"
# list the tags to be deleted
tags_to_delete = [
    'original_price', 'last_price', 'last_average_price', 'last_amount'
]

entities_list = entities_service.list(expression=expression,
                                      tags=tags_to_delete)

print('entity_name,entity_label')
for entity in entities_list:
    need_update = False
    actual_tags = entity.tags
    for key in tags_to_delete:
        if key in actual_tags:
            actual_tags[key] = ''
            # mark entity to be updated
            need_update = True
コード例 #25
0
    def put_spline(self, t0, t1, metric, spline_builder):
        self.splines.append([t0, t1, metric, partial(spline_builder, t0, t1 - t0)])

    def get_spline(self, metric, t):
        for [s, e, m, f] in self.splines:
            if s <= t < e and m == metric:
                return f
        return None


# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
entities_service = EntitiesService(connection)
svc = SeriesService(connection)

procedures = [['Heating', 10], ['Mixing', 5], ['Brewing', 40], ['Cooling', 15], ['Inactive', 5]]

metrics = [['axi.temperature', {
    'Heating': partial(positive_spline, 55, 30),
    'Mixing': partial(positive_inv_spline, 5, 85),
    'Brewing': partial(negative_spline, 10, 80),
    'Cooling': partial(negative_spline, 30, 50),
    'Inactive': partial(linear, -20, 50),
}], ['axi.pressure', {
    'Heating': partial(positive_spline, 12, 10),
    'Mixing': partial(linear, 0, 22),
    'Brewing': partial(positive_spline, 3, 22),
    'Cooling': partial(negative_spline, 15, 10),
コード例 #26
0
source_entity = args.src_entity
dst_entity = args.dst_entity
metric = args.metric_name
tag_expression = None
if args.tag_expression is not None:
    tag_expression = args.tag_expression
start_date = args.start_datetime
dry_run = False
if args.dry_run is not None:
    dry_run = True
batch_size = int(args.batch_size)


connection = connect('/path/to/connection.properties')

entity_service = EntitiesService(connection)

if entity_service.get(source_entity) is None:
    logging.warning("'" + source_entity + "' entity does not exist")
    exit(1)

if entity_service.get(dst_entity) is None:
    logging.warning("'" + dst_entity + "' entity does not exist")
    exit(1)

metric_service = MetricsService(connection)

if metric_service.get(metric) is None:
    logging.warning("'" + metric + "' metric does not exist")
    exit(1)
コード例 #27
0
# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set the name of entity_group and tag expression
entity_group_name = 'docker-containers'
property_type = 'docker.container.config.env'
tag_expression = 'env.*'

eg_service = EntityGroupsService(connection)
properties_service = PropertiesService(connection)
entities_list = eg_service.get_entities(entity_group_name, tags=tag_expression)
# exclude entities that have no required tags
entities = [entity for entity in entities_list if entity.tags]

entities_service = EntitiesService(connection)

# prepare property query
ef = EntityFilter('entity')
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
property_query = PropertiesQuery(entity_filter=ef,
                                 date_filter=df,
                                 type=property_type)

print('entity_name,entity_label,tags')
for entity in entities:
    pretty_tags = print_tags(entity.tags)
    for key in entity.tags:
        entity.tags[key] = ''

    # set actual entity and execute property query
コード例 #28
0
#!/usr/bin/env python3

from atsd_client import connect, connect_url
from atsd_client.services import EntitiesService

'''
Delete entities a) without any tags, b) with entity name having 64 characters, and c) which inserted data before 2018-Aug-01.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# Initialize services
entity_service = EntitiesService(connection)
entity_expression = "name.length() == 64 && tags.size() == 0"
entity_limit = 1000
entity_max_insert_date = '2018-08-01T00:00:00Z'
entity_list = entity_service.list(expression=entity_expression, max_insert_date=entity_max_insert_date, limit=entity_limit)

entity_count = len(entity_list)
print("Found " + str(entity_count) + " entities. Limit= " + str(entity_limit))

for idx, entity in enumerate(entity_list):
    print("- Found  " + entity.name + " : " + str(idx + 1) + "/" + str(entity_count) + " : inserted= " + str(entity.last_insert_date) + " : created= " + str(entity.created_date))
    # Uncomment next lines to delete and print delete operation status
    #res = entity_service.delete(entity)
    #print("- Delete " + entity.name + " : " + str(idx + 1) + "/" + str(entity_count) + " : " + str(res))
コード例 #29
0
from atsd_client.services import EntitiesService, SeriesService
from atsd_client.models import SeriesDeleteQuery
'''
Delete series for all metrics for the specified entity with names starting with the specified prefix.
'''

# Connect to ATSD server
# connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Set query
entity = "entity"
metric_expr = "name LIKE 'me*'"

# Initialize services
entities_service = EntitiesService(connection)
series_service = SeriesService(connection)

# Query all metrics for entity
metrics = entities_service.metrics(entity=entity, expression=metric_expr)

if not metrics:
    print("No metrics are found for entity " + entity)
else:
    # Delete series for each metric
    for metric in metrics:
        query = SeriesDeleteQuery(entity=entity,
                                  metric=metric.name,
                                  exact_match=False)
        print("deleting ", entity, metric.name)
        # Uncomment next line to delete series
コード例 #30
0
parser.add_argument('--entity', '-e', type=str, help='entity to monitor', default='060190011')
parser.add_argument('--metric_filter', '-mf', type=str, help='filter for metric names')
parser.add_argument('--aggregate_period', '-ap', type=int, help='aggregate period', default=0)
parser.add_argument('--interpolate_period', '-ip', type=int, help='interpolate period', default=60)
parser.add_argument('--data_interval', '-di', type=int, help='requested data to analyze', default=24)
parser.add_argument('--verbose', '-v', action="count", help="enable series processing logging")
args = parser.parse_args()

time_format = '%d-%m-%Y %H:%M:%S'

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)
message_service = MessageService(connection)
svc = SeriesService(connection)

title = '\nentity: %s, last hours: %s, minimal score: %s, aggregate period: %s min, interpolate period: %s min, ' \
        'data interval: %s days' % (
    args.entity, args.last_hours, args.min_score, args.aggregate_period, args.interpolate_period, args.data_interval)

if args.metric_filter is None:
    metric_expression = None
else:
    metric_expression = "name like '%s'" % args.metric_filter
    title = '%s, metric filter: %s' % (title, args.metric_filter)

message = [title]