Ejemplo n.º 1
0
 def get_schema(self, get_stats=False):
     connection = atsd_client.connect_url(
         self.url,
         self.configuration.get('username'),
         self.configuration.get('password'),
         verify=self.configuration.get('trust_certificate', False),
         timeout=self.configuration.get('timeout', 600))
     metrics = MetricsService(connection)
     ml = metrics.list(
         expression=self.configuration.get('expression', None),
         minInsertDate=self.configuration.get('min_insert_date', None),
         limit=self.configuration.get('limit', 5000))
     metrics_list = [i.name.encode('utf-8') for i in ml]
     metrics_list.append('atsd_series')
     schema = {}
     default_columns = [
         'entity', 'datetime', 'time', 'metric', 'value', 'text', 'tags',
         'entity.tags', 'metric.tags'
     ]
     for table_name in metrics_list:
         schema[table_name] = {
             'name': "'{}'".format(table_name),
             'columns': default_columns
         }
     values = schema.values()
     return values
Ejemplo n.º 2
0
 def get_schema(self, get_stats=False):
     connection = atsd_client.connect_url(
         self.url,
         self.configuration.get("username"),
         self.configuration.get("password"),
         verify=self.configuration.get("trust_certificate", False),
         timeout=self.configuration.get("timeout", 600),
     )
     metrics = MetricsService(connection)
     ml = metrics.list(
         expression=self.configuration.get("expression", None),
         minInsertDate=self.configuration.get("min_insert_date", None),
         limit=self.configuration.get("limit", 5000),
     )
     metrics_list = [i.name.encode("utf-8") for i in ml]
     metrics_list.append("atsd_series")
     schema = {}
     default_columns = [
         "entity",
         "datetime",
         "time",
         "metric",
         "value",
         "text",
         "tags",
         "entity.tags",
         "metric.tags",
     ]
     for table_name in metrics_list:
         schema[table_name] = {
             "name": "'{}'".format(table_name),
             "columns": default_columns,
         }
     values = list(schema.values())
     return values
Ejemplo n.º 3
0
 def get_schema(self, get_stats=False):
     connection = atsd_client.connect_url(self.url,
                                          self.configuration.get('username'),
                                          self.configuration.get('password'),
                                          verify=self.configuration.get('trust_certificate', False),
                                          timeout=self.configuration.get('timeout', 600))
     metrics = MetricsService(connection)
     ml = metrics.list(expression=self.configuration.get('expression', None),
                       minInsertDate=self.configuration.get('min_insert_date', None),
                       limit=self.configuration.get('limit', 5000))
     metrics_list = [i.name.encode('utf-8') for i in ml]
     metrics_list.append('atsd_series')
     schema = {}
     default_columns = ['entity', 'datetime', 'time', 'metric', 'value', 'text',
                        'tags', 'entity.tags', 'metric.tags']
     for table_name in metrics_list:
         schema[table_name] = {'name': "'{}'".format(table_name),
                               'columns': default_columns}
     values = schema.values()
     return values
Ejemplo n.º 4
0
from atsd_client import connect, connect_url
from atsd_client.models import SeriesQuery, SeriesFilter, EntityFilter, DateFilter, ControlFilter, to_iso
from atsd_client.services import MetricsService, SeriesService

'''
Find series with data older than `now - (metric.retention_days + grace_interval_days)`.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)
metric_service = MetricsService(connection)

metric_expression = "enabled AND persistent AND retentionDays > 0"
metric_list = metric_service.list(expression=metric_expression)
series_count = 0
# ATSD expired data removal schedule frequency, default is one day
grace_interval_days = 1

t = PrettyTable(['Metric', 'Entity', 'Tags', 'Retention Days', 'Threshold', 'Presented Sample Date'])
for metric in metric_list:
    # calculate datetime before which there is data
    threshold = datetime.now() - timedelta(days=metric.retention_days + grace_interval_days)

    # query series with current metric and all entities from the beginning up to threshold
    # enough to get at least one value, limit set to 1
    sf = SeriesFilter(metric=metric.name)
from atsd_client import connect_url
from atsd_client.services import MetricsService
'''
Locate a collection of metrics that have no last_insert_date.
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

metric_service = MetricsService(connection)
# query entities without last_insert_date
metric_list = metric_service.list(max_insert_date="1970-01-01T00:00:00.000Z")
metrics_count = 0

print('metric_name')
for metric in metric_list:
    if metric.enabled and metric.persistent \
            and metric.retention_days == 0 and metric.series_retention_days == 0:
        metrics_count += 1
        print(metric.name)

print("\nMetrics count without last insert date is %d." % metrics_count)
Ejemplo n.º 6
0
from atsd_client.services import MetricsService, EntitiesService

'''
Locate lagging series among all the series that differ only in tags (have the same metric and entity) during the 
grace interval. 
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set grace interval in hours
grace_interval_hours = 1

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all entities that have last_insert_date, i.e. series
entities = entities_service.list(expression="name LIKE '06*'", min_insert_date="1970-01-01T00:00:00.000Z")

print('metric,entity,tags,last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity.name)
    for m in metrics:
        # query series list for each metric and entity
        series_list = metrics_service.series(m.name, entity.name)
        # for each list with more than 1 series
        if len(series_list) > 1:
            # set lower limit to maximum last insert date over series list minus one hour
            lower_limit_date = max(s.last_insert_date for s in series_list) - timedelta(
from atsd_client import connect, connect_url
from atsd_client.services import MetricsService

'''
Locate a collection of metrics that have been created after specified date.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
metrics_service = MetricsService(connection)
# query all metrics created after specified_date
metric_list = metrics_service.list(expression="createdDate > '2018-05-16T00:00:00Z'")

print('metric_name')
for metric in metric_list:
    print(metric.name)

Ejemplo n.º 8
0
STAGE_3_DURATION_MIN, STAGE_3_DURATION_MAX = 16, 24

# value caching: enabled when INTERVAL_MINUTES > 0
THRESHOLD_PERCENT = 1
INTERVAL_MINUTES = 10

SAVE_AS_COMMANDS = True

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# configuration parameters: end

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)
svc = SeriesService(connection)


def positive_spline(diff_value, start_value, t, l, x):
    return diff_value * (((x - t).total_seconds() / l.total_seconds()) ** 3) + start_value


def positive_inv_spline(diff_value, start_value, t, l, x):
    return diff_value * (1 + (((x - t).total_seconds() / l.total_seconds()) - 1) ** 3) + start_value


def negative_spline(diff_value, end_value, t, l, x):
    return diff_value * ((1 - ((x - t).total_seconds() / l.total_seconds())) ** 3) + end_value

Ejemplo n.º 9
0
Locate a collection of entities (docker hosts in this cases) that have not inserted data for more than 7 days.
Delete related entities (docker containers, images, network, volumes).
Delete docker host entities.
'''

# Uncomment the next two lines to set custom local timezone
# os.environ['TZ'] = 'Europe/London'
# time.tzset()

tags_printer = pprint.PrettyPrinter(indent=4)

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

entity_service = EntitiesService(connection)
metric_service = MetricsService(connection)

# select all entities that collect this metric
# this metric is collected by docker hosts
docker_hosts = metric_service.series('docker.cpu.sum.usage.total.percent')

print("Docker hosts found: " + str(len(docker_hosts)))

for docker_host_series in docker_hosts:
    print("--------------")

    # get minutes since last insert
    elapsed_minutes = docker_host_series.get_elapsed_minutes()

    entity_filter = "lower(tags.docker-host) = lower('" + docker_host_series.entity + "')"
    # find related entities, which tag value equals docker host
from atsd_client import connect, connect_url
from atsd_client.services import MetricsService
from atsd_client.utils import print_tags

'''
Locate high-cardinality series that have tags more than specified cardinality.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
metrics_service = MetricsService(connection)

# query all metrics that have last_insert_date, i.e. series
metrics = metrics_service.list(min_insert_date="1970-01-01T00:00:00.000Z")

# set cardinality
cardinality = 8
series_count = 0

print('metric,entity,tags,last_insert_date')
for metric in metrics:
    # query series list for each metric
    series_list = metrics_service.series(metric)
    for s in series_list:
        # check tags cardinality for each series in list
        if len(s.tags) > cardinality:
            series_count += 1
            print("%s,%s,%s,%s" % (s.metric, s.entity, print_tags(s.tags), s.last_insert_date))
Ejemplo n.º 11
0
Delete Docker host entities.
'''

# Uncomment the next two lines to set custom local timezone
# os.environ['TZ'] = 'Europe/London'
# time.tzset()

tags_printer = pprint.PrettyPrinter(indent=4)

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
entity_service = EntitiesService(connection)
metric_service = MetricsService(connection)

# select all entities that collect this metric
# this metric is collected by docker hosts
docker_hosts = metric_service.series('docker.cpu.sum.usage.total.percent')

print("Docker hosts found: " + str(len(docker_hosts)))

for docker_host_series in docker_hosts:
    print("--------------")

    # get minutes since last insert
    elapsed_minutes = docker_host_series.get_elapsed_minutes()

    entity_filter = "lower(tags.docker-host) = lower('" + docker_host_series.entity + "')"
    # find related entities, which tag value equals docker host
'''
Locate series that have no data during the actual time interval (grace_interval) using an expression filter for entity.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set grace_interval to one day
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes * 60)

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query entities that have name started with 06
entities = entities_service.list(expression="name LIKE '06*'")

print('metric, entity, tags, last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity)
    for metric in metrics:
        # query all series for each metric and entity
        series = metrics_service.series(metric, entity, min_insert_date=min_insert_date, max_insert_date=max_insert_date)
        for s in series:
            print("%s,%s,%s,%s" % (s.metric, s.entity, print_tags(s.tags), s.last_insert_date))
Ejemplo n.º 13
0
from atsd_client import connect, connect_url
from atsd_client.models import SeriesFilter, EntityFilter, DateFilter, SeriesQuery
from atsd_client.services import MetricsService, SeriesService

'''
Retrieve metrics with 'frequency' tag. Load series for this metric. 
Print values that violate frequency (but not data gaps if the gap is a multiple of the frequency period).
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)
metric_service = MetricsService(connection)
# query metrics with not empty frequency tag, include all metric tags to response
metric_list = metric_service.list(expression='tags.frequency != ""', tags='*')


def resolve_frequency(frequency):
    """
    Transform metric frequency tag into relativedelta instance
    """

    if frequency in ['Daily (D)', 'Daily', 'Daily, 7-Day', 'Daily, Close']:
        return relativedelta(days=1)
    elif frequency in ['Weekly', 'Weekly, As of Monday', 'Weekly, As of Wednesday', 'Weekly, As of Thursday',
                       'Weekly, Ending Monday', 'Weekly, Ending Wednesday', 'Weekly, Ending Thursday',
                       'Weekly, Ending Friday', 'Weekly, Ending Saturday', ]:
        return relativedelta(weeks=1)
Ejemplo n.º 14
0
Print entities of that the series.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# specify metric name
metric_name = "ca.daily.reservoir_storage_af"
# set lower boundary for lastInsertDate
min_insert_date = "2018-05-01T00:00:00.000Z"
# set grace interval in hours for 14 days
grace_interval_hours = 24 * 14

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query required metric meta data
metric = metrics_service.get(metric_name)
if metric is None:
    print('No metric with name %s' % metric_name)
    sys.exit()
elif metric.last_insert_date is None:
    print('No data for metric name %s' % metric_name)
    sys.exit()

# calculate the upper boundary for the allowed last_insert_date values excluding grace interval
max_insert_date = metric.last_insert_date - timedelta(
    seconds=grace_interval_hours * 3600)

# query series list for the metric
Ejemplo n.º 15
0
batch_size = int(args.batch_size)


connection = connect('/path/to/connection.properties')

entity_service = EntitiesService(connection)

if entity_service.get(source_entity) is None:
    logging.warning("'" + source_entity + "' entity does not exist")
    exit(1)

if entity_service.get(dst_entity) is None:
    logging.warning("'" + dst_entity + "' entity does not exist")
    exit(1)

metric_service = MetricsService(connection)

if metric_service.get(metric) is None:
    logging.warning("'" + metric + "' metric does not exist")
    exit(1)

series_service = SeriesService(connection)


def insert_or_warning(series_to_insert):
    if not dry_run:
        series_service.insert(series_to_insert)
    else:
        logging.warning("Dry run enabled, series are not inserted.")

Locate series that have no data during the actual time interval (grace_interval) using an expression filter for entity.
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set grace_interval to one day
grace_interval_minutes = 24 * 60
# query entities with last_insert_date
min_insert_date = "1970-01-01T00:00:00.000Z"
# calculate the upper boundary for the allowed last_insert_date values excluding grace_interval
max_insert_date = datetime.now() - timedelta(seconds=grace_interval_minutes *
                                             60)

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query entities that have name started with 06
entities = entities_service.list(expression="name LIKE '06*'")

print('metric, entity, tags, last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity)
    for metric in metrics:
        # query all series for each metric and entity
        series = metrics_service.series(metric,
                                        entity,
                                        min_insert_date=min_insert_date,
                                        max_insert_date=max_insert_date)
        for s in series:
Ejemplo n.º 17
0
from datetime import datetime
from dateutil.relativedelta import relativedelta

from atsd_client import connect_url
from atsd_client.models import SeriesFilter, EntityFilter, DateFilter, SeriesQuery
from atsd_client.services import MetricsService, SeriesService
'''
Retrieve metrics with 'frequency' tag. Load series for this metric. 
Print values that violate frequency (but not data gaps if the gap is a multiple of the frequency period).
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

svc = SeriesService(connection)
metric_service = MetricsService(connection)
# query metrics with not empty frequency tag, include all metric tags to response
metric_list = metric_service.list(expression='tags.frequency != ""', tags='*')


def resolve_frequency(frequency):
    """
    Transform metric frequency tag into relativedelta instance
    """

    if frequency in ['Daily (D)', 'Daily', 'Daily, 7-Day', 'Daily, Close']:
        return relativedelta(days=1)
    elif frequency in [
            'Weekly',
            'Weekly, As of Monday',
            'Weekly, As of Wednesday',
Print entities of that the series.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# specify metric name
metric_name = "ca.daily.reservoir_storage_af"
# set lower boundary for lastInsertDate
min_insert_date = "2018-05-01T00:00:00.000Z"
# set grace interval in hours for 14 days
grace_interval_hours = 24 * 14

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query required metric meta data
metric = metrics_service.get(metric_name)
if metric is None:
    print('No metric with name %s' % metric_name)
    sys.exit()
elif metric.last_insert_date is None:
    print('No data for metric name %s' % metric_name)
    sys.exit()

# calculate the upper boundary for the allowed last_insert_date values excluding grace interval
max_insert_date = metric.last_insert_date - timedelta(seconds=grace_interval_hours * 3600)

# query series list for the metric
series_list = metrics_service.series(metric, min_insert_date=min_insert_date, max_insert_date=max_insert_date)
from atsd_client import connect, connect_url
from atsd_client.services import MetricsService

'''
Locate a collection of metrics that have no last_insert_date.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
metric_service = MetricsService(connection)
# query entities without last_insert_date
metric_list = metric_service.list(max_insert_date="1970-01-01T00:00:00.000Z")
metrics_count = 0

print('metric_name')
for metric in metric_list:
    if metric.enabled and metric.persistent \
            and metric.retention_days == 0 and metric.series_retention_days == 0:
        metrics_count += 1
        print(metric.name)

print("\nMetrics count without last insert date is %d." % metrics_count)
Load all series values that are non-positive for the specified metric.
Optionally, if deleteValues parameter is set replace these values with NaN.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# disable deleting inappropriate values
deleteValues = False

# specify metric name
metric_name = "ca.daily.reservoir_storage_af"

svc = SeriesService(connection)
metrics_service = MetricsService(connection)

# query series with current metric and all entities
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity='*')
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series = svc.query(query)

if deleteValues:
    print('Inappropriate values to be deleted.\n')
else:
    print('Leave as is inappropriate values.\n')

print('metric,entity,tags,data')
for s in series:
Ejemplo n.º 21
0
STAGE_1_DURATION_MIN, STAGE_1_DURATION_MAX = 4, 8
STAGE_2_DURATION_MIN, STAGE_2_DURATION_MAX = 5, 10
STAGE_3_DURATION_MIN, STAGE_3_DURATION_MAX = 16, 24

# value caching: enabled when INTERVAL_MINUTES > 0
THRESHOLD_PERCENT = 1
INTERVAL_MINUTES = 10

SAVE_AS_COMMANDS = True

connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# configuration parameters: end

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)
svc = SeriesService(connection)


def positive_spline(diff_value, start_value, t, l, x):
    return diff_value * (((x - t).total_seconds() / l.total_seconds()) ** 3) + start_value


def positive_inv_spline(diff_value, start_value, t, l, x):
    return diff_value * (1 + (((x - t).total_seconds() / l.total_seconds()) - 1) ** 3) + start_value


def negative_spline(diff_value, end_value, t, l, x):
    return diff_value * ((1 - ((x - t).total_seconds() / l.total_seconds())) ** 3) + end_value

Ejemplo n.º 22
0
from atsd_client import connect_url
from atsd_client.services import MetricsService, EntitiesService

'''
Locate lagging series among all the series that differ only in tags (have the same metric and entity) during the 
grace interval. 
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set grace interval in hours
grace_interval_hours = 1

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all entities that have last_insert_date, i.e. series
entities = entities_service.list(expression="name LIKE '06*'", min_insert_date="1970-01-01T00:00:00.000Z")

print('metric,entity,tags,last_insert_date')
for entity in entities:
    # query all metrics for each entity
    metrics = entities_service.metrics(entity.name)
    for m in metrics:
        # query series list for each metric and entity
        series_list = metrics_service.series(m.name, entity.name)
        # for each list with more than 1 series
        if len(series_list) > 1:
            # calculate maximum of all last_insert_date's in list and subtract 1 hour
            # it will be lower limit date to compare
from datetime import datetime
from atsd_client import connect, connect_url
from atsd_client.models import SeriesFilter, EntityFilter, DateFilter, SeriesQuery
from atsd_client.services import SeriesService, MetricsService

'''
Copy data from one metric to the new one.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)
metrics_service = MetricsService(connection)

# specify source and destination metric names
metric_src = 'metric_src'
metric_dst = 'metric_dst'

# copy series with all entities, specific entity name can be set instead
entity = '*'

# query series with required metric and all entities
sf = SeriesFilter(metric=metric_src)
ef = EntityFilter(entity='*')
df = DateFilter(start_date='1970-01-01T00:00:00Z', end_date=datetime.now())
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series = svc.query(query)