示例#1
0
 def setUp(self):
     """
     Insert series.
     """
     series = Series(ENTITY, METRIC)
     series.add_samples(Sample(VALUE, datetime.now()))
     self._series_service = SeriesService(self.connection)
     self._series_service.insert(series)
     time.sleep(self.wait_time)
 def setUp(self):
     """
     Insert series.
     """
     series = Series(ENTITY, METRIC)
     series.add_samples(Sample(VALUE, datetime.now()))
     self._series_service = SeriesService(self.connection)
     self._series_service.insert(series)
     time.sleep(self.wait_time)
示例#3
0
class TestSQLService(ServiceTestBase):
    def setUp(self):
        """
        Insert series.
        """
        series = Series(ENTITY, METRIC)
        series.add_samples(Sample(VALUE, datetime.now()))
        self._series_service = SeriesService(self.connection)
        self._series_service.insert(series)
        time.sleep(self.wait_time)

    def test_query(self):
        result = self.service.query(QUERY)
        # print(result)
        self.assertIsNotNone(result)
        self.assertEqual((1, 7), result.shape)
        self.assertTrue(isinstance(result.at[0, 'datetime'], str))
        self.assertEqual(METRIC, result.at[0, 'metric'])
        self.assertEqual(ENTITY, result.at[0, 'entity'])
        self.assertEqual(VALUE, result.at[0, 'value'])
class TestSQLService(ServiceTestBase):

    def setUp(self):
        """
        Insert series.
        """
        series = Series(ENTITY, METRIC)
        series.add_samples(Sample(VALUE, datetime.now()))
        self._series_service = SeriesService(self.connection)
        self._series_service.insert(series)
        time.sleep(self.wait_time)

    def test_query(self):
        result = self.service.query(QUERY)
        # print(result)
        self.assertIsNotNone(result)
        self.assertEqual((1, 7), result.shape)
        self.assertTrue(isinstance(result.at[0, 'datetime'], str))
        self.assertEqual(METRIC, result.at[0, 'metric'])
        self.assertEqual(ENTITY, result.at[0, 'entity'])
        self.assertEqual(VALUE, result.at[0, 'value'])
示例#5
0
def main(filename,
         atsd_url,
         username,
         password,
         stat_as_tag,
         entity_name=None,
         timestamp=None):
    if timestamp is None:
        timestamp = datetime.now()
    conn = connect_url(atsd_url, username, password)
    series_service = SeriesService(conn)
    with open(filename) as f:
        entries = json.load(f)

        for entry in entries:
            benchmark_name_split = entry['benchmark'].split('.')
            entity = benchmark_name_split[
                -2] if entity_name is None else entity_name
            metric_prefix = 'jmh.' + entry['mode'] + '.' + entry[
                'primaryMetric']['scoreUnit']
            tags = {'method': benchmark_name_split[-1]}
            if not stat_as_tag:
                metric = metric_prefix + '.avg'
            else:
                metric = metric_prefix
                tags['stat'] = 'avg'

            series_service.insert(
                Series(entity, metric,
                       [Sample(entry['primaryMetric']['score'], timestamp)],
                       tags))
            for key, value in entry['primaryMetric']['scorePercentiles'].items(
            ):
                if key == '0.0':
                    stat = 'min'
                elif key == '100.0':
                    stat = 'max'
                else:
                    stat = key
                if not stat_as_tag:
                    metric = metric_prefix + '.' + stat
                else:
                    metric = metric_prefix
                    tags['stat'] = stat
                series_service.insert(
                    Series(entity, metric, [Sample(value, timestamp)], tags))
示例#6
0
from datetime import datetime

from atsd_client import connect, connect_url
from atsd_client.utils import print_tags
from atsd_client.models import SeriesQuery, SeriesFilter, EntityFilter, DateFilter, ControlFilter
from atsd_client.services import MetricsService, SeriesService
'''
Retrieve series for a given metric, for each series fetch first and last value with corresponding dates.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)
metric_service = MetricsService(connection)

# set metric name
metric_name = "ca.daily.reservoir_storage_af"

# print header
print(
    'entity,entityLabel,seriesTags,firstValueDate,firstValue,lastValueDate,lastValue'
)

# query series with current metric for all entities with meta information in ascending order to get first value
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity='*')
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
cf = ControlFilter(limit=1, add_meta=True, direction="ASC")
示例#7
0
STAGE_2_DURATION_MIN, STAGE_2_DURATION_MAX = 5, 10
STAGE_3_DURATION_MIN, STAGE_3_DURATION_MAX = 16, 24

# value caching: enabled when INTERVAL_MINUTES > 0
THRESHOLD_PERCENT = 1
INTERVAL_MINUTES = 10

SAVE_AS_COMMANDS = True

connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# configuration parameters: end

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)
svc = SeriesService(connection)


def positive_spline(diff_value, start_value, t, l, x):
    return diff_value * (((x - t).total_seconds() / l.total_seconds()) ** 3) + start_value


def positive_inv_spline(diff_value, start_value, t, l, x):
    return diff_value * (1 + (((x - t).total_seconds() / l.total_seconds()) - 1) ** 3) + start_value


def negative_spline(diff_value, end_value, t, l, x):
    return diff_value * ((1 - ((x - t).total_seconds() / l.total_seconds())) ** 3) + end_value


def linear(diff_value, start_value, t, l, x):
from datetime import datetime

from atsd_client import connect, connect_url
from atsd_client.models import SeriesQuery, SeriesFilter, EntityFilter, DateFilter
from atsd_client.services import MetricsService, SeriesService

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# set minimum window size to make decision
min_window_size = 5
# 10 days interval in milliseconds
interval_ms = 10 * 24 * 60 * 60 * 1000

svc = SeriesService(connection)
metric_service = MetricsService(connection)

# set metric and entity
metric = 'ca.daily.reservoir_storage_af'
entity = 'ca.oro'

print('Metric: %s\nEntity: %s\n' % (metric, entity))

# prepare query to retrieve data
sf = SeriesFilter(metric=metric, exact_match=True)
ef = EntityFilter(entity=entity)
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())

query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series, = svc.query(query)
'''
Load all series values that are non-positive for the specified metric.
Optionally, if deleteValues parameter is set replace these values with NaN.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# disable deleting inappropriate values
deleteValues = False

# specify metric name
metric_name = "ca.daily.reservoir_storage_af"

svc = SeriesService(connection)
metrics_service = MetricsService(connection)

# query series with current metric and all entities
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity='*')
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series = svc.query(query)

if deleteValues:
    print('Inappropriate values to be deleted.\n')
else:
    print('Leave as is inappropriate values.\n')

print('metric,entity,tags,data')
示例#10
0
Delete data for a given series with tags for the specified date interval.
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')

# set series
metric = 'm-to-delete'
entity = 'e-to-delete'
tags = {'tag_key_1': 'tag_value_1', 'tag_key_2': 'tag_value_2'}

# specify date interval
startDate = "2018-10-01T00:00:00Z"
endDate = "2018-10-02T00:00:00Z"

series_service = SeriesService(connection)

# query the series to be deleted, use exactMatch to exclude not specified tags
sf = SeriesFilter(metric=metric, tags=tags, exact_match=True)
ef = EntityFilter(entity=entity)
df = DateFilter(start_date=startDate, end_date=endDate)
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series_list = series_service.query(query)

if len(series_list) > 1:
    raise Exception('There are multiple series meet the requirements')

series = series_list[0]

# check data existence
if len(series.data) == 0:
# connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Series filter
metric = 'm-to-delete'
entity = 'e-to-delete'
tags = {'tag_key_1': 'tag_value_1', 'tag_key_2': 'tag_value_2'}

# Specify date interval
startDate = "2018-10-01T00:00:00Z"
endDate = "2018-10-02T00:00:00Z"

# Exclude samples with NaN values (NaN represents deleted values)
expr = '!Float.isNaN(value)'

series_service = SeriesService(connection)

# Query the series to be deleted, use exactMatch to exclude not specified tags
sf = SeriesFilter(metric=metric, tags=tags, exact_match=True)
ef = EntityFilter(entity=entity)
df = DateFilter(start_date=startDate, end_date=endDate)
vf = SampleFilter(expr)
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, sample_filter=vf)
series_list = series_service.query(query)

if len(series_list) > 1:
    raise Exception('There are multiple series meet the requirements')

series = series_list[0]

# Check data existence
示例#12
0
from atsd_client import connect, connect_url
from atsd_client.models import SeriesFilter, EntityFilter, DateFilter, SeriesQuery, Sample
from atsd_client.services import SeriesService

'''
Load data for one year and insert the data with multiple year shift.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)

# specify years to increase
year_count = 5
# specify metric and entity names
metric_name = 'sml.power-consumed'
entity_name = '*'
# specify date filter
start_date = '2018-01-01T00:00:00Z'
end_date = '2019-01-01T00:00:01Z'

# prepare series_query and execute it
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity=entity_name)
df = DateFilter(start_date=start_date, end_date=end_date)
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series_list = svc.query(query)
示例#13
0
from atsd_client import connect, connect_url
from atsd_client.models import SeriesFilter, EntityFilter, DateFilter, SeriesQuery, Sample
from atsd_client.services import SeriesService
'''
Load data for one year and insert the data with multiple year shift.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)

# specify years to increase
year_count = 5
# specify metric and entity names
metric_name = 'sml.power-consumed'
entity_name = '*'
# specify date filter
start_date = '2018-01-01T00:00:00Z'
end_date = '2019-01-01T00:00:01Z'

# prepare series_query and execute it
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity=entity_name)
df = DateFilter(start_date=start_date, end_date=end_date)
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series_list = svc.query(query)

for series in series_list:
示例#14
0
query-cpu_busy-nurswgvml007-all-DETAIL.json
query-cpu_busy-nurswgvml007-all-PERCENTILE_90+DELTA.json
query-disk_used-nurswgvml006-all-all.json
query-disk_used-nurswgvml006-all-DETAIL.json
query-disk_used-nurswgvml006-all-MIN+MAX.json
query-disk_used-nurswgvml006-all-MIN.json
query-disk_used-nurswgvml006-all-WTAVG.json
query-log_event_counter-nurswgvml007-command=com.axibase.tsd.Server;level=INFO;logger=com.axibase.tsd.service.config.ServerPropertiesReader-all.json
rate-cpu_busy-nurswgvml007-all-DETAIL.json
group-disk_used-nurswgvml006-all-DETAIL.json
rate+group-collectd.cpu.busy-nurswghbs001-all-MIN.json
'''

# Connect to an ATSD server
connection = connect_url('https://atsd_hostname:8443', 'user', 'password')
svc = SeriesService(connection)

# set start_date and end_date
start_date = '2018-05-07T07:00:00Z'
end_date = '2018-05-08T08:00:00Z'

# list all json files from the current directory
files = [
    f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.json')
]

for filename in files:
    # parse filename to get series information
    query, metric_name, entity_name, tags, aggregate_types = splitext(
        filename)[0].split('-')
from atsd_client import connect, connect_url
from atsd_client.models import SeriesFilter, EntityFilter, DateFilter, SeriesQuery, ForecastFilter
from atsd_client.services import SeriesService

'''
Monitor data availability. The inputs stored in a data-availability.csv table.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)

filename = 'data-availability.csv'

with open(filename) as fp:
    line = fp.readline()
    while line:

        # skip commented lines
        if line.startswith('#'):
            line = fp.readline()
            continue

        metric_name, entity_name, interval, end_date, forecast_name, comments = line.split(',')
        count, unit = interval.split('-')

        sf = SeriesFilter(metric=metric_name)
        ef = EntityFilter(entity=entity_name)
示例#16
0
import matplotlib.pyplot as plt

from TDB.hw import forecast

# В файле connection.properties нужно указать хост, имя и пароль от ATSD
connection = connect();
# Указываем интересующие нас метрику и сущность, для которых будет строиться прогноз
sf = SeriesFilter(metric="direct.queries")
ef = EntityFilter(entity="duckduckgo")
# Выбираем начальную дату для запроса
start_date = "2018-11-03T02:59:00Z"
# Инициализируем временной фильтр, конечная дата = начальная дата + 3 месяца
df = DateFilter(interval={"count": 3, "unit": "MONTH"}, start_date=start_date)
# Формируем series-запрос
query_data = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
svc = SeriesService(connection)
# Загружаем указанный ряд
series, = svc.query(query_data)
"""
Визуализируем исторические и спрогнозированные данные
"""
print(series)
start = len(series.values())
end = start + 25
plt.subplot(111)
plt.plot(series.values(), label="History", marker=".")
plt.plot(range(start, end), forecast(series.values(), 7, 25)[-25:], label="Forecast", marker=".")
plt.legend(bbox_to_anchor=(1, 0.14), loc=1, borderaxespad=0.1)
plt.grid(True)
plt.show()
from atsd_client import connect, connect_url
from atsd_client.utils import print_tags
from atsd_client.models import SeriesQuery, SeriesFilter, EntityFilter, DateFilter, ControlFilter
from atsd_client.services import MetricsService, SeriesService

'''
Retrieve series for a given metric, for each series fetch first and last value with corresponding dates.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)
metric_service = MetricsService(connection)

# set metric name
metric_name = "ca.daily.reservoir_storage_af"

# print header
print('entity,entityLabel,seriesTags,firstValueDate,firstValue,lastValueDate,lastValue')

# query series with current metric for all entities with meta information in ascending order to get first value
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity='*')
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
cf = ControlFilter(limit=1, add_meta=True, direction="ASC")
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, control_filter=cf)
series_list_asc = svc.query(query)
示例#18
0
if entity_service.get(source_entity) is None:
    logging.warning("'" + source_entity + "' entity does not exist")
    exit(1)

if entity_service.get(dst_entity) is None:
    logging.warning("'" + dst_entity + "' entity does not exist")
    exit(1)

metric_service = MetricsService(connection)

if metric_service.get(metric) is None:
    logging.warning("'" + metric + "' metric does not exist")
    exit(1)

series_service = SeriesService(connection)


def insert_or_warning(series_to_insert):
    if not dry_run:
        series_service.insert(series_to_insert)
    else:
        logging.warning("Dry run enabled, series are not inserted.")


dst_entity_filter = EntityFilter(dst_entity)
dst_date_filter = DateFilter(start_date, 'now')
series_filter = SeriesFilter(metric, tag_expression=tag_expression)
limit_control = ControlFilter(limit=1, direction="ASC")
sample_filter = SampleFilter("!Double.isNaN(value)")
dst_series_query = SeriesQuery(series_filter, dst_entity_filter, dst_date_filter,
from prettytable import PrettyTable

from atsd_client import connect, connect_url
from atsd_client.models import SeriesQuery, SeriesFilter, EntityFilter, DateFilter, ControlFilter, to_iso
from atsd_client.services import MetricsService, SeriesService

'''
Find series with data older than `now - (metric.retention_days + grace_interval_days)`.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)
metric_service = MetricsService(connection)

metric_expression = "enabled AND persistent AND retentionDays > 0"
metric_list = metric_service.list(expression=metric_expression)
series_count = 0
# ATSD expired data removal schedule frequency, default is one day
grace_interval_days = 1

t = PrettyTable(['Metric', 'Entity', 'Tags', 'Retention Days', 'Threshold', 'Presented Sample Date'])
for metric in metric_list:
    # calculate datetime before which there is data
    threshold = datetime.now() - timedelta(days=metric.retention_days + grace_interval_days)

    # query series with current metric and all entities from the beginning up to threshold
    # enough to get at least one value, limit set to 1
示例#20
0
from atsd_client import connect_url
from atsd_client.services import EntitiesService, SeriesService
from atsd_client.models import SeriesDeleteQuery

'''
Delete series for all metrics for the specified entity with names starting with the specified prefix.
'''

# Connect to an ATSD server
conn = connect_url('https://atsd_hostname:8443', 'user', 'password')

# Set query
entity = "entity"
metric_expr = "name LIKE 'me*'"

entities_service = EntitiesService(conn)
series_service = SeriesService(conn)

# Query all metrics for entity
metrics = entities_service.metrics(entity=entity, expression=metric_expr)

if not metrics:
    print("No metrics are found for entity " + entity)
else:
    # Delete series for each metric
    for metric in metrics:
        query = SeriesDeleteQuery(entity=entity, metric=metric.name, exact_match=False)
        # Uncomment next line to delete series
        #response = series_service.delete(query)
        #print(response)
示例#21
0
        self.splines.append([t0, t1, metric, partial(spline_builder, t0, t1 - t0)])

    def get_spline(self, metric, t):
        for [s, e, m, f] in self.splines:
            if s <= t < e and m == metric:
                return f
        return None


# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
entities_service = EntitiesService(connection)
svc = SeriesService(connection)

procedures = [['Heating', 10], ['Mixing', 5], ['Brewing', 40], ['Cooling', 15], ['Inactive', 5]]

metrics = [['axi.temperature', {
    'Heating': partial(positive_spline, 55, 30),
    'Mixing': partial(positive_inv_spline, 5, 85),
    'Brewing': partial(negative_spline, 10, 80),
    'Cooling': partial(negative_spline, 30, 50),
    'Inactive': partial(linear, -20, 50),
}], ['axi.pressure', {
    'Heating': partial(positive_spline, 12, 10),
    'Mixing': partial(linear, 0, 22),
    'Brewing': partial(positive_spline, 3, 22),
    'Cooling': partial(negative_spline, 15, 10),
    'Inactive': partial(linear, 0, 10),
示例#22
0
query-disk_used-nurswgvml006-all-DETAIL.json
query-disk_used-nurswgvml006-all-MIN+MAX.json
query-disk_used-nurswgvml006-all-MIN.json
query-disk_used-nurswgvml006-all-WTAVG.json
query-log_event_counter-nurswgvml007-command=com.axibase.tsd.Server;level=INFO;logger=com.axibase.tsd.service.config.ServerPropertiesReader-all.json
rate-cpu_busy-nurswgvml007-all-DETAIL.json
group-disk_used-nurswgvml006-all-DETAIL.json
rate+group-collectd.cpu.busy-nurswghbs001-all-MIN.json
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)

# set start_date and end_date
start_date = '2018-05-07T07:00:00Z'
end_date = '2018-05-08T08:00:00Z'

# list all json files from the current directory
files = [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.json')]

for filename in files:
    # parse filename to get series information
    query, metric_name, entity_name, tags, aggregate_types = splitext(filename)[0].split('-')

    # prepare tags
    exact_match = tags != 'all'
    if exact_match:
from atsd_client.models import SeriesDeleteQuery
'''
Delete series for all metrics for the specified entity with names starting with the specified prefix.
'''

# Connect to ATSD server
# connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Set query
entity = "entity"
metric_expr = "name LIKE 'me*'"

# Initialize services
entities_service = EntitiesService(connection)
series_service = SeriesService(connection)

# Query all metrics for entity
metrics = entities_service.metrics(entity=entity, expression=metric_expr)

if not metrics:
    print("No metrics are found for entity " + entity)
else:
    # Delete series for each metric
    for metric in metrics:
        query = SeriesDeleteQuery(entity=entity,
                                  metric=metric.name,
                                  exact_match=False)
        print("deleting ", entity, metric.name)
        # Uncomment next line to delete series
        # response = series_service.delete(query)
from datetime import datetime
from atsd_client import connect, connect_url
from atsd_client.models import SeriesFilter, EntityFilter, DateFilter, SeriesQuery
from atsd_client.services import SeriesService, MetricsService

'''
Copy data from one metric to the new one.
'''

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Initialize services
svc = SeriesService(connection)
metrics_service = MetricsService(connection)

# specify source and destination metric names
metric_src = 'metric_src'
metric_dst = 'metric_dst'

# copy series with all entities, specific entity name can be set instead
entity = '*'

# query series with required metric and all entities
sf = SeriesFilter(metric=metric_src)
ef = EntityFilter(entity='*')
df = DateFilter(start_date='1970-01-01T00:00:00Z', end_date=datetime.now())
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series = svc.query(query)