Exemple #1
0
    def run_query(self, query, user):
        connection = atsd_client.connect_url(self.url,
                                             self.configuration.get('username'),
                                             self.configuration.get('password'),
                                             verify=self.configuration.get('trust_certificate', False),
                                             timeout=self.configuration.get('timeout', 600))
        sql = SQLService(connection)
        query_id = str(uuid.uuid4())

        try:
            logger.debug("SQL running query: %s", query)
            data = sql.query_with_params(query, {'outputFormat': 'csv', 'metadataFormat': 'EMBED',
                                                 'queryId': query_id})

            columns, rows = generate_rows_and_columns(data)

            data = {'columns': columns, 'rows': rows}
            json_data = json_dumps(data)
            error = None

        except SQLException as e:
            json_data = None
            error = e.content
        except (KeyboardInterrupt, InterruptException):
            sql.cancel_query(query_id)
            error = "Query cancelled by user."
            json_data = None

        return json_data, error
Exemple #2
0
    def run_query(self, query, user):
        connection = atsd_client.connect_url(
            self.url,
            self.configuration.get("username"),
            self.configuration.get("password"),
            verify=self.configuration.get("trust_certificate", False),
            timeout=self.configuration.get("timeout", 600),
        )
        sql = SQLService(connection)
        query_id = str(uuid.uuid4())

        try:
            logger.debug("SQL running query: %s", query)
            data = sql.query_with_params(
                query,
                {"outputFormat": "csv", "metadataFormat": "EMBED", "queryId": query_id},
            )

            columns, rows = generate_rows_and_columns(data)

            data = {"columns": columns, "rows": rows}
            json_data = json_dumps(data)
            error = None

        except SQLException as e:
            json_data = None
            error = e.content
        except (KeyboardInterrupt, InterruptException):
            sql.cancel_query(query_id)
            error = "Query cancelled by user."
            json_data = None

        return json_data, error
Exemple #3
0
 def get_schema(self, get_stats=False):
     connection = atsd_client.connect_url(
         self.url,
         self.configuration.get("username"),
         self.configuration.get("password"),
         verify=self.configuration.get("trust_certificate", False),
         timeout=self.configuration.get("timeout", 600),
     )
     metrics = MetricsService(connection)
     ml = metrics.list(
         expression=self.configuration.get("expression", None),
         minInsertDate=self.configuration.get("min_insert_date", None),
         limit=self.configuration.get("limit", 5000),
     )
     metrics_list = [i.name.encode("utf-8") for i in ml]
     metrics_list.append("atsd_series")
     schema = {}
     default_columns = [
         "entity",
         "datetime",
         "time",
         "metric",
         "value",
         "text",
         "tags",
         "entity.tags",
         "metric.tags",
     ]
     for table_name in metrics_list:
         schema[table_name] = {
             "name": "'{}'".format(table_name),
             "columns": default_columns,
         }
     values = list(schema.values())
     return values
Exemple #4
0
    def run_query(self, query, user):
        connection = atsd_client.connect_url(
            self.url,
            self.configuration.get('username'),
            self.configuration.get('password'),
            verify=self.configuration.get('trust_certificate', False),
            timeout=self.configuration.get('timeout', 600))
        sql = SQLService(connection)
        query_id = str(uuid.uuid4())

        try:
            logger.debug("SQL running query: %s", query)
            data = sql.query_with_params(
                query, {
                    'outputFormat': 'csv',
                    'metadataFormat': 'EMBED',
                    'queryId': query_id
                })

            columns, rows = generate_rows_and_columns(data)

            data = {'columns': columns, 'rows': rows}
            json_data = json_dumps(data)
            error = None

        except SQLException as e:
            json_data = None
            error = e.content
        except (KeyboardInterrupt, InterruptException):
            sql.cancel_query(query_id)
            error = "Query cancelled by user."
            json_data = None

        return json_data, error
Exemple #5
0
 def get_schema(self, get_stats=False):
     connection = atsd_client.connect_url(
         self.url,
         self.configuration.get('username'),
         self.configuration.get('password'),
         verify=self.configuration.get('trust_certificate', False),
         timeout=self.configuration.get('timeout', 600))
     metrics = MetricsService(connection)
     ml = metrics.list(
         expression=self.configuration.get('expression', None),
         minInsertDate=self.configuration.get('min_insert_date', None),
         limit=self.configuration.get('limit', 5000))
     metrics_list = [i.name.encode('utf-8') for i in ml]
     metrics_list.append('atsd_series')
     schema = {}
     default_columns = [
         'entity', 'datetime', 'time', 'metric', 'value', 'text', 'tags',
         'entity.tags', 'metric.tags'
     ]
     for table_name in metrics_list:
         schema[table_name] = {
             'name': "'{}'".format(table_name),
             'columns': default_columns
         }
     values = schema.values()
     return values
Exemple #6
0
def main(filename,
         atsd_url,
         username,
         password,
         stat_as_tag,
         entity_name=None,
         timestamp=None):
    if timestamp is None:
        timestamp = datetime.now()
    conn = connect_url(atsd_url, username, password)
    series_service = SeriesService(conn)
    with open(filename) as f:
        entries = json.load(f)

        for entry in entries:
            benchmark_name_split = entry['benchmark'].split('.')
            entity = benchmark_name_split[
                -2] if entity_name is None else entity_name
            metric_prefix = 'jmh.' + entry['mode'] + '.' + entry[
                'primaryMetric']['scoreUnit']
            tags = {'method': benchmark_name_split[-1]}
            if not stat_as_tag:
                metric = metric_prefix + '.avg'
            else:
                metric = metric_prefix
                tags['stat'] = 'avg'

            series_service.insert(
                Series(entity, metric,
                       [Sample(entry['primaryMetric']['score'], timestamp)],
                       tags))
            for key, value in entry['primaryMetric']['scorePercentiles'].items(
            ):
                if key == '0.0':
                    stat = 'min'
                elif key == '100.0':
                    stat = 'max'
                else:
                    stat = key
                if not stat_as_tag:
                    metric = metric_prefix + '.' + stat
                else:
                    metric = metric_prefix
                    tags['stat'] = stat
                series_service.insert(
                    Series(entity, metric, [Sample(value, timestamp)], tags))
Exemple #7
0
 def get_schema(self, get_stats=False):
     connection = atsd_client.connect_url(self.url,
                                          self.configuration.get('username'),
                                          self.configuration.get('password'),
                                          verify=self.configuration.get('trust_certificate', False),
                                          timeout=self.configuration.get('timeout', 600))
     metrics = MetricsService(connection)
     ml = metrics.list(expression=self.configuration.get('expression', None),
                       minInsertDate=self.configuration.get('min_insert_date', None),
                       limit=self.configuration.get('limit', 5000))
     metrics_list = [i.name.encode('utf-8') for i in ml]
     metrics_list.append('atsd_series')
     schema = {}
     default_columns = ['entity', 'datetime', 'time', 'metric', 'value', 'text',
                        'tags', 'entity.tags', 'metric.tags']
     for table_name in metrics_list:
         schema[table_name] = {'name': "'{}'".format(table_name),
                               'columns': default_columns}
     values = schema.values()
     return values
from atsd_client import connect, connect_url
from atsd_client.services import EntitiesService, SeriesService
from atsd_client.models import SeriesDeleteQuery
'''
Delete series for all metrics for the specified entity with names starting with the specified prefix.
'''

# Connect to ATSD server
# connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

# Set query
entity = "entity"
metric_expr = "name LIKE 'me*'"

# Initialize services
entities_service = EntitiesService(connection)
series_service = SeriesService(connection)

# Query all metrics for entity
metrics = entities_service.metrics(entity=entity, expression=metric_expr)

if not metrics:
    print("No metrics are found for entity " + entity)
else:
    # Delete series for each metric
    for metric in metrics:
        query = SeriesDeleteQuery(entity=entity,
                                  metric=metric.name,
                                  exact_match=False)
        print("deleting ", entity, metric.name)
from datetime import datetime, timedelta
from prettytable import PrettyTable

from atsd_client import connect, connect_url
from atsd_client.services import MetricsService, EntitiesService

# Connect to ATSD server
#connection = connect('/path/to/connection.properties')
connection = connect_url('https://atsd_hostname:8443', 'username', 'password')

entity = 'my-entity'

entities_service = EntitiesService(connection)
metrics_service = MetricsService(connection)

# query all metrics for entity
metrics = entities_service.metrics(entity, tags='frequency,seasonal_adjustment,observation_start,observation_end,category,parent_category', limit=5)
t = PrettyTable(['Top Category', 'Category', 'Name', 'Label', 'Frequency', 'Adjustment', 'Observation Start', 'Observation End'])

# iterate over metrics and add their fields/tags as rows into a PrettyTable
for metric in metrics:
	t.add_row([metric.tags['category'], metric.tags['parent_category'], metric.name, metric.label, metric.tags['frequency'], metric.tags['seasonal_adjustment'], metric.tags['observation_start'], metric.tags['observation_end']])

# Sort metrics by name
t.sortby = "Name"

# Print metrics as ASCII table
#print(t)

# Print metrics as HTML table with header
print(t.get_html_string(title="Available Metrics"))
def get_connection():
    conn = atsd_client.connect_url('https://localhost:8443', 'axibase', 'axibase')
    return conn
 def setUpClass(cls):
     cls.connection = atsd_client.connect_url('https://localhost:8443',
                                              'axibase', 'axibase')
     service_class = eval(cls.__name__[4:])
     cls.service = service_class(cls.connection)
     cls.wait_time = 1
Exemple #12
0
from atsd_client import connect_url
from atsd_client.services import SQLService

conn = connect_url('https://atsd_hostname:8443', 'user', 'passwd')

# Single-line SQL query
# query = 'SELECT datetime, time, entity, value FROM jvm_memory_free LIMIT 3';

# Multi-line SQL query, use triple quotes (single or double)
query = """
SELECT datetime, time, entity, value
  FROM "jvm_memory_free"
ORDER BY datetime DESC
  LIMIT 3
"""

svc = SQLService(conn)
df = svc.query(query)

print(df)
Exemple #13
0
## Launch this program to return the results of the first query from the article 'SQL Queries and Data Visualization with Python and ATSD.'

## Establish a connection to Trends database, the ATSD instance where data for this article is stored.

from atsd_client import connect_url
conn = connect_url('https://trends.axibase.com:8443/', 'username', 'password')

## Imports Series Service. All services available to ATSD Client available at: https://github.com/axibase/atsd-api-python#services.

from atsd_client.services import *
svc = SeriesService(conn)

## Access FRED data in ATSD.

from atsd_client.models import *

from datetime import datetime

sf = SeriesFilter(metric="AD01RC1Q027SBEA")
ef = EntityFilter(entity="fred.stlouisfed.org")
df = DateFilter(start_date="1970-01-01T00:00:00Z", end_date=datetime.now())
query_data = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
result = svc.query(query_data)

## Import SQL Service.

from atsd_client.services import *

sql = SQLService(conn)

## Define SQL Query.
 def setUpClass(cls):
     cls.connection = atsd_client.connect_url('https://localhost:8443', 'axibase', 'axibase')
     service_class = eval(cls.__name__[4:])
     cls.service = service_class(cls.connection)
     cls.wait_time = 1