Beispiel #1
0
    def client(self):
        verify = os.getenv('APIALCHEMY_APPD_SSL_VERIFY',
                           'true').lower() == 'true'

        if not verify:
            urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

        return AppDynamicsClient(**self._conn_params, verify=verify)
Beispiel #2
0
    def _execute(self):
        """
        Execute this command
        """

        # connect to the wf proxy
        self.proxy = WavefrontMetricsWriter(self.config.writer_host,
                                            self.config.writer_port,
                                            self.config.is_dry_run)
        self.proxy.start()

        # connect to appd
        self.appd_client = AppDynamicsClient(self.config.api_url,
                                             self.config.api_username,
                                             self.config.api_password,
                                             self.config.api_account,
                                             self.config.api_debug)

        # construct start time for when to get metrics starting from
        if self.config.start_time:
            start = self.config.start_time
        else:
            start = ((datetime.datetime.utcnow() -
                      datetime.timedelta(seconds=60.0)).replace(
                          microsecond=0, tzinfo=dateutil.tz.tzutc()))
        if self.config.end_time:
            end = self.config.end_time
        else:
            end = None

        if start is not None:
            if end is None:
                end = (datetime.datetime.utcnow().replace(
                    microsecond=0, tzinfo=dateutil.tz.tzutc()))

            if (end - start).total_seconds() < self.config.min_delay:
                self.logger.info('Not running since %s - %s < 60', str(end),
                                 str(start))
                return

        start = start.replace(microsecond=0, tzinfo=dateutil.tz.tzutc())
        self.logger.info('Running %s - %s', str(start), str(end))

        for app in self.appd_client.get_applications():
            if str(app.id) not in self.config.application_ids:
                print 'skipping %s (%s)' % (app.name, str(app.id))
                continue
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            # get a list of metrics available
            # TODO: cache this like New Relic plugin
            self.logger.info('[%s] Getting metric tree', app.name)
            paths = self.get_metric_paths(app, self.config.recurse_metric_tree)
            if not paths:
                self.logger.warn('[%s] no metrics found', app.name)
                return

            # if the time is more than 10 minutes, AppD will make sample size
            # larger than a minute.  so, we'll grab the data in chunks
            # (10m at a time)
            curr_start = start
            if not end:
                end = (datetime.datetime.utcnow().replace(
                    microsecond=0, tzinfo=dateutil.tz.tzutc()))
            curr_end = end
            curr_diff = curr_end - curr_start
            while (curr_diff.total_seconds() > 0
                   and not utils.CANCEL_WORKERS_EVENT.is_set()):
                if (curr_diff.total_seconds() > 600
                        or curr_diff.total_seconds() < 60):
                    curr_end = curr_start + datetime.timedelta(minutes=10)

                self._process_metrics(paths, app, curr_start, curr_end)

                # save "last run time" and update curr_* variables
                self.config.set_last_run_time(curr_end)
                curr_start = curr_end
                curr_end = end
                curr_diff = curr_end - curr_start
                if (curr_diff.total_seconds() > 600
                        and not utils.CANCEL_WORKERS_EVENT.is_set()):
                    time.sleep(30)
Sample script to print a list of all external dependencies for an application.
This would include databases, message queues, web sites,
Output format: ``app_name,tier_name,node_name,host_name``
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Todd Radel'
__copyright__ = 'Copyright (c) 2013-2015 AppDynamics Inc.'
__version__ = '0.4.5'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

app_name = 'E-Commerce_Demo'
tier_name = 'ECommerce-Server'
metric_path = 'Overall Application Performance|' + tier_name + '|External Calls'

app_id = -1
for app in c.get_applications():
    if app.name == app_name:
        app_id = app.id

deps = c.get_metric_tree(app_id, metric_path)
for dep in deps:
    if dep.type == 'folder':
        print(dep.name)
Beispiel #4
0
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

import random
import string
import requests

__author__ = 'Srikar Achanta'
__copyright__ = 'Copyright (c) 2013-2020 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)

apps = c.get_applications()


def get_random_string(length):
    letters = string.ascii_lowercase
    result_str = ''.join(random.choice(letters) for i in range(length))
    return result_str


if len(apps) > 0:
    first_name = get_random_string(5)
    second_name = get_random_string(5)
    email = first_name + '.' + second_name + '@email.com'
    user = c.create_user_v1(first_name, first_name, email, 'password', 'INTERNAL')
class AppDMetricRetrieverCommand(command.Command):
    """
    Command object for retrieving AppD metrics via the REST API.
    """
    global_points_counter = 0

    def __init__(self, **kwargs):
        super(AppDMetricRetrieverCommand, self).__init__(**kwargs)
        self.description = 'AppDynamics Metric Retriever'
        self.appd_client = None
        self.config = None
        self.proxy = None

    #pylint: disable=too-many-arguments
    #pylint: disable=bare-except
    def send_metric(self, name, value, host, timestamp, tags=None,
                    value_translator=None):
        """
        Sends the metric to writer.

        Arguments:
        name - the metric name
        value - the numeric value
        host - the source/host
        timestamp - the timestamp (epoch seconds) or datetime object
        tags - dictionary of tags
        value_translator - function pointer to function that will translate
            value from current form to something else
        """

        if not isinstance(timestamp, numbers.Number):
            parsed_date = datetime.datetime.strptime(timestamp,
                                                     '%Y-%m-%dT%H:%M:%S+00:00')
            parsed_date = parsed_date.replace(tzinfo=dateutil.tz.tzutc())
            timestamp = utils.unix_time_seconds(parsed_date)

        if value_translator:
            value = value_translator(name, value)
            if value is None:
                return

        attempts = 0
        while attempts < 5 and not utils.CANCEL_WORKERS_EVENT.is_set():
            try:
                self.proxy.transmit_metric(self.config.namespace + '.' +
                                           utils.sanitize_name(name),
                                           value, int(timestamp), host, tags)
                break
            except:
                attempts = attempts + 1
                self.logger.warning('Failed to transmit metric %s: %s',
                                    name, str(sys.exc_info()))
                if not utils.CANCEL_WORKERS_EVENT.is_set():
                    time.sleep(1)

    #pylint: disable=no-self-use
    def get_help_text(self):
        """
        Help text for this command.
        """

        return "Pull metrics from AppDynamics"

    def _initialize(self, arg):
        """
        Parses the arguments passed into this command.

        Arguments:
        arg - the argparse parser object returned from argparser
        """

        self.config = AppDPluginConfiguration(arg.config_file_path)
        self.config.validate()
        try:
            logging.config.fileConfig(arg.config_file_path)
        except ConfigParser.NoSectionError:
            pass
        self.logger = logging.getLogger()

    #pylint: disable=too-many-branches
    def _execute(self):
        """
        Execute this command
        """

        # connect to the wf proxy
        self.proxy = WavefrontMetricsWriter(self.config.writer_host,
                                            self.config.writer_port,
                                           self.config.is_dry_run)
        try:
            self.proxy.start()
        except:
            print("Error connecting to Wavefront proxy :", sys.exc_info()[0])
            raise

        # connect to appd
        try:
            self.appd_client = AppDynamicsClient(self.config.api_url,
                                                 self.config.api_username,
                                                 self.config.api_password,
                                                 self.config.api_account,
                                                 self.config.api_debug)
        except:
            print("Error connecting to AppDynamics :", sys.exc_info()[0])
            raise

        # construct start time for when to get metrics starting from
        if self.config.start_time:
            start = self.config.start_time
        else:
            start = ((datetime.datetime.utcnow() -
                      datetime.timedelta(seconds=60.0))
                     .replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
        if self.config.end_time:
            end = self.config.end_time
        else:
            end = None

        if start is not None:
            if end is None:
                end = (datetime.datetime.utcnow()
                       .replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))

            if (end - start).total_seconds() < self.config.min_delay:
                self.logger.info('Not running since %s - %s < 60',
                                 str(end), str(start))
                return

        start = start.replace(microsecond=0, tzinfo=dateutil.tz.tzutc())
        self.logger.info('Running %s - %s', str(start), str(end))

        print 'Fetching Applications from the Appdynamics Controller... '
        for app in self.appd_client.get_applications():
            if str(app.id) not in self.config.application_ids:
                print 'skipping %s (%s)' % (app.name, str(app.id))
                continue
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            # get a list of metrics available
            # TODO: cache this like New Relic plugin
            self.logger.info('[%s] Getting metric tree', app.name)
            paths = self.get_metric_paths(app, self.config.recurse_metric_tree)
            if not paths:
                self.logger.warn('[%s] no metrics found', app.name)
                return

            # if the time is more than 10 minutes, AppD will make sample size
            # larger than a minute.  so, we'll grab the data in chunks
            # (10m at a time)
            curr_start = start
            if not end:
                end = (datetime.datetime.utcnow()
                       .replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
            curr_end = end
            curr_diff = curr_end - curr_start
            while (curr_diff.total_seconds() > 0 and
                   not utils.CANCEL_WORKERS_EVENT.is_set()):
                if (curr_diff.total_seconds() > 600 or
                        curr_diff.total_seconds() < 60):
                    curr_end = curr_start + datetime.timedelta(minutes=10)

                self._process_metrics(paths, app, curr_start, curr_end)

                # save "last run time" and update curr_* variables
                self.config.set_last_run_time(curr_end)
                curr_start = curr_end
                curr_end = end
                curr_diff = curr_end - curr_start
                if (curr_diff.total_seconds() > 600 and
                        not utils.CANCEL_WORKERS_EVENT.is_set()):
                    time.sleep(30)

    def get_metric_paths(self, app, recurse):
        """
        Calls the get_metric_tree() api for the given app and returns
        all paths that are not in the black list (or are in black but included
        in white list)
        Arguments:
        app - the application object
        See:
        _get_metric_paths()
        """

        metric_tree = self.appd_client.get_metric_tree(app.id, None, recurse)
        paths = []
        self._get_metric_paths(paths, app, metric_tree)
        return paths

    def _get_metric_paths(self, _rtn_paths, app, metric_tree):
        """
        Gets a list of paths to retrieve from get_metrics()
        Arguments:
        _rtn_paths: out argument to return the list of paths (for recursion)
        app: the application object
        metric_tree: the response from get_metric_tree()
        """

        for node in metric_tree:
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            if node.type == 'folder' and node._children:
                self._get_metric_paths(_rtn_paths, app, node._children)
                continue

            # black list ...
            keep = True
            for pattern in self.config.fields_blacklist_regex_compiled:
                keep = not pattern.match(node.path)

            # white list ...
            if not keep:
                for pattern in self.config.fields_whitelist_regex_compiled:
                    keep = pattern.match(node.path)

            if keep:
                if node.type == 'folder':
                    _rtn_paths.append(node.path + '|*')
                else:
                    _rtn_paths.append(node.path)

    def _process_metrics(self, paths, app, start, end):
        """
        Processes metrics returned from a get_metrics() api call.
        Arguments:
        paths - list of paths returned from get_metric_paths()
        app - the application object
        start - the start datetime object
        end - the end datetime object
        """
        metric_counter = 0

        for path in paths:
            #print 'Number of paths %s ' % Counter(paths)
            print 'Processing metrics under path %s ' % (path)

            if utils.CANCEL_WORKERS_EVENT.is_set():
                break
            self.logger.info('[%s] Getting \'%s\' metrics for %s - %s',
                             app.name, path, start, end)
            #make sure the * wildcards are the correct numbers and match up below
            if  'Business'in path and 'Business Transaction Performance|Business Transactions|*|*|*' not in path:
                path = 'Business Transaction Performance|Business Transactions|*|*|*' #the last 3 components of the metric path. This should be 'tier_name|bt_name|metric_name'.
                if self.config.retrieve_BT_node_data:
                    if 'Business Transaction Performance|Business Transactions|*|*|*|*|*' not in paths :
                        print 'adding tier_name|bt_name|indvidual_nodes|node_name|metric_name to business transaction'
                        paths.append('Business Transaction Performance|Business Transactions|*|*|*|*|*') #This should be 'tier_name|bt_name|indvidual_nodes|node_name|metric_name'

            if "Backends" in path:
                path = 'Backends|*|*' # the last two components of the metric path. This should be 'backend_name|metric_name'

            if 'End User Experience|*' in path:
                path = 'End User Experience|*|*'
                if self.config.retrieve_EUM_AJAX_data:
                    if 'End User Experience|AJAX Requests|*|*' not in paths:
                        paths.append('End User Experience|AJAX Requests|*|*')

            if "Errors" in path:
                path = 'Errors|*|*|*' # tier level error stats
                if self.config.retrieve_error_node_data:
                    if 'Errors|*|*|*|*|*' not in paths:
                        paths.append('Errors|*|*|*|*|*') # individual node level error stats

            if 'Application Infrastructure Performance' in path:
                if self.config.retrieve_Application_Infrastructure_Performance_node_data:
                    if 'Application Infrastructure Performance|*|*|*|JVM|*|*' not in paths:
                            paths.append('Application Infrastructure Performance|*|*|*|JVM|*|*') #Application Infrastructure Performance|abtest-consumer|Individual Nodes|16f60b849273|JVM|Garbage Collection|GC Time Spent Per Min (ms)
            try:
                metrics = self.appd_client.get_metrics(
                    path, app.id, 'BETWEEN_TIMES', None,
                    long(utils.unix_time_seconds(start) * 1000),
                    long(utils.unix_time_seconds(end) * 1000),
                    False)

            except:
                    print("Unexpected error:", sys.exc_info()[0])
                    continue

            for metric in metrics:
                if utils.CANCEL_WORKERS_EVENT.is_set():
                    break
                #if not (metric.values ):
                    # print 'No value for metric - %s ' % metric.path

                for value in metric.values:
                    if "|/" in metric.path:
                        metric.path = str(metric.path).replace("|/",".")
                    metric_counter +=1
                    self.send_metric(app.name + '|' + metric.path,
                                     value.current,
                                     'appd',  # the source name
                                     long(value.start_time_ms / 1000),
                                     None,  # tags
                                     self.config.get_value_to_send)
        self.global_points_counter +=metric_counter

        self.logger.info('Number of AppDynamics points processed in this run [%s]', metric_counter)
        self.logger.info('Total points processed since begining %s ', self.global_points_counter)
Output format: ``app_name,tier_name,node_name,host_name``
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient


__author__ = 'Todd Radel'
__copyright__ = 'Copyright (c) 2013-2015 AppDynamics Inc.'
__version__ = '0.4.5'


args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)

app_name = 'E-Commerce_Demo'
tier_name = 'ECommerce-Server'
metric_path = 'Overall Application Performance|' + tier_name + '|External Calls'

app_id = -1
for app in c.get_applications():
    if app.name == app_name:
        app_id = app.id

deps = c.get_metric_tree(app_id, metric_path)
for dep in deps:
    if dep.type == 'folder':
        print(dep.name)
# The report will generate data for the last 2-hour period before the current hour of the current day.
# It needs to be run for every 2 hours using cron. Prefer to run it at even hours so that it runs 0,2,4,..,22 hours
#end_time = datetime.now().replace(minute=0, second=0, microsecond=0)
end_epoch = int(mktime(end_time.timetuple())) * 1000
#start_time = end_time - timedelta(hours=2)
start_epoch = int(mktime(start_time.timetuple())) * 1000
# Pulls data from Monday to Sunday into a single file for each backend.
#week_monday = start_time - timedelta(days=start_time.weekday())

# Credentials for the Client connection
username = "******"
password = "******"
baseurl = "https://test.saas.appdynamics.com"
account = "test"

c = AppDynamicsClient(account=account, base_url=baseurl, username=username, password=password)


#Adding it hard coded for backends to be pulled from this tier
apps = ['BRE-DES']
metrics=['Calls per Minute', 'Number of Slow Calls', 'Number of Very Slow Calls', 'Stall Count', 'Average Response Time (ms)']
metric_path =  'Overall Application Performance|*'
for app in apps:
    export_header = 'datetime'
    md_list = c.get_metrics(metric_path, app , time_range_type='BETWEEN_TIMES', end_time=end_epoch,
                            start_time=start_epoch, rollup=False)
    freq = freq_to_mins(md_list[0])
    mv = [''] * duration_length(start_time,end_time,freq)
    for md in md_list:
        if len(md.values) > 0:
            # Get the last two components of the metric path. This should be 'backend_name|metric_name'.
Beispiel #8
0
__author__ = 'Todd Radel <*****@*****.**>'

import itertools

from datetime import datetime
from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient


def incr(d, name, amt=1):
    d[name] = d.get(name, 0) + amt


args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

nodes = []
for app in c.get_applications():
    for node in c.get_nodes(app.id):
        # node_type = node.type
        # print node.id, node.machine_id, node.machine_name, node.type
        if node.has_machine_agent or node.has_app_agent:
            if node.has_app_agent:
                if 'PHP' in node.type:
                    node.group_type = 'PHP App Agent'
                if 'IIS' in node.type:
                    node.group_type = '.NET App Agent'
                else:
                    node.group_type = 'Java App Agent'
            else:
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

resp = c.export_policies(5)
print(resp)
resp = c.import_policies(5, resp)
print(resp)
#! /usr/bin/env python
# -*- coding: utf-8 -*-

"""
Sample script to print a list of roles from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Srikar Achanta'
__copyright__ = 'Copyright (c) 2013-2020 AppDynamics Inc.'


args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)

apps = c.get_applications()
if len(apps) > 0:
    allRoles = c.get_roles()
    print('All Roles -- \n%s' % allRoles)
    print('Role using get_role_by_id -- \n%s' % c.get_role_by_id(allRoles[1].id))
    print('Role using get_role_by_name -- \n%s' % c.get_role_by_name(allRoles[1].name))
else:
    print('Application, not found!')
from datetime import datetime

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient
from appd.time import from_ts, to_ts


__author__ = 'Todd Radel'
__copyright__ = 'Copyright (c) 2013-2015 AppDynamics Inc.'
__version__ = '0.4.3'


# Parse the command line arguments and initialize the client
#
args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)

# Get the application list and find "MyApp"
#
apps = c.get_applications()
gn_prod = [x for x in apps if x.name == "MyApp"][0]

# Calculate start and end times for the report: 24h period ending with midnight last night
#
today = to_ts(datetime.now().date())
yesterday = today - (86400 * 1000)

# Get the list of nodes so we can look up by node_id
#
all_nodes = c.get_nodes(gn_prod.id)
nodes_by_id = dict(list(zip([x.id for x in all_nodes], all_nodes)))
#! /usr/bin/env python
# -*- coding: utf-8 -*-

"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'


args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)

resp = c.export_entry_point_type(5, 'servlet', 'custom', 'java', 'asdf')
print(resp)
resp = c.import_entry_point_type(5, 'servlet', 'custom', resp, 'java', 'asdf')
print(resp)
Beispiel #13
0
class AppDMetricRetrieverCommand(command.Command):
    """
    Command object for retrieving AppD metrics via the REST API.
    """
    global_points_counter = 0

    def __init__(self, **kwargs):
        super(AppDMetricRetrieverCommand, self).__init__(**kwargs)
        self.description = 'AppDynamics Metric Retriever'
        self.appd_client = None
        self.config = None
        self.proxy = None

    #pylint: disable=too-many-arguments
    #pylint: disable=bare-except
    def send_metric(self,
                    name,
                    value,
                    host,
                    timestamp,
                    tags=None,
                    value_translator=None):
        """
        Sends the metric to writer.

        Arguments:
        name - the metric name
        value - the numeric value
        host - the source/host
        timestamp - the timestamp (epoch seconds) or datetime object
        tags - dictionary of tags
        value_translator - function pointer to function that will translate
            value from current form to something else
        """

        if not isinstance(timestamp, numbers.Number):
            parsed_date = datetime.datetime.strptime(
                timestamp, '%Y-%m-%dT%H:%M:%S+00:00')
            parsed_date = parsed_date.replace(tzinfo=dateutil.tz.tzutc())
            timestamp = utils.unix_time_seconds(parsed_date)

        if value_translator:
            value = value_translator(name, value)
            if value is None:
                return

        attempts = 0
        while attempts < 5 and not utils.CANCEL_WORKERS_EVENT.is_set():
            try:
                self.proxy.transmit_metric(
                    self.config.namespace + '.' + utils.sanitize_name(name),
                    value, int(timestamp), host, tags)
                break
            except:
                attempts = attempts + 1
                self.logger.warning('Failed to transmit metric %s: %s', name,
                                    str(sys.exc_info()))
                if not utils.CANCEL_WORKERS_EVENT.is_set():
                    time.sleep(1)

    #pylint: disable=no-self-use
    def get_help_text(self):
        """
        Help text for this command.
        """

        return "Pull metrics from AppDynamics"

    def _initialize(self, arg):
        """
        Parses the arguments passed into this command.

        Arguments:
        arg - the argparse parser object returned from argparser
        """

        self.config = AppDPluginConfiguration(arg.config_file_path)
        self.config.validate()
        try:
            logging.config.fileConfig(arg.config_file_path)
        except ConfigParser.NoSectionError:
            pass
        self.logger = logging.getLogger()

    #pylint: disable=too-many-branches
    def _execute(self):
        """
        Execute this command
        """

        # connect to the wf proxy
        self.proxy = WavefrontMetricsWriter(self.config.writer_host,
                                            self.config.writer_port,
                                            self.config.is_dry_run)
        try:
            self.proxy.start()
        except:
            print("Error connecting to Wavefront proxy :", sys.exc_info()[0])
            raise

        # connect to appd
        try:
            self.appd_client = AppDynamicsClient(self.config.api_url,
                                                 self.config.api_username,
                                                 self.config.api_password,
                                                 self.config.api_account,
                                                 self.config.api_debug)
        except:
            print("Error connecting to AppDynamics :", sys.exc_info()[0])
            raise

        # construct start time for when to get metrics starting from
        if self.config.start_time:
            start = self.config.start_time
        else:
            start = ((datetime.datetime.utcnow() -
                      datetime.timedelta(seconds=60.0)).replace(
                          microsecond=0, tzinfo=dateutil.tz.tzutc()))
        if self.config.end_time:
            end = self.config.end_time
        else:
            end = None

        if start is not None:
            if end is None:
                end = (datetime.datetime.utcnow().replace(
                    microsecond=0, tzinfo=dateutil.tz.tzutc()))

            if (end - start).total_seconds() < self.config.min_delay:
                self.logger.info('Not running since %s - %s < 60', str(end),
                                 str(start))
                return

        start = start.replace(microsecond=0, tzinfo=dateutil.tz.tzutc())
        self.logger.info('Running %s - %s', str(start), str(end))

        print 'Fetching Applications from the Appdynamics Controller... '
        for app in self.appd_client.get_applications():
            if str(app.id) not in self.config.application_ids:
                print 'skipping %s (%s)' % (app.name, str(app.id))
                continue
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            # get a list of metrics available
            # TODO: cache this like New Relic plugin
            self.logger.info('[%s] Getting metric tree', app.name)
            paths = self.get_metric_paths(app, self.config.recurse_metric_tree)
            if not paths:
                self.logger.warn('[%s] no metrics found', app.name)
                return

            # if the time is more than 10 minutes, AppD will make sample size
            # larger than a minute.  so, we'll grab the data in chunks
            # (10m at a time)
            curr_start = start
            if not end:
                end = (datetime.datetime.utcnow().replace(
                    microsecond=0, tzinfo=dateutil.tz.tzutc()))
            curr_end = end
            curr_diff = curr_end - curr_start
            while (curr_diff.total_seconds() > 0
                   and not utils.CANCEL_WORKERS_EVENT.is_set()):
                if (curr_diff.total_seconds() > 600
                        or curr_diff.total_seconds() < 60):
                    curr_end = curr_start + datetime.timedelta(minutes=10)

                self._process_metrics(paths, app, curr_start, curr_end)

                # save "last run time" and update curr_* variables
                self.config.set_last_run_time(curr_end)
                curr_start = curr_end
                curr_end = end
                curr_diff = curr_end - curr_start
                if (curr_diff.total_seconds() > 600
                        and not utils.CANCEL_WORKERS_EVENT.is_set()):
                    time.sleep(30)

    def get_metric_paths(self, app, recurse):
        """
        Calls the get_metric_tree() api for the given app and returns
        all paths that are not in the black list (or are in black but included
        in white list)
        Arguments:
        app - the application object
        See:
        _get_metric_paths()
        """

        metric_tree = self.appd_client.get_metric_tree(app.id, None, recurse)
        paths = []
        self._get_metric_paths(paths, app, metric_tree)
        return paths

    def _get_metric_paths(self, _rtn_paths, app, metric_tree):
        """
        Gets a list of paths to retrieve from get_metrics()
        Arguments:
        _rtn_paths: out argument to return the list of paths (for recursion)
        app: the application object
        metric_tree: the response from get_metric_tree()
        """

        for node in metric_tree:
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            if node.type == 'folder' and node._children:
                self._get_metric_paths(_rtn_paths, app, node._children)
                continue

            # black list ...
            keep = True
            for pattern in self.config.fields_blacklist_regex_compiled:
                keep = not pattern.match(node.path)

            # white list ...
            if not keep:
                for pattern in self.config.fields_whitelist_regex_compiled:
                    keep = pattern.match(node.path)

            if keep:
                if node.type == 'folder':
                    _rtn_paths.append(node.path + '|*')
                else:
                    _rtn_paths.append(node.path)

    def _process_metrics(self, paths, app, start, end):
        """
        Processes metrics returned from a get_metrics() api call.
        Arguments:
        paths - list of paths returned from get_metric_paths()
        app - the application object
        start - the start datetime object
        end - the end datetime object
        """
        metric_counter = 0

        for path in paths:
            #print 'Number of paths %s ' % Counter(paths)
            print 'Processing metrics under path %s ' % (path)

            if utils.CANCEL_WORKERS_EVENT.is_set():
                break
            self.logger.info('[%s] Getting \'%s\' metrics for %s - %s',
                             app.name, path, start, end)
            #make sure the * wildcards are the correct numbers and match up below
            if 'Business' in path and 'Business Transaction Performance|Business Transactions|*|*|*' not in path:
                path = 'Business Transaction Performance|Business Transactions|*|*|*'  #the last 3 components of the metric path. This should be 'tier_name|bt_name|metric_name'.
                if self.config.retrieve_BT_node_data:
                    if 'Business Transaction Performance|Business Transactions|*|*|*|*|*' not in paths:
                        print 'adding tier_name|bt_name|indvidual_nodes|node_name|metric_name to business transaction'
                        paths.append(
                            'Business Transaction Performance|Business Transactions|*|*|*|*|*'
                        )  #This should be 'tier_name|bt_name|indvidual_nodes|node_name|metric_name'

            if "Backends" in path:
                path = 'Backends|*|*'  # the last two components of the metric path. This should be 'backend_name|metric_name'

            if 'End User Experience|*' in path:
                path = 'End User Experience|*|*'
                if self.config.retrieve_EUM_AJAX_data:
                    if 'End User Experience|AJAX Requests|*|*' not in paths:
                        paths.append('End User Experience|AJAX Requests|*|*')

            if "Errors" in path:
                path = 'Errors|*|*|*'  # tier level error stats
                if self.config.retrieve_error_node_data:
                    if 'Errors|*|*|*|*|*' not in paths:
                        paths.append('Errors|*|*|*|*|*'
                                     )  # individual node level error stats

            if 'Application Infrastructure Performance' in path:
                if self.config.retrieve_Application_Infrastructure_Performance_node_data:
                    if 'Application Infrastructure Performance|*|*|*|JVM|*|*' not in paths:
                        paths.append(
                            'Application Infrastructure Performance|*|*|*|JVM|*|*'
                        )  #Application Infrastructure Performance|abtest-consumer|Individual Nodes|16f60b849273|JVM|Garbage Collection|GC Time Spent Per Min (ms)
            try:
                metrics = self.appd_client.get_metrics(
                    path, app.id, 'BETWEEN_TIMES', None,
                    long(utils.unix_time_seconds(start) * 1000),
                    long(utils.unix_time_seconds(end) * 1000), False)

            except:
                print("Unexpected error:", sys.exc_info()[0])
                continue

            for metric in metrics:
                if utils.CANCEL_WORKERS_EVENT.is_set():
                    break
                #if not (metric.values ):
                # print 'No value for metric - %s ' % metric.path

                for value in metric.values:
                    if "|/" in metric.path:
                        metric.path = str(metric.path).replace("|/", ".")
                    metric_counter += 1
                    self.send_metric(
                        app.name + '|' + metric.path,
                        value.current,
                        'appd',  # the source name
                        long(value.start_time_ms / 1000),
                        None,  # tags
                        self.config.get_value_to_send)
        self.global_points_counter += metric_counter

        self.logger.info(
            'Number of AppDynamics points processed in this run [%s]',
            metric_counter)
        self.logger.info('Total points processed since begining %s ',
                         self.global_points_counter)
# Helper functions

def now_rfc3339():
    return datetime.now(tzlocal.get_localzone()).isoformat('T')


def freq_to_mins(md):
    FREQ_MAP = {'ONE_MIN': 1, 'TEN_MIN': 10, 'SIXTY_MIN': 60}
    return FREQ_MAP[md.frequency]


# Parse command line arguments and create AD client:

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)


# Get the list of configured apps, and get backend metrics for each one:

rows = defaultdict(dict)
for app in c.get_applications():
    for md in c.get_metrics('Backends|*|*', app.id, time_range_type='BEFORE_TIME', end_time=end_epoch,
                            duration_in_mins=time_in_mins, rollup=False):

        # Get the last two components of the metric path. This should be 'backend_name|metric_name'.
        backend_name, metric_name = md.path.split('|')[-2:]

        if 'Discovered backend call' in backend_name:
            backend_name = backend_name[26:]
            metric_sum = sum([x.value for x in md.values])
class AppDMetricRetrieverCommand(command.Command):
    """
    Command object for retrieving AppD metrics via the REST API.
    """

    def __init__(self, **kwargs):
        super(AppDMetricRetrieverCommand, self).__init__(**kwargs)
        self.description = 'AppDynamics Metric Retriever'
        self.appd_client = None
        self.config = None
        self.proxy = None

    #pylint: disable=too-many-arguments
    #pylint: disable=bare-except
    def send_metric(self, name, value, host, timestamp, tags=None,
                    value_translator=None):
        """
        Sends the metric to writer.

        Arguments:
        name - the metric name
        value - the numeric value
        host - the source/host
        timestamp - the timestamp (epoch seconds) or datetime object
        tags - dictionary of tags
        value_translator - function pointer to function that will translate
            value from current form to something else
        """

        if not isinstance(timestamp, numbers.Number):
            parsed_date = datetime.datetime.strptime(timestamp,
                                                     '%Y-%m-%dT%H:%M:%S+00:00')
            parsed_date = parsed_date.replace(tzinfo=dateutil.tz.tzutc())
            timestamp = utils.unix_time_seconds(parsed_date)

        if value_translator:
            value = value_translator(name, value)
            if value is None:
                return

        attempts = 0
        while attempts < 5 and not utils.CANCEL_WORKERS_EVENT.is_set():
            try:
                self.proxy.transmit_metric(self.config.namespace + '.' +
                                           utils.sanitize_name(name),
                                           value, int(timestamp), host, tags)
                break
            except:
                attempts = attempts + 1
                self.logger.warning('Failed to transmit metric %s: %s',
                                    name, str(sys.exc_info()))
                if not utils.CANCEL_WORKERS_EVENT.is_set():
                    time.sleep(1)

    #pylint: disable=no-self-use
    def get_help_text(self):
        """
        Help text for this command.
        """

        return "Pull metrics from AppDynamics"

    def _initialize(self, arg):
        """
        Parses the arguments passed into this command.

        Arguments:
        arg - the argparse parser object returned from argparser
        """

        self.config = AppDPluginConfiguration(arg.config_file_path)
        self.config.validate()
        try:
            logging.config.fileConfig(arg.config_file_path)
        except ConfigParser.NoSectionError:
            pass
        self.logger = logging.getLogger()

    #pylint: disable=too-many-branches
    def _execute(self):
        """
        Execute this command
        """

        # connect to the wf proxy
        self.proxy = WavefrontMetricsWriter(self.config.writer_host,
                                            self.config.writer_port,
                                            self.config.is_dry_run)
        self.proxy.start()

        # connect to appd
        self.appd_client = AppDynamicsClient(self.config.api_url,
                                             self.config.api_username,
                                             self.config.api_password,
                                             self.config.api_account,
                                             self.config.api_debug)

        # construct start time for when to get metrics starting from
        if self.config.start_time:
            start = self.config.start_time
        else:
            start = ((datetime.datetime.utcnow() -
                      datetime.timedelta(seconds=60.0))
                     .replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
        if self.config.end_time:
            end = self.config.end_time
        else:
            end = None

        if start is not None:
            if end is None:
                end = (datetime.datetime.utcnow()
                       .replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))

            if (end - start).total_seconds() < self.config.min_delay:
                self.logger.info('Not running since %s - %s < 60',
                                 str(end), str(start))
                return

        start = start.replace(microsecond=0, tzinfo=dateutil.tz.tzutc())
        self.logger.info('Running %s - %s', str(start), str(end))

        for app in self.appd_client.get_applications():
            if str(app.id) not in self.config.application_ids:
                print 'skipping %s (%s)' % (app.name, str(app.id))
                continue
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            # get a list of metrics available
            # TODO: cache this like New Relic plugin
            self.logger.info('[%s] Getting metric tree', app.name)
            paths = self.get_metric_paths(app, self.config.recurse_metric_tree)
            if not paths:
                self.logger.warn('[%s] no metrics found', app.name)
                return

            # if the time is more than 10 minutes, AppD will make sample size
            # larger than a minute.  so, we'll grab the data in chunks
            # (10m at a time)
            curr_start = start
            if not end:
                end = (datetime.datetime.utcnow()
                       .replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
            curr_end = end
            curr_diff = curr_end - curr_start
            while (curr_diff.total_seconds() > 0 and
                   not utils.CANCEL_WORKERS_EVENT.is_set()):
                if (curr_diff.total_seconds() > 600 or
                        curr_diff.total_seconds() < 60):
                    curr_end = curr_start + datetime.timedelta(minutes=10)

                self._process_metrics(paths, app, curr_start, curr_end)

                # save "last run time" and update curr_* variables
                self.config.set_last_run_time(curr_end)
                curr_start = curr_end
                curr_end = end
                curr_diff = curr_end - curr_start
                if (curr_diff.total_seconds() > 600 and
                        not utils.CANCEL_WORKERS_EVENT.is_set()):
                    time.sleep(30)

    def get_metric_paths(self, app, recurse):
        """
        Calls the get_metric_tree() api for the given app and returns
        all paths that are not in the black list (or are in black but included
        in white list)
        Arguments:
        app - the application object
        See:
        _get_metric_paths()
        """

        metric_tree = self.appd_client.get_metric_tree(app.id, None, recurse)
        paths = []
        self._get_metric_paths(paths, app, metric_tree)
        return paths

    def _get_metric_paths(self, _rtn_paths, app, metric_tree):
        """
        Gets a list of paths to retrieve from get_metrics()
        Arguments:
        _rtn_paths: out argument to return the list of paths (for recursion)
        app: the application object
        metric_tree: the response from get_metric_tree()
        """

        for node in metric_tree:
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            if node.type == 'folder' and node._children:
                self._get_metric_paths(_rtn_paths, app, node._children)
                continue

            # black list ...
            keep = True
            for pattern in self.config.fields_blacklist_regex_compiled:
                keep = not pattern.match(node.path)

            # white list ...
            if not keep:
                for pattern in self.config.fields_whitelist_regex_compiled:
                    keep = pattern.match(node.path)

            if keep:
                if node.type == 'folder':
                    _rtn_paths.append(node.path + '|*')
                else:
                    _rtn_paths.append(node.path)

    def _process_metrics(self, paths, app, start, end):
        """
        Processes metrics returned from a get_metrics() api call.
        Arguments:
        paths - list of paths returned from get_metric_paths()
        app - the application object
        start - the start datetime object
        end - the end datetime object
        """

        for path in paths:
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break
            self.logger.info('[%s] Getting \'%s\' metrics for %s - %s',
                             app.name, path, start, end)
            metrics = self.appd_client.get_metrics(
                path, app.id, 'BETWEEN_TIMES', None,
                long(utils.unix_time_seconds(start) * 1000),
                long(utils.unix_time_seconds(end) * 1000),
                False)
            for metric in metrics:
                if utils.CANCEL_WORKERS_EVENT.is_set():
                    break
                for value in metric.values:
                    self.send_metric(self.config.namespace + '|' + path,
                                     value.current,
                                     'appd', # the source name
                                     long(value.start_time_ms / 1000),
                                     None, # tags
                                     self.config.get_value_to_send)
Beispiel #16
0
# The report will generate data for the last 2-hour period before the current hour of the current day.
# It needs to be run for every 2 hours using cron. Prefer to run it at even hours so that it runs 0,2,4,..,22 hours
#end_time = datetime.now().replace(minute=0, second=0, microsecond=0)
end_epoch = int(mktime(end_time.timetuple())) * 1000
#start_time = end_time - timedelta(hours=2)
start_epoch = int(mktime(start_time.timetuple())) * 1000
# Pulls data from Monday to Sunday into a single file for each backend.
#week_monday = start_time - timedelta(days=start_time.weekday())

# Credentials for the Client connection
username = "******"
password = "******"
baseurl = "https://test.saas.appdynamics.com"
account = "account"

c = AppDynamicsClient(account=account, base_url=baseurl, username=username, password=password)

# Add more backend values to create additional backend extracts
#backends = ['/RequestCenter/nsapi', 'myservices/executeFormRules.execute']

#for backend_value in backends :
metric_path =  tier + '|' + appname + '|*'
metric_path = "/controller/rest/applications/eStore-prod/events"
export_header = 'datetime'
md_list = c.get_metrics('eStore-prod', time_range_type='BETWEEN_TIMES', end_time=end_epoch,
                        start_time=start_epoch, severities='APPLICATION_ERROR', eventtypes="ERROR",rollup=False)
freq = freq_to_mins(md_list[0])
mv = [''] * duration_length(start_time,end_time,freq)
for md in md_list:
    if len(md.values) > 0:
        # Get the last two components of the metric path. This should be 'backend_name|metric_name'.
from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient


__author__ = 'Todd Radel'
__copyright__ = 'Copyright (c) 2013-2015 AppDynamics Inc.'
__version__ = '0.4.5'


def incr(d, name, amt=1):
    d[name] = d.get(name, 0) + amt


args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)

nodes = []
for app in c.get_applications():
    for node in c.get_nodes(app.id):
        # node_type = node.type
        # print node.id, node.machine_id, node.machine_name, node.type
        # print node.type, node.os_type, node.app_agent_version
        if node.has_machine_agent or node.has_app_agent:
            if node.has_app_agent:
                if 'PHP' in node.type:
                    node.group_type = 'PHP App Agent'
                if 'IIS' in node.type:
                    node.group_type = '.NET App Agent'
                else:
                    node.group_type = 'Java App Agent'
#! /usr/bin/env python
# -*- coding: utf-8 -*-

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Todd Radel'
__copyright__ = 'Copyright (c) 2013-2015 AppDynamics Inc.'
__version__ = '0.4.5'


args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)

for app in c.get_applications():
    metric_data = c.get_metrics('Overall Application Performance|*', app_id=app.id)
    art = metric_data.by_leaf_name(c.AVERAGE_RESPONSE_TIME).first_value()
    cpm = metric_data.by_leaf_name(c.CALLS_PER_MINUTE).first_value()
    epm = metric_data.by_leaf_name(c.ERRORS_PER_MINUTE).first_value()
    error_pct = round(float(epm) / float(cpm) * 100.0, 1) if cpm > 0 else 0
    print(app.name, art, cpm, epm, error_pct)
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

resp = c.export_custom_dashboard(1)
print(resp)
resp = c.import_custom_dashboard(resp)
print(resp)
Beispiel #20
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

resp = c.exclude_bt_list(5, [32])
print(resp)
resp = c.exclude_bt_list(5, [32], False)
print(resp)
Beispiel #21
0
time_in_mins = 1440
end_time = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
end_epoch = int(mktime(end_time.timetuple())) * 1000

# Helper functions


def now_rfc3339():
    return datetime.now(tzlocal.get_localzone()).isoformat('T')


# Parse command line arguments and create AD client:

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

# Get the list of configured apps, and get backend metrics for each one:

METRIC_MAP = {
    'Average Block Time (ms)': 'abt',
    'Average CPU Used (ms)': 'cpu',
    'Average Request Size': 'req_size',
    'Average Response Time (ms)': 'art',
    'Average Wait Time (ms)': 'wait_time',
    'Calls per Minute': 'cpm',
    'End User Average Response Time (ms)': 'eum_art',
    'End User Network Average Response Time (ms)': 'eum_net',
    'End User Page Render Average Response Time (ms)': 'eum_render',
    'Errors per Minute': 'epm',
    'Normal Average Response Time (ms)': 'norm_art',
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

apps = c.get_applications()

if len(apps) > 0:
    tiers = c.get_tiers(apps[0].id)
    if len(tiers) > 0:
        bts = c.get_bt_list(apps[0].id)

        if len(bts) > 0:
            resp = c.create_event(
                app_id=apps[0].id,
                summary='Custom Event 1',
                comment='This is an event created by the Python SDK',
                severity='INFO',
                eventtype='CUSTOM',
from __future__ import print_function

from datetime import datetime

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient
from appd.time import from_ts, to_ts

__author__ = 'Todd Radel'
__copyright__ = 'Copyright (c) 2013-2015 AppDynamics Inc.'

# Parse the command line arguments and initialize the client
#
args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

# Get the application list and find "MyApp"
#
apps = c.get_applications()
gn_prod = [x for x in apps if x.name == "MyApp"][0]

# Calculate start and end times for the report: 24h period ending with midnight last night
#
today = to_ts(datetime.now().date())
yesterday = today - (86400 * 1000)

# Get the list of nodes so we can look up by node_id
#
all_nodes = c.get_nodes(gn_prod.id)
nodes_by_id = dict(list(zip([x.id for x in all_nodes], all_nodes)))
Beispiel #24
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

apps = c.get_applications()
if len(apps) > 0:
    resp = c.get_events(apps[0].id,
                        event_types='APPLICATION_CONFIG_CHANGE',
                        severities='INFO,WARN,ERROR',
                        time_range_type='BEFORE_NOW',
                        duration_in_mins='60000')
    print(resp)
else:
    print('Application, not found!')
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

resp = c.export_email_action_templates()
print(resp)
resp = c.import_email_action_templates(resp)
print(resp)
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

import random
import string
import requests

__author__ = 'Srikar Achanta'
__copyright__ = 'Copyright (c) 2013-2020 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

apps = c.get_applications()


def get_random_string(length):
    letters = string.ascii_lowercase
    result_str = ''.join(random.choice(letters) for i in range(length))
    return result_str


if len(apps) > 0:
    group_name = get_random_string(5)
    response = c.create_group(group_name, 'description to test group')
    print(response)
    resp = c.update_group(response['id'], get_random_string(5),
Beispiel #27
0
time_in_mins = 1440
end_time = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
end_epoch = int(mktime(end_time.timetuple())) * 1000


# Helper functions

def now_rfc3339():
    return datetime.now(tzlocal.get_localzone()).isoformat('T')


# Parse command line arguments and create AD client:

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)


# Get the list of configured apps, and get backend metrics for each one:

METRIC_MAP = {'Average Block Time (ms)': 'abt',
              'Average CPU Used (ms)': 'cpu',
              'Average Request Size': 'req_size',
              'Average Response Time (ms)': 'art',
              'Average Wait Time (ms)': 'wait_time',
              'Calls per Minute': 'cpm',
              'End User Average Response Time (ms)': 'eum_art',
              'End User Network Average Response Time (ms)': 'eum_net',
              'End User Page Render Average Response Time (ms)': 'eum_render',
              'Errors per Minute': 'epm',
              'Normal Average Response Time (ms)': 'norm_art',
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

resp = c.get_audit_history('2017-04-27T08:00:00.000-0800',
                           '2017-04-27T20:00:00.000-0800')

for audit in resp.by_user_name("user1"):
    print(audit)
for audit in resp.by_action("LOGIN"):
    print(audit)
for audit in resp.by_account_name("customer1"):
    print(audit)
Beispiel #29
0
class AppDMetricRetrieverCommand(command.Command):
    """
    Command object for retrieving AppD metrics via the REST API.
    """
    def __init__(self, **kwargs):
        super(AppDMetricRetrieverCommand, self).__init__(**kwargs)
        self.description = 'AppDynamics Metric Retriever'
        self.appd_client = None
        self.config = None
        self.proxy = None

    #pylint: disable=too-many-arguments
    #pylint: disable=bare-except
    def send_metric(self,
                    name,
                    value,
                    host,
                    timestamp,
                    tags=None,
                    value_translator=None):
        """
        Sends the metric to writer.

        Arguments:
        name - the metric name
        value - the numeric value
        host - the source/host
        timestamp - the timestamp (epoch seconds) or datetime object
        tags - dictionary of tags
        value_translator - function pointer to function that will translate
            value from current form to something else
        """

        if not isinstance(timestamp, numbers.Number):
            parsed_date = datetime.datetime.strptime(
                timestamp, '%Y-%m-%dT%H:%M:%S+00:00')
            parsed_date = parsed_date.replace(tzinfo=dateutil.tz.tzutc())
            timestamp = utils.unix_time_seconds(parsed_date)

        if value_translator:
            value = value_translator(name, value)
            if value is None:
                return

        attempts = 0
        while attempts < 5 and not utils.CANCEL_WORKERS_EVENT.is_set():
            try:
                self.proxy.transmit_metric(
                    self.config.namespace + '.' + utils.sanitize_name(name),
                    value, int(timestamp), host, tags)
                break
            except:
                attempts = attempts + 1
                self.logger.warning('Failed to transmit metric %s: %s', name,
                                    str(sys.exc_info()))
                if not utils.CANCEL_WORKERS_EVENT.is_set():
                    time.sleep(1)

    #pylint: disable=no-self-use
    def get_help_text(self):
        """
        Help text for this command.
        """

        return "Pull metrics from AppDynamics"

    def _initialize(self, arg):
        """
        Parses the arguments passed into this command.

        Arguments:
        arg - the argparse parser object returned from argparser
        """

        self.config = AppDPluginConfiguration(arg.config_file_path)
        self.config.validate()
        try:
            logging.config.fileConfig(arg.config_file_path)
        except ConfigParser.NoSectionError:
            pass
        self.logger = logging.getLogger()

    #pylint: disable=too-many-branches
    def _execute(self):
        """
        Execute this command
        """

        # connect to the wf proxy
        self.proxy = WavefrontMetricsWriter(self.config.writer_host,
                                            self.config.writer_port,
                                            self.config.is_dry_run)
        self.proxy.start()

        # connect to appd
        self.appd_client = AppDynamicsClient(self.config.api_url,
                                             self.config.api_username,
                                             self.config.api_password,
                                             self.config.api_account,
                                             self.config.api_debug)

        # construct start time for when to get metrics starting from
        if self.config.start_time:
            start = self.config.start_time
        else:
            start = ((datetime.datetime.utcnow() -
                      datetime.timedelta(seconds=60.0)).replace(
                          microsecond=0, tzinfo=dateutil.tz.tzutc()))
        if self.config.end_time:
            end = self.config.end_time
        else:
            end = None

        if start is not None:
            if end is None:
                end = (datetime.datetime.utcnow().replace(
                    microsecond=0, tzinfo=dateutil.tz.tzutc()))

            if (end - start).total_seconds() < self.config.min_delay:
                self.logger.info('Not running since %s - %s < 60', str(end),
                                 str(start))
                return

        start = start.replace(microsecond=0, tzinfo=dateutil.tz.tzutc())
        self.logger.info('Running %s - %s', str(start), str(end))

        for app in self.appd_client.get_applications():
            if str(app.id) not in self.config.application_ids:
                print 'skipping %s (%s)' % (app.name, str(app.id))
                continue
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            # get a list of metrics available
            # TODO: cache this like New Relic plugin
            self.logger.info('[%s] Getting metric tree', app.name)
            paths = self.get_metric_paths(app, self.config.recurse_metric_tree)
            if not paths:
                self.logger.warn('[%s] no metrics found', app.name)
                return

            # if the time is more than 10 minutes, AppD will make sample size
            # larger than a minute.  so, we'll grab the data in chunks
            # (10m at a time)
            curr_start = start
            if not end:
                end = (datetime.datetime.utcnow().replace(
                    microsecond=0, tzinfo=dateutil.tz.tzutc()))
            curr_end = end
            curr_diff = curr_end - curr_start
            while (curr_diff.total_seconds() > 0
                   and not utils.CANCEL_WORKERS_EVENT.is_set()):
                if (curr_diff.total_seconds() > 600
                        or curr_diff.total_seconds() < 60):
                    curr_end = curr_start + datetime.timedelta(minutes=10)

                self._process_metrics(paths, app, curr_start, curr_end)

                # save "last run time" and update curr_* variables
                self.config.set_last_run_time(curr_end)
                curr_start = curr_end
                curr_end = end
                curr_diff = curr_end - curr_start
                if (curr_diff.total_seconds() > 600
                        and not utils.CANCEL_WORKERS_EVENT.is_set()):
                    time.sleep(30)

    def get_metric_paths(self, app, recurse):
        """
        Calls the get_metric_tree() api for the given app and returns
        all paths that are not in the black list (or are in black but included
        in white list)
        Arguments:
        app - the application object
        See:
        _get_metric_paths()
        """

        metric_tree = self.appd_client.get_metric_tree(app.id, None, recurse)
        paths = []
        self._get_metric_paths(paths, app, metric_tree)
        return paths

    def _get_metric_paths(self, _rtn_paths, app, metric_tree):
        """
        Gets a list of paths to retrieve from get_metrics()
        Arguments:
        _rtn_paths: out argument to return the list of paths (for recursion)
        app: the application object
        metric_tree: the response from get_metric_tree()
        """

        for node in metric_tree:
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            if node.type == 'folder' and node._children:
                self._get_metric_paths(_rtn_paths, app, node._children)
                continue

            # black list ...
            keep = True
            for pattern in self.config.fields_blacklist_regex_compiled:
                keep = not pattern.match(node.path)

            # white list ...
            if not keep:
                for pattern in self.config.fields_whitelist_regex_compiled:
                    keep = pattern.match(node.path)

            if keep:
                if node.type == 'folder':
                    _rtn_paths.append(node.path + '|*')
                else:
                    _rtn_paths.append(node.path)

    def _process_metrics(self, paths, app, start, end):
        """
        Processes metrics returned from a get_metrics() api call.
        Arguments:
        paths - list of paths returned from get_metric_paths()
        app - the application object
        start - the start datetime object
        end - the end datetime object
        """

        for path in paths:
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break
            self.logger.info('[%s] Getting \'%s\' metrics for %s - %s',
                             app.name, path, start, end)
            metrics = self.appd_client.get_metrics(
                path, app.id, 'BETWEEN_TIMES', None,
                long(utils.unix_time_seconds(start) * 1000),
                long(utils.unix_time_seconds(end) * 1000), False)
            for metric in metrics:
                if utils.CANCEL_WORKERS_EVENT.is_set():
                    break
                for value in metric.values:
                    self.send_metric(
                        self.config.namespace + '|' + path,
                        value.current,
                        'appd',  # the source name
                        long(value.start_time_ms / 1000),
                        None,  # tags
                        self.config.get_value_to_send)
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

resp = c.create_user('Ash',
                     'AsherBasher',
                     '*****@*****.**',
                     user_password='******',
                     user_roles='Administrator,Universal Agent User')
print(resp)
from appd.request import AppDynamicsClient

usr="******"
pwd="pass"

#url="https://test.saas.appdynamics.com/controller/rest/applications?output=json"
#   #"/eStore-prod/metric-data?metric-path=Business%20Transaction%20Performance%7CBusiness%20Transactions%7CServiceLink%7CEndpointMessageListener%3AISEEInboundQueue%7C95th%20Percentile%20Response%20Time%20%28ms%29&time-range-type=BEFORE_NOW&duration-in-mins=15&output=JSON"


c= AppDynamicsClient(account="test",base_url="https://test.saas.appdynamics.com",username=usr,password=pwd)

# for app in c.get_applications():
#     print app.id, app.name

metrics = c.get_metrics(metric_path='Business Transaction Performance|Business Transactions|ECommerce Server',
                        app_id='22',
                        time_range_type='BEFORE_NOW',
                        duration_in_mins=60,
                        rollup=False)
print(len(metrics))

for point in metrics[0].values:
    print(point.start_time, 'Average Response Time: ', point.value)
Beispiel #32
0
from appd.request import AppDynamicsClient

c = AppDynamicsClient('https://sulamerica.saas.appdynamics.com', '******',
                      '*******', 'sulamerica')


def get_app():
    #return c.get_applications()
    app_list = []
    for app in c.get_applications():
        app_list.append(app.name)
    return app_list


if __name__ == "__main__":
    print(', '.join(get_app()))
    #print(get_app())
    def _execute(self):
        """
        Execute this command
        """

        # connect to the wf proxy
        self.proxy = WavefrontMetricsWriter(self.config.writer_host,
                                            self.config.writer_port,
                                            self.config.is_dry_run)
        self.proxy.start()

        # connect to appd
        self.appd_client = AppDynamicsClient(self.config.api_url,
                                             self.config.api_username,
                                             self.config.api_password,
                                             self.config.api_account,
                                             self.config.api_debug)

        # construct start time for when to get metrics starting from
        if self.config.start_time:
            start = self.config.start_time
        else:
            start = ((datetime.datetime.utcnow() -
                      datetime.timedelta(seconds=60.0))
                     .replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
        if self.config.end_time:
            end = self.config.end_time
        else:
            end = None

        if start is not None:
            if end is None:
                end = (datetime.datetime.utcnow()
                       .replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))

            if (end - start).total_seconds() < self.config.min_delay:
                self.logger.info('Not running since %s - %s < 60',
                                 str(end), str(start))
                return

        start = start.replace(microsecond=0, tzinfo=dateutil.tz.tzutc())
        self.logger.info('Running %s - %s', str(start), str(end))

        for app in self.appd_client.get_applications():
            if str(app.id) not in self.config.application_ids:
                print 'skipping %s (%s)' % (app.name, str(app.id))
                continue
            if utils.CANCEL_WORKERS_EVENT.is_set():
                break

            # get a list of metrics available
            # TODO: cache this like New Relic plugin
            self.logger.info('[%s] Getting metric tree', app.name)
            paths = self.get_metric_paths(app, self.config.recurse_metric_tree)
            if not paths:
                self.logger.warn('[%s] no metrics found', app.name)
                return

            # if the time is more than 10 minutes, AppD will make sample size
            # larger than a minute.  so, we'll grab the data in chunks
            # (10m at a time)
            curr_start = start
            if not end:
                end = (datetime.datetime.utcnow()
                       .replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
            curr_end = end
            curr_diff = curr_end - curr_start
            while (curr_diff.total_seconds() > 0 and
                   not utils.CANCEL_WORKERS_EVENT.is_set()):
                if (curr_diff.total_seconds() > 600 or
                        curr_diff.total_seconds() < 60):
                    curr_end = curr_start + datetime.timedelta(minutes=10)

                self._process_metrics(paths, app, curr_start, curr_end)

                # save "last run time" and update curr_* variables
                self.config.set_last_run_time(curr_end)
                curr_start = curr_end
                curr_end = end
                curr_diff = curr_end - curr_start
                if (curr_diff.total_seconds() > 600 and
                        not utils.CANCEL_WORKERS_EVENT.is_set()):
                    time.sleep(30)
Beispiel #34
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

resp = c.export_analytics_dynamic_service_configs(5)
print(resp)
resp = c.import_analytics_dynamic_service_configs(5, resp)
print(resp)
from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

import random
import string
import time
import requests

__author__ = 'Srikar Achanta'
__copyright__ = 'Copyright (c) 2013-2020 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

apps = c.get_applications()


def get_random_string(length):
    letters = string.ascii_lowercase
    result_str = ''.join(random.choice(letters) for i in range(length))
    return result_str


if len(apps) > 0:

    group_name = get_random_string(5)
    response = c.create_group(group_name, 'description to test group')
    print(response)
Beispiel #36
0
Sample script to delete a user.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

import random
import string

__author__ = 'Srikar Achanta'
__copyright__ = 'Copyright (c) 2013-2020 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)

apps = c.get_applications()


def get_random_string(length):
    letters = string.ascii_lowercase
    result_str = ''.join(random.choice(letters) for i in range(length))
    return result_str


if len(apps) > 0:

    first_name = get_random_string(5)
    second_name = get_random_string(5)
    email = first_name + '.' + second_name + '@email.com'
# -*- coding: utf-8 -*-

from datetime import datetime, timedelta
from collections import OrderedDict

import pygal
import tzlocal

from appd.request import AppDynamicsClient
import creds.demo2 as creds

__author__ = 'Todd Radel'
__copyright__ = 'Copyright (c) 2013-2015 AppDynamics Inc.'

# Set up API client
c = AppDynamicsClient(creds.url, creds.user, creds.password, creds.account)

# Get my tenant account info
my_acct = c.get_my_account()

# Calculate start and end dates - we will start at midnight last night and go back 7 days
days = 15
mytz = tzlocal.get_localzone()
end_dt = datetime.now(mytz).replace(hour=0, minute=0, second=0, microsecond=0)
start_dt = end_dt - timedelta(days)

# Get license usage for my account
usage = c.get_license_usage(my_acct.id, 'java', start_dt, end_dt)


def daterange(start_dt, end_dt):
from datetime import datetime, timedelta
from collections import OrderedDict, defaultdict

import pygal
import tzlocal

from appd.request import AppDynamicsClient
import creds.demo2 as creds

__author__ = 'Todd Radel'
__copyright__ = 'Copyright (c) 2013-2015 AppDynamics Inc.'
__version__ = '0.4.5'

# Set up API client
c = AppDynamicsClient(creds.url, creds.user, creds.password, creds.account)

# Get my tenant account info
my_acct = c.get_my_account()

# Calculate start and end dates - we will start at midnight last night and go back 7 days
days = 15
mytz = tzlocal.get_localzone()
end_dt = datetime.now(mytz).replace(hour=0, minute=0, second=0, microsecond=0)
start_dt = end_dt - timedelta(days)

# Get license usage for my account
usage = c.get_license_usage(my_acct.id, None, start_dt, end_dt)


def daterange(start_dt, end_dt):
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

resp = c.mark_nodes_historical('7')
print(resp)
Beispiel #40
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of groups from the specified controller and app.
"""

from __future__ import print_function
from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Srikar Achanta'
__copyright__ = 'Copyright (c) 2013-2020 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

apps = c.get_applications()
if len(apps) > 0:
    allGroups = c.get_groups()
    print('All Groups -- \n%s' % allGroups)
    print('Group using get_group_by_id -- \n%s' %
          c.get_group_by_id(allGroups[1].id))
    print('Group using get_group_by_name -- \n%s' %
          c.get_group_by_name(allGroups[1].name))
else:
    print('Application, not found!')
Beispiel #41
0
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

import random
import string
import requests

__author__ = 'Srikar Achanta'
__copyright__ = 'Copyright (c) 2013-2020 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

apps = c.get_applications()


def get_random_string(length):
    letters = string.ascii_lowercase
    result_str = ''.join(random.choice(letters) for i in range(length))
    return result_str


if len(apps) > 0:
    role_name = get_random_string(5)
    role = c.create_role(role_name, 'description to test role')
    group_name = get_random_string(5)
    group = c.create_group(group_name, 'description to test group')
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sample script to print a list of events from the specified controller and app.
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

__author__ = 'Kyle Furlong'
__copyright__ = 'Copyright (c) 2013-2017 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

account = c.get_my_account()

action = {
    'name': 'suppress',
    'timeRange': {
        'startTimeMillis': '2017-03-24T16:16:57+0000',
        'endTimeMillis': '2017-10-25T04:16:57+0000'
    },
    'heathRuleIds': '1,2,3',
    'affects': {
        'type': 'APP'
    }
}
resp = c.create_action_suppression(account.id, app_id=16, params=action)
Beispiel #43
0
"""

from __future__ import print_function

from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient

import random
import string
import requests

__author__ = 'Srikar Achanta'
__copyright__ = 'Copyright (c) 2013-2020 AppDynamics Inc.'

args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account,
                      args.verbose)

apps = c.get_applications()


def get_random_string(length):
    letters = string.ascii_lowercase
    result_str = ''.join(random.choice(letters) for i in range(length))
    return result_str


if len(apps) > 0:
    role_name = get_random_string(5)
    role = c.create_role(role_name, 'description to test role')
    first_name = get_random_string(5)
    second_name = get_random_string(5)
__author__ = 'Todd Radel <*****@*****.**> & Toby Davies <*****@*****.**>'

import itertools

from datetime import datetime
from appd.cmdline import parse_argv
from appd.request import AppDynamicsClient
from time import mktime

def incr(d, name, amt=1):
    d[name] = d.get(name, 0) + amt


args = parse_argv()
c = AppDynamicsClient(args.url, args.username, args.password, args.account, args.verbose)

time_in_mins = 24 * 60
end_time = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
end_epoch = int(mktime(end_time.timetuple())) * 1000

nodes = []
for app in c.get_applications():
    for tier in c.get_tiers(app.id):
        for node in c.get_nodes(app.id, tier.id):
            av = c.get_metrics('Application Infrastructure Performance|'+tier.name+'|Individual Nodes|'+node.name+'|Agent|App|Availability', app.id, time_range_type='BEFORE_TIME', end_time=end_epoch, duration_in_mins=50, rollup=False)


            # node_type = node.type
            # print node.id, node.machine_id, node.machine_name, node.type
            if (node.has_machine_agent or node.has_app_agent):