def run(): """ Initialize connection to datadog during django startup. Expects the datadog api key in the DATADOG_API settings key """ if hasattr(settings, 'DATADOG_API'): dog_http_api.api_key = settings.DATADOG_API dog_stats_api.start(api_key=settings.DATADOG_API, statsd=True)
def get_client(): """ Return a started instance of the datadog client api. If a 'DATADOG_API_KEY' is not set or it's None, we will disable collection and flushing of metrics - useful for development. """ api_key = getattr(settings, 'DATADOG_API_KEY', None) if api_key is None: logger.debug("DATADOG_API_KEY not set, disabling it") dog_stats_api.start(disabled=True) dog_stats_api.reporter = NullReporter() else: # if we have the DATADOG_API_KEY set, we use that dog_stats_api.start(api_key=api_key) return dog_stats_api
def ready(self): """ Initialize connection to datadog during django startup. Configure using DATADOG dictionary in the django project settings. """ # By default use the statsd agent options = {'statsd': True} if hasattr(settings, 'DATADOG'): options.update(settings.DATADOG) # Not all arguments are documented. # Look at the source code for details. dog_stats_api.start(**options) dog_http_api.api_key = options.get('api_key')
def run(): """ Initialize connection to datadog during django startup. Can be configured using a dictionary named DATADOG in the django project settings. """ # By default use the statsd agent options = {'statsd': True} if hasattr(settings, 'DATADOG'): options.update(settings.DATADOG) # Not all arguments are documented. # Look at the source code for details. if settings.ENV_TOKENS.get('ENABLE_DOG'): dog_stats_api.start(**options) dog_http_api.api_key = options.get('api_key')
def setup(): """ Initialize connection to datadog during locust startup. Reads the datadog api key from (in order): 1) the DATADOG_API_KEY setting in the settings file 2) the DATADOG_API_KEY of a yaml file specified by the ANSIBLE_VARS setting in the settings file """ api_key = settings.data.get('DATADOG_API_KEY') if api_key is None: server_vars_path = settings.data['ANSIBLE_VARS'] with open(server_vars_path, 'r') as server_vars_file: server_vars = yaml.safe_load(server_vars_file) api_key = server_vars.get('DATADOG_API_KEY') # By default use the statsd agent dog_stats_api.start( statsd=True, api_key=api_key, )
def setup(): """ Initialize connection to datadog during locust startup. Reads the datadog api key from (in order): 1) An environment variable named DATADOG_API_KEY 2) the DATADOG_API_KEY of a yaml file at 2a) the environment variable ANSIBLE_VARS 2b) /edx/app/edx_ansible/server-vars.yaml """ api_key = os.environ.get('DATADOG_API_KEY') if api_key is None: server_vars_path = os.environ.get('ANSIBLE_VARS', '/edx/app/edx_ansible/server-vars.yml') with open(server_vars_path, 'r') as server_vars_file: server_vars = yaml.safe_load(server_vars_file) api_key = server_vars.get('DATADOG_API_KEY') # By default use the statsd agent dog_stats_api.start( statsd=True, api_key=api_key, )
import os from optparse import make_option from stat import S_ISDIR from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from dogapi import dog_http_api, dog_stats_api import paramiko import boto dog_http_api.api_key = settings.DATADOG_API dog_stats_api.start(api_key=settings.DATADOG_API, statsd=True) class Command(BaseCommand): help = """ This command handles the importing and exporting of student records for Pearson. It uses some other Django commands to export and import the files and then uploads over SFTP to Pearson and stuffs the entry in an S3 bucket for archive purposes. Usage: django-admin.py pearson-transfer --mode [import|export|both] """ option_list = BaseCommand.option_list + (make_option( '--mode', action='store', dest='mode', default='both', choices=('import', 'export', 'both'),
from dogapi import dog_http_api, dog_stats_api from django.conf import settings from xmodule.modulestore.django import modulestore from django.dispatch import Signal from request_cache.middleware import RequestCache from django.core.cache import get_cache cache = get_cache('mongo_metadata_inheritance') for store_name in settings.MODULESTORE: store = modulestore(store_name) store.metadata_inheritance_cache_subsystem = cache store.request_cache = RequestCache.get_request_cache() modulestore_update_signal = Signal(providing_args=['modulestore', 'course_id', 'location']) store.modulestore_update_signal = modulestore_update_signal if hasattr(settings, 'DATADOG_API'): dog_http_api.api_key = settings.DATADOG_API dog_stats_api.start(api_key=settings.DATADOG_API, statsd=True)
try: import json except ImportError: import simplejson as json # django from django.conf import settings # dogapi from dogapi import dog_http_api as api from dogapi import dog_stats_api # init datadog api api.api_key = settings.DATADOG_API_KEY api.application_key = settings.DATADOG_APP_KEY dog_stats_api.start(api_key=settings.DATADOG_API_KEY) class DatadogMiddleware(object): DD_TIMING_ATTRIBUTE = '_dd_start_time' def __init__(self): app_name = settings.DATADOG_APP_NAME self.error_metric = '{0}.errors'.format(app_name) self.timing_metric = '{0}.request_time'.format(app_name) self.event_tags = [app_name, 'exception'] def process_request(self, request): setattr(request, self.DD_TIMING_ATTRIBUTE, time.time()) def process_response(self, request, response): """ Submit timing metrics from the current request """
env.linewise = True env.noop = False env.use_ssh_config = True FORMAT = '[ %(asctime)s ] : %(message)s' logging.basicConfig(format=FORMAT, level=logging.WARNING) # add timestamps to output sys.stdout = TSWrapper(sys.stdout) sys.stderr = TSWrapper(sys.stderr) path = os.path.abspath(__file__) with open(os.path.join( os.path.dirname(path), '../package_data.yaml')) as f: package_data = yaml.load(f) dog_stats_api.start(api_key=package_data['datadog_api'], statsd=True) dog_http_api.api_key = package_data['datadog_api'] @task def noop(): """ Disable modification of servers """ env.noop = True dog_stats_api.stop() @task def quiet(): """
from dogapi import dog_stats_api as d d.start(statsd=True, statsd_port=9999) for i in xrange(10000): d.gauge('my.gauge', 1, sample_rate=0.5) d.increment('my.counter', sample_rate=0.01)
from random import random from dogapi import dog_stats_api dog_stats_api.start(statsd=True, statsd_host='localhost', statsd_port=8125) while True: dog_stats_api.gauge('test.udp.gauge', 1000) dog_stats_api.increment('test.udp.counter') dog_stats_api.histogram('test.udp.histogram', random() * 1000)