示例#1
0
def get_client(celery_app):
    try:
        client = _state.clients[celery_app]
    except AttributeError:
        client = statsd.StatsClient(celery_app.conf.STATSD_HOST,
                                    celery_app.conf.STATSD_PORT)

        _state.clients = {celery_app: client}
    except KeyError:
        client = statsd.StatsClient(celery_app.conf.STATSD_HOST,
                                    celery_app.conf.STATSD_PORT)

        _state.clients[celery_app] = client

    return client
示例#2
0
def wrap(app):
    addr = _CONF[OPT_GROUP_NAME].address
    port = _CONF[OPT_GROUP_NAME].port
    keys = _CONF[OPT_GROUP_NAME].path_regexes_keys
    values = _CONF[OPT_GROUP_NAME].path_regexes_values
    prefix = _CONF[OPT_GROUP_NAME].prefix
    app_name = _CONF[OPT_GROUP_NAME].app_name

    regex_strings = zip(keys, values)
    regex = []
    for (method, pattern) in regex_strings:
        regex.append((method, re.compile(pattern)))

    client = statsd.StatsClient(host=addr, port=port, prefix=prefix)

    # initialize buckets
    for request_method in ["GET", "PUT", "HEAD", "POST", "DELETE", "PATCH"]:
        for name, regexstr in regex_strings:
            for code in ["2xx", "4xx", "5xx"]:
                client.incr(app_name + "." + socket.gethostname() +
                            ".requests." + request_method + "." + name + "." +
                            code)
                client.decr(app_name + "." + socket.gethostname() +
                            ".requests." + request_method + "." + name + "." +
                            code)

    def middleware(env, start_response):

        request_method = env["REQUEST_METHOD"]
        path = env["PATH_INFO"]
        hostname = socket.gethostname()
        api_method = "unknown"

        for (method, regex_pattern) in regex:
            if regex_pattern.match(path):
                api_method = method

        def _start_response(status, headers, *args):
            status_path = (app_name + "." + hostname + ".requests." +
                           request_method + "." + api_method)
            status_code = int(status[:3])
            if status_code / 500 == 1:
                client.incr(status_path + ".5xx")
            elif status_code / 400 == 1:
                client.incr(status_path + ".4xx")
            elif status_code / 200 == 1:
                client.incr(status_path + ".2xx")

            return start_response(status, headers, *args)

        start = time.time() * 1000
        response = app(env, _start_response)
        stop = time.time() * 1000

        elapsed = stop - start
        client.timing(app_name + "." + hostname + ".latency." + request_method,
                      elapsed)
        return response

    return middleware
示例#3
0
        def __init__(self,
                     max_concurrent_batches=10,
                     block_on_send=False,
                     block_on_response=False,
                     max_batch_size=100,
                     send_frequency=timedelta(seconds=0.25),
                     user_agent_addition=''):
            if not has_tornado:
                raise ImportError(
                    'TornadoTransmission requires tornado, but it was not found.'
                )

            self.block_on_send = block_on_send
            self.block_on_response = block_on_response
            self.max_batch_size = max_batch_size
            self.send_frequency = send_frequency

            user_agent = "libhoney-py/" + VERSION
            if user_agent_addition:
                user_agent += " " + user_agent_addition

            self.http_client = AsyncHTTPClient(
                force_instance=True, defaults=dict(user_agent=user_agent))

            # libhoney adds events to the pending queue for us to send
            self.pending = Queue(maxsize=1000)
            # we hand back responses from the API on the responses queue
            self.responses = Queue(maxsize=2000)

            self.batch_data = {}
            self.sd = statsd.StatsClient(prefix="libhoney")
            self.batch_sem = Semaphore(max_concurrent_batches)
示例#4
0
def api_init():
    """
    Initializes flask. Call _after_ setting flask config.
    """

    faf.db.init_db(app.config)
    github = api.deployment.github.make_session(app.config['GITHUB_USER'],
                                                app.config['GITHUB_TOKEN'])
    slack = api.deployment.slack.make_session(app.config['SLACK_HOOK_URL'])

    app.deployment_manager = DeploymentManager(app.config['ENVIRONMENT'],
                                               app.config['GITHUB_SECRET'],
                                               app.config['GIT_OWNER'], github,
                                               slack)
    for deploy_configuration in app.config['DEPLOYMENTS']:
        app.deployment_manager.add(deploy_configuration)
    app.github = github

    app.secret_key = app.config['FLASK_LOGIN_SECRET_KEY']
    flask_jwt.init_app(app)
    cache.init_app(app)

    if app.config.get('STATSD_SERVER'):
        host, port = app.config['STATSD_SERVER'].split(':')
        stats = statsd.StatsClient(host, port)

        @app.before_request
        def before_req():
            request._start_time = time.time()

        @app.after_request
        def after_req(response):
            stats.timing('api.request',
                         (time.time() - request._start_time) * 1000)
            return response
def action():
    import statsd
    stats = statsd.StatsClient()
    pipe = stats.pipeline()
    counters = psutil.network_io_counters(True)
    pattern = None
    if j.application.config.exists('nic.pattern'):
        pattern = j.application.config.getStr('nic.pattern')

    for nic, stat in counters.iteritems():
        if pattern and j.codetools.regex.match(pattern, nic) == False:
            continue

        result = dict()
        bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout = stat
        result['kbytes_sent'] = int(round(bytes_sent / 1024.0, 0))
        result['kbytes_recv'] = int(round(bytes_recv / 1024.0, 0))
        result['packets_sent'] = packets_sent
        result['packets_recv'] = packets_recv
        result['errin'] = errin
        result['errout'] = errout
        result['dropin'] = dropin
        result['dropout'] = dropout
        for key, value in result.iteritems():
            pipe.gauge(
                "%s_%s_nic_%s_%s" %
                (j.application.whoAmI.gid, j.application.whoAmI.nid, nic, key),
                value)
    pipe.send()
示例#6
0
def send_email_notification(email, performance_id):
    c = statsd.StatsClient('localhost', 8125)
    start = time.time()
    try:
        user_email = UserEmail.objects.get(email=email)
        performance = Performance.objects.get(pk=performance_id)

        user_email.mail(
            "Es gibt noch Karten für '%s'" % performance.title,
            render_to_string('email/notification.email',
                             {'performance': performance}))

        c.incr('send_email_notification')
        c.gauge('total.send_email_notification', 1, delta=True)
    except UserEmail.DoesNotExist as e:
        c.incr('send_email_notification.no_user')
        logger.error('User does not exist', exc_info=True)
        return
    except Performance.DoesNotExist as e:
        c.incr('send_email_notification.no_performance')
        logger.error('Performance does not exist', exc_info=True)
        return
    except Exception as e:
        c.incr('send_email_notification.failed')
        logger.error('Sending email failed', exc_info=True)
        return

    end = time.time()
    c.timing('send_email_notifications.timed', floor((end - start) * 1000))
示例#7
0
    def __init__(self, nodes, graphite_node, graphite_port, graphite_prefix):
        self._nodes = self.parse_nodes(nodes)
        self._graphite_node = graphite_node
        self._statsd = statsd.StatsClient(graphite_node, graphite_port,
                                          graphite_prefix)

        self._stats = ImpalaStats(self._statsd)
 def __init__(self, config=None):
     # Shouldn't be trying imports in __init__.
     # It turns what should be a load error into a runtime error
     config = normalize_config(config)
     self.config = config
     self.client = InfluxDBClient(config['host'], config['port'], config['user'], config['passw'], config['db'], config['ssl'])
     self.schemas = [(re.compile(patt), step) for (patt, step) in config['schema']]
     try:
         self.statsd_client = statsd.StatsClient(config['statsd'].get('host'),
                                                 config['statsd'].get('port', 8125)) \
                                                 if 'statsd' in config and config['statsd'].get('host') else NullStatsd()
     except NameError:
         logger.warning("Statsd client configuration present but 'statsd' module"
                        "not installed - ignoring statsd configuration..")
         self.statsd_client = NullStatsd()
     self._setup_logger(config['log_level'], config['log_file'])
     self.es = None
     if config['es_enabled']:
         try:
             from elasticsearch import Elasticsearch
         except ImportError:
             logger.warning("Elasticsearch configuration present but 'elasticsearch'"
                            "module not installed - ignoring elasticsearch configuration..")
         else:
             self.es = Elasticsearch(config['es_hosts'])
示例#9
0
文件: __init__.py 项目: yorick-ne/api
def api_init():
    """
    Initializes flask. Call _after_ setting flask config.
    """

    faf.db.init_db(app.config)
    app.github = github.make_session(app.config['GITHUB_USER'],
                                     app.config['GITHUB_TOKEN'])
    app.slack = slack.make_session(app.config['SLACK_HOOK_URL'])

    app.secret_key = app.config['FLASK_LOGIN_SECRET_KEY']
    flask_jwt.init_app(app)


    if app.config.get('STATSD_SERVER'):
        host, port = app.config['STATSD_SERVER'].split(':')
        stats = statsd.StatsClient(host, port)

        @app.before_request
        def before_req():
            request._start_time = time.time()

        @app.after_request
        def after_req(response):
            stats.timing('api.request', (time.time()-request._start_time)*1000)
            return response
示例#10
0
    def __init__(self,
                 max_concurrent_batches=10,
                 block_on_send=False,
                 block_on_response=False,
                 max_batch_size=100,
                 send_frequency=0.25,
                 user_agent_addition='',
                 debug=False):
        self.max_concurrent_batches = max_concurrent_batches
        self.block_on_send = block_on_send
        self.block_on_response = block_on_response
        self.max_batch_size = max_batch_size
        self.send_frequency = send_frequency

        user_agent = "libhoney-py/" + VERSION
        if user_agent_addition:
            user_agent += " " + user_agent_addition

        session = requests.Session()
        session.headers.update({"User-Agent": user_agent})
        self.session = session

        # libhoney adds events to the pending queue for us to send
        self.pending = queue.Queue(maxsize=1000)
        # we hand back responses from the API on the responses queue
        self.responses = queue.Queue(maxsize=2000)

        self._sending_thread = None
        self.sd = statsd.StatsClient(prefix="libhoney")

        self.debug = debug
        if debug:
            self._init_logger()
示例#11
0
def online_count():
    c = statsd.StatsClient(STATSD_HOST, 8125, prefix=PREFIX + 'online')
    while True:
        for community, db_num in hosts.items():
            count = r_servers[community].bitcount('users:online:bitmap')
            c.gauge('%s.count' % community, count)
        time.sleep(GRANULARITY)
示例#12
0
    def __init__(self, logger, session_info=None):
        """
        Initialize instance with a logger to be used, info related to the request and info related to
        the service
        """

        self._logger = logger

        if not session_info:
            session_info = {}

        environment = session_info.get('environment')

        self._extra = {
            'env': environment,
            'service': session_info.get('service'),
            'handler': session_info.get('handler'),
            'requestId': session_info.get('requestId',
                                          constants.NO_REQUEST_ID),
            'details': ''
        }

        self._stats_enabled = settings.STATS_ENABLED

        if self._stats_enabled:
            self._stats_client = statsd.StatsClient(
                host=settings.STATS_SERVICE_HOSTNAME,
                port=8125,
                prefix='prjname.' + environment)
        self._messages_queue = Queue.Queue()
        self.log_entire_request = settings.LOG_LEVEL in ['CRITICAL', 'ERROR']
示例#13
0
def cpu_times_percent(host, port, prefix, fields, debug=False):
    prefix = '.'.join([prefix, 'cpu']) if prefix else 'cpu'
    client = statsd.StatsClient(host, port, prefix=prefix)
    value = psutil.cpu_percent(interval=1)
    cpu_times_pcnt = psutil.cpu_times_percent(interval=1)

    with client.pipeline() as pipe:
        pipe.gauge('percent{}'.format(fields), value)
        pipe.gauge('percent.user{}'.format(fields), cpu_times_pcnt.user)
        pipe.gauge('percent.system{}'.format(fields), cpu_times_pcnt.system)
        pipe.gauge('percent.idle{}'.format(fields), cpu_times_pcnt.idle)

        if not isWindows:
            pipe.gauge('percent.nice{}'.format(fields), cpu_times_pcnt.nice)

        if isLinux:
            pipe.gauge('percent.iowait{}'.format(fields),
                       cpu_times_pcnt.iowait)
            pipe.gauge('percent.irq{}'.format(fields), cpu_times_pcnt.irq)
            pipe.gauge('percent.softirq{}'.format(fields),
                       cpu_times_pcnt.softirq)
            pipe.gauge('percent.steal{}'.format(fields), cpu_times_pcnt.steal)
            pipe.gauge('percent.guest{}'.format(fields), cpu_times_pcnt.guest)
            pipe.gauge('percent.guest_nice{}'.format(fields),
                       cpu_times_pcnt.guest_nice)
示例#14
0
文件: replay.py 项目: a-kr/httpcloner
def main():
    global logger, statsd_client
    options, args = setup_options()
    logging.basicConfig(stream=sys.stdout,
                        level=getattr(logging, options.loglevel),
                        format="%(asctime)s :: %(message)s")
    logger = logging.getLogger()

    if os.path.exists(options.unix_socket):
        os.unlink(options.unix_socket)

    conn = get_dgram_socket(options.unix_socket)
    logger.info('spawning workers...')

    if options.statsd:
        host, port = options.statsd.split(':')
        prefix = 'gor.' + socket.gethostname().split('.')[0] + '.replay'
        statsd_client = statsd.StatsClient(host, int(port), prefix)

    total_output_counter = Counter('worker_output')
    parse_errors_counter = Counter('parse_errors')

    queue = gevent.queue.Queue(maxsize=options.backlog)

    listener = Listener(0, conn, queue, options)
    listener_thread = gevent.spawn(listener.runloop)
    workers = [
        Worker(i, queue, total_output_counter, parse_errors_counter, options)
        for i in xrange(options.threads)
    ]
    threads = [gevent.spawn(worker.runloop) for worker in workers]
    listener_thread.join()
示例#15
0
def loop():
    task_mq_cfg = cfg['task-mq']
    mq_url = task_mq_cfg['url']
    routing_key = task_mq_cfg['routing-key']
    exchange_name = task_mq_cfg['exchange']
    queue_name = task_mq_cfg['queue']
    insert_sql = cfg['db']['sqls']['insert']

    statsd_cfg = cfg['statsd']
    STATSD_HOST = statsd_cfg['host']
    STATSD_PORT = statsd_cfg['port']
    STATSD_PREFIX = statsd_cfg['prefix']
    STATSD_COMPLETED_KEY = statsd_cfg['completed-key']
    statsd_client = statsd.StatsClient(STATSD_HOST,
                                       STATSD_PORT,
                                       prefix=STATSD_PREFIX)

    while True:
        try:
            purge_conn = mdb.connect(**cfg['db']['args'])
            purge_conn.cursor().execute("set names utf8")

            with Connection(mq_url) as conn:
                exchange = Exchange(exchange_name)
                queue = MqQueue(queue_name, exchange, routing_key=routing_key)
                worker = PurgeWorker(conn, queue, purge_conn, insert_sql,
                                     statsd_client, STATSD_COMPLETED_KEY)
                worker.run()
            purge_conn.close()
        except KeyboardInterrupt:
            raise
        except Exception:
            logger.info('loop() Exception: << %s >>' % traceback.format_exc())
            time.sleep(0.5)
    def __init__(
        self,
        statsd_host="cloudmetrics1001.eqiad.wmnet",
        statsd_prefix="tools",
        sleep=60,
        max_tool_restarts=3,
        restart_window=3600,
    ):
        self.sleep = sleep
        self.manifests = []
        self.restarts = collections.defaultdict(list)
        self.max_tool_restarts = max_tool_restarts
        self.restart_window = restart_window

        # Setup logging
        self.log = logging.getLogger(
            "manifestcollector.%s" % self.__class__.__name__
        )
        handler = logging.StreamHandler(sys.stdout)
        handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
        self.log.addHandler(handler)
        self.log.setLevel(logging.DEBUG)

        # Setup statsd client
        self.stats = statsd.StatsClient(statsd_host, 8125, prefix=statsd_prefix)
示例#17
0
    def __init__(self, crawler):
        super(StatsDStatsCollector, self).__init__(crawler)

        self._statsd_client = statsd.StatsClient(
            host=crawler.settings.get('STATSD_HOST', 'localhost'),
            port=crawler.settings.getint('STATSD_PORT', 8125))
        self.escape_dots = crawler.settings.get('STATSD_ESCAPE_DOTS')
示例#18
0
    def __init__(self):
        self.db_connection_dict = {}
        super().__init__()

        # self.mon_con = graphitesend.GraphitePickleClient(
        # 		autoreconnect   = True,
        # 		group           = None,
        # 		prefix          = 'MangaCMS.Scrapers.{tableName}.{pluginName}'.format(
        # 					tableName  = self.tableName.replace(".", "_"),
        # 					pluginName = self.pluginName.replace(".", "_")
        # 				),
        # 		system_name     = '',
        # 		graphite_server = settings.GRAPHITE_DB_IP,
        # 		graphite_port   = 2003,
        # 		debug           = True
        # 	)
        # self.mon_con.connect()
        if settings.GRAPHITE_DB_IP:
            self.mon_con = statsd.StatsClient(
                host=settings.GRAPHITE_DB_IP,
                port=8125,
                prefix='MangaCMS.Scrapers.{tableName}.{pluginName}'.format(
                    tableName=self.tableName.replace(".", "_"),
                    pluginName=self.pluginName.replace(".", "_"),
                ))
        else:
            self.mon_con = None
    def test_001_STATSD_Client(self):

        # This unit test is testing the statsd client and UDP simuserver more than anything else

        response = None
        MSG_HELLO = "Hello World"
        MSG_STATSD_1 = 'phenome.Hello World:1|c'
        MSG_STATSD_2 = 'phenome.Metric2:100|g'

        # start the simulator
        simulator = self.startSimulator(None, "UDP_SERVER", self.api_port)

        try:

            import statsd
            c = statsd.StatsClient('localhost', self.api_port, prefix='phenome')

            # Now send the messages

            c.incr(MSG_HELLO)
            time.sleep(1)
            self._check_simulator_message(MSG_STATSD_1)

            # did the UDP server get a Metric2 Gauge? (send a couple times this is UDP)
            c.gauge("Metric2",100)
            c.gauge("Metric2",100)
            time.sleep(1)
            self._check_simulator_message(MSG_STATSD_2)

        except Exception as ex:
            print(ex)
        finally:
            # MAKE SURE TO PUT A FINALLY AND STOP otherwise there could be hanging threads
            simulator.stop()
示例#20
0
def network():
    c = statsd.StatsClient(STATSD_HOST, 8125, prefix=PREFIX + 'system.network')
    t0 = time.time()
    counters = psutil.net_io_counters(pernic=True)

    last_totals = dict()
    totals = dict()
    interfaces = set([key for key in counters.keys() if key != 'lo'])
    for interface in interfaces:
        totals[interface] = (counters[interface].bytes_sent,
                             counters[interface].bytes_recv)
        last_totals[interface] = (counters[interface].bytes_sent,
                                  counters[interface].bytes_recv)

    while True:
        for interface in interfaces:
            counter = psutil.net_io_counters(pernic=True)[interface]
            t1 = time.time()
            totals[interface] = (counter.bytes_sent, counter.bytes_recv)

            ul, dl = [
                (now - last) / (t1 - t0) / 1000.0
                for now, last in zip(totals[interface], last_totals[interface])
            ]

            t0 = time.time()
            c.gauge('%s.upload.kbps' % interface, ul)
            c.gauge('%s.download.kbps' % interface, dl)
            last_totals[interface] = totals[interface]

        time.sleep(GRANULARITY)
示例#21
0
def send_verify_email(email, scheme, host, count):
    c = statsd.StatsClient('localhost', 8125)
    try:
        user_email = UserEmail.objects.get(email=email)
        user_email.mail(
            "Willkommen beim TheaterWecker",
            render_to_string(
                'email/welcome.email', {
                    'verification_link':
                    "%s://%s%s" %
                    (scheme, host,
                     reverse('app:verify_email',
                             kwargs={'key': user_email.verification_key})),
                    'unsubscribe_link':
                    "%s://%s%s?email=%s" %
                    (scheme, host, reverse('app:unsubscribe'), email),
                }))
        c.incr('send_verify_email')
        c.gauge('total.send_verify_email', 1, delta=True)
    except UserEmail.DoesNotExist as e:
        c.incr('send_verify_email.no_user')
        logger.error('User does not exist', exc_info=True)
        return
    except Exception as e:
        if count > 9:
            c.incr('send_verify_email.failed_finally')
            logger.error('Sending email failed after 10th retry',
                         exc_info=True)
            return
        c.incr('send_verify_email.failed')
        logger.error('Sending email failed', exc_info=True)
        send_verify_email.apply_async((email, scheme, host, count + 1),
                                      countdown=(2**count) * 60)
示例#22
0
def connections():
    c = statsd.StatsClient(STATSD_HOST, 8125, prefix=PREFIX + 'system.network')
    to_check = [
        ('conn_established', 'count_established_conn.sh'),
        ('pkts_collapsed', 'count_pkts_collapsed.sh'),
        ('pkts_pruned', 'count_pkts_pruned.sh'),
        ('pkts_pruned_overrun', 'count_pkts_pruned_overrun.sh'),
        ('syn_recv', 'count_syn_recv.sh'),
        ('tcp_in_errs', 'count_in_errs_snmp.sh'),
        ('rx_discards', 'count_rx_discards.sh'),
        ('pkts_recv_errs', 'count_pkts_recv_errors.sh'),
        ('pkts_recv_buff_errs', 'count_pkts_recv_buffer_errors.sh'),
    ]

    while True:
        for key, script in to_check:
            try:
                process = subprocess.Popen([script], stdout=subprocess.PIPE)
                out, _ = process.communicate()
                try:
                    the_count = int(float(str(out, 'utf-8').strip()))
                except:
                    the_count = 0
                c.gauge(key, the_count)
            except Exception as e:
                print('error: %s' % str(e))
        time.sleep(GRANULARITY)
示例#23
0
    def __init__(self, env):
        self.env = env

        conf = env.config.get(ConfigKeys.STATS_SERVICE)
        host = conf.get(ConfigKeys.HOST)

        if env.config.get(ConfigKeys.TESTING, False) or host == "mock":
            self.statsd = MockStatsd()
        else:
            import statsd

            port = conf.get(ConfigKeys.PORT)
            prefix = "dinoms"
            if ConfigKeys.PREFIX in conf:
                prefix = conf.get(ConfigKeys.PREFIX)
            if ConfigKeys.INCLUDE_HOST_NAME in conf:
                include_host_name = conf.get(ConfigKeys.INCLUDE_HOST_NAME)
                if include_host_name is not None and str(
                        include_host_name).strip().lower() in [
                            "yes", "1", "true"
                        ]:
                    import socket

                    prefix = "%s.%s" % (prefix, socket.gethostname())

            self.statsd = statsd.StatsClient(host, int(port), prefix=prefix)
示例#24
0
 def execute_action(self):
     statsd_connection = statsd.StatsClient(self.host, self.port)
     influxdb_measure = f"{self.metric_name}"
     if self.tags is not None:
         influxdb_measure = influxdb_measure + ",{self.tags}"
     statsd_connection.gauge(influxdb_measure, self.metric)
     log.info(f"logging metric {influxdb_measure} with value {self.metric}")
示例#25
0
def _get_stats_client(cfg: StatsConfig):
    global _stats_client
    if cfg.disabled is True:
        _stats_client = DummyStatsClient()
    if _stats_client is None:
        _stats_client = statsd.StatsClient(cfg.host, cfg.port)
    return _stats_client
示例#26
0
    def __init__(self):
        self.db_connection_dict = {}
        super().__init__()

        # self.mon_con = graphitesend.GraphitePickleClient(
        # 		autoreconnect   = True,
        # 		group           = None,
        # 		prefix          = 'MangaCMSOld.Scrapers.{tableName}.{pluginName}'.format(
        # 					tableName  = self.tableName.replace(".", "_"),
        # 					pluginName = self.pluginName.replace(".", "_")
        # 				),
        # 		system_name     = '',
        # 		graphite_server = settings.GRAPHITE_DB_IP,
        # 		graphite_port   = 2003,
        # 		debug           = True
        # 	)
        # self.mon_con.connect()
        if settings.GRAPHITE_DB_IP:

            prefix_str = 'MangaCMSOld.Scrapers.{tableName}.{loggerPath}'.format(
                tableName=self.tableName.replace(".", "_").replace(
                    "-", "_").replace(" ", "_"),
                loggerPath=self.loggerPath.replace("-", "_").replace(" ", "_"),
            )

            self.log.info("Using graphite prefix str: '%s'", prefix_str)

            self.mon_con = statsd.StatsClient(host=settings.GRAPHITE_DB_IP,
                                              port=8125,
                                              prefix=prefix_str)
        else:
            self.mon_con = None
示例#27
0
def subscribe(request):
    c = statsd.StatsClient('localhost', 8125)

    data = json.loads(request.body.decode("utf-8") )

    try:
        user_device = UserDevice.objects.get(device_id=data.get('deviceId', ''))
    except UserDevice.DoesNotExist:
        return HttpResponse("", status=404)

    if not user_device.verified:
        return HttpResponse("", status=412)

    CategoryNotification.objects.filter(device=user_device).delete()

    for category in data.get('categories', []):
        try:
            notification,_ = CategoryNotification.objects.get_or_create(
                device=user_device,
                category=Category.objects.get(id=category),
                verified=True,
                defaults={'interval': timedelta(hours=4)}
            )

        except Exception as e:
            logger.error('Saving category failed', exc_info=True, extra={
                # Optionally pass a request and we'll grab any information we can
                'request': request,
            })
            return HttpResponse("", status=500)

    c.incr('subscribe.device.success')
    c.gauge('total.subscribe.device.success', 1, delta=True)
    return HttpResponse("", status=201)
示例#28
0
文件: stats.py 项目: lyft/osscla
def get_statsd_client():
    global STATS_CLIENT
    if STATS_CLIENT is None:
        STATS_CLIENT = statsd.StatsClient(app.config['STATSD_HOST'],
                                          app.config['STATSD_PORT'],
                                          prefix=['STATSD_PREFIX'])
    return STATS_CLIENT
示例#29
0
 def __init__(self, prefix="", host="127.0.0.1", port="8125"):
     """Initialize a new StatsD client.
     :prefix: StatsD string prefix that will be used in every bucket name.
     :host: network host address of the StatsD server.
     :port: network port of the StatsD server.
     """
     self.stat = statsd.StatsClient(host=host, port=port, prefix=prefix)
示例#30
0
class StatsdWatcher:
    c = statsd.StatsClient('myhost.cooldispatcher.com', 8125)
    prefix = 'your_company.test'

    def __init__(self, app_code, env='beta'):
        self.app_code = app_code
        self.env = env

    @error_notice
    def timer(self, target, value):
        self.c.timing('.'.join([self.prefix, self.app_code, self.env, target]),
                      value)

    @error_notice
    def counter(self, target):
        self.c.gauge('.'.join([self.prefix, self.app_code, self.env, target]),
                     1,
                     delta=True)

    @error_notice
    def counter_many(self, target, n):
        self.c.gauge('.'.join([self.prefix, self.app_code, self.env, target]),
                     n,
                     delta=True)

    @error_notice
    def gauge(self, target, value):
        self.c.gauge('.'.join([self.prefix, self.app_code, self.env, target]),
                     value)