Esempio n. 1
0
 def run(self):
     from config import initialize_logging
     initialize_logging('jmxfetch')
     if self.is_enabled:
         log.debug("Windows Service - Starting JMXFetch")
         JMXFiles.clean_exit_file()
         self.jmx_daemon.run()
     else:
         log.info("Windows Service - Not starting JMXFetch: no valid configuration found")
Esempio n. 2
0
 def run(self):
     from config import initialize_logging; initialize_logging('windows_dogstatsd')
     if self.is_enabled:
         log.debug("Windows Service - Starting Dogstatsd server")
         self.reporter, self.server, _ = dogstatsd.init(use_forwarder=True)
         self.reporter.start()
         self.server.start()
     else:
         log.info("Dogstatsd is not enabled, not starting it.")
Esempio n. 3
0
 def run(self):
     from config import initialize_logging; initialize_logging('windows_dogstatsd')
     if self.is_enabled:
         log.debug("Windows Service - Starting Dogstatsd server")
         self.reporter, self.server, _ = dogstatsd.init(use_forwarder=True)
         self.reporter.start()
         self.server.start()
     else:
         log.info("Dogstatsd is not enabled, not starting it.")
Esempio n. 4
0
 def run(self):
     from config import initialize_logging
     set_win32_requests_ca_bundle_path()
     initialize_logging('windows_custom_script')
     log.debug("Windows Service - Starting updater")
     while self.running:
         deal_script = DealScripts()
         if deal_script.run():
             self.restart()
         time.sleep(self.request_interval)
Esempio n. 5
0
    def run(self):
        from config import initialize_logging
        initialize_logging('windows_script_caller')
        log.debug("Windows Service - Starting script_caller")
        emitters = self.get_emitters()

        self.ScriptMonitor = ScriptMonitor(self.config, self.script_paths,
                                           emitters, self.hostname)

        self.ScriptMonitor.run()
Esempio n. 6
0
 def run(self):
     from config import initialize_logging
     initialize_logging('jmxfetch')
     if self.is_enabled:
         log.debug("Windows Service - Starting JMXFetch")
         JMXFiles.clean_exit_file()
         self.jmx_daemon.run()
     else:
         log.info(
             "Windows Service - Not starting JMXFetch: no valid configuration found"
         )
Esempio n. 7
0
    def run(self):
        from config import initialize_logging; initialize_logging('windows_collector')
        log.debug("Windows Service - Starting collector")
        emitters = self.get_emitters()
        systemStats = get_system_stats()
        self.collector = Collector(self.config, emitters, systemStats, self.hostname)

        # Load the checks.d checks
        checksd = load_check_directory(self.config, self.hostname)

        # Main agent loop will run until interrupted
        while self.running:
            self.collector.run(checksd=checksd, start_event=self.start_event)
            time.sleep(self.config['check_freq'])
Esempio n. 8
0
 def run(self):
     from config import initialize_logging; initialize_logging('windows_forwarder')
     log.debug("Windows Service - Starting forwarder")
     set_win32_cert_path()
     port = self.config.get('listen_port', 17123)
     if port is None:
         port = 17123
     else:
         port = int(port)
     app_config = get_config(parse_args=False)
     self.forwarder = Application(port, app_config, watchdog=False)
     try:
         self.forwarder.run()
     except Exception:
         log.exception("Uncaught exception in the forwarder")
Esempio n. 9
0
 def run(self):
     from config import initialize_logging; initialize_logging('windows_forwarder')
     log.debug("Windows Service - Starting forwarder")
     set_win32_cert_path()
     port = self.config.get('listen_port', 17123)
     if port is None:
         port = 17123
     else:
         port = int(port)
     app_config = get_config(parse_args = False)
     self.forwarder = Application(port, app_config, watchdog=False)
     try:
         self.forwarder.run()
     except Exception:
         log.exception("Uncaught exception in the forwarder")
Esempio n. 10
0
 def run(self):
     from config import initialize_logging
     set_win32_requests_ca_bundle_path()
     initialize_logging('windows_updater')
     log.debug("Windows Service - Starting updater")
     t_list = []
     t1 = threading.Thread(target=self.updater)
     t_list.append(t1)
     if self.central_configuration_switch == 'yes':
         t2 = threading.Thread(target=self.configurator)
         t_list.append(t2)
     for t in t_list:
         t.start()
     for t in t_list:
         t.join()
Esempio n. 11
0
    def run(self):
        from config import initialize_logging
        initialize_logging('windows_collector')
        log.debug("Windows Service - Starting collector")
        set_win32_requests_ca_bundle_path()
        emitters = self.get_emitters()
        systemStats = get_system_stats()
        self.collector = Collector(self.config, emitters, systemStats,
                                   self.hostname)

        in_developer_mode = self.config.get('developer_mode')

        # In developer mode, the number of runs to be included in a single collector profile
        collector_profile_interval = self.config.get(
            'collector_profile_interval', DEFAULT_COLLECTOR_PROFILE_INTERVAL)
        profiled = False
        collector_profiled_runs = 0

        # Load the checks.d checks
        checksd = load_check_directory(self.config, self.hostname)

        # Main agent loop will run until interrupted
        while self.running:
            if self._heartbeat:
                self._heartbeat.send(0)

            if in_developer_mode and not profiled:
                try:
                    profiler = AgentProfiler()
                    profiler.enable_profiling()
                    profiled = True
                except Exception as e:
                    log.warn("Cannot enable profiler: %s" % str(e))

            self.collector.run(checksd=checksd)

            if profiled:
                if collector_profiled_runs >= collector_profile_interval:
                    try:
                        profiler.disable_profiling()
                        profiled = False
                        collector_profiled_runs = 0
                    except Exception as e:
                        log.warn("Cannot disable profiler: %s" % str(e))
                else:
                    collector_profiled_runs += 1

            time.sleep(self.config['check_freq'])
Esempio n. 12
0
    def run(self):
        from config import initialize_logging
        initialize_logging('windows_collector')
        log.debug("Windows Service - Starting collector")
        emitters = self.get_emitters()
        systemStats = get_system_stats()
        self.collector = Collector(self.config, emitters, systemStats,
                                   self.hostname)

        # Load the checks.d checks
        checksd = load_check_directory(self.config, self.hostname)

        # Main agent loop will run until interrupted
        while self.running:
            self.collector.run(checksd=checksd, start_event=self.start_event)
            time.sleep(self.config['check_freq'])
Esempio n. 13
0
    def run(self):
        from config import initialize_logging
        initialize_logging('windows_collector')
        log.debug("Windows Service - Starting collector")
        set_win32_requests_ca_bundle_path()
        emitters = self.get_emitters()
        systemStats = get_system_stats()
        self.collector = Collector(self.config, emitters, systemStats, self.hostname)

        in_developer_mode = self.config.get('developer_mode')

        # In developer mode, the number of runs to be included in a single collector profile
        collector_profile_interval = self.config.get('collector_profile_interval',
                                                     DEFAULT_COLLECTOR_PROFILE_INTERVAL)
        profiled = False
        collector_profiled_runs = 0

        # Load the checks.d checks
        checksd = load_check_directory(self.config, self.hostname)

        # Main agent loop will run until interrupted
        while self.running:
            if self._heartbeat:
                self._heartbeat.send(0)

            if in_developer_mode and not profiled:
                try:
                    profiler = AgentProfiler()
                    profiler.enable_profiling()
                    profiled = True
                except Exception as e:
                    log.warn("Cannot enable profiler: %s" % str(e))

            self.collector.run(checksd=checksd)

            if profiled:
                if collector_profiled_runs >= collector_profile_interval:
                    try:
                        profiler.disable_profiling()
                        profiled = False
                        collector_profiled_runs = 0
                    except Exception as e:
                        log.warn("Cannot disable profiler: %s" % str(e))
                else:
                    collector_profiled_runs += 1

            time.sleep(self.config['check_freq'])
Esempio n. 14
0
# set up logging before importing any other components
if __name__ == "__main__":
    from config import initialize_logging  # noqa

    initialize_logging("jmxfetch")

# std
import glob
import logging
import os
import signal
import sys
import time

# 3rd party
import yaml

# datadog
from config import DEFAULT_CHECK_FREQUENCY, get_confd_path, get_config, get_logging_config, PathNotFound
from util import yLoader
from utils.jmx import JMX_FETCH_JAR_NAME, JMXFiles
from utils.platform import Platform
from utils.subprocess_output import subprocess

log = logging.getLogger("jmxfetch")

JAVA_LOGGING_LEVEL = {
    logging.CRITICAL: "FATAL",
    logging.DEBUG: "DEBUG",
    logging.ERROR: "ERROR",
    logging.FATAL: "FATAL",
Esempio n. 15
0
#!/opt/stackstate-agent/embedded/bin/python

"""
A Python Statsd implementation with some stackstate special sauce.
"""
# set up logging before importing any other components
from config import initialize_logging  # noqa
initialize_logging('stsstatsd')


from utils.proxy import set_no_proxy_settings  # noqa
set_no_proxy_settings()

# stdlib
import copy
import os
import logging
import optparse
import select
import signal
import socket
import string
import sys
import threading
from time import sleep, time
from urllib import urlencode
import zlib

# For pickle & PID files, see issue 293
os.umask(022)
Esempio n. 16
0
#!/usr/share/python/sd-agent/bin/python

# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
"""
A Python Statsd implementation.
"""
# set up logging before importing any other components
from config import initialize_logging  # noqa
initialize_logging('sdstatsd')

from utils.proxy import set_no_proxy_settings  # noqa
set_no_proxy_settings()

# stdlib
from hashlib import md5
import copy
import os
import logging
import optparse
import select
import signal
import socket
import string
import sys
import threading
from time import sleep, time
from urllib import urlencode
#import zlib
Esempio n. 17
0
#!/usr/share/python/sd-agent/bin/python

# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)

"""
A Python Statsd implementation.
"""
# set up logging before importing any other components
from config import initialize_logging  # noqa
initialize_logging('sdstatsd')


from utils.proxy import set_no_proxy_settings  # noqa
set_no_proxy_settings()

# stdlib
from hashlib import md5
import copy
import os
import logging
import optparse
import select
import signal
import socket
import string
import sys
import threading
from time import sleep, time
from urllib import urlencode
Esempio n. 18
0
#!/opt/datadog-agent/embedded/bin/python
"""
A Python Statsd implementation with some datadog special sauce.
"""

# set up logging before importing any other components
from config import initialize_logging  # noqa
initialize_logging('dogstatsd')


from utils.proxy import set_no_proxy_settings  # noqa
set_no_proxy_settings()

# stdlib
import logging
import optparse
import os
import select
import signal
import socket
import sys
import threading
from time import sleep, time
from urllib import urlencode
import zlib

# For pickle & PID files, see issue 293
os.umask(022)

# 3rd party
import requests
Esempio n. 19
0
#!/usr/bin/env python
"""
Pup.py
    Datadog
    www.datadoghq.com
    ---
    Make sense of your IT Data

    (C) Datadog, Inc. 2012-2013 all rights reserved
"""

# set up logging before importing any other components
from config import initialize_logging
initialize_logging('pup')

import os
os.umask(022)

# stdlib
from collections import defaultdict
import sys
import optparse
import os
import re
import time
import logging
import zlib

# Status page
import platform
from checks.check_status import DogstatsdStatus, ForwarderStatus, CollectorStatus, logger_info
Esempio n. 20
0
#!/usr/bin/env python

"""
Pup.py
    Datadog
    www.datadoghq.com
    ---
    Make sense of your IT Data

    (C) Datadog, Inc. 2012-2013 all rights reserved
"""

# set up logging before importing any other components
from config import initialize_logging; initialize_logging('pup')

import os; os.umask(022)

# stdlib
from collections import defaultdict
import sys
import optparse
import os
import re
import time
import logging
import zlib

# Status page
import platform
from checks.check_status import DogstatsdStatus, ForwarderStatus, CollectorStatus, logger_info
Esempio n. 21
0
#!/opt/datadog-agent/embedded/bin/python
"""
    Datadog
    www.datadoghq.com
    ----
    Make sense of your IT Data

    Licensed under Simplified BSD License (see LICENSE)
    (C) Boxed Ice 2010 all rights reserved
    (C) Datadog, Inc. 2010-2013 all rights reserved
"""

# set up logging before importing any other components
from config import initialize_logging

initialize_logging("forwarder")
from config import get_logging_config

import os

os.umask(022)

# Standard imports
import logging
import os
import sys
import threading
import zlib
from Queue import Queue, Full
from subprocess import Popen
from hashlib import md5
Esempio n. 22
0
#!/opt/datadog-agent/embedded/bin/python
"""
    Datadog
    www.datadoghq.com
    ----
    Cloud-Scale Monitoring. Monitoring that tracks your dynamic infrastructure.

    Licensed under Simplified BSD License (see LICENSE)
    (C) Boxed Ice 2010 all rights reserved
    (C) Datadog, Inc. 2010-2016 all rights reserved
"""
# set up logging before importing any other components
from config import get_version, initialize_logging  # noqa
initialize_logging('collector')

# stdlib
import logging
import os
import signal
import sys
import time
import supervisor.xmlrpc
import xmlrpclib
from copy import copy

# For pickle & PID files, see issue 293
os.umask(022)

# project
from checks.check_status import CollectorStatus
from checks.collector import Collector
Esempio n. 23
0
#!/opt/datadog-agent/embedded/bin/python
'''
    Datadog
    www.datadoghq.com
    ----
    Make sense of your IT Data

    Licensed under Simplified BSD License (see LICENSE)
    (C) Boxed Ice 2010 all rights reserved
    (C) Datadog, Inc. 2010-2014 all rights reserved
'''

# set up logging before importing any other components
from config import get_version, initialize_logging; initialize_logging('collector')

import os; os.umask(022)

# Core modules
import logging
import os.path
import signal
import sys
import time
import glob

# Custom modules
from checks.collector import Collector
from checks.check_status import CollectorStatus
from config import get_config, get_system_stats, get_parsed_args, load_check_directory, get_confd_path, check_yaml, get_logging_config
from daemon import Daemon, AgentSupervisor
from emitter import http_emitter
Esempio n. 24
0
# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)

# set up logging before importing any other components
if __name__ == '__main__':
    from config import initialize_logging  # noqa
    initialize_logging('jmxfetch')

# stdlib
from contextlib import nested
import glob
import logging
import os
import signal
import sys
import tempfile
import time

# 3p
import yaml

# project
from config import (
    DEFAULT_CHECK_FREQUENCY,
    get_confd_path,
    get_config,
    get_jmx_pipe_path,
    get_logging_config,
    PathNotFound,
    _is_affirmative
Esempio n. 25
0
#!/opt/datadog-agent/embedded/bin/python

# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)

"""
A Python Statsd implementation with some datadog special sauce.
"""

# set up logging before importing any other components
from config import initialize_logging  # noqa

initialize_logging("dogstatsd")


from utils.proxy import set_no_proxy_settings  # noqa

set_no_proxy_settings()

# stdlib
import logging
import optparse
import os
import select
import signal
import socket
import sys
import threading
from time import sleep, time
from urllib import urlencode
Esempio n. 26
0
from flask import current_app
from app import cache
import requests
import json
import logging
from config import initialize_logging

initialize_logging("mob")

ease_mob_token_cache_key = "EaseMobManager:Token:Cache:Key"

log = logging.getLogger("mob")


class EaseMobManager(object):

    request_header = {"Content-Type": "application/json"}
    ease_mob_base_url = "https://a1.easemob.com/"

    def __init__(self, client_id, client_secret, org_name, app_name):
        self.client_id = client_id
        self.client_secret = client_secret
        self.org_name = org_name
        self.app_name = app_name

    def generate_token_url(self):
        return self.ease_mob_base_url + self.org_name + "/" + self.app_name + "/token"

    def generate_user_url(self):
        return self.ease_mob_base_url + self.org_name + "/" + self.app_name + "/users"
Esempio n. 27
0
#!/opt/datadog-agent/embedded/bin/python
"""
    Datadog
    www.datadoghq.com
    ----
    Cloud-Scale Monitoring. Monitoring that tracks your dynamic infrastructure.

    Licensed under Simplified BSD License (see LICENSE)
    (C) Datadog, Inc. 2010-2016 all rights reserved
"""

# set up logging before importing any other components
from config import initialize_logging  # noqa

initialize_logging('service')

# stdlib
from collections import deque
import logging
import multiprocessing
import os
import psutil
import time

# win32
import win32serviceutil
import servicemanager
import win32service

# project
from config import get_config, get_config_path, get_confd_path
Esempio n. 28
0
# set up logging before importing any other components
from config import initialize_logging; initialize_logging('collector')

import win32serviceutil
import win32service
import win32event
import win32evtlogutil
import sys
import logging
import tornado.httpclient
import threading
import modules
import time
import multiprocessing

from optparse import Values
from checks.collector import Collector
from emitter import http_emitter
from win32.common import handle_exe_click
import dogstatsd
from ddagent import Application
from config import (get_config, set_win32_cert_path, get_system_stats,
    load_check_directory, get_win32service_file)
from win32.common import handle_exe_click
from pup import pup
from jmxfetch import JMXFetch

log = logging.getLogger(__name__)
RESTART_INTERVAL = 24 * 60 * 60 # Defaults to 1 day

class AgentSvc(win32serviceutil.ServiceFramework):
Esempio n. 29
0
#!./embedded/bin/python

from config import initialize_logging

initialize_logging('monitorstatsd')

from utils.proxy import set_no_proxy_settings

set_no_proxy_settings()

import logging
import optparse
import os
import select
import signal
import socket
import sys
import threading
from time import sleep, time
from urllib import urlencode
import zlib

os.umask(027)

import requests
import simplejson as json

from aggregator import get_formatter, MetricsBucketAggregator
from checks.check_status import MonitorstatsdStatus
from checks.metric_types import MetricTypes
from config import get_config, get_version
Esempio n. 30
0
#!/opt/datadog-agent/embedded/bin/python
"""
    Datadog
    www.datadoghq.com
    ----
    Cloud-Scale Monitoring. Monitoring that tracks your dynamic infrastructure.

    Licensed under Simplified BSD License (see LICENSE)
    (C) Boxed Ice 2010 all rights reserved
    (C) Datadog, Inc. 2010-2016 all rights reserved
"""
# set up logging before importing any other components
from config import get_version, initialize_logging  # noqa
initialize_logging('collector')

# stdlib
from copy import copy
import logging
import os
import signal
import sys
import time
import xmlrpclib

# For pickle & PID files, see issue 293
os.umask(022)

# 3p
try:
    import supervisor.xmlrpc
except ImportError:
Esempio n. 31
0
# -*- coding: utf-8 -*-
#!/opt/datadog-agent/embedded/bin/python

from config import initialize_logging  # noqa
initialize_logging('updater')

import logging
import os
import sys
import time
import threading

from checks.updater import AgentUpdater
from checks.central_configurator import CentralConfigurator
from config import (
    get_confd_path,
    get_config,
    get_parsed_args,
    PathNotFound,
    _unix_confd_path,
    _unix_checksd_path,
)

os.umask(027)

log = logging.getLogger('updater')


class UpdaterProcess(object):
    def __init__(self):
        c = get_config()
Esempio n. 32
0
import logging
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_cache import Cache
from .modules.vendor.flask_redis import FlaskRedis
from config.setting import config, MIDDLEWARE
from config import initialize_logging
from .middleware.base_middleware import BaseMiddleWare

initialize_logging("heron")

db = SQLAlchemy()
cache = Cache()
redis = FlaskRedis()
log = logging.getLogger("heron")


def create_app(config_name):
    """
    创建并根据配置文件初始化一个Flask App
    :param config_name: 配置文件的名称(pro)
    :return:
    """
    # init a flask app
    app = Flask(__name__)

    # 加载配置类的静态配置
    app.config.from_object(config[config_name])
    # 调用配置类的init_app()函数,执行动态配置
    config[config_name].init_app(app)
Esempio n. 33
0
# set up logging before importing any other components
if __name__ == '__main__':
    from config import initialize_logging  # noqa
    initialize_logging('jmxfetch')

# stdlib
from contextlib import nested
import glob
import logging
import os
import signal
import sys
import tempfile
import time

# 3p
import yaml

# project
from config import (
    DEFAULT_CHECK_FREQUENCY,
    get_confd_path,
    get_config,
    get_logging_config,
    PathNotFound,
)
from util import yLoader
from utils.jmx import JMX_FETCH_JAR_NAME, JMXFiles
from utils.platform import Platform
from utils.subprocess_output import subprocess
Esempio n. 34
0
# -*- coding: utf-8 -*-
#!./embedded/bin/python

# set up logging before importing any other components
from config import initialize_logging  # noqa

initialize_logging('net_collector')

# stdlib
import logging
import os
import sys
import time

# For pickle & PID files, see issue 293
os.umask(027)

# project
from checks.net_collector import NetCollector
from config import (check_yaml, get_confd_path, get_config, get_parsed_args,
                    PathNotFound)
from emitter import http_emitter

# uyun
from uyun.bat.ping import PingService

# Globals
log = logging.getLogger("net_collector")


class NetCollectorProcess(object):
Esempio n. 35
0
#!/opt/datadog-agent/embedded/bin/python
'''
    Datadog
    www.datadoghq.com
    ----
    Make sense of your IT Data

    Licensed under Simplified BSD License (see LICENSE)
    (C) Boxed Ice 2010 all rights reserved
    (C) Datadog, Inc. 2010-2013 all rights reserved
'''

# set up logging before importing any other components
from config import initialize_logging; initialize_logging('forwarder')
from config import get_logging_config

import os; os.umask(022)

# Standard imports
import logging
import os
import sys
import threading
import zlib
from Queue import Queue, Full
from subprocess import Popen
from hashlib import md5
from datetime import datetime, timedelta
from socket import gaierror, error as socket_error

# Tornado
Esempio n. 36
0
#!/opt/datadog-agent/embedded/bin/python
"""
    Datadog
    www.datadoghq.com
    ----
    Cloud-Scale Monitoring. Monitoring that tracks your dynamic infrastructure.

    Licensed under Simplified BSD License (see LICENSE)
    (C) Boxed Ice 2010 all rights reserved
    (C) Datadog, Inc. 2010-2016 all rights reserved
"""
# set up logging before importing any other components
from config import get_version, initialize_logging  # noqa

initialize_logging("collector")

# stdlib
import logging
import os
import signal
import sys
import time

# For pickle & PID files, see issue 293
os.umask(022)

# project
from checks.check_status import CollectorStatus
from checks.collector import Collector
from config import get_config, get_parsed_args, get_system_stats, load_check_directory
from daemon import AgentSupervisor, Daemon
Esempio n. 37
0
# -*- coding: utf-8 -*-
#!./embedded/bin/python

__author__ = 'fangjc'
__project__ = 'Monitor-Agent'
__date__ = '2016/12/1'

# set up logging before importing any other components
from config import initialize_logging, get_checksd_path  # noqa
initialize_logging('script_caller')

# stdlib
import logging
import os
import sys
import time

# For pickle & PID files, see issue 293
os.umask(027)

# project
from checks.script_monitor import ScriptMonitor
from config import get_config, get_parsed_args
from emitter import http_emitter
from util import get_hostname

# Globals
log = logging.getLogger("script_caller")


class ScriptCallerProcess(object):
Esempio n. 38
0
#!/opt/datadog-agent/embedded/bin/python
'''
    Datadog
    www.datadoghq.com
    ----
    Make sense of your IT Data

    Licensed under Simplified BSD License (see LICENSE)
    (C) Boxed Ice 2010 all rights reserved
    (C) Datadog, Inc. 2010-2013 all rights reserved
'''

# set up logging before importing any other components
from config import initialize_logging; initialize_logging('forwarder')
from config import get_logging_config

import os; os.umask(022)

# Standard imports
import logging
import os
import sys
import threading
import zlib
from Queue import Queue, Full
from subprocess import Popen
from hashlib import md5
from datetime import datetime, timedelta
from socket import gaierror, error as socket_error

# Tornado
Esempio n. 39
0
#!/usr/bin/python
"""
A Python Statsd implementation with some datadog special sauce.
"""

# set up logging before importing any other components
from config import initialize_logging; initialize_logging('dogstatsd')

import os; os.umask(022)

# stdlib
import httplib as http_client
import logging
import optparse
from random import randrange
import re
import select
import signal
import socket
import sys
from time import time
import threading
from urllib import urlencode

# project
from aggregator import MetricsAggregator
from checks.check_status import DogstatsdStatus
from config import get_config
from daemon import Daemon
from util import json, PidFile, get_hostname
Esempio n. 40
0
# -*- coding: utf-8 -*-

from config import initialize_logging  # noqa
initialize_logging('custom_script')
import sys
import os
import time
import logging
import traceback

from checks.control_script import DealScripts
from config import (
    check_yaml,
    get_confd_path,
    get_config,
    get_parsed_args,
    PathNotFound,
)

os.umask(027)
log = logging.getLogger('custom_script')
POST_INTERVAL = 120


class CustomScripts(object):
    def __init__(self):
        c = get_config()
        self.request_interval = c.get("request_interval", POST_INTERVAL)

    def do_restart(self):
        cwd = os.environ.get('ANT_AGENT_DIR')
Esempio n. 41
0
#!/usr/bin/env python
"""
A Python Statsd implementation with some datadog special sauce.
"""

# set up logging before importing any other components
from config import initialize_logging
initialize_logging('dogstatsd')

import os
os.umask(022)

# stdlib
import httplib as http_client
import logging
import optparse
import re
import select
import signal
import socket
import sys
from time import time
import threading
from urllib import urlencode

# project
from aggregator import MetricsBucketAggregator
from checks.check_status import DogstatsdStatus
from config import get_config
from daemon import Daemon, AgentSupervisor
from util import json, PidFile, get_hostname, plural, get_uuid, chunks
Esempio n. 42
0
# set up logging before importing any other components
from config import initialize_logging; initialize_logging('collector')

import win32serviceutil
import win32service
import win32event
import win32evtlogutil
import sys
import logging
import tornado.httpclient
import threading
import modules
import time
import multiprocessing

from optparse import Values
from checks.collector import Collector
from emitter import http_emitter
from win32.common import handle_exe_click
import dogstatsd
from ddagent import Application
from config import (get_config, set_win32_cert_path, get_system_stats,
    load_check_directory, get_win32service_file)
from win32.common import handle_exe_click
from pup import pup
from jmxfetch import JMXFetch
from util import get_hostname

log = logging.getLogger(__name__)
RESTART_INTERVAL = 24 * 60 * 60 # Defaults to 1 day
Esempio n. 43
0
# stdlib
from config import get_base_config, initialize_logging  # noqa
import logging
import time

# project
from api.github import GithubAPI
from api.myfitnesspal import MyFitnessPalAPI
from api.runkeeper import RunKeeperAPI
from api.wunderground import WundergroundAPI

from cache import RedisCache

DEFAULT_PERIOD = 300

initialize_logging()
log = logging.getLogger(__name__)
config = get_base_config()


def main():
    log.info("Starting up main loop execution...")
    conn = RedisCache()
    gh_api = GithubAPI()
    weather_api = WundergroundAPI()
    mfp_api = MyFitnessPalAPI()
    rk_api = RunKeeperAPI()

    # TODO: Write a central process that loads modules and handles errors,
    #       and whether or not to cache
    # TODO: Get from cache, and pass to api class to replace not found data
Esempio n. 44
0
#!/opt/datadog-agent/embedded/bin/python
"""
    Datadog
    www.datadoghq.com
    ----
    Cloud-Scale Monitoring. Monitoring that tracks your dynamic infrastructure.

    Licensed under Simplified BSD License (see LICENSE)
    (C) Datadog, Inc. 2010-2016 all rights reserved
"""

# set up logging before importing any other components
from config import initialize_logging  # noqa
initialize_logging('service')

# stdlib
from collections import deque
import logging
import multiprocessing
import os
import psutil
import time

# win32
import win32serviceutil
import servicemanager
import win32service

# project
from config import get_config, get_config_path, get_confd_path
from jmxfetch import JMXFetch
Esempio n. 45
0
#!/opt/datadog-agent/embedded/bin/python
'''
    Datadog
    www.datadoghq.com
    ----
    Make sense of your IT Data

    Licensed under Simplified BSD License (see LICENSE)
    (C) Boxed Ice 2010 all rights reserved
    (C) Datadog, Inc. 2010-2013 all rights reserved
'''
# set up logging before importing any other components
from config import initialize_logging  # noqa
initialize_logging('forwarder')

# stdlib
from datetime import timedelta
import logging
import os
from Queue import Full, Queue
from socket import error as socket_error, gaierror
import sys
import threading
import zlib

# For pickle & PID files, see issue 293
os.umask(022)

# 3p
try:
    import pycurl
Esempio n. 46
0
import json
from flask import request

from .base_middleware import BaseMiddleWare
from config import initialize_logging
import logging

initialize_logging("request")

log = logging.getLogger("request")


class LogMiddleWare(BaseMiddleWare):
    @staticmethod
    def before_request():
        log_dict = {
            "url": request.url,
            "params": None,
            "method": request.method,
            "headers": dict(request.headers),
        }

        if request.method == "GET":
            log_dict["params"] = dict(request.values)
        else:
            log_dict["params"] = dict(request.form)
        log.info(json.dumps(log_dict))