def test_get_proxy(): from config import config config['proxy'] = None proxy_settings = get_proxy() assert proxy_settings == {} # remove all config options del config['proxy'] del config.defaults['proxy'] proxy_settings = get_proxy() assert proxy_settings == {} # restore defaults config.defaults['proxy'] = { 'http': None, 'https': None, } config['proxy'] = { 'http': 'http://foo', 'https': 'http://bar', } proxy_settings = get_proxy() assert proxy_settings is not {} assert 'http' in proxy_settings assert proxy_settings['http'] == 'http://foo' assert 'https' in proxy_settings assert proxy_settings['https'] == 'http://bar' assert 'no_proxy' in proxy_settings no_proxy = proxy_settings['no_proxy'].split(',') for host in LOCAL_PROXY_SKIP: assert host in no_proxy
def init_dogstatsd(config): api_key = config['api_key'] recent_point_threshold = config.get('recent_point_threshold', None) server_host = config['dogstatsd']['bind_host'] dd_url = config['dd_url'] port = config['dogstatsd']['port'] forward_to_host = config['dogstatsd'].get('forward_host') forward_to_port = config['dogstatsd'].get('forward_port') non_local_traffic = config['dogstatsd'].get('non_local_traffic') so_rcvbuf = config['dogstatsd'].get('so_rcvbuf') utf8_decoding = config['dogstatsd'].get('utf8_decoding') interval = DOGSTATSD_FLUSH_INTERVAL aggregator_interval = DOGSTATSD_AGGREGATOR_BUCKET_SIZE hostname = get_hostname() # get proxy settings proxies = get_proxy() forwarder = Forwarder( api_key, dd_url, proxies=proxies, ) forwarder.start() aggregator = MetricsBucketAggregator( hostname, aggregator_interval, recent_point_threshold=recent_point_threshold, formatter=get_formatter(config), histogram_aggregates=config.get('histogram_aggregates'), histogram_percentiles=config.get('histogram_percentiles'), utf8_decoding=utf8_decoding ) # serializer serializer = Serializer( aggregator, forwarder, ) reporter = Reporter(interval, aggregator, serializer, api_key, use_watchdog=False, hostname=hostname) # NOTICE: when `non_local_traffic` is passed we need to bind to any interface on the box. The forwarder uses # Tornado which takes care of sockets creation (more than one socket can be used at once depending on the # network settings), so it's enough to just pass an empty string '' to the library. # In Dogstatsd we use a single, fullstack socket, so passing '' as the address doesn't work and we default to # '0.0.0.0'. If someone needs to bind Dogstatsd to the IPv6 '::', they need to turn off `non_local_traffic` and # use the '::' meta address as `bind_host`. if non_local_traffic: server_host = '0.0.0.0' server = Server(aggregator, server_host, port, forward_to_host=forward_to_host, forward_to_port=forward_to_port, so_rcvbuf=so_rcvbuf) return reporter, server, forwarder
def test_get_proxy(): from config import config config['proxy'] = None with mock.patch('utils.network.getproxies', return_value={}): proxy_settings = get_proxy() assert proxy_settings == {} # remove all config options del config['proxy'] del config.defaults['proxy'] with mock.patch('utils.network.getproxies', return_value={}): proxy_settings = get_proxy() assert proxy_settings == {} # restore defaults config.defaults['proxy'] = { 'http': None, 'https': None, } config['proxy'] = { 'http': 'http://foo', 'https': 'http://bar', } proxy_settings = get_proxy() assert proxy_settings is not {} assert 'http' in proxy_settings assert proxy_settings['http'] == 'http://foo' assert 'https' in proxy_settings assert proxy_settings['https'] == 'http://bar' assert 'no_proxy' in proxy_settings no_proxy = proxy_settings['no_proxy'].split(',') for host in LOCAL_PROXY_SKIP: assert host in no_proxy
def test_get_proxy_from_env(): from config import config config.reset('proxy') proxy_skip_address = 'http://skipittyskip' os.environ['http_proxy'] = 'http://foo' os.environ['https_proxy'] = 'http://bar' os.environ['no_proxy'] = proxy_skip_address proxy_settings = get_proxy() assert proxy_settings is not {} assert 'http' in proxy_settings assert proxy_settings['http'] == 'http://foo' assert 'https' in proxy_settings assert proxy_settings['https'] == 'http://bar' assert 'no_proxy' in proxy_settings no_proxy = proxy_settings['no_proxy'].split(',') for host in LOCAL_PROXY_SKIP + [proxy_skip_address]: assert host in no_proxy
def validate_api_key(config): try: proxy = get_proxy() base_uri = get_site_url(config.get('dd_url'), site=config.get('site')) r = requests.get("{}/api/v1/validate".format(base_uri.rstrip('/')), params={'api_key': config.get('api_key')}, proxies=proxy, timeout=3, verify=(not config.get('skip_ssl_validation', False))) if r.status_code == 403: return INVALID_API_KEY_MSG r.raise_for_status() except requests.RequestException: return REQUEST_ERROR_MSG except Exception: log.exception("Unable to validate API Key") return OTHER_ERROR_MSG return VALID_API_KEY_MSG
def run(self): try: hostname = get_hostname() except HostnameException as e: logging.critical( "{} - You can define one in datadog.yaml or in your hosts file" .format(e)) sys.exit(1) logging.info("Starting the agent, hostname: %s", hostname) # init Forwarder logging.info("Starting the Forwarder") api_key = config.get('api_key') dd_url = config.get('dd_url') if not dd_url: logging.error('No Datadog URL configured - cannot continue') sys.exit(1) if not api_key: logging.error('No API key configured - cannot continue') sys.exit(1) # get proxy settings proxies = get_proxy() logging.debug('Proxy configuration used: %s', proxies) # get site url forwarder = Forwarder( api_key, get_site_url(dd_url, site=config.get('site')), proxies=proxies, ) forwarder.start() # agent aggregator aggregator = MetricsAggregator( hostname, interval=config.get('aggregator_interval'), expiry_seconds=(config.get('min_collection_interval') + config.get('aggregator_expiry_seconds')), recent_point_threshold=config.get('recent_point_threshold'), histogram_aggregates=config.get('histogram_aggregates'), histogram_percentiles=config.get('histogram_percentiles'), ) # serializer serializer = Serializer( aggregator, forwarder, ) # instantiate collector collector = Collector(config, aggregator) collector.load_check_classes() collector.instantiate_checks() # instantiate AgentRunner runner = AgentRunner(collector, serializer, config) # instantiate Dogstatsd reporter = None dsd_server = None dsd_enable = config['dogstatsd'].get('enable', False) if dsd_enable: reporter, dsd_server, _ = init_dogstatsd(config, forwarder=forwarder) dsd = DogstatsdRunner(dsd_server) # instantiate API status = { 'agent': aggregator.stats, 'forwarder': forwarder.stats, 'collector': collector.status, } if dsd_server: status['dogstatsd'] = dsd_server.aggregator.stats api = APIServer(config, status=status) handler = SignalHandler() # components handler.register('runner', runner) handler.register('forwarder', forwarder) handler.register('api', api) if dsd_enable: handler.register('reporter', reporter) handler.register('dsd_server', dsd_server) # signals handler.handle(signal.SIGTERM) handler.handle(signal.SIGINT) # start signal handler handler.start() runner.start() api.start() if dsd_enable: reporter.start() dsd.start() dsd.join() logging.info("Dogstatsd server done...") try: dsd.raise_for_status() except Exception as e: log.error("There was a problem with the dogstatsd server: %s", e) reporter.stop() runner.join() logging.info("Collector done...") api.join() logging.info("API done...") handler.stop() handler.join() logging.info("Signal handler done...") logging.info("Thank you for shopping at DataDog! Come back soon!") sys.exit(0)
def run(self): try: hostname = get_hostname() except HostnameException as e: logging.critical( "{} - You can define one in datadog.yaml or in your hosts file" .format(e)) sys.exit(1) logging.info("Starting the agent, hostname: %s", hostname) # init Forwarder logging.info("Starting the Forwarder") api_key = config.get('api_key') dd_url = config.get('dd_url') if not dd_url: logging.error('No Datadog URL configured - cannot continue') sys.exit(1) if not api_key: logging.error('No API key configured - cannot continue') sys.exit(1) # get proxy settings proxies = get_proxy() logging.debug('Proxy configuration used: %s', proxies) forwarder = Forwarder( api_key, dd_url, proxies=proxies, ) forwarder.start() # aggregator aggregator = MetricsAggregator( hostname, interval=config.get('aggregator_interval'), expiry_seconds=(config.get('min_collection_interval') + config.get('aggregator_expiry_seconds')), recent_point_threshold=config.get('recent_point_threshold'), histogram_aggregates=config.get('histogram_aggregates'), histogram_percentiles=config.get('histogram_percentiles'), ) # serializer serializer = Serializer( aggregator, forwarder, ) # instantiate collector collector = Collector(config, aggregator) collector.load_check_classes() collector.instantiate_checks() # instantiate AgentRunner runner = AgentRunner(collector, serializer, config) # instantiate API api = APIServer(config, aggregator.stats) handler = SignalHandler() # components handler.register('runner', runner) handler.register('forwarder', forwarder) handler.register('api', api) # signals handler.handle(signal.SIGTERM) handler.handle(signal.SIGINT) # start signal handler handler.start() runner.start() api.start() runner.join() logging.info("Agent done...") api.join() logging.info("API done...") handler.stop() handler.join() logging.info("Signal handler done...") logging.info("Thank you for shopping at DataDog! Come back soon!") sys.exit(0)
def run(self): try: hostname = get_hostname() except HostnameException as e: logging.critical( "{} - You can define one in datadog.yaml or in your hosts file" .format(e)) sys.exit(1) logging.info("Starting the agent, hostname: %s", hostname) # init Forwarder logging.info("Starting the Forwarder") api_key = config.get('api_key') dd_url = config.get('dd_url') if not dd_url: logging.error('No Datadog URL configured - cannot continue') sys.exit(1) if not api_key: logging.error('No API key configured - cannot continue') sys.exit(1) # get proxy settings proxies = get_proxy() logging.debug('Proxy configuration used: %s', proxies) forwarder = Forwarder( api_key, dd_url, proxies=proxies, ) forwarder.start() # aggregator aggregator = MetricsAggregator( hostname, interval=config.get('aggregator_interval'), expiry_seconds=(config.get('min_collection_interval') + config.get('aggregator_expiry_seconds')), recent_point_threshold=config.get('recent_point_threshold'), histogram_aggregates=config.get('histogram_aggregates'), histogram_percentiles=config.get('histogram_percentiles'), ) # serializer serializer = Serializer( aggregator, forwarder, ) # instantiate collector collector = Collector(config, aggregator) collector.load_check_classes() collector.instantiate_checks() # instantiate AgentRunner runner = AgentRunner(collector, serializer, config) # instantiate API api = APIServer(8888, aggregator.stats) def signal_handler(signal, frame): log.info("SIGINT received: stopping the agent") log.info("Stopping the forwarder") runner.stop() forwarder.stop() api.stop() log.info("See you !") sys.exit(0) signal.signal(signal.SIGINT, signal_handler) runner.start() api.run() # blocking tornado in main thread