def test_get_proxy(self): agentConfig = { "proxy_host": "localhost", "proxy_port": 4242, "proxy_user": "******", "proxy_password": "******" } proxy_from_config = get_proxy(agentConfig) self.assertEqual(proxy_from_config, { "host": "localhost", "port": 4242, "user": "******", "password": "******", }) os.environ["HTTPS_PROXY"] = "https://*****:*****@google.com:4444" proxy_from_env = get_proxy({}) self.assertEqual(proxy_from_env, { "host": "google.com", "port": 4444, "user": "******", "password": "******" })
def test_get_proxy(self): agentConfig = { "proxy_host": "localhost", "proxy_port": 4242, "proxy_user": "******", "proxy_password": "******" } proxy_from_config = get_proxy(agentConfig) self.assertEqual(proxy_from_config, { "host": "localhost", "port": 4242, "user": "******", "password": "******", }) os.environ["HTTPS_PROXY"] = "https://*****:*****@google.com:4444" proxy_from_env = get_proxy({}) self.assertEqual( proxy_from_env, { "host": "google.com", "port": 4444, "user": "******", "password": "******" })
def __init__(self, name, init_config, agentConfig, instances): self.ca_certs = init_config.get('ca_certs', get_ca_certs_path()) proxy_settings = get_proxy(agentConfig) self.proxies = { "http": None, "https": None, } if proxy_settings: uri = "{host}:{port}".format( host=proxy_settings['host'], port=proxy_settings['port']) if proxy_settings['user'] and proxy_settings['password']: uri = "{user}:{password}@{uri}".format( user=proxy_settings['user'], password=proxy_settings['password'], uri=uri) self.proxies['http'] = "http://{uri}".format(uri=uri) self.proxies['https'] = "https://{uri}".format(uri=uri) else: self.proxies['http'] = environ.get('HTTP_PROXY', None) self.proxies['https'] = environ.get('HTTPS_PROXY', None) self.proxies['no'] = environ.get('no_proxy', environ.get('NO_PROXY', None) ) NetworkCheck.__init__(self, name, init_config, agentConfig, instances)
def get_tags(agentConfig): if not agentConfig['collect_instance_metadata']: log.info( "Instance metadata collection is disabled. Not collecting it.") return [] EC2_tags = [] socket_to = None try: socket_to = socket.getdefaulttimeout() socket.setdefaulttimeout(EC2.TIMEOUT) except Exception: pass try: iam_role = EC2.get_iam_role() iam_params = json.loads( urllib2.urlopen(EC2.METADATA_URL_BASE + "/iam/security-credentials/" + unicode(iam_role)).read().strip()) instance_identity = json.loads( urllib2.urlopen(EC2.INSTANCE_IDENTITY_URL).read().strip()) region = instance_identity['region'] import boto.ec2 proxy_settings = get_proxy(agentConfig) or {} connection = boto.ec2.connect_to_region( region, aws_access_key_id=iam_params['AccessKeyId'], aws_secret_access_key=iam_params['SecretAccessKey'], security_token=iam_params['Token'], proxy=proxy_settings.get('host'), proxy_port=proxy_settings.get('port'), proxy_user=proxy_settings.get('user'), proxy_pass=proxy_settings.get('password')) tag_object = connection.get_all_tags( {'resource-id': EC2.metadata['instance-id']}) EC2_tags = [u"%s:%s" % (tag.name, tag.value) for tag in tag_object] if agentConfig.get('collect_security_groups') and EC2.metadata.get( 'security-groups'): EC2_tags.append(u"security-group-name:{0}".format( EC2.metadata.get('security-groups'))) except EC2.NoIAMRole: log.warning( u"Unable to retrieve AWS EC2 custom tags: " u"an IAM role associated with the instance is required") except Exception: log.exception("Problem retrieving custom EC2 tags") try: if socket_to is None: socket_to = 3 socket.setdefaulttimeout(socket_to) except Exception: pass return EC2_tags
def validate_api_key(config): try: proxy = get_proxy(agentConfig=config) request_proxy = {} if proxy: request_proxy = { 'https': "http://{user}:{password}@{host}:{port}".format(**proxy) } r = requests.get("%s/api/v1/validate" % config['dd_url'].rstrip('/'), params={'api_key': config.get('api_key')}, proxies=request_proxy, timeout=3, verify=(not config.get('skip_ssl_validation', False))) if r.status_code == 403: return "API Key is invalid" r.raise_for_status() except requests.RequestException: return "Unable to validate API Key. Please try again later" except Exception: log.exception("Unable to validate API Key") return "Unable to validate API Key (unexpected error). Please try again later" return "API Key is valid"
def validate_api_key(config): try: proxy = get_proxy(agentConfig=config) request_proxy = {} if proxy: request_proxy = { 'https': "http://{user}:{password}@{host}:{port}".format(**proxy) } r = requests.get("https://app.stackstate.com/api/v1/validate", params={'api_key': config.get('api_key')}, proxies=request_proxy, timeout=3) if r.status_code == 403: return "API Key is invalid" r.raise_for_status() except requests.RequestException: return "Unable to validate API Key. Please try again later" except Exception: log.exception("Unable to validate API Key") return "Unable to validate API Key (unexpected error). Please try again later" return "API Key is valid"
def __init__(self, name, init_config, agentConfig, instances=None): """ Initialize a new check. :param name: The name of the check :param init_config: The config for initializing the check :param agentConfig: The global configuration for the agent :param instances: A list of configuration objects for each instance. """ from aggregator import MetricsAggregator self._enabled_checks.append(name) self._enabled_checks = list(set(self._enabled_checks)) self.name = name self.init_config = init_config or {} self.agentConfig = agentConfig self.in_developer_mode = agentConfig.get("developer_mode") and psutil self._internal_profiling_stats = None self.default_integration_http_timeout = float(agentConfig.get("default_integration_http_timeout", 9)) self.hostname = agentConfig.get("checksd_hostname") or get_hostname(agentConfig) self.log = logging.getLogger("%s.%s" % (__name__, name)) self.min_collection_interval = self.init_config.get( "min_collection_interval", self.DEFAULT_MIN_COLLECTION_INTERVAL ) self.aggregator = MetricsAggregator( self.hostname, expiry_seconds=self.min_collection_interval + self.DEFAULT_EXPIRY_SECONDS, formatter=agent_formatter, recent_point_threshold=agentConfig.get("recent_point_threshold", None), histogram_aggregates=agentConfig.get("histogram_aggregates"), histogram_percentiles=agentConfig.get("histogram_percentiles"), ) self.events = [] self.service_checks = [] self.instances = instances or [] self.warnings = [] self.library_versions = None self.last_collection_time = defaultdict(int) self._instance_metadata = [] self.svc_metadata = [] self.historate_dict = {} # Set proxy settings self.proxy_settings = get_proxy(self.agentConfig) self._use_proxy = False if init_config is None else init_config.get("use_agent_proxy", True) self.proxies = {"http": None, "https": None} if self.proxy_settings and self._use_proxy: uri = "{host}:{port}".format(host=self.proxy_settings["host"], port=self.proxy_settings["port"]) if self.proxy_settings["user"] and self.proxy_settings["password"]: uri = "{user}:{password}@{uri}".format( user=self.proxy_settings["user"], password=self.proxy_settings["password"], uri=uri ) self.proxies["http"] = "http://{uri}".format(uri=uri) self.proxies["https"] = "https://{uri}".format(uri=uri)
def find_item(search_kw, target): proxy = get_proxy() logger.info(proxy) chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--proxy-server={h}:{p}'.format(h=proxy[0], p=proxy[1])) chrome_options.add_experimental_option('prefs', {'profile.managed_default_content_settings.images': 2}) driver = webdriver.Chrome(chrome_options=chrome_options) driver.set_page_load_timeout(DRIVER_TIMEOUT) try: driver.get(AMAZON_URL) e = driver.find_element_by_name('field-keywords') e.click() e.click() e.clear() e.send_keys(search_kw, Keys.RETURN) find_item = False page_num = 0 while not find_item: page_num += 1 if page_num >= MAX_PAGE_NUM: logger.error('超过{p}页仍未找到商品'.format(p=page_num)) driver.close() return False try: e = driver.find_element_by_link_text(target) except NoSuchElementException as e: pass else: find_item = True if find_item: logger.info('第{page_num}页发现商品,进入页面'.format(page_num=page_num)) driver.get(e.get_property('href')) else: logger.info('第{page_num}页未发现商品,进入下一页'.format(page_num=page_num)) next_url = driver.find_element_by_id('pagnNextLink').get_property('href') driver.get(next_url) driver.execute_script("window.scrollTo(0, document.body.scrollHeight);"); except NoSuchElementException as e: logger.error('未找到数据') ret = False except TimeoutException as e: logger.error('访问超时') ret = False except Exception as e: logger.error(e) ret = False else: logger.info('等待{s}秒后完成任务'.format(s=DEFAULT_SLEEP_TIME_IN_PAGE)) sleep(DEFAULT_SLEEP_TIME_IN_PAGE) ret = True finally: driver.close() return ret
def get_tags(agentConfig): """ Retrieve AWS EC2 tags. """ if not agentConfig['collect_instance_metadata']: log.info("Instance metadata collection is disabled. Not collecting it.") return [] EC2_tags = [] socket_to = None try: socket_to = socket.getdefaulttimeout() socket.setdefaulttimeout(EC2.TIMEOUT) except Exception: pass try: iam_role = EC2.get_iam_role() iam_params = json.loads(urllib2.urlopen(EC2.METADATA_URL_BASE + "/iam/security-credentials/" + unicode(iam_role)).read().strip()) instance_identity = json.loads(urllib2.urlopen(EC2.INSTANCE_IDENTITY_URL).read().strip()) region = instance_identity['region'] import boto.ec2 proxy_settings = get_proxy(agentConfig) or {} connection = boto.ec2.connect_to_region( region, aws_access_key_id=iam_params['AccessKeyId'], aws_secret_access_key=iam_params['SecretAccessKey'], security_token=iam_params['Token'], proxy=proxy_settings.get('host'), proxy_port=proxy_settings.get('port'), proxy_user=proxy_settings.get('user'), proxy_pass=proxy_settings.get('password') ) tag_object = connection.get_all_tags({'resource-id': EC2.metadata['instance-id']}) EC2_tags = [u"%s:%s" % (tag.name, tag.value) for tag in tag_object] if agentConfig.get('collect_security_groups') and EC2.metadata.get('security-groups'): EC2_tags.append(u"security-group-name:{0}".format(EC2.metadata.get('security-groups'))) except EC2.NoIAMRole: log.warning( u"Unable to retrieve AWS EC2 custom tags: " u"an IAM role associated with the instance is required" ) except Exception: log.exception("Problem retrieving custom EC2 tags") try: if socket_to is None: socket_to = 3 socket.setdefaulttimeout(socket_to) except Exception: pass return EC2_tags
def get_tags(agentConfig): """ Retrieve AWS EC2 tags. """ if not agentConfig['collect_instance_metadata']: log.info( "Instance metadata collection is disabled. Not collecting it.") return [] EC2_tags = [] try: iam_role = EC2.get_iam_role() iam_url = EC2.METADATA_URL_BASE + "/iam/security-credentials/" + unicode( iam_role) r = requests.get(iam_url, timeout=EC2.TIMEOUT) r.raise_for_status() # Fail on 404 etc iam_params = r.json() r = requests.get(EC2.INSTANCE_IDENTITY_URL, timeout=EC2.TIMEOUT) r.raise_for_status() instance_identity = r.json() region = instance_identity['region'] import boto.ec2 proxy_settings = get_proxy(agentConfig) or {} connection = boto.ec2.connect_to_region( region, aws_access_key_id=iam_params['AccessKeyId'], aws_secret_access_key=iam_params['SecretAccessKey'], security_token=iam_params['Token'], proxy=proxy_settings.get('host'), proxy_port=proxy_settings.get('port'), proxy_user=proxy_settings.get('user'), proxy_pass=proxy_settings.get('password')) tag_object = connection.get_all_tags( {'resource-id': EC2.metadata['instance-id']}) EC2_tags = [u"%s:%s" % (tag.name, tag.value) for tag in tag_object] if agentConfig.get('collect_security_groups') and EC2.metadata.get( 'security-groups'): EC2_tags.append(u"security-group-name:{0}".format( EC2.metadata.get('security-groups'))) except EC2.NoIAMRole: log.warning( u"Unable to retrieve AWS EC2 custom tags: " u"an IAM role associated with the instance is required") except Exception: log.exception("Problem retrieving custom EC2 tags") return EC2_tags
def __init__(self, name, init_config, agentConfig, instances): self.ca_certs = init_config.get("ca_certs", get_ca_certs_path()) proxy_settings = get_proxy(agentConfig) if not proxy_settings: self.proxies = None else: uri = "{host}:{port}".format(host=proxy_settings["host"], port=proxy_settings["port"]) if proxy_settings["user"] and proxy_settings["password"]: uri = "{user}:{password}@{uri}".format( user=proxy_settings["user"], password=proxy_settings["password"], uri=uri ) self.proxies = {"http": "http://{uri}".format(uri=uri), "https": "https://{uri}".format(uri=uri)} NetworkCheck.__init__(self, name, init_config, agentConfig, instances)
def get_tags(agentConfig): """ Retrieve AWS EC2 tags. """ if not agentConfig["collect_instance_metadata"]: log.info("Instance metadata collection is disabled. Not collecting it.") return [] EC2_tags = [] try: iam_role = EC2.get_iam_role() iam_url = EC2.METADATA_URL_BASE + "/iam/security-credentials/" + unicode(iam_role) r = requests.get(iam_url, timeout=EC2.TIMEOUT) r.raise_for_status() # Fail on 404 etc iam_params = r.json() r = requests.get(EC2.INSTANCE_IDENTITY_URL, timeout=EC2.TIMEOUT) r.raise_for_status() instance_identity = r.json() region = instance_identity["region"] import boto.ec2 proxy_settings = get_proxy(agentConfig) or {} connection = boto.ec2.connect_to_region( region, aws_access_key_id=iam_params["AccessKeyId"], aws_secret_access_key=iam_params["SecretAccessKey"], security_token=iam_params["Token"], proxy=proxy_settings.get("host"), proxy_port=proxy_settings.get("port"), proxy_user=proxy_settings.get("user"), proxy_pass=proxy_settings.get("password"), ) tag_object = connection.get_all_tags({"resource-id": EC2.metadata["instance-id"]}) EC2_tags = [u"%s:%s" % (tag.name, tag.value) for tag in tag_object] if agentConfig.get("collect_security_groups") and EC2.metadata.get("security-groups"): EC2_tags.append(u"security-group-name:{0}".format(EC2.metadata.get("security-groups"))) except EC2.NoIAMRole: log.warning( u"Unable to retrieve AWS EC2 custom tags: " u"an IAM role associated with the instance is required" ) except Exception: log.exception("Problem retrieving custom EC2 tags") return EC2_tags
def __init__(self, name, init_config, agentConfig, instances): self.ca_certs = init_config.get('ca_certs', get_ca_certs_path()) proxy_settings = get_proxy(agentConfig) if not proxy_settings: self.proxies = None else: uri = "{host}:{port}".format(host=proxy_settings['host'], port=proxy_settings['port']) if proxy_settings['user'] and proxy_settings['password']: uri = "{user}:{password}@{uri}".format( user=proxy_settings['user'], password=proxy_settings['password'], uri=uri) self.proxies = { 'http': "http://{uri}".format(uri=uri), 'https': "https://{uri}".format(uri=uri) } NetworkCheck.__init__(self, name, init_config, agentConfig, instances)
def __init__(self, name, init_config, agentConfig, instances): self.ca_certs = init_config.get('ca_certs', get_ca_certs_path()) proxy_settings = get_proxy(agentConfig) if not proxy_settings: self.proxies = None else: uri = "{host}:{port}".format( host=proxy_settings['host'], port=proxy_settings['port']) if proxy_settings['user'] and proxy_settings['password']: uri = "{user}:{password}@{uri}".format( user=proxy_settings['user'], password=proxy_settings['password'], uri=uri) self.proxies = { 'http': "http://{uri}".format(uri=uri), 'https': "https://{uri}".format(uri=uri) } NetworkCheck.__init__(self, name, init_config, agentConfig, instances)
def get_html(self, url): try: proxy = get_proxy()['proxy'] proxies = { 'http': 'http://' + proxy } logger.info(f'使用代理:{proxy}') retry_count = 5 while retry_count > 0: try: response = requests.get(url=url, headers=self.headers, proxies=proxies) response.encoding = "utf-8" return response.text except: retry_count = -1 delete_proxy(proxy) logger.info(f'删除代理:{proxy}') return '' except: logger.warning('无可用代理') return ''
def validate_api_key(config): try: proxy = get_proxy(agentConfig=config) request_proxy = {} if proxy: request_proxy = {'https': "http://{user}:{password}@{host}:{port}".format(**proxy)} r = requests.get("%s/api/v1/validate" % config['dd_url'].rstrip('/'), params={'api_key': config.get('api_key')}, proxies=request_proxy, timeout=3) if r.status_code == 403: return "API Key is invalid" r.raise_for_status() except requests.RequestException: return "Unable to validate API Key. Please try again later" except Exception: log.exception("Unable to validate API Key") return "Unable to validate API Key (unexpected error). Please try again later" return "API Key is valid"
def validate_api_key(config): try: proxy = get_proxy(agentConfig=config) request_proxy = {} if proxy: # key might set to None user = proxy.get("user", "") or "" password = proxy.get("password", "") or "" if user: if password: user += ":" + password user += "@" host = proxy.get("host", "") or "" port = proxy.get("port", "") or "" if host and port: host += ":" + str(proxy["port"]) request_proxy = {'https': "http://%s%s" % (user, host)} r = requests.get("%s/api/v1/validate" % config['dd_url'].rstrip('/'), params={'api_key': config.get('api_key')}, proxies=request_proxy, timeout=3, verify=(not config.get('skip_ssl_validation', False))) if r.status_code == 403: return "[ERROR] API Key is invalid" r.raise_for_status() except requests.RequestException: return "[ERROR] Unable to validate API Key. Please try again later" except Exception: log.exception("Unable to validate API Key") return "[ERROR] Unable to validate API Key (unexpected error). Please try again later" return "API Key is valid"
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'monitorstatsd_port': 8125, 'monitorstatsd_target': 'http://localhost:17123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, # DEPRECATED 'version': get_version(), 'watchmonitor': True, 'additional_checksd': '/etc/monitor-agent/checks.d/', 'bind_host': get_default_bind_host(), 'statsd_metric_namespace': None, 'utf8_decoding': False } if Platform.is_mac(): agentConfig['additional_checksd'] = '/opt/datadog-agent/etc/checks.d' # Config handling try: # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # Store developer mode setting in the agentConfig if config.has_option('Main', 'developer_mode'): agentConfig['developer_mode'] = _is_affirmative( config.get('Main', 'developer_mode')) # Allow an override with the --profile option if options is not None and options.profile: agentConfig['developer_mode'] = True # Get check frequency if config.has_option("Main", "frequency"): agentConfig['check_freq'] = config.get("Main", "frequency") # # Core config # # FIXME unnecessarily complex agentConfig['use_forwarder'] = False if options is not None and options.use_forwarder: listen_port = 17123 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['m_url'] = "http://" + agentConfig[ 'bind_host'] + ":" + str(listen_port) agentConfig['use_forwarder'] = True elif options is not None and not options.disable_dd and options.m_url: agentConfig['m_url'] = options.m_url else: agentConfig['m_url'] = config.get('Main', 'm_url') if agentConfig['m_url'].endswith('/'): agentConfig['m_url'] = agentConfig['m_url'][:-1] # Extra checks.d path # the linux directory is set by default if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get( 'Main', 'additional_checksd') elif get_os() == 'windows': # default windows location common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join( common_path, 'Datamonitor', 'checks.d') if config.has_option('Main', 'use_monitorstatsd'): agentConfig['use_monitorstatsd'] = config.get( 'Main', 'use_monitorstatsd').lower() in ("yes", "true") else: agentConfig['use_monitorstatsd'] = True # Concerns only Windows if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get( 'Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True # Which API key to use agentConfig['api_key'] = config.get('Main', 'api_key') # local traffic only? Default to no agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get( 'Main', 'non_local_traffic').lower() in ("yes", "true") # DEPRECATED if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') # translate yes into True, the rest into False agentConfig['use_ec2_instance_id'] = ( use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int( config.get('Main', 'check_freq')) except Exception: pass # Custom histogram aggregate/percentile metrics if config.has_option('Main', 'histogram_aggregates'): agentConfig['histogram_aggregates'] = get_histogram_aggregates( config.get('Main', 'histogram_aggregates')) if config.has_option('Main', 'histogram_percentiles'): agentConfig['histogram_percentiles'] = get_histogram_percentiles( config.get('Main', 'histogram_percentiles')) # Disable Watchmonitor (optionally) if config.has_option('Main', 'watchmonitor'): if config.get('Main', 'watchmonitor').lower() in ('no', 'false'): agentConfig['watchmonitor'] = False # Optional graphite listener if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None # monitorstatsd config monitorstatsd_defaults = { 'monitorstatsd_port': 8125, 'monitorstatsd_target': 'http://' + agentConfig['bind_host'] + ':17123', } for key, value in monitorstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value # Create app:xxx tags based on monitored apps agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \ _is_affirmative(config.get('Main', 'create_dd_check_tags')) # Forwarding to external statsd server if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get( 'Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int( config.get('Main', 'statsd_forward_port')) # optionally send monitorstatsd data directly to the agent. if config.has_option('Main', 'monitorstatsd_use_murl'): if _is_affirmative(config.get('Main', 'monitorstatsd_use_murl')): agentConfig['monitorstatsd_target'] = agentConfig['m_url'] # Optional config # FIXME not the prettiest code ever... if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative( config.get('Main', 'use_mount')) if options is not None and options.autorestart: agentConfig['autorestart'] = True elif config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative( config.get('Main', 'autorestart')) if config.has_option('Main', 'check_timings'): agentConfig['check_timings'] = _is_affirmative( config.get('Main', 'check_timings')) if config.has_option('Main', 'exclude_process_args'): agentConfig['exclude_process_args'] = _is_affirmative( config.get('Main', 'exclude_process_args')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass if config.has_option('datamonitor', 'ddforwarder_log'): agentConfig['has_datamonitor'] = True # monitorstream config if config.has_option("Main", "monitorstream_log"): # Older version, single log support log_path = config.get("Main", "monitorstream_log") if config.has_option("Main", "monitorstream_line_parser"): agentConfig["monitorstreams"] = ':'.join([ log_path, config.get("Main", "monitorstream_line_parser") ]) else: agentConfig["monitorstreams"] = log_path elif config.has_option("Main", "monitorstreams"): agentConfig["monitorstreams"] = config.get("Main", "monitorstreams") if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get( "Main", "nagios_perf_cfg") if config.has_option("Main", "use_curl_http_client"): agentConfig["use_curl_http_client"] = _is_affirmative( config.get("Main", "use_curl_http_client")) else: # Default to False as there are some issues with the curl client and ELB agentConfig["use_curl_http_client"] = False if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if (config.has_option("Main", "limit_memory_consumption") and config.get("Main", "limit_memory_consumption") is not None): agentConfig["limit_memory_consumption"] = int( config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative( config.get("Main", "skip_ssl_validation")) agentConfig["collect_instance_metadata"] = True if config.has_option("Main", "collect_instance_metadata"): agentConfig["collect_instance_metadata"] = _is_affirmative( config.get("Main", "collect_instance_metadata")) agentConfig["proxy_forbid_method_switch"] = False if config.has_option("Main", "proxy_forbid_method_switch"): agentConfig["proxy_forbid_method_switch"] = _is_affirmative( config.get("Main", "proxy_forbid_method_switch")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative( config.get("Main", "collect_ec2_tags")) agentConfig["utf8_decoding"] = False if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative( config.get("Main", "utf8_decoding")) agentConfig["gce_updated_hostname"] = False if config.has_option("Main", "gce_updated_hostname"): agentConfig["gce_updated_hostname"] = _is_affirmative( config.get("Main", "gce_updated_hostname")) except ConfigParser.NoSectionError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.ParsingError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.NoOptionError as e: sys.stderr.write( 'There are some items missing from your config file, but nothing fatal [%s]' % e) # Storing proxy settings in the agentConfig agentConfig['proxy_settings'] = get_proxy(agentConfig) if agentConfig.get('ca_certs', None) is None: agentConfig['ssl_certificate'] = get_ssl_certificate( get_os(), 'datamonitor-cert.pem') else: agentConfig['ssl_certificate'] = agentConfig['ca_certs'] # self-updater relative conf agentConfig['interval'] = config.get('Main', 'updater_interval') return agentConfig
def __init__(self, name, init_config, agentConfig, instances=None): """ Initialize a new check. :param name: The name of the check :param init_config: The config for initializing the check :param agentConfig: The global configuration for the agent :param instances: A list of configuration objects for each instance. """ from aggregator import MetricsAggregator self._enabled_checks.append(name) self._enabled_checks = list(set(self._enabled_checks)) self.name = name self.init_config = init_config or {} self.agentConfig = agentConfig self.in_developer_mode = agentConfig.get('developer_mode') and psutil self._internal_profiling_stats = None self.default_integration_http_timeout = float( agentConfig.get('default_integration_http_timeout', 9)) self.hostname = agentConfig.get('checksd_hostname') or get_hostname( agentConfig) self.log = logging.getLogger('%s.%s' % (__name__, name)) self.min_collection_interval = self.init_config.get( 'min_collection_interval', self.DEFAULT_MIN_COLLECTION_INTERVAL) self.aggregator = MetricsAggregator( self.hostname, expiry_seconds=self.min_collection_interval + self.DEFAULT_EXPIRY_SECONDS, formatter=agent_formatter, recent_point_threshold=agentConfig.get('recent_point_threshold', None), histogram_aggregates=agentConfig.get('histogram_aggregates'), histogram_percentiles=agentConfig.get('histogram_percentiles')) self.events = [] self.service_checks = [] self.instances = instances or [] self.warnings = [] self.library_versions = None self.last_collection_time = defaultdict(int) self._instance_metadata = [] self.svc_metadata = [] self.historate_dict = {} # Set proxy settings self.proxy_settings = get_proxy(self.agentConfig) self._use_proxy = False if init_config is None else init_config.get( "use_agent_proxy", True) self.proxies = { "http": None, "https": None, } if self.proxy_settings and self._use_proxy: uri = "{host}:{port}".format(host=self.proxy_settings['host'], port=self.proxy_settings['port']) if self.proxy_settings['user'] and self.proxy_settings['password']: uri = "{user}:{password}@{uri}".format( user=self.proxy_settings['user'], password=self.proxy_settings['password'], uri=uri) self.proxies['http'] = "http://{uri}".format(uri=uri) self.proxies['https'] = "https://{uri}".format(uri=uri)
if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative(config.get("Main", "utf8_decoding")) except ConfigParser.NoSectionError, e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.ParsingError, e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.NoOptionError, e: sys.stderr.write('There are some items missing from your config file, but nothing fatal [%s]' % e) # Storing proxy settings in the agentConfig agentConfig['proxy_settings'] = get_proxy(agentConfig) if agentConfig.get('ca_certs', None) is None: agentConfig['ssl_certificate'] = get_ssl_certificate(get_os(), 'datadog-cert.pem') else: agentConfig['ssl_certificate'] = agentConfig['ca_certs'] return agentConfig def get_system_stats(): systemStats = { 'machine': platform.machine(), 'platform': sys.platform, 'processor': platform.processor(), 'pythonV': platform.python_version(), }
if config.has_option("Main", "gce_updated_hostname"): agentConfig["gce_updated_hostname"] = _is_affirmative(config.get("Main", "gce_updated_hostname")) except ConfigParser.NoSectionError, e: sys.stderr.write("Config file not found or incorrectly formatted.\n") sys.exit(2) except ConfigParser.ParsingError, e: sys.stderr.write("Config file not found or incorrectly formatted.\n") sys.exit(2) except ConfigParser.NoOptionError, e: sys.stderr.write("There are some items missing from your config file, but nothing fatal [%s]" % e) # Storing proxy settings in the agentConfig agentConfig["proxy_settings"] = get_proxy(agentConfig) if agentConfig.get("ca_certs", None) is None: agentConfig["ssl_certificate"] = get_ssl_certificate(get_os(), "datadog-cert.pem") else: agentConfig["ssl_certificate"] = agentConfig["ca_certs"] return agentConfig def get_system_stats(): systemStats = { "machine": platform.machine(), "platform": sys.platform, "processor": platform.processor(), "pythonV": platform.python_version(), }
def __init__(self, name, init_config, agentConfig, instances=None): """ Initialize a new check. :param name: The name of the check :param init_config: The config for initializing the check :param agentConfig: The global configuration for the agent :param instances: A list of configuration objects for each instance. """ from aggregator import MetricsAggregator self._enabled_checks.append(name) self._enabled_checks = list(set(self._enabled_checks)) self.name = name self.init_config = init_config or {} self.agentConfig = agentConfig self.in_developer_mode = agentConfig.get('developer_mode') and psutil self._internal_profiling_stats = None self.default_integration_http_timeout = float(agentConfig.get('default_integration_http_timeout', 9)) self.hostname = agentConfig.get('checksd_hostname') or get_hostname(agentConfig) self.log = logging.getLogger('%s.%s' % (__name__, name)) self.min_collection_interval = self.init_config.get('min_collection_interval', self.DEFAULT_MIN_COLLECTION_INTERVAL) self.aggregator = MetricsAggregator( self.hostname, expiry_seconds = self.min_collection_interval + self.DEFAULT_EXPIRY_SECONDS, formatter=agent_formatter, recent_point_threshold=agentConfig.get('recent_point_threshold', None), histogram_aggregates=agentConfig.get('histogram_aggregates'), histogram_percentiles=agentConfig.get('histogram_percentiles') ) if Platform.is_linux() and psutil is not None: procfs_path = self.agentConfig.get('procfs_path', '/proc').rstrip('/') psutil.PROCFS_PATH = procfs_path self.events = [] self.service_checks = [] self.instances = instances or [] self.warnings = [] self.library_versions = None self.last_collection_time = defaultdict(int) self._instance_metadata = [] self.svc_metadata = [] self.historate_dict = {} # Set proxy settings self.proxy_settings = get_proxy(self.agentConfig) self._use_proxy = False if init_config is None else init_config.get("use_agent_proxy", True) self.proxies = { "http": None, "https": None, } if self.proxy_settings and self._use_proxy: uri = "{host}:{port}".format( host=self.proxy_settings['host'], port=self.proxy_settings['port']) if self.proxy_settings['user'] and self.proxy_settings['password']: uri = "{user}:{password}@{uri}".format( user=self.proxy_settings['user'], password=self.proxy_settings['password'], uri=uri) self.proxies['http'] = "http://{uri}".format(uri=uri) self.proxies['https'] = "https://{uri}".format(uri=uri)
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'monitorstatsd_port': 8125, 'monitorstatsd_target': 'http://localhost:17123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, 'version': get_version(), 'watchmonitor': True, 'additional_checksd': '/etc/monitor-agent/checks.d/', 'bind_host': get_default_bind_host(), 'statsd_metric_namespace': None, 'utf8_decoding': False } if Platform.is_mac(): agentConfig['additional_checksd'] = '/opt/datadog-agent/etc/checks.d' try: path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) for option in config.options('Main'): agentConfig[option] = config.get('Main', option) if config.has_option('Main', 'developer_mode'): agentConfig['developer_mode'] = _is_affirmative( config.get('Main', 'developer_mode')) if options is not None and options.profile: agentConfig['developer_mode'] = True if config.has_option("Main", "frequency"): agentConfig['check_freq'] = config.get("Main", "frequency") agentConfig['use_forwarder'] = False if options is not None and options.use_forwarder: listen_port = 17123 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['m_url'] = "http://" + agentConfig[ 'bind_host'] + ":" + str(listen_port) agentConfig['use_forwarder'] = True elif options is not None and not options.disable_dd and options.m_url: agentConfig['m_url'] = options.m_url else: agentConfig['m_url'] = config.get('Main', 'm_url') if agentConfig['m_url'].endswith('/'): agentConfig['m_url'] = agentConfig['m_url'][:-1] if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get( 'Main', 'additional_checksd') elif get_os() == 'windows': common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join( common_path, 'Datamonitor', 'checks.d') if config.has_option('Main', 'use_monitorstatsd'): agentConfig['use_monitorstatsd'] = config.get( 'Main', 'use_monitorstatsd').lower() in ("yes", "true") else: agentConfig['use_monitorstatsd'] = True if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get( 'Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True agentConfig['api_key'] = config.get('Main', 'api_key') agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get( 'Main', 'non_local_traffic').lower() in ("yes", "true") if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') agentConfig['use_ec2_instance_id'] = ( use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int( config.get('Main', 'check_freq')) except Exception: pass if config.has_option('Main', 'histogram_aggregates'): agentConfig['histogram_aggregates'] = get_histogram_aggregates( config.get('Main', 'histogram_aggregates')) if config.has_option('Main', 'histogram_percentiles'): agentConfig['histogram_percentiles'] = get_histogram_percentiles( config.get('Main', 'histogram_percentiles')) if config.has_option('Main', 'watchmonitor'): if config.get('Main', 'watchmonitor').lower() in ('no', 'false'): agentConfig['watchmonitor'] = False if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None monitorstatsd_defaults = { 'monitorstatsd_port': 8125, 'monitorstatsd_target': 'http://' + agentConfig['bind_host'] + ':17123', } for key, value in monitorstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \ _is_affirmative(config.get('Main', 'create_dd_check_tags')) if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get( 'Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int( config.get('Main', 'statsd_forward_port')) if config.has_option('Main', 'monitorstatsd_use_murl'): if _is_affirmative(config.get('Main', 'monitorstatsd_use_murl')): agentConfig['monitorstatsd_target'] = agentConfig['m_url'] if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative( config.get('Main', 'use_mount')) if options is not None and options.autorestart: agentConfig['autorestart'] = True elif config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative( config.get('Main', 'autorestart')) if config.has_option('Main', 'check_timings'): agentConfig['check_timings'] = _is_affirmative( config.get('Main', 'check_timings')) if config.has_option('Main', 'exclude_process_args'): agentConfig['exclude_process_args'] = _is_affirmative( config.get('Main', 'exclude_process_args')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass if config.has_option('datamonitor', 'ddforwarder_log'): agentConfig['has_datamonitor'] = True if config.has_option("Main", "monitorstream_log"): log_path = config.get("Main", "monitorstream_log") if config.has_option("Main", "monitorstream_line_parser"): agentConfig["monitorstreams"] = ':'.join([ log_path, config.get("Main", "monitorstream_line_parser") ]) else: agentConfig["monitorstreams"] = log_path elif config.has_option("Main", "monitorstreams"): agentConfig["monitorstreams"] = config.get("Main", "monitorstreams") if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get( "Main", "nagios_perf_cfg") if config.has_option("Main", "use_curl_http_client"): agentConfig["use_curl_http_client"] = _is_affirmative( config.get("Main", "use_curl_http_client")) else: agentConfig["use_curl_http_client"] = False if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if (config.has_option("Main", "limit_memory_consumption") and config.get("Main", "limit_memory_consumption") is not None): agentConfig["limit_memory_consumption"] = int( config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative( config.get("Main", "skip_ssl_validation")) agentConfig["collect_instance_metadata"] = True if config.has_option("Main", "collect_instance_metadata"): agentConfig["collect_instance_metadata"] = _is_affirmative( config.get("Main", "collect_instance_metadata")) agentConfig["proxy_forbid_method_switch"] = False if config.has_option("Main", "proxy_forbid_method_switch"): agentConfig["proxy_forbid_method_switch"] = _is_affirmative( config.get("Main", "proxy_forbid_method_switch")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative( config.get("Main", "collect_ec2_tags")) agentConfig["utf8_decoding"] = False if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative( config.get("Main", "utf8_decoding")) agentConfig["gce_updated_hostname"] = False if config.has_option("Main", "gce_updated_hostname"): agentConfig["gce_updated_hostname"] = _is_affirmative( config.get("Main", "gce_updated_hostname")) except ConfigParser.NoSectionError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.ParsingError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.NoOptionError as e: sys.stderr.write( 'There are some items missing from your config file, but nothing fatal [%s]' % e) agentConfig['proxy_settings'] = get_proxy(agentConfig) if agentConfig.get('ca_certs', None) is None: agentConfig['ssl_certificate'] = get_ssl_certificate( get_os(), 'datamonitor-cert.pem') else: agentConfig['ssl_certificate'] = agentConfig['ca_certs'] agentConfig['interval'] = config.get('Main', 'updater_interval') return agentConfig
def get_tags(agentConfig): """ Retrieve AWS EC2 tags. """ if not agentConfig['collect_instance_metadata']: log.info( "Instance metadata collection is disabled. Not collecting it.") return [] EC2_tags = [] try: iam_role = EC2.get_iam_role() iam_url = EC2.METADATA_URL_BASE + "/iam/security-credentials/" + unicode( iam_role) r = requests.get(iam_url, timeout=EC2.TIMEOUT) r.raise_for_status() # Fail on 404 etc iam_params = r.json() r = requests.get(EC2.INSTANCE_IDENTITY_URL, timeout=EC2.TIMEOUT) r.raise_for_status() instance_identity = r.json() region = instance_identity['region'] import boto.ec2 proxy_settings = get_proxy(agentConfig) or {} proxy_host = proxy_settings.get('host') if proxy_host is not None: proxy_host = re.sub(r'^http(s?)://', '', proxy_host) connection = boto.ec2.connect_to_region( region, aws_access_key_id=iam_params['AccessKeyId'], aws_secret_access_key=iam_params['SecretAccessKey'], security_token=iam_params['Token'], proxy=proxy_host, proxy_port=proxy_settings.get('port'), proxy_user=proxy_settings.get('user'), proxy_pass=proxy_settings.get('password')) tag_object = connection.get_all_tags( {'resource-id': EC2.metadata['instance-id']}) EC2_tags = [u"%s:%s" % (tag.name, tag.value) for tag in tag_object] if agentConfig.get('collect_security_groups') and EC2.metadata.get( 'security-groups'): EC2_tags.append(u"security-group-name:{0}".format( EC2.metadata.get('security-groups'))) except EC2.NoIAMRole: log.warning( u"Unable to retrieve AWS EC2 custom tags: " u"an IAM role associated with the instance is required") except Exception: log.exception("Problem retrieving custom EC2 tags") if EC2.is_openstack is True and agentConfig[ 'openstack_use_metadata_tags']: log.info( u"Attempting to collect tags from OpenStack meta_data.json") openstack_metadata_url = EC2.EC2_METADATA_HOST + "/openstack/latest/meta_data.json" try: r = requests.get(openstack_metadata_url, timeout=EC2.TIMEOUT) r.raise_for_status() # Fail on 404 etc openstack_metadata = r.json() EC2_tags = [ u"%s:%s" % (tag, openstack_metadata['meta'][tag]) for tag in openstack_metadata['meta'] ] if 'project_id' in openstack_metadata: EC2_tags.append(u"project_id:%s" % openstack_metadata['project_id']) # Map the OS availability_zone to Datadog's use of availability-zone for UI defaults EC2_tags.append(u"availability-zone:%s" % openstack_metadata['availability_zone']) # Even though the name is set in EC2.metadata it also needs to be a tag for filters if 'name' not in openstack_metadata['meta']: EC2_tags.append(u"name:%s" % openstack_metadata['name']) except Exception: log.warning( u"Problem retrieving tags from OpenStack meta_data.json") return EC2_tags
def get_tags(agentConfig): """ Retrieve AWS EC2 tags. """ if not agentConfig['collect_instance_metadata']: log.info("Instance metadata collection is disabled. Not collecting it.") return [] EC2_tags = [] try: iam_role = EC2.get_iam_role() iam_url = EC2.METADATA_URL_BASE + "/iam/security-credentials/" + unicode(iam_role) r = requests.get(iam_url, timeout=EC2.TIMEOUT) r.raise_for_status() # Fail on 404 etc iam_params = r.json() r = requests.get(EC2.INSTANCE_IDENTITY_URL, timeout=EC2.TIMEOUT) r.raise_for_status() instance_identity = r.json() region = instance_identity['region'] import boto.ec2 proxy_settings = get_proxy(agentConfig) or {} connection = boto.ec2.connect_to_region( region, aws_access_key_id=iam_params['AccessKeyId'], aws_secret_access_key=iam_params['SecretAccessKey'], security_token=iam_params['Token'], proxy=proxy_settings.get('host'), proxy_port=proxy_settings.get('port'), proxy_user=proxy_settings.get('user'), proxy_pass=proxy_settings.get('password') ) tag_object = connection.get_all_tags({'resource-id': EC2.metadata['instance-id']}) EC2_tags = [u"%s:%s" % (tag.name, tag.value) for tag in tag_object] if agentConfig.get('collect_security_groups') and EC2.metadata.get('security-groups'): EC2_tags.append(u"security-group-name:{0}".format(EC2.metadata.get('security-groups'))) except EC2.NoIAMRole: log.warning( u"Unable to retrieve AWS EC2 custom tags: " u"an IAM role associated with the instance is required" ) except Exception: log.exception("Problem retrieving custom EC2 tags") if EC2.is_openstack is True and agentConfig['openstack_use_metadata_tags']: log.info(u"Attempting to collect tags from OpenStack meta_data.json") openstack_metadata_url = EC2.EC2_METADATA_HOST + "/openstack/latest/meta_data.json" try: r = requests.get(openstack_metadata_url, timeout=EC2.TIMEOUT) r.raise_for_status() # Fail on 404 etc openstack_metadata = r.json() EC2_tags = [u"%s:%s" % (tag, openstack_metadata['meta'][tag]) for tag in openstack_metadata['meta']] if 'project_id' in openstack_metadata: EC2_tags.append(u"project_id:%s" % openstack_metadata['project_id']) # Map the OS availability_zone to Datadog's use of availability-zone for UI defaults EC2_tags.append(u"availability-zone:%s" % openstack_metadata['availability_zone']) # Even though the name is set in EC2.metadata it also needs to be a tag for filters if 'name' not in openstack_metadata['meta']: EC2_tags.append(u"name:%s" % openstack_metadata['name']) except Exception: log.warning(u"Problem retrieving tags from OpenStack meta_data.json") return EC2_tags
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://localhost:17123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, # DEPRECATED 'version': get_version(), 'watchdog': True, 'additional_checksd': '/etc/dd-agent/checks.d/', 'bind_host': get_default_bind_host(), 'statsd_metric_namespace': None, 'utf8_decoding': False } if Platform.is_mac(): agentConfig['additional_checksd'] = '/opt/datadog-agent/etc/checks.d' # Config handling try: # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # Store developer mode setting in the agentConfig if config.has_option('Main', 'developer_mode'): agentConfig['developer_mode'] = _is_affirmative(config.get('Main', 'developer_mode')) # Allow an override with the --profile option if options is not None and options.profile: agentConfig['developer_mode'] = True # # Core config #ap if not config.has_option('Main', 'api_key'): log.warning(u"No API key was found. Aborting.") sys.exit(2) if not config.has_option('Main', 'dd_url'): log.warning(u"No dd_url was found. Aborting.") sys.exit(2) # Endpoints dd_urls = map(clean_dd_url, config.get('Main', 'dd_url').split(',')) api_keys = map(lambda el: el.strip(), config.get('Main', 'api_key').split(',')) # For collector and dogstatsd agentConfig['dd_url'] = dd_urls[0] agentConfig['api_key'] = api_keys[0] # Forwarder endpoints logic # endpoints is: # { # 'https://app.datadoghq.com': ['api_key_abc', 'api_key_def'], # 'https://app.example.com': ['api_key_xyz'] # } endpoints = {} dd_urls = remove_empty(dd_urls) api_keys = remove_empty(api_keys) if len(dd_urls) == 1: if len(api_keys) > 0: endpoints[dd_urls[0]] = api_keys else: assert len(dd_urls) == len(api_keys), 'Please provide one api_key for each url' for i, dd_url in enumerate(dd_urls): endpoints[dd_url] = endpoints.get(dd_url, []) + [api_keys[i]] agentConfig['endpoints'] = endpoints # Forwarder or not forwarder agentConfig['use_forwarder'] = options is not None and options.use_forwarder if agentConfig['use_forwarder']: listen_port = 17123 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['dd_url'] = "http://{}:{}".format(agentConfig['bind_host'], listen_port) # FIXME: Legacy dd_url command line switch elif options is not None and options.dd_url is not None: agentConfig['dd_url'] = options.dd_url # Forwarder timeout agentConfig['forwarder_timeout'] = 20 if config.has_option('Main', 'forwarder_timeout'): agentConfig['forwarder_timeout'] = int(config.get('Main', 'forwarder_timeout')) # Extra checks.d path # the linux directory is set by default if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get('Main', 'additional_checksd') elif get_os() == 'windows': # default windows location common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join(common_path, 'Datadog', 'checks.d') if config.has_option('Main', 'use_dogstatsd'): agentConfig['use_dogstatsd'] = config.get('Main', 'use_dogstatsd').lower() in ("yes", "true") else: agentConfig['use_dogstatsd'] = True # Service discovery if config.has_option('Main', 'service_discovery_backend'): try: additional_config = extract_agent_config(config) agentConfig.update(additional_config) except: log.error('Failed to load the agent configuration related to ' 'service discovery. It will not be used.') # Concerns only Windows if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get('Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True # local traffic only? Default to no agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get('Main', 'non_local_traffic').lower() in ("yes", "true") # DEPRECATED if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') # translate yes into True, the rest into False agentConfig['use_ec2_instance_id'] = (use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int(config.get('Main', 'check_freq')) except Exception: pass # Custom histogram aggregate/percentile metrics if config.has_option('Main', 'histogram_aggregates'): agentConfig['histogram_aggregates'] = get_histogram_aggregates(config.get('Main', 'histogram_aggregates')) if config.has_option('Main', 'histogram_percentiles'): agentConfig['histogram_percentiles'] = get_histogram_percentiles(config.get('Main', 'histogram_percentiles')) # Disable Watchdog (optionally) if config.has_option('Main', 'watchdog'): if config.get('Main', 'watchdog').lower() in ('no', 'false'): agentConfig['watchdog'] = False # Optional graphite listener if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None # Dogstatsd config dogstatsd_defaults = { 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://' + agentConfig['bind_host'] + ':17123', } for key, value in dogstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value # Create app:xxx tags based on monitored apps agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \ _is_affirmative(config.get('Main', 'create_dd_check_tags')) # Forwarding to external statsd server if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get('Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int(config.get('Main', 'statsd_forward_port')) # Optional config # FIXME not the prettiest code ever... if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative(config.get('Main', 'use_mount')) if options is not None and options.autorestart: agentConfig['autorestart'] = True elif config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative(config.get('Main', 'autorestart')) if config.has_option('Main', 'check_timings'): agentConfig['check_timings'] = _is_affirmative(config.get('Main', 'check_timings')) if config.has_option('Main', 'exclude_process_args'): agentConfig['exclude_process_args'] = _is_affirmative(config.get('Main', 'exclude_process_args')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass # Dogstream config if config.has_option("Main", "dogstream_log"): # Older version, single log support log_path = config.get("Main", "dogstream_log") if config.has_option("Main", "dogstream_line_parser"): agentConfig["dogstreams"] = ':'.join([log_path, config.get("Main", "dogstream_line_parser")]) else: agentConfig["dogstreams"] = log_path elif config.has_option("Main", "dogstreams"): agentConfig["dogstreams"] = config.get("Main", "dogstreams") if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get("Main", "nagios_perf_cfg") if config.has_option("Main", "use_curl_http_client"): agentConfig["use_curl_http_client"] = _is_affirmative(config.get("Main", "use_curl_http_client")) else: # Default to False as there are some issues with the curl client and ELB agentConfig["use_curl_http_client"] = False if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if (config.has_option("Main", "limit_memory_consumption") and config.get("Main", "limit_memory_consumption") is not None): agentConfig["limit_memory_consumption"] = int(config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative(config.get("Main", "skip_ssl_validation")) agentConfig["collect_instance_metadata"] = True if config.has_option("Main", "collect_instance_metadata"): agentConfig["collect_instance_metadata"] = _is_affirmative(config.get("Main", "collect_instance_metadata")) agentConfig["proxy_forbid_method_switch"] = False if config.has_option("Main", "proxy_forbid_method_switch"): agentConfig["proxy_forbid_method_switch"] = _is_affirmative(config.get("Main", "proxy_forbid_method_switch")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative(config.get("Main", "collect_ec2_tags")) agentConfig["utf8_decoding"] = False if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative(config.get("Main", "utf8_decoding")) agentConfig["gce_updated_hostname"] = False if config.has_option("Main", "gce_updated_hostname"): agentConfig["gce_updated_hostname"] = _is_affirmative(config.get("Main", "gce_updated_hostname")) except ConfigParser.NoSectionError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.ParsingError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.NoOptionError as e: sys.stderr.write('There are some items missing from your config file, but nothing fatal [%s]' % e) # Storing proxy settings in the agentConfig agentConfig['proxy_settings'] = get_proxy(agentConfig) if agentConfig.get('ca_certs', None) is None: agentConfig['ssl_certificate'] = get_ssl_certificate(get_os(), 'datadog-cert.pem') else: agentConfig['ssl_certificate'] = agentConfig['ca_certs'] return agentConfig
except ConfigParser.NoSectionError, e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.ParsingError, e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.NoOptionError, e: sys.stderr.write( 'There are some items missing from your config file, but nothing fatal [%s]' % e) # Storing proxy settings in the agentConfig agentConfig['proxy_settings'] = get_proxy(agentConfig) if agentConfig.get('ca_certs', None) is None: agentConfig['ssl_certificate'] = get_ssl_certificate( get_os(), 'sd-cert.pem') else: agentConfig['ssl_certificate'] = agentConfig['ca_certs'] return agentConfig def get_system_stats(): systemStats = { 'machine': platform.machine(), 'platform': sys.platform, 'processor': platform.processor(), 'pythonV': platform.python_version(),
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://localhost:17123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, # DEPRECATED 'version': get_version(), 'watchdog': True, 'additional_checksd': '/etc/dd-agent/checks.d/', 'bind_host': get_default_bind_host(), 'statsd_metric_namespace': None, 'utf8_decoding': False } if Platform.is_mac(): agentConfig['additional_checksd'] = '/opt/datadog-agent/etc/checks.d' # Config handling try: # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # Store developer mode setting in the agentConfig if config.has_option('Main', 'developer_mode'): agentConfig['developer_mode'] = _is_affirmative( config.get('Main', 'developer_mode')) # Allow an override with the --profile option if options is not None and options.profile: agentConfig['developer_mode'] = True # # Core config #ap if not config.has_option('Main', 'api_key'): log.warning(u"No API key was found. Aborting.") sys.exit(2) if not config.has_option('Main', 'dd_url'): log.warning(u"No dd_url was found. Aborting.") sys.exit(2) # Endpoints dd_url = clean_dd_url(config.get('Main', 'dd_url')) api_key = config.get('Main', 'api_key').strip() # For collector and dogstatsd agentConfig['api_key'] = api_key agentConfig['dd_url'] = dd_url # multiple endpoints if config.has_option('Main', 'other_dd_urls'): other_dd_urls = map(clean_dd_url, config.get('Main', 'other_dd_urls').split(',')) else: other_dd_urls = [] if config.has_option('Main', 'other_api_keys'): other_api_keys = map( lambda x: x.strip(), config.get('Main', 'other_api_keys').split(',')) else: other_api_keys = [] # Forwarder endpoints logic # endpoints is: # { # 'https://app.datadoghq.com': ['api_key_abc', 'api_key_def'], # 'https://app.example.com': ['api_key_xyz'] # } endpoints = {dd_url: [api_key]} if len(other_dd_urls) == 0: endpoints[dd_url] += other_api_keys else: assert len(other_dd_urls) == len( other_api_keys), 'Please provide one api_key for each url' for i, other_dd_url in enumerate(other_dd_urls): endpoints[other_dd_url] = endpoints.get( other_dd_url, []) + [other_api_keys[i]] agentConfig['endpoints'] = endpoints # Forwarder or not forwarder agentConfig[ 'use_forwarder'] = options is not None and options.use_forwarder if agentConfig['use_forwarder']: listen_port = 17123 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['dd_url'] = "http://{}:{}".format( agentConfig['bind_host'], listen_port) # FIXME: Legacy dd_url command line switch elif options is not None and options.dd_url is not None: agentConfig['dd_url'] = options.dd_url # Forwarder timeout agentConfig['forwarder_timeout'] = 20 if config.has_option('Main', 'forwarder_timeout'): agentConfig['forwarder_timeout'] = int( config.get('Main', 'forwarder_timeout')) # Extra checks.d path # the linux directory is set by default if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get( 'Main', 'additional_checksd') elif get_os() == 'windows': # default windows location common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join( common_path, 'Datadog', 'checks.d') if config.has_option('Main', 'use_dogstatsd'): agentConfig['use_dogstatsd'] = config.get( 'Main', 'use_dogstatsd').lower() in ("yes", "true") else: agentConfig['use_dogstatsd'] = True # Service discovery if config.has_option('Main', 'service_discovery_backend'): try: additional_config = extract_agent_config(config) agentConfig.update(additional_config) except: log.error('Failed to load the agent configuration related to ' 'service discovery. It will not be used.') # Concerns only Windows if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get( 'Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True # local traffic only? Default to no agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get( 'Main', 'non_local_traffic').lower() in ("yes", "true") # DEPRECATED if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') # translate yes into True, the rest into False agentConfig['use_ec2_instance_id'] = ( use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int( config.get('Main', 'check_freq')) except Exception: pass # Custom histogram aggregate/percentile metrics if config.has_option('Main', 'histogram_aggregates'): agentConfig['histogram_aggregates'] = get_histogram_aggregates( config.get('Main', 'histogram_aggregates')) if config.has_option('Main', 'histogram_percentiles'): agentConfig['histogram_percentiles'] = get_histogram_percentiles( config.get('Main', 'histogram_percentiles')) # Disable Watchdog (optionally) if config.has_option('Main', 'watchdog'): if config.get('Main', 'watchdog').lower() in ('no', 'false'): agentConfig['watchdog'] = False # Optional graphite listener if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None # Dogstatsd config dogstatsd_defaults = { 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://' + agentConfig['bind_host'] + ':17123', } for key, value in dogstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value # Create app:xxx tags based on monitored apps agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \ _is_affirmative(config.get('Main', 'create_dd_check_tags')) # Forwarding to external statsd server if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get( 'Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int( config.get('Main', 'statsd_forward_port')) # optionally send dogstatsd data directly to the agent. if config.has_option('Main', 'dogstatsd_use_ddurl'): if _is_affirmative(config.get('Main', 'dogstatsd_use_ddurl')): agentConfig['dogstatsd_target'] = agentConfig['dd_url'] # Optional config # FIXME not the prettiest code ever... if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative( config.get('Main', 'use_mount')) if options is not None and options.autorestart: agentConfig['autorestart'] = True elif config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative( config.get('Main', 'autorestart')) if config.has_option('Main', 'check_timings'): agentConfig['check_timings'] = _is_affirmative( config.get('Main', 'check_timings')) if config.has_option('Main', 'exclude_process_args'): agentConfig['exclude_process_args'] = _is_affirmative( config.get('Main', 'exclude_process_args')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass if config.has_option('datadog', 'ddforwarder_log'): agentConfig['has_datadog'] = True # Dogstream config if config.has_option("Main", "dogstream_log"): # Older version, single log support log_path = config.get("Main", "dogstream_log") if config.has_option("Main", "dogstream_line_parser"): agentConfig["dogstreams"] = ':'.join( [log_path, config.get("Main", "dogstream_line_parser")]) else: agentConfig["dogstreams"] = log_path elif config.has_option("Main", "dogstreams"): agentConfig["dogstreams"] = config.get("Main", "dogstreams") if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get( "Main", "nagios_perf_cfg") if config.has_option("Main", "use_curl_http_client"): agentConfig["use_curl_http_client"] = _is_affirmative( config.get("Main", "use_curl_http_client")) else: # Default to False as there are some issues with the curl client and ELB agentConfig["use_curl_http_client"] = False if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if (config.has_option("Main", "limit_memory_consumption") and config.get("Main", "limit_memory_consumption") is not None): agentConfig["limit_memory_consumption"] = int( config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative( config.get("Main", "skip_ssl_validation")) agentConfig["collect_instance_metadata"] = True if config.has_option("Main", "collect_instance_metadata"): agentConfig["collect_instance_metadata"] = _is_affirmative( config.get("Main", "collect_instance_metadata")) agentConfig["proxy_forbid_method_switch"] = False if config.has_option("Main", "proxy_forbid_method_switch"): agentConfig["proxy_forbid_method_switch"] = _is_affirmative( config.get("Main", "proxy_forbid_method_switch")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative( config.get("Main", "collect_ec2_tags")) agentConfig["utf8_decoding"] = False if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative( config.get("Main", "utf8_decoding")) agentConfig["gce_updated_hostname"] = False if config.has_option("Main", "gce_updated_hostname"): agentConfig["gce_updated_hostname"] = _is_affirmative( config.get("Main", "gce_updated_hostname")) except ConfigParser.NoSectionError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.ParsingError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.NoOptionError as e: sys.stderr.write( 'There are some items missing from your config file, but nothing fatal [%s]' % e) # Storing proxy settings in the agentConfig agentConfig['proxy_settings'] = get_proxy(agentConfig) if agentConfig.get('ca_certs', None) is None: agentConfig['ssl_certificate'] = get_ssl_certificate( get_os(), 'datadog-cert.pem') else: agentConfig['ssl_certificate'] = agentConfig['ca_certs'] return agentConfig