def load_check(agentConfig, hostname, checkname): """Same logic as load_check_directory except it loads one specific check""" agentConfig['checksd_hostname'] = hostname osname = get_os() checks_places = get_checks_places(osname, agentConfig) for config_path in _file_configs_paths(osname, agentConfig): check_name = _conf_path_to_check_name(config_path) if check_name == checkname: conf_is_valid, check_config, invalid_check = _load_file_config( config_path, check_name, agentConfig) if invalid_check and not conf_is_valid: return invalid_check # try to load the check and return the result load_success, load_failure = load_check_from_places( check_config, check_name, checks_places, agentConfig) return load_success.values()[0] or load_failure # the check was not found, try with service discovery for check_name, service_disco_check_config in _service_disco_configs( agentConfig).iteritems(): if check_name == checkname: sd_init_config, sd_instances = service_disco_check_config check_config = { 'init_config': sd_init_config, 'instances': sd_instances } # try to load the check and return the result load_success, load_failure = load_check_from_places( check_config, check_name, checks_places, agentConfig) return load_success.values()[0] or load_failure return None
def get_config_path(cfg_path=None, os_name=None): # Check if there's an override and if it exists if cfg_path is not None and os.path.exists(cfg_path): return cfg_path # Check if there's a config stored in the current agent directory try: path = os.path.realpath(__file__) path = os.path.dirname(path) return _config_path(path) except PathNotFound as e: pass if os_name is None: os_name = get_os() # Check for an OS-specific path, continue on not-found exceptions bad_path = '' try: if os_name == 'windows': return _windows_config_path() elif os_name == 'mac': return _mac_config_path() else: return _unix_config_path() except PathNotFound as e: if len(e.args) > 0: bad_path = e.args[0] # If all searches fail, exit the agent with an error sys.stderr.write("Please supply a configuration file at %s or in the directory where " "the Agent is currently deployed.\n" % bad_path) sys.exit(3)
def load_check(agentConfig, hostname, checkname): """Same logic as load_check_directory except it loads one specific check""" agentConfig['checksd_hostname'] = hostname osname = get_os() checks_places = get_checks_places(osname, agentConfig) for config_path in _file_configs_paths(osname, agentConfig): check_name = _conf_path_to_check_name(config_path) if check_name == checkname: conf_is_valid, check_config, invalid_check = _load_file_config(config_path, check_name, agentConfig) if invalid_check and not conf_is_valid: return invalid_check # try to load the check and return the result load_success, load_failure = load_check_from_places(check_config, check_name, checks_places, agentConfig) return load_success.values()[0] or load_failure # the check was not found, try with service discovery for check_name, service_disco_check_config in _service_disco_configs(agentConfig).iteritems(): if check_name == checkname: sd_init_config, sd_instances = service_disco_check_config check_config = {'init_config': sd_init_config, 'instances': sd_instances} # try to load the check and return the result load_success, load_failure = load_check_from_places(check_config, check_name, checks_places, agentConfig) return load_success.values()[0] or load_failure return None
def run_check(name, path=None): """ Test custom checks on Windows. """ # Read the config file confd_path = path or os.path.join(get_confd_path(get_os()), '%s.yaml' % name) try: f = open(confd_path) except IOError: raise Exception('Unable to open configuration at %s' % confd_path) config_str = f.read() f.close() # Run the check check, instances = get_check(name, config_str) if not instances: raise Exception('YAML configuration returned no instances.') for instance in instances: check.check(instance) if check.has_events(): print "Events:\n" pprint(check.get_events(), indent=4) print "Metrics:\n" pprint(check.get_metrics(), indent=4)
def load_check(name, config, agentConfig): if not _is_sdk(): checksd_path = agentConfig.get('additional_checksd', get_checksd_path(get_os())) # find (in checksd_path) and load the check module fd, filename, desc = imp.find_module(name, [checksd_path]) check_module = imp.load_module(name, fd, filename, desc) else: check_module = _load_sdk_module(name) # parent module check_class = None classes = inspect.getmembers(check_module, inspect.isclass) for _, clsmember in classes: if clsmember == AgentCheck: continue if issubclass(clsmember, AgentCheck): check_class = clsmember if AgentCheck in clsmember.__bases__: continue else: break if check_class is None: raise Exception("Unable to import check %s. Missing a class that inherits AgentCheck" % name) init_config = config.get('init_config', {}) instances = config.get('instances') agentConfig['checksd_hostname'] = get_hostname(agentConfig) # init the check class try: return check_class(name, init_config, agentConfig, instances=instances) except TypeError as e: raise Exception("Check is using old API, {0}".format(e)) except Exception: raise
def get_config_path(cfg_path=None, os_name=None): # Check if there's an override and if it exists if cfg_path is not None and os.path.exists(cfg_path): return cfg_path # Check if there's a config stored in the current agent directory try: path = os.path.realpath(__file__) path = os.path.dirname(path) return _config_path(path) except PathNotFound as e: pass if os_name is None: os_name = get_os() # Check for an OS-specific path, continue on not-found exceptions bad_path = '' try: if os_name == 'windows': return _windows_config_path() elif os_name == 'mac': return _mac_config_path() else: return _unix_config_path() except PathNotFound as e: if len(e.args) > 0: bad_path = e.args[0] # If all searches fail, exit the agent with an error sys.stderr.write( "Please supply a configuration file at %s or in the directory where " "the Agent is currently deployed.\n" % bad_path) sys.exit(3)
def get_checksd_path(osname=None): if not osname: osname = get_os() if osname == 'windows': return _windows_checksd_path() elif osname == 'mac': return _mac_checksd_path() else: return _unix_checksd_path()
def get_sdk_integrations_path(osname=None): if not osname: osname = get_os() if osname in ['windows', 'mac']: raise PathNotFound() cur_path = os.path.dirname(os.path.realpath(__file__)) path = os.path.join(cur_path, '..', SDK_INTEGRATIONS_DIR) if os.path.exists(path): return path raise PathNotFound(path)
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'version': AGENT_VERSION, 'recv_port':8225, 'hostname': None, 'utf8_decoding': False, 'check_freq': DEFAULT_CHECK_FREQUENCY, 'run_plugins':[] } # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # Allow an override with the --profile option if options is not None and options.profile: agentConfig['developer_mode'] = True # Core config # ap if not config.has_option('Main', 'api_key'): log.warning(u"No API key was found. Aborting.") sys.exit(2) if not config.has_option('Main', 'secret_key'): log.warning(u"No SECRET key was found. Aborting.") sys.exit(2) if not config.has_option('Main', 'linklog_url'): log.warning(u"No linklog_url was found. Aborting.") sys.exit(2) if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int(config.get('Main', 'check_freq')) except Exception: pass if config.has_option('Main', 'run_plugins'): try: agentConfig['run_plugins'] = config.get('Main', 'run_plugins').split(',') except Exception: pass return agentConfig
def load_class(check_name, class_name): """ Retrieve a class with the given name within the given check module. """ checksd_path = get_checksd_path(get_os()) if checksd_path not in sys.path: sys.path.append(checksd_path) check_module = __import__(check_name) classes = inspect.getmembers(check_module, inspect.isclass) for name, clsmember in classes: if name == class_name: return clsmember raise Exception(u"Unable to import class {0} from the check module.".format(class_name))
def _get_logging_config(cfg_path=None): levels = { 'CRITICAL': logging.CRITICAL, 'DEBUG': logging.DEBUG, 'ERROR': logging.ERROR, 'FATAL': logging.FATAL, 'INFO': logging.INFO, 'WARN': logging.WARN, 'WARNING': logging.WARNING, } config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) logging_config = { 'log_level': logging.INFO, } if config.has_option('Main', 'log_level'): logging_config['log_level'] = levels.get(config.get('Main', 'log_level')) if config.has_option('Main', 'disable_file_logging'): logging_config['disable_file_logging'] = config.get('Main', 'disable_file_logging').strip().lower() in ['yes', 'true', 1] else: logging_config['disable_file_logging'] = False system_os = get_os() global BASE_LOG_DIR if system_os != 'windows' and not logging_config['disable_file_logging']: if not os.access(BASE_LOG_DIR, os.R_OK | os.W_OK): print("{0} dir is not writeable, so change it to local".format(BASE_LOG_DIR)) BASE_LOG_DIR = "logs" logging_config['collector_log_file'] = '{0}/collector.log'.format(BASE_LOG_DIR) logging_config['forwarder_log_file'] = '{0}/forwarder.log'.format(BASE_LOG_DIR) logging_config['{0}_log_file'.format(__name__)] = '{0}/monitor.log'.format(BASE_LOG_DIR) return logging_config
def _load_sdk_module(name): sdk_path = get_sdk_integrations_path(get_os()) module_path = os.path.join(sdk_path, name) sdk_module_name = "_{}".format(name) if sdk_module_name in sys.modules: return sys.modules[sdk_module_name] if sdk_path not in sys.path: sys.path.append(sdk_path) if module_path not in sys.path: sys.path.append(module_path) fd, filename, desc = imp.find_module('check', [module_path]) module = imp.load_module("_{}".format(name), fd, filename, desc) if fd: fd.close() # module = __import__(module_name, fromlist=['check']) return module
def get_check_class(name): checksd_path = get_checksd_path(get_os()) if checksd_path not in sys.path: sys.path.append(checksd_path) check_module = __import__(name) check_class = None classes = inspect.getmembers(check_module, inspect.isclass) for _, clsmember in classes: if clsmember == AgentCheck: continue if issubclass(clsmember, AgentCheck): check_class = clsmember if AgentCheck in clsmember.__bases__: continue else: break return check_class
def get_confd_path(osname=None): try: cur_path = os.path.dirname(os.path.realpath(__file__)) return _confd_path(cur_path) except PathNotFound as e: pass if not osname: osname = get_os() bad_path = '' try: if osname == 'windows': return _windows_confd_path() elif osname == 'mac': return _mac_confd_path() else: return _unix_confd_path() except PathNotFound as e: if len(e.args) > 0: bad_path = e.args[0] raise PathNotFound(bad_path)
def _load_sdk_module(name): try: # see whether the check was installed as a wheel package return import_module("datadog_checks.{}".format(name)) except ImportError: sdk_path = get_sdk_integrations_path(get_os()) module_path = os.path.join(sdk_path, name) sdk_module_name = "_{}".format(name) if sdk_module_name in sys.modules: return sys.modules[sdk_module_name] if sdk_path not in sys.path: sys.path.append(sdk_path) if module_path not in sys.path: sys.path.append(module_path) fd, filename, desc = imp.find_module('check', [module_path]) module = imp.load_module("_{}".format(name), fd, filename, desc) if fd: fd.close() # module = __import__(module_name, fromlist=['check']) return module
def get_check(name, config_str): from checks import AgentCheck checksd_path = get_checksd_path(get_os()) if checksd_path not in sys.path: sys.path.append(checksd_path) check_module = __import__(name) check_class = None classes = inspect.getmembers(check_module, inspect.isclass) for name, clsmember in classes: if issubclass(clsmember, AgentCheck) and clsmember != AgentCheck: check_class = clsmember break if check_class is None: raise Exception("Unable to import check %s. Missing a class that inherits AgentCheck" % name) agentConfig = { 'version': '0.1', 'api_key': 'tota' } return check_class.from_yaml(yaml_text=config_str, check_name=name, agentConfig=agentConfig)
def get_sdk_integrations_path(osname=None): if not osname: osname = get_os() if os.environ.get('INTEGRATIONS_DIR'): if os.environ.get('TRAVIS'): path = os.environ['TRAVIS_BUILD_DIR'] elif os.environ.get('CIRCLECI'): path = os.path.join( os.environ['HOME'], os.environ['CIRCLE_PROJECT_REPONAME'] ) elif os.environ.get('APPVEYOR'): path = os.environ['APPVEYOR_BUILD_FOLDER'] else: cur_path = os.environ['INTEGRATIONS_DIR'] path = os.path.join(cur_path, '..') # might need tweaking in the future. else: cur_path = os.path.dirname(os.path.realpath(__file__)) path = os.path.join(cur_path, '..', SDK_INTEGRATIONS_DIR) if os.path.exists(path): return path raise PathNotFound(path)
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://localhost:17123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, # DEPRECATED 'version': get_version(), 'watchdog': True, 'additional_checksd': '/etc/dd-agent/checks.d/', 'bind_host': get_default_bind_host(), 'statsd_metric_namespace': None, 'utf8_decoding': False } if Platform.is_mac(): agentConfig['additional_checksd'] = '/opt/datadog-agent/etc/checks.d' # Config handling try: # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # Store developer mode setting in the agentConfig if config.has_option('Main', 'developer_mode'): agentConfig['developer_mode'] = _is_affirmative(config.get('Main', 'developer_mode')) # Allow an override with the --profile option if options is not None and options.profile: agentConfig['developer_mode'] = True # # Core config #ap if not config.has_option('Main', 'api_key'): log.warning(u"No API key was found. Aborting.") sys.exit(2) if not config.has_option('Main', 'dd_url'): log.warning(u"No dd_url was found. Aborting.") sys.exit(2) # Endpoints dd_urls = map(clean_dd_url, config.get('Main', 'dd_url').split(',')) api_keys = map(lambda el: el.strip(), config.get('Main', 'api_key').split(',')) # For collector and dogstatsd agentConfig['dd_url'] = dd_urls[0] agentConfig['api_key'] = api_keys[0] # Forwarder endpoints logic # endpoints is: # { # 'https://app.datadoghq.com': ['api_key_abc', 'api_key_def'], # 'https://app.example.com': ['api_key_xyz'] # } endpoints = {} dd_urls = remove_empty(dd_urls) api_keys = remove_empty(api_keys) if len(dd_urls) == 1: if len(api_keys) > 0: endpoints[dd_urls[0]] = api_keys else: assert len(dd_urls) == len(api_keys), 'Please provide one api_key for each url' for i, dd_url in enumerate(dd_urls): endpoints[dd_url] = endpoints.get(dd_url, []) + [api_keys[i]] agentConfig['endpoints'] = endpoints # Forwarder or not forwarder agentConfig['use_forwarder'] = options is not None and options.use_forwarder if agentConfig['use_forwarder']: listen_port = 17123 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['dd_url'] = "http://{}:{}".format(agentConfig['bind_host'], listen_port) # FIXME: Legacy dd_url command line switch elif options is not None and options.dd_url is not None: agentConfig['dd_url'] = options.dd_url # Forwarder timeout agentConfig['forwarder_timeout'] = 20 if config.has_option('Main', 'forwarder_timeout'): agentConfig['forwarder_timeout'] = int(config.get('Main', 'forwarder_timeout')) # Extra checks.d path # the linux directory is set by default if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get('Main', 'additional_checksd') elif get_os() == 'windows': # default windows location common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join(common_path, 'Datadog', 'checks.d') if config.has_option('Main', 'use_dogstatsd'): agentConfig['use_dogstatsd'] = config.get('Main', 'use_dogstatsd').lower() in ("yes", "true") else: agentConfig['use_dogstatsd'] = True # Service discovery if config.has_option('Main', 'service_discovery_backend'): try: additional_config = extract_agent_config(config) agentConfig.update(additional_config) except: log.error('Failed to load the agent configuration related to ' 'service discovery. It will not be used.') # Concerns only Windows if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get('Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True # local traffic only? Default to no agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get('Main', 'non_local_traffic').lower() in ("yes", "true") # DEPRECATED if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') # translate yes into True, the rest into False agentConfig['use_ec2_instance_id'] = (use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int(config.get('Main', 'check_freq')) except Exception: pass # Custom histogram aggregate/percentile metrics if config.has_option('Main', 'histogram_aggregates'): agentConfig['histogram_aggregates'] = get_histogram_aggregates(config.get('Main', 'histogram_aggregates')) if config.has_option('Main', 'histogram_percentiles'): agentConfig['histogram_percentiles'] = get_histogram_percentiles(config.get('Main', 'histogram_percentiles')) # Disable Watchdog (optionally) if config.has_option('Main', 'watchdog'): if config.get('Main', 'watchdog').lower() in ('no', 'false'): agentConfig['watchdog'] = False # Optional graphite listener if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None # Dogstatsd config dogstatsd_defaults = { 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://' + agentConfig['bind_host'] + ':17123', } for key, value in dogstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value # Create app:xxx tags based on monitored apps agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \ _is_affirmative(config.get('Main', 'create_dd_check_tags')) # Forwarding to external statsd server if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get('Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int(config.get('Main', 'statsd_forward_port')) # Optional config # FIXME not the prettiest code ever... if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative(config.get('Main', 'use_mount')) if options is not None and options.autorestart: agentConfig['autorestart'] = True elif config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative(config.get('Main', 'autorestart')) if config.has_option('Main', 'check_timings'): agentConfig['check_timings'] = _is_affirmative(config.get('Main', 'check_timings')) if config.has_option('Main', 'exclude_process_args'): agentConfig['exclude_process_args'] = _is_affirmative(config.get('Main', 'exclude_process_args')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass # Dogstream config if config.has_option("Main", "dogstream_log"): # Older version, single log support log_path = config.get("Main", "dogstream_log") if config.has_option("Main", "dogstream_line_parser"): agentConfig["dogstreams"] = ':'.join([log_path, config.get("Main", "dogstream_line_parser")]) else: agentConfig["dogstreams"] = log_path elif config.has_option("Main", "dogstreams"): agentConfig["dogstreams"] = config.get("Main", "dogstreams") if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get("Main", "nagios_perf_cfg") if config.has_option("Main", "use_curl_http_client"): agentConfig["use_curl_http_client"] = _is_affirmative(config.get("Main", "use_curl_http_client")) else: # Default to False as there are some issues with the curl client and ELB agentConfig["use_curl_http_client"] = False if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if (config.has_option("Main", "limit_memory_consumption") and config.get("Main", "limit_memory_consumption") is not None): agentConfig["limit_memory_consumption"] = int(config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative(config.get("Main", "skip_ssl_validation")) agentConfig["collect_instance_metadata"] = True if config.has_option("Main", "collect_instance_metadata"): agentConfig["collect_instance_metadata"] = _is_affirmative(config.get("Main", "collect_instance_metadata")) agentConfig["proxy_forbid_method_switch"] = False if config.has_option("Main", "proxy_forbid_method_switch"): agentConfig["proxy_forbid_method_switch"] = _is_affirmative(config.get("Main", "proxy_forbid_method_switch")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative(config.get("Main", "collect_ec2_tags")) agentConfig["utf8_decoding"] = False if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative(config.get("Main", "utf8_decoding")) agentConfig["gce_updated_hostname"] = False if config.has_option("Main", "gce_updated_hostname"): agentConfig["gce_updated_hostname"] = _is_affirmative(config.get("Main", "gce_updated_hostname")) except ConfigParser.NoSectionError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.ParsingError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.NoOptionError as e: sys.stderr.write('There are some items missing from your config file, but nothing fatal [%s]' % e) # Storing proxy settings in the agentConfig agentConfig['proxy_settings'] = get_proxy(agentConfig) if agentConfig.get('ca_certs', None) is None: agentConfig['ssl_certificate'] = get_ssl_certificate(get_os(), 'datadog-cert.pem') else: agentConfig['ssl_certificate'] = agentConfig['ca_certs'] return agentConfig
def initialize_logging(logger_name): try: logging_config = get_logging_config() logging.basicConfig( format=get_log_format(logger_name), level=logging_config['log_level'] or logging.INFO, ) log_file = logging_config.get('%s_log_file' % logger_name) if log_file is not None and not logging_config['disable_file_logging']: # make sure the log directory is writeable # NOTE: the entire directory needs to be writable so that rotation works if os.access(os.path.dirname(log_file), os.R_OK | os.W_OK): file_handler = logging.handlers.RotatingFileHandler(log_file, maxBytes=LOGGING_MAX_BYTES, backupCount=1) formatter = logging.Formatter(get_log_format(logger_name), get_log_date_format()) file_handler.setFormatter(formatter) root_log = logging.getLogger() root_log.addHandler(file_handler) else: sys.stderr.write("Log file is unwritable: '%s'\n" % log_file) # set up syslog if logging_config['log_to_syslog']: try: from logging.handlers import SysLogHandler if logging_config['syslog_host'] is not None and logging_config['syslog_port'] is not None: sys_log_addr = (logging_config['syslog_host'], logging_config['syslog_port']) else: sys_log_addr = "/dev/log" # Special-case BSDs if Platform.is_darwin(): sys_log_addr = "/var/run/syslog" elif Platform.is_freebsd(): sys_log_addr = "/var/run/log" handler = SysLogHandler(address=sys_log_addr, facility=SysLogHandler.LOG_DAEMON) handler.setFormatter(logging.Formatter(get_syslog_format(logger_name), get_log_date_format())) root_log = logging.getLogger() root_log.addHandler(handler) except Exception as e: sys.stderr.write("Error setting up syslog: '%s'\n" % str(e)) traceback.print_exc() # Setting up logging in the event viewer for windows if get_os() == 'windows' and logging_config['log_to_event_viewer']: try: from logging.handlers import NTEventLogHandler nt_event_handler = NTEventLogHandler(logger_name, get_win32service_file('windows', 'win32service.pyd'), 'Application') nt_event_handler.setFormatter(logging.Formatter(get_syslog_format(logger_name), get_log_date_format())) nt_event_handler.setLevel(logging.ERROR) app_log = logging.getLogger(logger_name) app_log.addHandler(nt_event_handler) except Exception as e: sys.stderr.write("Error setting up Event viewer logging: '%s'\n" % str(e)) traceback.print_exc() except Exception as e: sys.stderr.write("Couldn't initialize logging: %s\n" % str(e)) traceback.print_exc() # if config fails entirely, enable basic stdout logging as a fallback logging.basicConfig( format=get_log_format(logger_name), level=logging.INFO, ) # re-get the log after logging is initialized global log log = logging.getLogger(__name__)
def get_os_freebsd(platform): assert get_os() == "freebsd"
def test_get_os_aix7(): assert get_os() == "aix"
def get_log_format(logger_name): if get_os() != 'windows': return '%%(asctime)s | %%(levelname)s | dd.%s | %%(name)s(%%(filename)s:%%(lineno)s) | %%(message)s' % logger_name return '%(asctime)s | %(levelname)s | %(name)s(%(filename)s:%(lineno)s) | %(message)s'
def test_get_os_linux(): assert get_os() == "linux"
def get_os_sunos(platform): assert get_os() == "sunos"
def get_logging_config(cfg_path=None): system_os = get_os() logging_config = { 'log_level': None, 'log_to_event_viewer': False, 'log_to_syslog': False, 'syslog_host': None, 'syslog_port': None, } if system_os == 'windows': logging_config['windows_collector_log_file'] = os.path.join( _windows_commondata_path(), 'StackState', 'logs', 'collector.log') logging_config['windows_forwarder_log_file'] = os.path.join( _windows_commondata_path(), 'StackState', 'logs', 'forwarder.log') logging_config['windows_dogstatsd_log_file'] = os.path.join( _windows_commondata_path(), 'StackState', 'logs', 'dogstatsd.log') logging_config['jmxfetch_log_file'] = os.path.join( _windows_commondata_path(), 'StackState', 'logs', 'jmxfetch.log') else: logging_config[ 'collector_log_file'] = '/var/log/stackstate/collector.log' logging_config[ 'forwarder_log_file'] = '/var/log/stackstate/forwarder.log' logging_config[ 'dogstatsd_log_file'] = '/var/log/stackstate/stsstatsd.log' logging_config[ 'jmxfetch_log_file'] = '/var/log/stackstate/jmxfetch.log' logging_config[ 'go-metro_log_file'] = '/var/log/stackstate/go-metro.log' logging_config['log_to_syslog'] = True config_path = get_config_path(cfg_path, os_name=system_os) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) if config.has_section('handlers') or config.has_section( 'loggers') or config.has_section('formatters'): sys.stderr.write( """Python logging config is no longer supported and will be ignored. To configure logging, update the logging portion of 'stackstate.conf'.""" ) for option in logging_config: if config.has_option('Main', option): logging_config[option] = config.get('Main', option) levels = { 'CRITICAL': logging.CRITICAL, 'DEBUG': logging.DEBUG, 'ERROR': logging.ERROR, 'FATAL': logging.FATAL, 'INFO': logging.INFO, 'WARN': logging.WARN, 'WARNING': logging.WARNING, } if config.has_option('Main', 'log_level'): logging_config['log_level'] = levels.get( config.get('Main', 'log_level')) if config.has_option('Main', 'log_to_syslog'): logging_config['log_to_syslog'] = config.get( 'Main', 'log_to_syslog').strip().lower() in ['yes', 'true', 1] if config.has_option('Main', 'log_to_event_viewer'): logging_config['log_to_event_viewer'] = config.get( 'Main', 'log_to_event_viewer').strip().lower() in ['yes', 'true', 1] if config.has_option('Main', 'syslog_host'): host = config.get('Main', 'syslog_host').strip() if host: logging_config['syslog_host'] = host else: logging_config['syslog_host'] = None if config.has_option('Main', 'syslog_port'): port = config.get('Main', 'syslog_port').strip() try: logging_config['syslog_port'] = int(port) except Exception: logging_config['syslog_port'] = None if config.has_option('Main', 'disable_file_logging'): logging_config['disable_file_logging'] = config.get( 'Main', 'disable_file_logging').strip().lower() in ['yes', 'true', 1] else: logging_config['disable_file_logging'] = False return logging_config
def initialize_logging(logger_name): try: logging_config = get_logging_config() logging.basicConfig( format=get_log_format(logger_name), level=logging_config['log_level'] or logging.INFO, ) log_file = logging_config.get('%s_log_file' % logger_name) if log_file is not None and not logging_config['disable_file_logging']: # make sure the log directory is writeable # NOTE: the entire directory needs to be writable so that rotation works if os.access(os.path.dirname(log_file), os.R_OK | os.W_OK): file_handler = logging.handlers.RotatingFileHandler( log_file, maxBytes=LOGGING_MAX_BYTES, backupCount=1) formatter = logging.Formatter(get_log_format(logger_name), get_log_date_format()) file_handler.setFormatter(formatter) root_log = logging.getLogger() root_log.addHandler(file_handler) else: sys.stderr.write("Log file is unwritable: '%s'\n" % log_file) # set up syslog if logging_config['log_to_syslog']: try: from logging.handlers import SysLogHandler if logging_config['syslog_host'] is not None and logging_config[ 'syslog_port'] is not None: sys_log_addr = (logging_config['syslog_host'], logging_config['syslog_port']) else: sys_log_addr = "/dev/log" # Special-case BSDs if Platform.is_darwin(): sys_log_addr = "/var/run/syslog" elif Platform.is_freebsd(): sys_log_addr = "/var/run/log" handler = SysLogHandler(address=sys_log_addr, facility=SysLogHandler.LOG_DAEMON) handler.setFormatter( logging.Formatter(get_syslog_format(logger_name), get_log_date_format())) root_log = logging.getLogger() root_log.addHandler(handler) except Exception as e: sys.stderr.write("Error setting up syslog: '%s'\n" % str(e)) traceback.print_exc() # Setting up logging in the event viewer for windows if get_os() == 'windows' and logging_config['log_to_event_viewer']: try: from logging.handlers import NTEventLogHandler nt_event_handler = NTEventLogHandler( logger_name, get_win32service_file('windows', 'win32service.pyd'), 'Application') nt_event_handler.setFormatter( logging.Formatter(get_syslog_format(logger_name), get_log_date_format())) nt_event_handler.setLevel(logging.ERROR) app_log = logging.getLogger(logger_name) app_log.addHandler(nt_event_handler) except Exception as e: sys.stderr.write( "Error setting up Event viewer logging: '%s'\n" % str(e)) traceback.print_exc() except Exception as e: sys.stderr.write("Couldn't initialize logging: %s\n" % str(e)) traceback.print_exc() # if config fails entirely, enable basic stdout logging as a fallback logging.basicConfig( format=get_log_format(logger_name), level=logging.INFO, ) # re-get the log after logging is initialized global log log = logging.getLogger(__name__)
def __init__(self, agentConfig, emitters, systemStats, hostname): self.emit_duration = None self.agentConfig = agentConfig self.hostname = hostname # system stats is generated by config.get_system_stats self.agentConfig['system_stats'] = systemStats # agent config is used during checks, system_stats can be accessed through the config self.os = get_os() self.plugins = None self.emitters = emitters self.check_timings = agentConfig.get('check_timings') self.push_times = { 'host_metadata': { 'start': time.time(), 'interval': int(agentConfig.get('metadata_interval', 4 * 60 * 60)) }, 'external_host_tags': { 'start': time.time() - 3 * 60, # Wait for the checks to init 'interval': int(agentConfig.get('external_host_tags', 5 * 60)) }, 'agent_checks': { 'start': time.time(), 'interval': int(agentConfig.get('agent_checks_interval', 10 * 60)) }, 'processes': { 'start': time.time(), 'interval': int(agentConfig.get('processes_interval', 60)) } } socket.setdefaulttimeout(15) self.run_count = 0 self.continue_running = True self.hostname_metadata_cache = None self.initialized_checks_d = [] self.init_failed_checks_d = {} if Platform.is_linux() and psutil is not None: procfs_path = agentConfig.get('procfs_path', '/proc').rstrip('/') psutil.PROCFS_PATH = procfs_path # Unix System Checks self._unix_system_checks = { 'io': u.IO(log), 'load': u.Load(log), 'memory': u.Memory(log), 'processes': u.Processes(log), 'cpu': u.Cpu(log), 'system': u.System(log) } # Win32 System `Checks self._win32_system_checks = { 'io': w32.IO(log), 'proc': w32.Processes(log), 'memory': w32.Memory(log), 'network': w32.Network(log), 'cpu': w32.Cpu(log), 'system': w32.System(log) } # Old-style metric checks self._ganglia = Ganglia(log) if self.agentConfig.get('ganglia_host', '') != '' else None self._dogstream = None if self.agentConfig.get('dogstreams') is None else Dogstreams.init(log, self.agentConfig) # Agent performance metrics check self._agent_metrics = None self._metrics_checks = [] # Custom metric checks for module_spec in [s.strip() for s in self.agentConfig.get('custom_checks', '').split(',')]: if len(module_spec) == 0: continue try: self._metrics_checks.append(modules.load(module_spec, 'Check')(log)) log.info("Registered custom check %s" % module_spec) log.warning("Old format custom checks are deprecated. They should be moved to the checks.d interface as old custom checks will be removed in a next version") except Exception: log.exception('Unable to load custom check module %s' % module_spec)
def test_get_os_custom(): assert get_os() == "test-value"
def load_check_directory(agentConfig, hostname): ''' Return the initialized checks from checks.d, and a mapping of checks that failed to initialize. Only checks that have a configuration file in conf.d will be returned. ''' from checks import AGENT_METRICS_CHECK_NAME initialized_checks = {} init_failed_checks = {} deprecated_checks = {} agentConfig['checksd_hostname'] = hostname osname = get_os() # the TRACE_CONFIG flag is used by the configcheck to trace config object loading and # where they come from (service discovery, auto config or config file) if agentConfig.get(TRACE_CONFIG): configs_and_sources = { # check_name: (config_source, config) } deprecated_checks.update(_deprecated_configs(agentConfig)) checks_places = get_checks_places(osname, agentConfig) for config_path in _file_configs_paths(osname, agentConfig): # '/etc/dd-agent/checks.d/my_check.py' -> 'my_check' check_name = _conf_path_to_check_name(config_path) conf_is_valid, check_config, invalid_check = _load_file_config( config_path, check_name, agentConfig) init_failed_checks.update(invalid_check) if not conf_is_valid: continue if agentConfig.get(TRACE_CONFIG): configs_and_sources[check_name] = (CONFIG_FROM_FILE, check_config) # load the check load_success, load_failure = load_check_from_places( check_config, check_name, checks_places, agentConfig) initialized_checks.update(load_success) init_failed_checks.update(load_failure) for check_name, service_disco_check_config in _service_disco_configs( agentConfig).iteritems(): # ignore this config from service disco if the check has been loaded through a file config if check_name in initialized_checks or check_name in init_failed_checks: continue # if TRACE_CONFIG is set, service_disco_check_config looks like: # (config_src, (sd_init_config, sd_instances)) instead of # (sd_init_config, sd_instances) if agentConfig.get(TRACE_CONFIG): sd_init_config, sd_instances = service_disco_check_config[1] configs_and_sources[check_name] = (service_disco_check_config[0], { 'init_config': sd_init_config, 'instances': sd_instances }) else: sd_init_config, sd_instances = service_disco_check_config check_config = { 'init_config': sd_init_config, 'instances': sd_instances } # load the check load_success, load_failure = load_check_from_places( check_config, check_name, checks_places, agentConfig) initialized_checks.update(load_success) init_failed_checks.update(load_failure) init_failed_checks.update(deprecated_checks) log.info('initialized checks.d checks: %s' % [ k for k in initialized_checks.keys() if k != AGENT_METRICS_CHECK_NAME ]) log.info('initialization failed checks.d checks: %s' % init_failed_checks.keys()) if agentConfig.get(TRACE_CONFIG): return configs_and_sources return { 'initialized_checks': initialized_checks.values(), 'init_failed_checks': init_failed_checks, }
def load_check_directory(agentConfig, hostname): ''' Return the initialized checks from checks.d, and a mapping of checks that failed to initialize. Only checks that have a configuration file in conf.d will be returned. ''' from checks import AGENT_METRICS_CHECK_NAME initialized_checks = {} init_failed_checks = {} deprecated_checks = {} agentConfig['checksd_hostname'] = hostname osname = get_os() # the TRACE_CONFIG flag is used by the configcheck to trace config object loading and # where they come from (service discovery, auto config or config file) if agentConfig.get(TRACE_CONFIG): configs_and_sources = { # check_name: (config_source, config) } deprecated_checks.update(_deprecated_configs(agentConfig)) checks_places = get_checks_places(osname, agentConfig) for config_path in _file_configs_paths(osname, agentConfig): # '/etc/dd-agent/checks.d/my_check.py' -> 'my_check' check_name = _conf_path_to_check_name(config_path) conf_is_valid, check_config, invalid_check = _load_file_config(config_path, check_name, agentConfig) init_failed_checks.update(invalid_check) if not conf_is_valid: continue if agentConfig.get(TRACE_CONFIG): configs_and_sources[check_name] = (CONFIG_FROM_FILE, check_config) # load the check load_success, load_failure = load_check_from_places(check_config, check_name, checks_places, agentConfig) initialized_checks.update(load_success) init_failed_checks.update(load_failure) for check_name, service_disco_check_config in _service_disco_configs(agentConfig).iteritems(): # ignore this config from service disco if the check has been loaded through a file config if check_name in initialized_checks or check_name in init_failed_checks: continue # if TRACE_CONFIG is set, service_disco_check_config looks like: # (config_src, (sd_init_config, sd_instances)) instead of # (sd_init_config, sd_instances) if agentConfig.get(TRACE_CONFIG): sd_init_config, sd_instances = service_disco_check_config[1] configs_and_sources[check_name] = ( service_disco_check_config[0], {'init_config': sd_init_config, 'instances': sd_instances}) else: sd_init_config, sd_instances = service_disco_check_config check_config = {'init_config': sd_init_config, 'instances': sd_instances} # load the check load_success, load_failure = load_check_from_places(check_config, check_name, checks_places, agentConfig) initialized_checks.update(load_success) init_failed_checks.update(load_failure) init_failed_checks.update(deprecated_checks) log.info('initialized checks.d checks: %s' % [k for k in initialized_checks.keys() if k != AGENT_METRICS_CHECK_NAME]) log.info('initialization failed checks.d checks: %s' % init_failed_checks.keys()) if agentConfig.get(TRACE_CONFIG): return configs_and_sources return {'initialized_checks': initialized_checks.values(), 'init_failed_checks': init_failed_checks, }
def test_get_os_sunos(): assert get_os() == "solaris"
def get_logging_config(cfg_path=None): system_os = get_os() logging_config = { 'log_level': None, 'log_to_event_viewer': False, 'log_to_syslog': False, 'syslog_host': None, 'syslog_port': None, } if system_os == 'windows': logging_config['windows_collector_log_file'] = os.path.join(_windows_commondata_path(), 'Datadog', 'logs', 'collector.log') logging_config['windows_forwarder_log_file'] = os.path.join(_windows_commondata_path(), 'Datadog', 'logs', 'forwarder.log') logging_config['windows_dogstatsd_log_file'] = os.path.join(_windows_commondata_path(), 'Datadog', 'logs', 'dogstatsd.log') logging_config['jmxfetch_log_file'] = os.path.join(_windows_commondata_path(), 'Datadog', 'logs', 'jmxfetch.log') else: logging_config['collector_log_file'] = '/var/log/datadog/collector.log' logging_config['forwarder_log_file'] = '/var/log/datadog/forwarder.log' logging_config['dogstatsd_log_file'] = '/var/log/datadog/dogstatsd.log' logging_config['jmxfetch_log_file'] = '/var/log/datadog/jmxfetch.log' logging_config['go-metro_log_file'] = '/var/log/datadog/go-metro.log' logging_config['log_to_syslog'] = True config_path = get_config_path(cfg_path, os_name=system_os) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) if config.has_section('handlers') or config.has_section('loggers') or config.has_section('formatters'): if system_os == 'windows': config_example_file = "https://github.com/DataDog/dd-agent/blob/master/packaging/datadog-agent/win32/install_files/datadog_win32.conf" else: config_example_file = "https://github.com/DataDog/dd-agent/blob/master/datadog.conf.example" sys.stderr.write("""Python logging config is no longer supported and will be ignored. To configure logging, update the logging portion of 'datadog.conf' to match: '%s'. """ % config_example_file) for option in logging_config: if config.has_option('Main', option): logging_config[option] = config.get('Main', option) levels = { 'CRITICAL': logging.CRITICAL, 'DEBUG': logging.DEBUG, 'ERROR': logging.ERROR, 'FATAL': logging.FATAL, 'INFO': logging.INFO, 'WARN': logging.WARN, 'WARNING': logging.WARNING, } if config.has_option('Main', 'log_level'): logging_config['log_level'] = levels.get(config.get('Main', 'log_level')) if config.has_option('Main', 'log_to_syslog'): logging_config['log_to_syslog'] = config.get('Main', 'log_to_syslog').strip().lower() in ['yes', 'true', 1] if config.has_option('Main', 'log_to_event_viewer'): logging_config['log_to_event_viewer'] = config.get('Main', 'log_to_event_viewer').strip().lower() in ['yes', 'true', 1] if config.has_option('Main', 'syslog_host'): host = config.get('Main', 'syslog_host').strip() if host: logging_config['syslog_host'] = host else: logging_config['syslog_host'] = None if config.has_option('Main', 'syslog_port'): port = config.get('Main', 'syslog_port').strip() try: logging_config['syslog_port'] = int(port) except Exception: logging_config['syslog_port'] = None if config.has_option('Main', 'disable_file_logging'): logging_config['disable_file_logging'] = config.get('Main', 'disable_file_logging').strip().lower() in ['yes', 'true', 1] else: logging_config['disable_file_logging'] = False return logging_config
def get_os_linux(platform): assert get_os() == "linux"
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'dogstatsd_port': 8225, 'dogstatsd_target': 'http://localhost:18123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, # DEPRECATED 'version': get_version(), 'watchdog': True, 'additional_checksd': '/etc/sts-agent/checks.d/', 'bind_host': get_default_bind_host(), 'statsd_metric_namespace': None, 'utf8_decoding': False } if Platform.is_mac(): agentConfig[ 'additional_checksd'] = '/opt/stackstate-agent/etc/checks.d' # Config handling try: # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # Store developer mode setting in the agentConfig if config.has_option('Main', 'developer_mode'): agentConfig['developer_mode'] = _is_affirmative( config.get('Main', 'developer_mode')) # Allow an override with the --profile option if options is not None and options.profile: agentConfig['developer_mode'] = True # # Core config #ap if not config.has_option('Main', 'api_key'): log.warning(u"No API key was found. Aborting.") sys.exit(2) if not config.has_option('Main', 'dd_url'): log.warning(u"No dd_url was found. Aborting.") sys.exit(2) # Endpoints dd_urls = map(clean_dd_url, config.get('Main', 'dd_url').split(',')) api_keys = map(lambda el: el.strip(), config.get('Main', 'api_key').split(',')) # For collector and dogstatsd agentConfig['dd_url'] = dd_urls[0] agentConfig['api_key'] = api_keys[0] # Forwarder endpoints logic # endpoints is: # { # 'https://app.example.com': ['api_key_xyz'] # } endpoints = {} dd_urls = remove_empty(dd_urls) api_keys = remove_empty(api_keys) if len(dd_urls) == 1: if len(api_keys) > 0: endpoints[dd_urls[0]] = api_keys else: assert len(dd_urls) == len( api_keys), 'Please provide one api_key for each url' for i, dd_url in enumerate(dd_urls): endpoints[dd_url] = endpoints.get(dd_url, []) + [api_keys[i]] agentConfig['endpoints'] = endpoints # Forwarder or not forwarder agentConfig[ 'use_forwarder'] = options is not None and options.use_forwarder if agentConfig['use_forwarder']: listen_port = 18123 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['dd_url'] = "http://{}:{}".format( agentConfig['bind_host'], listen_port) # FIXME: Legacy dd_url command line switch elif options is not None and options.dd_url is not None: agentConfig['dd_url'] = options.dd_url # Forwarder timeout agentConfig['forwarder_timeout'] = 20 if config.has_option('Main', 'forwarder_timeout'): agentConfig['forwarder_timeout'] = int( config.get('Main', 'forwarder_timeout')) # Extra checks.d path # the linux directory is set by default if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get( 'Main', 'additional_checksd') elif get_os() == 'windows': # default windows location common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join( common_path, 'StackState', 'checks.d') if config.has_option('Main', 'use_dogstatsd'): agentConfig['use_dogstatsd'] = config.get( 'Main', 'use_dogstatsd').lower() in ("yes", "true") else: agentConfig['use_dogstatsd'] = True # Service discovery if config.has_option('Main', 'service_discovery_backend'): try: additional_config = extract_agent_config(config) agentConfig.update(additional_config) except: log.error('Failed to load the agent configuration related to ' 'service discovery. It will not be used.') # Concerns only Windows if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get( 'Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True # local traffic only? Default to no agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get( 'Main', 'non_local_traffic').lower() in ("yes", "true") # DEPRECATED if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') # translate yes into True, the rest into False agentConfig['use_ec2_instance_id'] = ( use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int( config.get('Main', 'check_freq')) except Exception: pass # Custom histogram aggregate/percentile metrics if config.has_option('Main', 'histogram_aggregates'): agentConfig['histogram_aggregates'] = get_histogram_aggregates( config.get('Main', 'histogram_aggregates')) if config.has_option('Main', 'histogram_percentiles'): agentConfig['histogram_percentiles'] = get_histogram_percentiles( config.get('Main', 'histogram_percentiles')) # Disable Watchdog (optionally) if config.has_option('Main', 'watchdog'): if config.get('Main', 'watchdog').lower() in ('no', 'false'): agentConfig['watchdog'] = False # Optional graphite listener if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None # Dogstatsd config dogstatsd_defaults = { 'dogstatsd_port': 8225, 'dogstatsd_target': 'http://' + agentConfig['bind_host'] + ':18123', } for key, value in dogstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value # Create app:xxx tags based on monitored apps agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \ _is_affirmative(config.get('Main', 'create_dd_check_tags')) # Forwarding to external statsd server if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get( 'Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int( config.get('Main', 'statsd_forward_port')) # Optional config # FIXME not the prettiest code ever... if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative( config.get('Main', 'use_mount')) if options is not None and options.autorestart: agentConfig['autorestart'] = True elif config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative( config.get('Main', 'autorestart')) if config.has_option('Main', 'check_timings'): agentConfig['check_timings'] = _is_affirmative( config.get('Main', 'check_timings')) if config.has_option('Main', 'exclude_process_args'): agentConfig['exclude_process_args'] = _is_affirmative( config.get('Main', 'exclude_process_args')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass # Dogstream config if config.has_option("Main", "dogstream_log"): # Older version, single log support log_path = config.get("Main", "dogstream_log") if config.has_option("Main", "dogstream_line_parser"): agentConfig["dogstreams"] = ':'.join( [log_path, config.get("Main", "dogstream_line_parser")]) else: agentConfig["dogstreams"] = log_path elif config.has_option("Main", "dogstreams"): agentConfig["dogstreams"] = config.get("Main", "dogstreams") if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get( "Main", "nagios_perf_cfg") if config.has_option("Main", "use_curl_http_client"): agentConfig["use_curl_http_client"] = _is_affirmative( config.get("Main", "use_curl_http_client")) else: # Default to False as there are some issues with the curl client and ELB agentConfig["use_curl_http_client"] = False if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if (config.has_option("Main", "limit_memory_consumption") and config.get("Main", "limit_memory_consumption") is not None): agentConfig["limit_memory_consumption"] = int( config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative( config.get("Main", "skip_ssl_validation")) agentConfig["collect_instance_metadata"] = True if config.has_option("Main", "collect_instance_metadata"): agentConfig["collect_instance_metadata"] = _is_affirmative( config.get("Main", "collect_instance_metadata")) agentConfig["proxy_forbid_method_switch"] = False if config.has_option("Main", "proxy_forbid_method_switch"): agentConfig["proxy_forbid_method_switch"] = _is_affirmative( config.get("Main", "proxy_forbid_method_switch")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative( config.get("Main", "collect_ec2_tags")) agentConfig["utf8_decoding"] = False if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative( config.get("Main", "utf8_decoding")) agentConfig["gce_updated_hostname"] = False if config.has_option("Main", "gce_updated_hostname"): agentConfig["gce_updated_hostname"] = _is_affirmative( config.get("Main", "gce_updated_hostname")) except ConfigParser.NoSectionError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.ParsingError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.NoOptionError as e: sys.stderr.write( 'There are some items missing from your config file, but nothing fatal [%s]' % e) # Storing proxy settings in the agentConfig agentConfig['proxy_settings'] = get_proxy(agentConfig) if agentConfig.get('ca_certs', None) is None: agentConfig['ssl_certificate'] = get_ssl_certificate( get_os(), 'stackstate-cert.pem') else: agentConfig['ssl_certificate'] = agentConfig['ca_certs'] return agentConfig
def get_os_custom(platform): assert get_os() == "test-value"
def test_get_os_freebsd(): assert get_os() == "freebsd"