def _set_up_environment(): # Trace variables need to be set in the global environment # since the Datadog Java Trace Agent does not live inside the Datadog Agent process if _is_dd_tracing_enabled(): os.environ["DD_SERVICE_NAME"] = _get_service() os.environ["DD_JMXFETCH_ENABLED"] = "false" dbconfig = database.get_config() if dbconfig: os.environ["DD_SERVICE_MAPPING"] = "{}:{}.db".format( dbconfig["DatabaseType"].lower(), _get_service()) e = dict(os.environ.copy()) # Everything in datadog.yaml can be configured with environment variables # This is the "official way" of working with the DD buildpack, so let's do this to ensure forward compatibility e["DD_API_KEY"] = get_api_key() e["DD_HOSTNAME"] = util.get_hostname() # Explicitly turn off tracing to ensure backward compatibility if not _is_dd_tracing_enabled(): e["DD_TRACE_ENABLED"] = "false" e["DD_LOGS_ENABLED"] = "true" e["DD_LOG_FILE"] = "/dev/null" tags = util.get_tags() if tags: e["DD_TAGS"] = ",".join(tags) e["DD_PROCESS_CONFIG_LOG_FILE"] = "/dev/null" e["DD_DOGSTATSD_PORT"] = str(_get_statsd_port()) # Include for forward-compatibility with DD buildpack e["DD_ENABLE_CHECKS"] = "true" e["DATADOG_DIR"] = str(os.path.abspath(DD_AGENT_DIR)) return e
def _set_up_environment(): # Trace variables need to be set in the global environment # since the Datadog Java Trace Agent does not live inside the Datadog Agent process e = dict(os.environ.copy()) # Everything in datadog.yaml can be configured with environment variables # This is the "official way" of working with the DD buildpack, so let's do this to ensure forward compatibility e["DD_API_KEY"] = get_api_key() e["DD_HOSTNAME"] = util.get_hostname() e["DD_LOG_FILE"] = "/dev/null" e["DD_PROCESS_CONFIG_LOG_FILE"] = "/dev/null" e["DD_DOGSTATSD_PORT"] = str(get_statsd_port()) # Transform and append tags e["DD_TAGS"] = _get_datadog_tags() if "TAGS" in e: del e["TAGS"] # Explicitly enable or disable tracing e["DD_TRACE_ENABLED"] = str(_is_tracing_enabled()).lower() # Set Datadog Cloud Foundry Buildpack specific environment variables e["DATADOG_DIR"] = str(_get_agent_dir()) e["RUN_AGENT"] = "true" e["DD_LOGS_ENABLED"] = "true" e["DD_ENABLE_CHECKS"] = "false" e["LOGS_CONFIG"] = json.dumps(_get_logging_config()) return e
def update_config(m2ee, app_name): if not is_enabled() or not _is_installed(): return # Telegraf config, taking over defaults from telegraf.conf from the distro logging.debug("creating telegraf config") _create_config_file( { "interval": "10s", "round_interval": True, "metric_batch_size": 1000, "metric_buffer_limit": 10000, "collection_jitter": "0s", "flush_interval": "10s", "flush_jitter": "5s", "precision": "", "debug": False, "logfile": "", "hostname": util.get_hostname(), "omit_hostname": False, } ) _write_config("[global_tags]", _get_tags()) _write_config( "[[inputs.statsd]]", { "protocol": "udp", "max_tcp_connections": 250, "tcp_keep_alive": False, "service_address": ":8125", "delete_gauges": True, "delete_counters": True, "delete_sets": True, "delete_timings": True, "percentiles": [90], "metric_separator": ".", "parse_data_dog_tags": True, "allowed_pending_messages": 10000, "percentile_limit": 1000, }, ) # Forward metrics also to DataDog when enabled if datadog.is_enabled(): _write_config("[[outputs.datadog]]", {"apikey": datadog.get_api_key()}) # # Write http_oputs (one or array) http_configs = json.loads(_get_appmetrics_target()) if type(http_configs) is list: for http_config in http_configs: _write_http_output_config(http_config) else: _write_http_output_config(http_configs) # Enable Java Agent on MxRuntime to datadog.enable_mx_java_agent(m2ee)
def _set_up_environment(model_version, runtime_version): e = dict(os.environ.copy()) # Everything in datadog.yaml can be configured with environment variables # This is the "official way" of working with the DD buildpack, so let's do this to ensure forward compatibility # Trace variables need to be set in the global environment # since the Datadog Java Trace Agent does not live inside the Datadog Agent process e["DD_API_KEY"] = get_api_key() e["DD_HOSTNAME"] = util.get_hostname() e["DD_LOG_FILE"] = "/dev/null" e["DD_PROCESS_CONFIG_LOG_FILE"] = "/dev/null" e["DD_DOGSTATSD_PORT"] = str(get_statsd_port()) # Transform and append tags e["DD_TAGS"] = _get_datadog_tags(model_version) if "TAGS" in e: del e["TAGS"] # Explicitly add reserved tags e["DD_ENV"] = get_env_tag() e["DD_VERSION"] = get_version_tag(model_version) e["DD_SERVICE"] = get_service_tag() if "DD_SERVICE_NAME" in e: del e["DD_SERVICE_NAME"] # Explicitly enable or disable tracing and profiling e["DD_TRACE_ENABLED"] = str(bool(_is_tracing_enabled())).lower() e["DD_PROFILING_ENABLED"] = str( bool(_is_profiling_enabled(runtime_version)) ).lower() # Set Datadog Cloud Foundry Buildpack specific environment variables e["DATADOG_DIR"] = str(_get_agent_dir()) e["RUN_AGENT"] = "true" e["DD_LOGS_ENABLED"] = "true" e["DD_ENABLE_CHECKS"] = str(bool(_is_checks_enabled())).lower() e["LOGS_CONFIG"] = json.dumps(_get_logging_config()) e[ "SUPPRESS_DD_AGENT_OUTPUT" ] = "false" # Has to be set explicitly since DD buildpack 4.22.0 return e
def update_config(m2ee, app_name): if not is_enabled() or not _is_installed(): return # Populate Telegraf config template statsd_port = None if mx_java_agent.meets_version_requirements( m2ee.config.get_runtime_version()): statsd_port = get_statsd_port() template_path = os.path.join(CONFIG_FILE_DIR, TEMPLATE_FILENAME) tags = util.get_tags() if datadog.is_enabled() and "service" not in tags: # app and / or service tag not set tags["service"] = datadog.get_service() with open(template_path, "r") as file_: template = Template(file_.read(), trim_blocks=True, lstrip_blocks=True) rendered = template.render( interval=10, # in seconds tags=tags, hostname=util.get_hostname(), statsd_port=statsd_port, db_config=_get_db_config(), database_diskstorage_metric_enabled=datadog. is_database_diskstorage_metric_enabled(), database_rate_count_metrics_enabled=datadog. is_database_rate_count_metrics_enabled(), datadog_api_key=datadog.get_api_key(), datadog_url="{}series/".format(datadog.get_api_url()), http_outputs=_get_http_outputs(), ) logging.debug("Writing Telegraf configuration file...") with open(CONFIG_FILE_PATH, "w") as file_: file_.write(rendered) logging.debug("Telegraf configuration file written")
def update_config(m2ee, app_name): if not is_enabled() or not _is_installed(): return # Telegraf config, taking over defaults from telegraf.conf from the distro logging.debug("creating telegraf config") _create_config_file( { "interval": "10s", "round_interval": True, "metric_batch_size": 1000, "metric_buffer_limit": 10000, "collection_jitter": "0s", "flush_interval": "10s", "flush_jitter": "5s", "precision": "", "debug": False, "logfile": "", "hostname": util.get_hostname(), "omit_hostname": False, } ) _write_config("[global_tags]", _get_tags()) _write_config( "[[inputs.statsd]]", { "protocol": "udp", "max_tcp_connections": 250, "tcp_keep_alive": False, "service_address": ":8125", "delete_gauges": True, "delete_counters": True, "delete_sets": True, "delete_timings": True, "percentiles": [90], "metric_separator": ".", "parse_data_dog_tags": True, "allowed_pending_messages": 10000, "percentile_limit": 1000, }, ) # Configure postgreSQL input plugin if include_db_metrics(): db_config = database.get_config() if db_config: _write_config( "[[inputs.postgresql]]", { "address": "postgres://{}:{}@{}/{}".format( db_config["DatabaseUserName"], db_config["DatabasePassword"], db_config["DatabaseHost"], db_config["DatabaseName"], ) }, ) # Forward metrics also to DataDog when enabled if datadog.is_enabled(): _write_config("[[outputs.datadog]]", {"apikey": datadog.get_api_key()}) # Write http_outputs (one or array) try: http_configs = json.loads(_get_appmetrics_target()) except ValueError: logging.error( "Invalid APPMETRICS_TARGET set. Please check if it contains valid JSON." ) return if type(http_configs) is list: for http_config in http_configs: _write_http_output_config(http_config) else: _write_http_output_config(http_configs)