import grpc import os from ddtrace.vendor.wrapt import wrap_function_wrapper as _w from ddtrace import config, Pin from ...utils.wrappers import unwrap as _u from . import constants from .client_interceptor import create_client_interceptor, intercept_channel from .server_interceptor import create_server_interceptor config._add('grpc_server', dict( service_name=os.environ.get('DATADOG_SERVICE_NAME', constants.GRPC_SERVICE_SERVER), distributed_tracing_enabled=True, )) # TODO[tbutt]: keeping name for client config unchanged to maintain backwards # compatibility but should change in future config._add('grpc', dict( service_name='{}-{}'.format( os.environ.get('DATADOG_SERVICE_NAME'), constants.GRPC_SERVICE_CLIENT ) if os.environ.get('DATADOG_SERVICE_NAME') else constants.GRPC_SERVICE_CLIENT, distributed_tracing_enabled=True, )) def patch(): _patch_client() _patch_server()
import aredis from ddtrace import config from ddtrace.vendor import wrapt from ...internal.utils.wrappers import unwrap from ...pin import Pin from ..redis.util import _trace_redis_cmd from ..redis.util import _trace_redis_execute_pipeline from ..redis.util import format_command_args config._add("aredis", dict(_default_service="redis")) def patch(): """Patch the instrumented methods""" if getattr(aredis, "_datadog_patch", False): return setattr(aredis, "_datadog_patch", True) _w = wrapt.wrap_function_wrapper _w("aredis.client", "StrictRedis.execute_command", traced_execute_command) _w("aredis.client", "StrictRedis.pipeline", traced_pipeline) _w("aredis.pipeline", "StrictPipeline.execute", traced_execute_pipeline) _w("aredis.pipeline", "StrictPipeline.immediate_execute_command", traced_execute_command) Pin(service=None).onto(aredis.StrictRedis) def unpatch():
from .wrappers import wrap_signal log = get_logger(__name__) FLASK_ENDPOINT = "flask.endpoint" FLASK_VIEW_ARGS = "flask.view_args" FLASK_URL_RULE = "flask.url_rule" FLASK_VERSION = "flask.version" # Configure default configuration config._add( "flask", dict( # Flask service configuration _default_service="flask", app="flask", collect_view_args=True, distributed_tracing_enabled=True, template_default_name="<memory>", trace_signals=True, ), ) # Extract flask version into a tuple e.g. (0, 12, 1) or (1, 0, 2) # DEV: This makes it so we can do `if flask_version >= (0, 12, 0):` # DEV: Example tests: # (0, 10, 0) > (0, 10) # (0, 10, 0) >= (0, 10, 0) # (0, 10, 1) >= (0, 10) # (0, 11, 1) >= (0, 10) # (0, 11, 1) >= (0, 10, 2) # (1, 0, 0) >= (0, 10)
from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...constants import SPAN_MEASURED_KEY from ...ext import SpanTypes from ...pin import Pin from ...utils import ArgumentError from ...utils import get_argument_value from ...utils.formats import deep_getattr from ...utils.wrappers import unwrap # Pynamodb connection class _PynamoDB_client = pynamodb.connection.base.Connection config._add( "pynamodb", { "_default_service": "pynamodb", }, ) def patch(): if getattr(pynamodb.connection.base, "_datadog_patch", False): return setattr(pynamodb.connection.base, "_datadog_patch", True) wrapt.wrap_function_wrapper("pynamodb.connection.base", "Connection._make_api_call", patched_api_call) Pin(service=None).onto(pynamodb.connection.base.Connection) def unpatch(): if getattr(pynamodb.connection.base, "_datadog_patch", False):
import ddtrace from ddtrace import config from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY from ddtrace.ext import SpanTypes from ddtrace.propagation.http import HTTPPropagator from ddtrace.utils.wrappers import unwrap as _u from ddtrace.vendor import wrapt from ddtrace.vendor.wrapt import wrap_function_wrapper as _w from .. import trace_utils from ...internal.logger import get_logger log = get_logger(__name__) config._add("sanic", dict(_default_service="sanic", distributed_tracing=True)) def _wrap_response_callback(span, callback): # wrap response callbacks (either sync or async function) to set span tags # based on response and finish span before returning response def update_span(response): if isinstance(response, sanic.response.BaseHTTPResponse): status_code = response.status response_headers = response.headers else: # invalid response causes ServerError exception which must be handled status_code = 500 response_headers = None trace_utils.set_http_meta(span,
This option can also be set with the ``DD_PYTEST_SERVICE`` environment variable. Default: ``"pytest"`` .. py:data:: ddtrace.config.pytest["operation_name"] The operation name reported by default for pytest traces. This option can also be set with the ``DD_PYTEST_OPERATION_NAME`` environment variable. Default: ``"pytest.test"`` """ from ddtrace import config from ...utils.formats import get_env # pytest default settings config._add( "pytest", dict( _default_service="pytest", operation_name=get_env("pytest", "operation_name", default="pytest.test"), ), )
from ddtrace.contrib.trace_utils import ext_service from ddtrace.vendor.wrapt import wrap_function_wrapper as _w from ...constants import ANALYTICS_SAMPLE_RATE_KEY from ...constants import SPAN_MEASURED_KEY from ...ext import SpanTypes from ...ext import elasticsearch as metadata from ...ext import http from ...internal.compat import urlencode from ...internal.utils.wrappers import unwrap as _u from ...pin import Pin from .quantize import quantize config._add( "elasticsearch", { "_default_service": "elasticsearch", }, ) def _es_modules(): module_names = ( "elasticsearch", "elasticsearch1", "elasticsearch2", "elasticsearch5", "elasticsearch6", "elasticsearch7", ) for module_name in module_names: try:
from ...utils.formats import get_env from ...utils.wrappers import unwrap # Original botocore client class _Botocore_client = botocore.client.BaseClient ARGS_NAME = ("action", "params", "path", "verb") TRACED_ARGS = {"params", "path", "verb"} log = get_logger(__name__) # Botocore default settings config._add( "botocore", { "distributed_tracing": get_env("botocore", "distributed_tracing", default=True), }, ) def inject_trace_data_to_message_attributes(trace_data, entry): if "MessageAttributes" not in entry: entry["MessageAttributes"] = {} # An Amazon SQS message can contain up to 10 metadata attributes. if len(entry["MessageAttributes"]) < 10: entry["MessageAttributes"]["_datadog"] = { "DataType": "String", "StringValue": json.dumps(trace_data) } else:
# instrument external packages only if they're available import aiohttp_jinja2 from .template import _trace_render_template template_module = True except ImportError: template_module = False except Exception: log.warning( "aiohttp_jinja2 could not be imported and will not be instrumented.", exc_info=True) template_module = False config._add( "aiohttp", dict(distributed_tracing=True, ), ) def patch(): """ Patch aiohttp third party modules: * aiohttp_jinja2 """ if template_module: if getattr(aiohttp_jinja2, "__datadog_patch", False): return setattr(aiohttp_jinja2, "__datadog_patch", True) _w = wrapt.wrap_function_wrapper _w("aiohttp_jinja2", "render_template", _trace_render_template)
from ddtrace import config from ddtrace.vendor.wrapt import wrap_function_wrapper as _w from ...constants import SPAN_MEASURED_KEY from ...ext import SpanTypes from ...internal.utils import ArgumentError from ...internal.utils import get_argument_value from ...pin import Pin from ..trace_utils import unwrap as _u from .constants import DEFAULT_TEMPLATE_NAME # default settings config._add( "jinja2", { "service_name": os.getenv("DD_JINJA2_SERVICE_NAME"), }, ) def patch(): if getattr(jinja2, "__datadog_patch", False): # already patched return setattr(jinja2, "__datadog_patch", True) Pin( service=config.jinja2["service_name"], _config=config.jinja2, ).onto(jinja2.environment.Environment) _w(jinja2, "environment.Template.render", _wrap_render) _w(jinja2, "environment.Template.generate", _wrap_render)
from ...internal.compat import reraise from ...internal.logger import get_logger from .utils import guarantee_single_callable if TYPE_CHECKING: from typing import Any from typing import Mapping from typing import Optional from ddtrace import Span log = get_logger(__name__) config._add( "asgi", dict(service_name=config._get_service(default="asgi"), request_span_name="asgi.request", distributed_tracing=True), ) ASGI_VERSION = "asgi.version" ASGI_SPEC_VERSION = "asgi.spec_version" def bytes_to_str(str_or_bytes): return str_or_bytes.decode() if isinstance(str_or_bytes, bytes) else str_or_bytes def _extract_versions_from_scope(scope, integration_config): tags = {}
from ddtrace import config from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE from .. import trace_utils from ...ext import SpanTypes from ...internal import compat from ...internal.utils.formats import asbool log = logging.getLogger(__name__) # Configure default configuration config._add( "cherrypy", dict(distributed_tracing=asbool( os.getenv("DD_CHERRYPY_DISTRIBUTED_TRACING", default=True)), ), ) SPAN_NAME = "cherrypy.request" class TraceTool(cherrypy.Tool): def __init__(self, app, tracer, service, use_distributed_tracing=None): self.app = app self._tracer = tracer self.service = service if use_distributed_tracing is not None: self.use_distributed_tracing = use_distributed_tracing # CherryPy uses priority to determine which tools act first on each event. The lower the number, the higher
from ddtrace import Pin from ddtrace import config from ddtrace.vendor.wrapt import wrap_function_wrapper as _w from . import constants from . import utils from ...utils.wrappers import unwrap as _u from .client_interceptor import create_client_interceptor from .client_interceptor import intercept_channel from .server_interceptor import create_server_interceptor config._add( "grpc_server", dict( _default_service=constants.GRPC_SERVICE_SERVER, distributed_tracing_enabled=True, ), ) # TODO[tbutt]: keeping name for client config unchanged to maintain backwards # compatibility but should change in future config._add( "grpc", dict( _default_service=constants.GRPC_SERVICE_CLIENT, distributed_tracing_enabled=True, ), )
from ...ext import sql from ...internal.logger import get_logger from ...pin import Pin from ...utils.attrdict import AttrDict from ...utils.formats import asbool from ...utils.formats import get_env from ...vendor import wrapt from ..trace_utils import ext_service from ..trace_utils import iswrapped log = get_logger(__name__) config._add( "dbapi2", dict( _default_service="db", trace_fetch_methods=asbool( get_env("dbapi2", "trace_fetch_methods", default=False)), ), ) class TracedCursor(wrapt.ObjectProxy): """TracedCursor wraps a psql cursor and traces its queries.""" def __init__(self, cursor, pin, cfg): super(TracedCursor, self).__init__(cursor) pin.onto(self) name = pin.app or "sql" self._self_datadog_name = "{}.query".format(name) self._self_last_execute_operation = None self._self_config = _override_dbapi2_config(cfg)
import jinja2 from ddtrace.vendor.wrapt import wrap_function_wrapper as _w from ddtrace import config from ...ext import http from ...utils.formats import get_env from ...pin import Pin from ...utils.wrappers import unwrap as _u from .constants import DEFAULT_TEMPLATE_NAME # default settings config._add('jinja2', { 'service_name': get_env('jinja2', 'service_name', None), }) def patch(): if getattr(jinja2, '__datadog_patch', False): # already patched return setattr(jinja2, '__datadog_patch', True) Pin( service=config.jinja2['service_name'], _config=config.jinja2, ).onto(jinja2.environment.Environment) _w(jinja2, 'environment.Template.render', _wrap_render) _w(jinja2, 'environment.Template.generate', _wrap_render) _w(jinja2, 'environment.Environment.compile', _wrap_compile) _w(jinja2, 'environment.Environment._load_template', _wrap_load_template)
from ...internal.logger import get_logger from ...pin import Pin from ...utils import ArgumentError from ...utils import get_argument_value from ...vendor import wrapt from ..trace_utils import ext_service from ..trace_utils import iswrapped log = get_logger(__name__) config._add( "dbapi2", dict( _default_service="db", trace_fetch_methods=None, # Part of the API. Should be implemented at the integration level. ), ) class TracedCursor(wrapt.ObjectProxy): """TracedCursor wraps a psql cursor and traces its queries.""" def __init__(self, cursor, pin, cfg): super(TracedCursor, self).__init__(cursor) pin.onto(self) name = pin.app or "sql" self._self_datadog_name = "{}.query".format(name) self._self_last_execute_operation = None self._self_config = cfg or config.dbapi2
from fastapi.middleware import Middleware from ddtrace import config from ddtrace.contrib.asgi.middleware import TraceMiddleware from ddtrace.contrib.starlette.patch import get_resource from ddtrace.internal.logger import get_logger from ddtrace.utils.wrappers import unwrap as _u from ddtrace.vendor.wrapt import wrap_function_wrapper as _w log = get_logger(__name__) config._add( "fastapi", dict( _default_service="fastapi", request_span_name="fastapi.request", distributed_tracing=True, aggregate_resources=True, ), ) def span_modifier(span, scope): resource = get_resource(scope) if config.fastapi["aggregate_resources"] and resource: span.resource = "{} {}".format(scope["method"], resource) def traced_init(wrapped, instance, args, kwargs): mw = kwargs.pop("middleware", []) mw.insert(
from ...propagation.http import HTTPPropagator from ...utils.formats import get_env from ...utils.wrappers import unwrap from .constants import DEFAULT_SERVICE from .utils import HEADER_POS from .utils import extract_conn_tags from .utils import get_body_length_from_args from .utils import get_exchange_from_args from .utils import get_routing_key_from_args # kombu default settings config._add( "kombu", { "service_name": config.service or get_env("kombu", "service_name", default=DEFAULT_SERVICE), }, ) propagator = HTTPPropagator def patch(): """Patch the instrumented methods This duplicated doesn't look nice. The nicer alternative is to use an ObjectProxy on top of Kombu. However, it means that any "import kombu.Connection" won't be instrumented. """ if getattr(kombu, "_datadog_patch", False): return
# 3p from ddtrace.vendor import wrapt import pymysql # project from ddtrace import config, Pin from ddtrace.contrib.dbapi import TracedConnection from ...ext import net, db config._add( "pymysql", dict( # TODO[v1.0] this should be "mysql" _default_service="pymysql", )) CONN_ATTR_BY_TAG = { net.TARGET_HOST: 'host', net.TARGET_PORT: 'port', db.USER: '******', db.NAME: 'db', } def patch(): wrapt.wrap_function_wrapper('pymysql', 'connect', _connect) def unpatch(): if isinstance(pymysql.connect, wrapt.ObjectProxy): pymysql.connect = pymysql.connect.__wrapped__
import mysql.connector from ddtrace import Pin from ddtrace import config from ddtrace.contrib.dbapi import TracedConnection from ddtrace.vendor import wrapt from ...ext import db from ...ext import net from ...internal.utils.formats import asbool config._add( "mysql", dict( _default_service="mysql", _dbapi_span_name_prefix="mysql", trace_fetch_methods=asbool( os.getenv("DD_MYSQL_TRACE_FETCH_METHODS", default=False)), ), ) CONN_ATTR_BY_TAG = { net.TARGET_HOST: "server_host", net.TARGET_PORT: "server_port", db.USER: "******", db.NAME: "database", } def patch(): wrapt.wrap_function_wrapper("mysql.connector", "connect", _connect)
from wrapt import wrap_function_wrapper as _w from ddtrace import config from ...pin import Pin from ...utils.formats import asbool, get_env from ...utils.wrappers import unwrap as _u from .legacy import _distributed_tracing, _distributed_tracing_setter from .constants import DEFAULT_SERVICE from .connection import _wrap_request from ...ext import AppTypes # requests default settings config._add('requests',{ 'service_name': get_env('requests', 'service_name', DEFAULT_SERVICE), 'distributed_tracing': asbool(get_env('requests', 'distributed_tracing', False)), 'split_by_domain': asbool(get_env('requests', 'split_by_domain', False)), }) def patch(): """Activate http calls tracing""" if getattr(requests, '__datadog_patch', False): return setattr(requests, '__datadog_patch', True) _w('requests', 'Session.request', _wrap_request) Pin( service=config.requests['service_name'], app='requests', app_type=AppTypes.web,
from ...utils.wrappers import unwrap # Original botocore client class _Botocore_client = botocore.client.BaseClient ARGS_NAME = ("action", "params", "path", "verb") TRACED_ARGS = {"params", "path", "verb"} log = get_logger(__name__) # Botocore default settings config._add( "botocore", { "distributed_tracing": get_env("botocore", "distributed_tracing", default=True), "invoke_with_legacy_context": get_env("botocore", "invoke_with_legacy_context", default=False), }, ) def inject_trace_data_to_message_attributes(trace_data, entry): if "MessageAttributes" not in entry: entry["MessageAttributes"] = {} # An Amazon SQS message can contain up to 10 metadata attributes. if len(entry["MessageAttributes"]) < 10: entry["MessageAttributes"]["_datadog"] = { "DataType": "String", "StringValue": json.dumps(trace_data) }
from ddtrace.vendor.wrapt import wrap_function_wrapper as _w from ddtrace import config from ...pin import Pin from ...utils.formats import asbool, get_env from ...utils.wrappers import unwrap as _u from .legacy import _distributed_tracing, _distributed_tracing_setter from .connection import _wrap_send # requests default settings config._add( "requests", { "distributed_tracing": asbool(get_env("requests", "distributed_tracing", default=True)), "split_by_domain": asbool(get_env("requests", "split_by_domain", default=False)), }, ) def patch(): """Activate http calls tracing""" if getattr(requests, "__datadog_patch", False): return setattr(requests, "__datadog_patch", True) _w("requests", "Session.send", _wrap_send) Pin(app="requests", _config=config.requests).onto(requests.Session) # [Backward compatibility]: `session.distributed_tracing` should point and
FLASK_ENDPOINT = 'flask.endpoint' FLASK_VIEW_ARGS = 'flask.view_args' FLASK_URL_RULE = 'flask.url_rule' FLASK_VERSION = 'flask.version' # Configure default configuration config._add( 'flask', dict( # Flask service configuration # DEV: Environment variable 'DATADOG_SERVICE_NAME' used for backwards compatibility service_name=os.environ.get('DATADOG_SERVICE_NAME') or 'flask', app='flask', app_type=AppTypes.web, collect_view_args=True, distributed_tracing_enabled=True, template_default_name='<memory>', trace_signals=True, # We mark 5xx responses as errors, these codes are additional status codes to mark as errors # DEV: This is so that if a user wants to see `401` or `403` as an error, they can configure that extra_error_codes=set(), )) # Extract flask version into a tuple e.g. (0, 12, 1) or (1, 0, 2) # DEV: This makes it so we can do `if flask_version >= (0, 12, 0):` # DEV: Example tests: # (0, 10, 0) > (0, 10) # (0, 10, 0) >= (0, 10, 0) # (0, 10, 1) >= (0, 10)
from ...internal.utils.formats import get_env from .app import patch_app from .app import unpatch_app from .constants import PRODUCER_SERVICE from .constants import WORKER_SERVICE forksafe._soft = True # Celery default settings config._add( "celery", { "distributed_tracing": get_env("celery", "distributed_tracing", default=False), "producer_service_name": get_env("celery", "producer_service_name", default=PRODUCER_SERVICE), "worker_service_name": get_env("celery", "worker_service_name", default=WORKER_SERVICE), }, ) def patch(): """Instrument Celery base application and the `TaskRegistry` so that any new registered task is automatically instrumented. In the case of Django-Celery integration, also the `@shared_task` decorator must be instrumented because Django doesn't use the Celery registry. """ patch_app(celery.Celery)
import mariadb from ddtrace import Pin from ddtrace import config from ddtrace.contrib.dbapi import TracedConnection from ddtrace.ext import db from ddtrace.ext import net from ddtrace.utils.formats import asbool from ddtrace.utils.formats import get_env from ddtrace.utils.wrappers import unwrap from ddtrace.vendor import wrapt config._add( "mariadb", dict( trace_fetch_methods=asbool( get_env("mariadb", "trace_fetch_methods", default=False)), _default_service="mariadb", ), ) def patch(): if getattr(mariadb, "_datadog_patch", False): return setattr(mariadb, "_datadog_patch", True) wrapt.wrap_function_wrapper("mariadb", "connect", _connect) def unpatch(): if getattr(mariadb, "_datadog_patch", False): setattr(mariadb, "_datadog_patch", False)
import pylons.wsgiapp from ddtrace import Pin from ddtrace import config from ddtrace import tracer from ddtrace.vendor import wrapt from ...utils.formats import asbool from ...utils.formats import get_env from ...utils.wrappers import unwrap as _u from .middleware import PylonsTraceMiddleware config._add( "pylons", dict(distributed_tracing=asbool( get_env("pylons", "distributed_tracing", default=True)), ), ) def patch(): """Instrument Pylons applications""" if getattr(pylons.wsgiapp, "_datadog_patch", False): return setattr(pylons.wsgiapp, "_datadog_patch", True) wrapt.wrap_function_wrapper("pylons.wsgiapp", "PylonsApp.__init__", traced_init) def unpatch(): """Disable Pylons tracing"""
This option can also be set with the integration specific ``DD_PYTEST_SERVICE`` environment variable, or more generally with the `DD_SERVICE` environment variable. Default: Name of the repository being tested, otherwise ``"pytest"`` if the repository name cannot be found. .. py:data:: ddtrace.config.pytest["operation_name"] The operation name reported by default for pytest traces. This option can also be set with the ``DD_PYTEST_OPERATION_NAME`` environment variable. Default: ``"pytest.test"`` """ import os from ddtrace import config # pytest default settings config._add( "pytest", dict( _default_service="pytest", operation_name=os.getenv("DD_PYTEST_OPERATION_NAME", default="pytest.test"), ), )
log = get_logger(__name__) config._add( "django", dict( _default_service="django", cache_service_name=get_env("django", "cache_service_name") or "django", database_service_name_prefix=get_env("django", "database_service_name_prefix", default=""), database_service_name=get_env("django", "database_service_name", default=""), distributed_tracing_enabled=True, instrument_middleware=asbool( get_env("django", "instrument_middleware", default=True)), instrument_databases=True, instrument_caches=True, analytics_enabled= None, # None allows the value to be overridden by the global config analytics_sample_rate=None, trace_query_string=None, # Default to global config include_user_name=True, use_handler_resource_format=asbool( get_env("django", "use_handler_resource_format", default=False)), use_legacy_resource_format=asbool( get_env("django", "use_legacy_resource_format", default=False)), ), ) propagator = HTTPPropagator()
import MySQLdb # project from ddtrace import Pin from ddtrace import config from ddtrace.contrib.dbapi import TracedConnection from ddtrace.vendor.wrapt import wrap_function_wrapper as _w from ...ext import db from ...ext import net from ...utils.wrappers import unwrap as _u config._add( "mysqldb", dict( _default_service="mysql", ), ) KWPOS_BY_TAG = { net.TARGET_HOST: ("host", 0), db.USER: ("user", 1), db.NAME: ("db", 3), } def patch(): # patch only once if getattr(MySQLdb, "__datadog_patch", False): return setattr(MySQLdb, "__datadog_patch", True)