def __attrs_post_init__(self): if self.max_retry_delay is None: self.max_retry_delay = self.timeout * 3 self._retry_upload = tenacity.Retrying( # Retry after 1s, 2s, 4s, 8s with some randomness wait=tenacity.wait_random_exponential(multiplier=0.5), stop=tenacity.stop_after_delay(self.max_retry_delay), retry_error_cls=UploadFailed, retry=tenacity.retry_if_exception_type( (http_client.HTTPException, OSError, IOError)), ) tags = { k: six.ensure_binary(v) for k, v in itertools.chain( parse_tags_str(os.environ.get("DD_TAGS")).items(), parse_tags_str(os.environ.get("DD_PROFILING_TAGS")).items(), ) } tags.update({k: six.ensure_binary(v) for k, v in self.tags.items()}) tags.update({ "host": HOSTNAME.encode("utf-8"), "language": b"python", "runtime": PYTHON_IMPLEMENTATION, "runtime_version": PYTHON_VERSION, "profiler_version": ddtrace.__version__.encode("ascii"), }) if self.version: tags["version"] = self.version.encode("utf-8") if self.env: tags["env"] = self.env.encode("utf-8") self.tags = tags
def test_parse_env_tags(self): tags = parse_tags_str("key:val") assert tags == dict(key="val") tags = parse_tags_str("key:val,key2:val2") assert tags == dict(key="val", key2="val2") tags = parse_tags_str("key:val,key2:val2,key3:1234.23") assert tags == dict(key="val", key2="val2", key3="1234.23") with mock.patch("ddtrace.utils.formats.log") as log: tags = parse_tags_str("key:,key3:val1,") assert tags == dict(key3="val1") assert log.error.call_count == 2 with mock.patch("ddtrace.utils.formats.log") as log: tags = parse_tags_str("") assert tags == dict() assert log.error.call_count == 0 with mock.patch("ddtrace.utils.formats.log") as log: tags = parse_tags_str(",") assert tags == dict() assert log.error.call_count == 2 with mock.patch("ddtrace.utils.formats.log") as log: tags = parse_tags_str(":,:") assert tags == dict() assert log.error.call_count == 2 with mock.patch("ddtrace.utils.formats.log") as log: tags = parse_tags_str("key,key2:val1") assert tags == dict(key2="val1") log.error.assert_called_once_with( "Malformed tag in tag pair '%s' from tag string '%s'.", "key", "key,key2:val1") with mock.patch("ddtrace.utils.formats.log") as log: tags = parse_tags_str("key2:val1:") assert tags == dict() log.error.assert_called_once_with( "Malformed tag in tag pair '%s' from tag string '%s'.", "key2:val1:", "key2:val1:") with mock.patch("ddtrace.utils.formats.log") as log: tags = parse_tags_str("key,key2,key3") assert tags == dict() log.error.assert_has_calls([ mock.call("Malformed tag in tag pair '%s' from tag string '%s'.", "key", "key,key2,key3"), mock.call("Malformed tag in tag pair '%s' from tag string '%s'.", "key2", "key,key2,key3"), mock.call("Malformed tag in tag pair '%s' from tag string '%s'.", "key3", "key,key2,key3"), ])
def update_patched_modules(): modules_to_patch = os.environ.get("DATADOG_PATCH_MODULES") if not modules_to_patch: return modules = parse_tags_str(modules_to_patch) for module, should_patch in modules.items(): EXTRA_PATCHED_MODULES[module] = asbool(should_patch)
def test_parse_env_tags(tag_str, expected_tags, error_calls): with mock.patch("ddtrace.utils.formats.log") as log: tags = parse_tags_str(tag_str) assert tags == expected_tags if error_calls: assert log.error.call_count == len(error_calls) log.error.assert_has_calls(error_calls) else: assert log.error.call_count == 0
def _get_tags(self, service): tags = { "service": service.encode("utf-8"), "runtime-id": runtime.get_runtime_id().encode("ascii"), } user_tags = parse_tags_str(os.environ.get("DD_TAGS", {})) user_tags.update( parse_tags_str(os.environ.get("DD_PROFILING_TAGS", {}))) tags.update({k: six.ensure_binary(v) for k, v in user_tags.items()}) tags.update({k: six.ensure_binary(v) for k, v in self.tags.items()}) if self.version: tags["version"] = self.version.encode("utf-8") if self.env: tags["env"] = self.env.encode("utf-8") return tags
def _get_tags(self, service): tags = { "service": service.encode("utf-8"), "host": HOSTNAME.encode("utf-8"), "runtime-id": runtime.get_runtime_id().encode("ascii"), "language": b"python", "runtime": PYTHON_IMPLEMENTATION, "runtime_version": PYTHON_VERSION, "profiler_version": ddtrace.__version__.encode("utf-8"), } if self.version: tags["version"] = self.version.encode("utf-8") if self.env: tags["env"] = self.env.encode("utf-8") user_tags = parse_tags_str(os.environ.get("DD_TAGS", {})) user_tags.update(parse_tags_str(os.environ.get("DD_PROFILING_TAGS", {}))) tags.update({k: six.ensure_binary(v) for k, v in user_tags.items()}) return tags
if opts: tracer.configure(**opts) if patch: update_patched_modules() from ddtrace import patch_all patch_all(**EXTRA_PATCHED_MODULES) if "DATADOG_ENV" in os.environ: tracer.set_tags({constants.ENV_KEY: os.environ["DATADOG_ENV"]}) if "DD_TRACE_GLOBAL_TAGS" in os.environ: env_tags = os.getenv("DD_TRACE_GLOBAL_TAGS") tracer.set_tags(parse_tags_str(env_tags)) # Ensure sitecustomize.py is properly called if available in application directories: # * exclude `bootstrap_dir` from the search # * find a user `sitecustomize.py` module # * import that module via `imp` bootstrap_dir = os.path.dirname(__file__) path = list(sys.path) if bootstrap_dir in path: path.remove(bootstrap_dir) try: (f, path, description) = imp.find_module("sitecustomize", path) except ImportError: pass
def snapshot_context(token, ignores=None, tracer=None, async_mode=True, variants=None): # Use variant that applies to update test token. One must apply. If none # apply, the test should have been marked as skipped. if variants: applicable_variant_ids = [k for (k, v) in variants.items() if v] assert len(applicable_variant_ids) == 1 variant_id = applicable_variant_ids[0] token = "{}_{}".format(token, variant_id) if variant_id else token ignores = ignores or [] if not tracer: tracer = ddtrace.tracer parsed = parse.urlparse(tracer.writer.agent_url) conn = httplib.HTTPConnection(parsed.hostname, parsed.port) try: # clear queue in case traces have been generated before test case is # itself run try: tracer.writer.flush_queue() except Exception as e: pytest.fail("Could not flush the queue before test case: %s" % str(e), pytrace=True) if async_mode: # Patch the tracer writer to include the test token header for all requests. tracer.writer._headers["X-Datadog-Test-Token"] = token # Also add a header to the environment for subprocesses test cases that might use snapshotting. existing_headers = parse_tags_str(os.environ.get("_DD_TRACE_WRITER_ADDITIONAL_HEADERS", "")) existing_headers.update({"X-Datadog-Test-Token": token}) os.environ["_DD_TRACE_WRITER_ADDITIONAL_HEADERS"] = ",".join( ["%s:%s" % (k, v) for k, v in existing_headers.items()] ) else: # Signal the start of this test case to the test agent. try: conn.request("GET", "/test/start?token=%s" % token) except Exception as e: pytest.fail("Could not connect to test agent: %s" % str(e), pytrace=False) else: r = conn.getresponse() if r.status != 200: # The test agent returns nice error messages we can forward to the user. raise SnapshotFailed(r.read()) try: yield SnapshotTest( tracer=tracer, token=token, ) finally: # Force a flush so all traces are submitted. tracer.writer.flush_queue() if async_mode: del tracer.writer._headers["X-Datadog-Test-Token"] del os.environ["_DD_TRACE_WRITER_ADDITIONAL_HEADERS"] # Query for the results of the test. conn = httplib.HTTPConnection(parsed.hostname, parsed.port) conn.request("GET", "/test/snapshot?ignores=%s&token=%s" % (",".join(ignores), token)) r = conn.getresponse() if r.status != 200: raise SnapshotFailed(r.read()) except SnapshotFailed as e: # Fail the test if a failure has occurred and print out the # message we got from the test agent. pytest.fail(to_unicode(e.args[0]), pytrace=False) except Exception as e: # Even though it's unlikely any traces have been sent, make the # final request to the test agent so that the test case is finished. conn = httplib.HTTPConnection(parsed.hostname, parsed.port) conn.request("GET", "/test/snapshot?ignores=%s&token=%s" % (",".join(ignores), token)) conn.getresponse() pytest.fail("Unexpected test failure during snapshot test: %s" % str(e), pytrace=True) finally: conn.close()