コード例 #1
0
def test_status_code_exceptions(status_code, expected_exc, log_level, caplog):
    caplog.set_level(logging.INFO)
    HttpClientRecorder.STATUS_CODE = status_code
    settings = finalize_application_settings({
        "license_key": "123LICENSEKEY",
    })
    protocol = AgentProtocol(settings, client_cls=HttpClientRecorder)

    internal_metrics = CustomMetrics()
    with InternalTraceContext(internal_metrics):
        with pytest.raises(expected_exc):
            protocol.send("analytic_event_data")

    internal_metrics = dict(internal_metrics.metrics())
    if status_code == 413:
        assert internal_metrics[
            "Supportability/Python/Collector/MaxPayloadSizeLimit/analytic_event_data"] == [
                1, 0, 0, 0, 0, 0
            ]
    else:
        assert (
            "Supportability/Python/Collector/MaxPayloadSizeLimit/analytic_event_data"
            not in internal_metrics)

    assert len(HttpClientRecorder.SENT) == 1
    request = HttpClientRecorder.SENT[0]
    assert request.params["method"] == "analytic_event_data"

    assert len(caplog.records) == 1
    assert caplog.records[0].levelname == log_level
    message = caplog.records[0].getMessage()
    assert "123LICENSEKEY" not in message
コード例 #2
0
def test_default_cert_path(monkeypatch, system_certs_available):
    if system_certs_available:
        cert_file = "foo"
    else:
        cert_file = None

    class DefaultVerifyPaths(object):
        cafile = cert_file

        def __init__(self, *args, **kwargs):
            pass

    monkeypatch.setattr(ssl, "DefaultVerifyPaths", DefaultVerifyPaths)
    internal_metrics = CustomMetrics()
    with InternalTraceContext(internal_metrics):
        client = HttpClient("localhost", ca_bundle_path=None)

    internal_metrics = dict(internal_metrics.metrics())
    cert_metric = "Supportability/Python/Certificate/BundleRequired"
    if system_certs_available:
        assert "ca_certs" not in client._connection_kwargs
        assert cert_metric not in internal_metrics
    else:
        assert client._connection_kwargs["ca_certs"] == certs.where()
        assert internal_metrics[cert_metric][-3:-1] == [1, 1]
コード例 #3
0
def test_audit_logging(server, insecure_server, client_cls, proxy_host,
                       exception):
    audit_log_fp = StringIO()
    params = {"method": "metric_data"}
    prefix = getattr(client_cls, "PREFIX_SCHEME", "https://")
    if exception:
        port = MockExternalHTTPServer.get_open_port()
    elif prefix == "https://":
        port = server.port
    else:
        port = insecure_server.port

    internal_metrics = CustomMetrics()

    with client_cls(
            "localhost",
            port,
            proxy_scheme="https",
            proxy_host=proxy_host,
            proxy_port=server.port if not exception else port,
            audit_log_fp=audit_log_fp,
            disable_certificate_validation=True,
    ) as client:
        with InternalTraceContext(internal_metrics):
            try:
                client.send_request(params=params)
                exc = ""
            except Exception as e:
                exc = callable_name(type(e.args[0]))

    internal_metrics = dict(internal_metrics.metrics())
    if exception and client_cls is ApplicationModeClient:
        if proxy_host:
            connection = "https-proxy"
        else:
            connection = "direct"
        assert internal_metrics == {
            "Supportability/Python/Collector/Failures": [1, 0, 0, 0, 0, 0],
            "Supportability/Python/Collector/Failures/%s" % connection:
            [1, 0, 0, 0, 0, 0],
            "Supportability/Python/Collector/Exception/%s" % exc:
            [1, 0, 0, 0, 0, 0],
        }
    else:
        assert not internal_metrics

    # Verify the audit log isn't empty
    assert audit_log_fp.tell()

    audit_log_fp.seek(0)
    audit_log_contents = audit_log_fp.read()
    assert prefix in audit_log_contents
コード例 #4
0
def test_non_ok_response(client_cls, server):
    internal_metrics = CustomMetrics()

    with client_cls(
        "localhost", server.port, disable_certificate_validation=True
    ) as client:
        with InternalTraceContext(internal_metrics):
            status, _ = client.send_request(method="PUT")

    assert status != 200
    internal_metrics = dict(internal_metrics.metrics())

    if client_cls is ApplicationModeClient:
        assert internal_metrics == {
            "Supportability/Python/Collector/Failures": [1, 0, 0, 0, 0, 0],
            "Supportability/Python/Collector/Failures/direct": [1, 0, 0, 0, 0, 0],
            "Supportability/Python/Collector/HTTPError/%d" % status: [1, 0, 0, 0, 0, 0],
        }
    else:
        assert not internal_metrics
コード例 #5
0
ファイル: transaction.py プロジェクト: jacalata/optimization
    def __init__(self, application, enabled=None):

        self._application = application

        self.thread_id = transaction_cache().current_thread_id()

        self._transaction_id = id(self)
        self._transaction_lock = threading.Lock()

        self._dead = False

        self._state = STATE_PENDING
        self._settings = None

        self._priority = 0
        self._group = None
        self._name = None

        self._frameworks = set()

        self._frozen_path = None

        self._node_stack = []

        self._request_uri = None

        self.queue_start = 0.0

        self.start_time = 0.0
        self.end_time = 0.0

        self.stopped = False

        self._errors = []
        self._slow_sql = []

        self._stack_trace_count = 0
        self._explain_plan_count = 0

        self._string_cache = {}

        self._custom_params = {}
        self._request_params = {}

        self._utilization_tracker = None

        self._thread_utilization_start = None
        self._thread_utilization_end = None
        self._thread_utilization_value = None

        self._cpu_user_time_start = None
        self._cpu_user_time_end = None
        self._cpu_user_time_value = 0.0

        self._read_length = None

        self._read_start = None
        self._read_end = None

        self._sent_start = None
        self._sent_end = None

        self._bytes_read = 0
        self._bytes_sent = 0

        self._calls_read = 0
        self._calls_readline = 0
        self._calls_readlines = 0

        self._calls_write = 0
        self._calls_yield = 0

        self._request_environment = {}
        self._response_properties = {}
        self._transaction_metrics = {}

        self.background_task = False

        self.enabled = False
        self.autorum_disabled = False

        self.ignore_transaction = False
        self.suppress_apdex = False
        self.suppress_transaction_trace = False

        self.capture_params = False
        self.ignored_params = []

        self.response_code = 0

        self.apdex = 0

        self.rum_token = None
        self.rum_trace = False

        # 16-digit random hex. Padded with zeros in the front.
        self.guid = '%016x' % random.getrandbits(64)

        self.client_cross_process_id = None
        self.client_account_id = None
        self.client_application_id = None
        self.referring_transaction_guid = None
        self.record_tt = False

        self._custom_metrics = CustomMetrics()

        self._profile_samples = deque()
        self._profile_frames = {}
        self._profile_skip = 1
        self._profile_count = 0

        global_settings = application.global_settings

        if global_settings.enabled:
            if enabled or (enabled is None and application.enabled):
                self._settings = application.settings
                if not self._settings:
                    application.activate()

                    # We see again if the settings is now valid
                    # in case startup timeout had been specified
                    # and registration had been started and
                    # completed within the timeout.

                    self._settings = application.settings

                if self._settings:
                    self.enabled = True
コード例 #6
0
ファイル: transaction.py プロジェクト: jacalata/optimization
class Transaction(object):

    def __init__(self, application, enabled=None):

        self._application = application

        self.thread_id = transaction_cache().current_thread_id()

        self._transaction_id = id(self)
        self._transaction_lock = threading.Lock()

        self._dead = False

        self._state = STATE_PENDING
        self._settings = None

        self._priority = 0
        self._group = None
        self._name = None

        self._frameworks = set()

        self._frozen_path = None

        self._node_stack = []

        self._request_uri = None

        self.queue_start = 0.0

        self.start_time = 0.0
        self.end_time = 0.0

        self.stopped = False

        self._errors = []
        self._slow_sql = []

        self._stack_trace_count = 0
        self._explain_plan_count = 0

        self._string_cache = {}

        self._custom_params = {}
        self._request_params = {}

        self._utilization_tracker = None

        self._thread_utilization_start = None
        self._thread_utilization_end = None
        self._thread_utilization_value = None

        self._cpu_user_time_start = None
        self._cpu_user_time_end = None
        self._cpu_user_time_value = 0.0

        self._read_length = None

        self._read_start = None
        self._read_end = None

        self._sent_start = None
        self._sent_end = None

        self._bytes_read = 0
        self._bytes_sent = 0

        self._calls_read = 0
        self._calls_readline = 0
        self._calls_readlines = 0

        self._calls_write = 0
        self._calls_yield = 0

        self._request_environment = {}
        self._response_properties = {}
        self._transaction_metrics = {}

        self.background_task = False

        self.enabled = False
        self.autorum_disabled = False

        self.ignore_transaction = False
        self.suppress_apdex = False
        self.suppress_transaction_trace = False

        self.capture_params = False
        self.ignored_params = []

        self.response_code = 0

        self.apdex = 0

        self.rum_token = None
        self.rum_trace = False

        # 16-digit random hex. Padded with zeros in the front.
        self.guid = '%016x' % random.getrandbits(64)

        self.client_cross_process_id = None
        self.client_account_id = None
        self.client_application_id = None
        self.referring_transaction_guid = None
        self.record_tt = False

        self._custom_metrics = CustomMetrics()

        self._profile_samples = deque()
        self._profile_frames = {}
        self._profile_skip = 1
        self._profile_count = 0

        global_settings = application.global_settings

        if global_settings.enabled:
            if enabled or (enabled is None and application.enabled):
                self._settings = application.settings
                if not self._settings:
                    application.activate()

                    # We see again if the settings is now valid
                    # in case startup timeout had been specified
                    # and registration had been started and
                    # completed within the timeout.

                    self._settings = application.settings

                if self._settings:
                    self.enabled = True

    def __del__(self):
        self._dead = True
        if self._state == STATE_RUNNING:
            self.__exit__(None, None, None)

    def save_transaction(self):
        transaction_cache().save_transaction(self)

    def drop_transaction(self):
        transaction_cache().drop_transaction(self)

    def __enter__(self):

        assert(self._state == STATE_PENDING)

        # Bail out if the transaction is not enabled.

        if not self.enabled:
            return self

        # Mark transaction as active and update state
        # used to validate correct usage of class.

        self._state = STATE_RUNNING

        # Cache transaction in thread/coroutine local
        # storage so that it can be accessed from
        # anywhere in the context of the transaction.

        try:
            self.save_transaction()
        except:  # Catch all
            self._state = STATE_PENDING
            self.enabled = False
            raise

        # Record the start time for transaction.

        self.start_time = time.time()

        # Record initial CPU user time.

        self._cpu_user_time_start = os.times()[0]

        # Calculate initial thread utilisation factor.
        # For now we only do this if we know it is an
        # actual thread and not a greenlet.

        if (not hasattr(sys, '_current_frames') or
                self.thread_id in sys._current_frames()):
            thread_instance = threading.currentThread()
            self._utilization_tracker = utilization_tracker(
                    self.application.name)
            if self._utilization_tracker:
                self._utilization_tracker.enter_transaction(thread_instance)
                self._thread_utilization_start = \
                        self._utilization_tracker.utilization_count()

        # We need to push an object onto the top of the
        # node stack so that children can reach back and
        # add themselves as children to the parent. We
        # can't use ourself though as we then end up
        # with a reference count cycle which will cause
        # the destructor to never be called if the
        # __exit__() function is never called. We
        # instead push on to the top of the node stack a
        # dummy time trace object and when done we will
        # just grab what we need from that.

        self._node_stack.append(newrelic.api.time_trace.TimeTrace(None))

        return self

    def __exit__(self, exc, value, tb):

        # Bail out if the transaction is not enabled.

        if not self.enabled:
            return

        # Mark as stopped and drop the transaction from
        # thread/coroutine local storage.
        #
        # Note that we validate the saved transaction ID
        # against that for the current transaction object
        # to protect against situations where a copy was
        # made of the transaction object for some reason.
        # Such a copy when garbage collected could trigger
        # this function and cause a deadlock if it occurs
        # while original transaction was being recorded.

        self._state = STATE_STOPPED

        if self._transaction_id != id(self):
            return

        if not self._settings:
            return

        if not self._dead:
            try:
                self.drop_transaction()
            except:  # Catch all
                _logger.exception('Unable to drop transaction.')
                raise

        # Record error if one was registered.

        if exc is not None and value is not None and tb is not None:
            self.record_exception(exc, value, tb)

        # Record the end time for transaction and then
        # calculate the duration.

        if not self.stopped:
            self.end_time = time.time()

        duration = self.end_time - self.start_time

        # Calculate overall user time.

        if not self._cpu_user_time_end:
            self._cpu_user_time_end = os.times()[0]

        if duration and self._cpu_user_time_end:
            self._cpu_user_time_value = (self._cpu_user_time_end -
                    self._cpu_user_time_start)

        # Calculate thread utilisation factor. Note that even if
        # we are tracking thread utilization we skip calculation
        # if duration is zero. Under normal circumstances this
        # should not occur but may if the system clock is wound
        # backwards and duration was squashed to zero due to the
        # request appearing to finish before it started. It may
        # also occur if true response time came in under the
        # resolution of the clock being used, but that is highly
        # unlikely as the overhead of the agent itself should
        # always ensure that that is hard to achieve.

        if self._utilization_tracker:
            self._utilization_tracker.exit_transaction()
            if self._thread_utilization_start and duration > 0.0:
                if not self._thread_utilization_end:
                    self._thread_utilization_end = (
                            self._utilization_tracker.utilization_count())
                self._thread_utilization_value = (
                        self._thread_utilization_end -
                        self._thread_utilization_start) / duration

        # Derive generated values from the raw data. The
        # dummy root node has exclusive time of children
        # as negative number. Add our own duration to get
        # our own exclusive time.

        root = self._node_stack.pop()
        children = root.children

        exclusive = duration + root.exclusive

        # Construct final root node of transaction trace.
        # Freeze path in case not already done. This will
        # construct out path.

        self._freeze_path()

        if self.background_task:
            transaction_type = 'OtherTransaction'
        else:
            transaction_type = 'WebTransaction'

        group = self._group

        if group is None:
            if self.background_task:
                group = 'Python'
            else:
                group = 'Uri'

        if self.response_code != 0:
            self._response_properties['STATUS'] = str(self.response_code)

        metrics = self._transaction_metrics

        if self._bytes_read != 0:
            metrics['WSGI/Input/Bytes'] = self._bytes_read
        if self._bytes_sent != 0:
            metrics['WSGI/Output/Bytes'] = self._bytes_sent
        if self._calls_read != 0:
            metrics['WSGI/Input/Calls/read'] = self._calls_read
        if self._calls_readline != 0:
            metrics['WSGI/Input/Calls/readline'] = self._calls_readline
        if self._calls_readlines != 0:
            metrics['WSGI/Input/Calls/readlines'] = self._calls_readlines
        if self._calls_write != 0:
            metrics['WSGI/Output/Calls/write'] = self._calls_write
        if self._calls_yield != 0:
            metrics['WSGI/Output/Calls/yield'] = self._calls_yield

        if self._thread_utilization_value:
            metrics['Thread/Concurrency'] = \
                    '%.4f' % self._thread_utilization_value

        read_duration = 0
        if self._read_start:
            read_duration = self._read_end - self._read_start
            metrics['WSGI/Input/Time'] = '%.4f' % read_duration
        self.record_custom_metric('Python/WSGI/Input/Time', read_duration)

        sent_duration = 0
        if self._sent_start:
            if not self._sent_end:
                self._sent_end = time.time()
            sent_duration = self._sent_end - self._sent_start
            metrics['WSGI/Output/Time'] = '%.4f' % sent_duration
        self.record_custom_metric('Python/WSGI/Output/Time',
                           sent_duration)

        if self.queue_start:
            queue_wait = self.start_time - self.queue_start
            if queue_wait < 0:
                queue_wait = 0
            metrics['WebFrontend/QueueTime'] = '%.4f' % queue_wait

        self.record_custom_metric('Python/WSGI/Input/Bytes',
                           self._bytes_read)

        self.record_custom_metric('Python/WSGI/Input/Calls/read',
                           self._calls_read)
        self.record_custom_metric('Python/WSGI/Input/Calls/readline',
                           self._calls_readline)
        self.record_custom_metric('Python/WSGI/Input/Calls/readlines',
                           self._calls_readlines)

        self.record_custom_metric('Python/WSGI/Output/Bytes',
                           self._bytes_sent)
        self.record_custom_metric('Python/WSGI/Output/Calls/yield',
                           self._calls_yield)
        self.record_custom_metric('Python/WSGI/Output/Calls/write',
                           self._calls_write)

        if self._frameworks:
            for framework, version in self._frameworks:
                self.record_custom_metric('Python/Framework/%s/%s' %
                    (framework, version), 1)

        request_params = {}
        parameter_groups = {}

        if self.capture_params:
            request_params = self._request_params

        if self._request_environment:
            parameter_groups['Request environment'] = self._request_environment
        if self._response_properties:
            parameter_groups['Response properties'] = self._response_properties

        if self._transaction_metrics:
            parameter_groups['Transaction metrics'] = self._transaction_metrics

        node = newrelic.core.transaction_node.TransactionNode(
                settings=self._settings,
                path=self.path,
                type=transaction_type,
                group=group,
                name=self._name,
                request_uri=self._request_uri,
                response_code=self.response_code,
                request_params=request_params,
                custom_params=self._custom_params,
                queue_start=self.queue_start,
                start_time=self.start_time,
                end_time=self.end_time,
                duration=duration,
                exclusive=exclusive,
                children=tuple(children),
                errors=tuple(self._errors),
                slow_sql=tuple(self._slow_sql),
                apdex_t=self.apdex,
                suppress_apdex=self.suppress_apdex,
                custom_metrics=self._custom_metrics,
                parameter_groups=parameter_groups,
                guid=self.guid,
                rum_trace = self.rum_trace,
                cpu_time=self._cpu_user_time_value,
                suppress_transaction_trace=self.suppress_transaction_trace,
                client_cross_process_id=self.client_cross_process_id,
                referring_transaction_guid=self.referring_transaction_guid,
                record_tt = self.record_tt,
                )

        # Clear settings as we are all done and don't need it
        # anymore.

        self._settings = None
        self.enabled = False

        # Unless we are ignoring the transaction, record it. We
        # need to lock the profile samples and replace it with
        # an empty list just in case the thread profiler kicks
        # in just as we are trying to record the transaction.
        # If we don't, when processing the samples, addition of
        # new samples can cause an error.

        if not self.ignore_transaction:
            profile_samples = []

            if self._profile_samples:
                with self._transaction_lock:
                    profile_samples = self._profile_samples
                    self._profile_samples = deque()

            self._application.record_transaction(node,
                    (self.background_task, profile_samples))

    @property
    def state(self):
        return self._state

    @property
    def settings(self):
        return self._settings

    @property
    def application(self):
        return self._application

    @property
    def name(self):
        return self._name

    @property
    def group(self):
        return self._group

    @property
    def path(self):
        if self._frozen_path:
            return self._frozen_path

        if self.background_task:
            transaction_type = 'OtherTransaction'
        else:
            transaction_type = 'WebTransaction'

        group = self._group

        if group is None:
            if self.background_task:
                group = 'Python'
            else:
                group = 'Uri'

        name = self._name

        if name is None:
            name = '<undefined>'

        # Stripping the leading slash on the request URL held by
        # name when type is 'Uri' is to keep compatibility with
        # PHP agent and also possibly other agents. Leading
        # slash it not deleted for other category groups as the
        # leading slash may be significant in that situation.

        if self._group in ['Uri', 'NormalizedUri'] and name[:1] == '/':
            path = '%s/%s%s' % (transaction_type, group, name)
        else:
            path = '%s/%s/%s' % (transaction_type, group, name)

        return path

    @property
    def profile_sample(self):
        return self._profile_samples

    def add_profile_sample(self, stack_trace):
        if self._state != STATE_RUNNING:
            return

        self._profile_count += 1

        if self._profile_count < self._profile_skip:
            return

        self._profile_count = 0

        with self._transaction_lock:
            new_stack_trace = tuple(self._profile_frames.setdefault(
                    frame, frame) for frame in stack_trace)
            self._profile_samples.append(new_stack_trace)

            agent_limits = self._application.global_settings.agent_limits
            profile_maximum = agent_limits.xray_profile_maximum

            if len(self._profile_samples) >= profile_maximum:
                self._profile_samples = deque(itertools.islice(
                        self._profile_samples, 0,
                        len(self._profile_samples), 2))
                self._profile_skip = 2 * self._profile_skip

    def _freeze_path(self):
        if self._frozen_path is None:
            self._priority = None

            if self._group == 'Uri' and self._name != '/':
                # Apply URL normalization rules. We would only have raw
                # URLs where we were not specifically naming the web
                # transactions for a specific web framework to be a code
                # handler or otherwise.

                name, ignore = self._application.normalize_name(
                        self._name, 'url')

                if self._name != name:
                    self._group = 'NormalizedUri'
                    self._name = name

                self.ignore_transaction = self.ignore_transaction or ignore

            # Apply transaction rules on the full transaction name.
            # The path is frozen at this point and cannot be further
            # changed.

            self._frozen_path, ignore = self._application.normalize_name(
                    self.path, 'transaction')

            self.ignore_transaction = self.ignore_transaction or ignore

            # Look up the apdex from the table of key transactions. If
            # current transaction is not a key transaction then use the
            # default apdex from settings. The path used at this point
            # is the frozen path.

            self.apdex = (self._settings.web_transactions_apdex.get(
                self.path) or self._settings.apdex_t)

    def set_transaction_name(self, name, group=None, priority=None):

        # Always perform this operation even if the transaction
        # is not active at the time as will be called from
        # constructor. If path has been frozen do not allow
        # name/group to be overridden. New priority then must be
        # same or greater than existing priority. If no priority
        # always override the existing name/group if not frozen.

        if self._priority is None:
            return

        if priority is not None and priority < self._priority:
            return

        if group is None:
            group = 'Function'

        if priority is not None:
            self._priority = priority

        # The name can be a URL for the default case. URLs are
        # supposed to be ASCII but can get a URL with illegal
        # non ASCII characters. As the rule patterns and
        # replacements are Unicode then can get Unicode
        # conversion warnings or errors when URL is converted to
        # Unicode and default encoding is ASCII. Thus need to
        # convert URL to Unicode as Latin-1 explicitly to avoid
        # problems with illegal characters.

        if isinstance(name, bytes):
            name = name.decode('Latin-1')

        self._group = group
        self._name = name

    def name_transaction(self, name, group=None, priority=None):
        #warnings.warn('Internal API change. Use set_transaction_name() '
        #        'instead of name_transaction().', DeprecationWarning,
        #        stacklevel=2)

        return self.set_transaction_name(name, group, priority)

    def record_exception(self, exc=None, value=None, tb=None,
                         params={}, ignore_errors=[]):

        # Bail out if the transaction is not active or
        # collection of errors not enabled.

        if not self._settings:
            return

        settings = self._settings
        error_collector = settings.error_collector

        if not error_collector.enabled or not settings.collect_errors:
            return

        # If no exception details provided, use current exception.

        if exc is None and value is None and tb is None:
            exc, value, tb = sys.exc_info()

        # Has to be an error to be logged.

        if exc is None or value is None or tb is None:
            return

        # 'should_ignore' is a tri-state variable with the following behavior.
        # 'True' - ignore the error.
        # 'False'- record the error.
        # 'None' - Use the default ignore rules.

        should_ignore = None

        if callable(ignore_errors):
            should_ignore = ignore_errors(exc, value, tb)
            if should_ignore:
                return

        if should_ignore is None:
            # We need to check for module.name and module:name.
            # Originally we used module.class but that was
            # inconsistent with everything else which used
            # module:name. So changed to use ':' as separator, but
            # for backward compatability need to support '.' as
            # separator for time being. Check that with the ':'
            # last as we will use that name as the exception type.

            module = value.__class__.__module__
            name = value.__class__.__name__

            if module:
                fullname = '%s.%s' % (module, name)
            else:
                fullname = name

            if (not callable(ignore_errors)) and (fullname in ignore_errors):
                return

            if fullname in error_collector.ignore_errors:
                return

            if module:
                fullname = '%s:%s' % (module, name)
            else:
                fullname = name

            if (not callable(ignore_errors)) and (fullname in ignore_errors):
                return

            if fullname in error_collector.ignore_errors:
                return

        # Only remember up to limit of what can be caught for a
        # single transaction. This could be trimmed further
        # later if there are already recorded errors and would
        # go over the harvest limit.

        if len(self._errors) >= settings.agent_limits.errors_per_transaction:
            return

        if params:
            custom_params = dict(self._custom_params)
            custom_params.update(params)
        else:
            custom_params = self._custom_params

        exc_type = exc.__name__

        try:
            message = str(value)
        except Exception:
            try:
                # Assume JSON encoding can handle unicode.
                message = six.text_type(value)
            except Exception:
                message = '<unprintable %s object>' % type(value).__name__

        # Check that we have not recorded this exception
        # previously for this transaction due to multiple
        # error traces triggering. This is not going to be
        # exact but the UI hides exceptions of same type
        # anyway. Better that we under count exceptions of
        # same type and message rather than count same one
        # multiple times.

        for error in self._errors:
            if error.type == exc_type and error.message == message:
                return

        stack_trace = traceback.format_exception(exc, value, tb)

        node = newrelic.core.error_node.ErrorNode(
                timestamp=time.time(),
                type=fullname,
                message=message,
                stack_trace=stack_trace,
                custom_params=custom_params,
                file_name=None,
                line_number=None,
                source=None)

        # TODO Errors are recorded in time order. If
        # there are two exceptions of same type and
        # different message, the UI displays the first
        # one. In the PHP agent it was recording the
        # errors in reverse time order and so the UI
        # displayed the last one. What is the the
        # official order in which they should be sent.

        self._errors.append(node)

    def notice_error(self, exc, value, tb, params={}, ignore_errors=[]):
        warnings.warn('Internal API change. Use record_exception() '
                'instead of notice_error().', DeprecationWarning,
                stacklevel=2)

        self.record_exception(exc, value, tb, params, ignore_errors)

    def record_custom_metric(self, name, value):
        self._custom_metrics.record_custom_metric(name, value)

    def record_custom_metrics(self, metrics):
        for name, value in metrics:
            self._custom_metrics.record_custom_metric(name, value)

    def record_metric(self, name, value):
        warnings.warn('Internal API change. Use record_custom_metric() '
                'instead of record_metric().', DeprecationWarning,
                stacklevel=2)

        return self.record_custom_metric(name, value)

    def active_node(self):
        if self._node_stack:
            return self._node_stack[-1]

    def _intern_string(self, value):
        return self._string_cache.setdefault(value, value)

    def _push_current(self, node):
        self._node_stack.append(node)

    def _pop_current(self, node):
        last = self._node_stack.pop()
        assert last == node
        parent = self._node_stack[-1]
        return parent

    def _process_node(self, node):
        if type(node) is newrelic.core.database_node.DatabaseNode:
            settings = self._settings
            if not settings.collect_traces:
                return
            if not settings.slow_sql.enabled:
                return
            if settings.transaction_tracer.record_sql == 'off':
                return
            if node.duration < settings.transaction_tracer.explain_threshold:
                return
            self._slow_sql.append(node)

    def stop_recording(self):
        if not self.enabled:
            return

        if self.stopped:
            return

        if self.end_time:
            return

        self.end_time = time.time()
        self.stopped = True

        if self._utilization_tracker:
            if self._thread_utilization_start:
                if not self._thread_utilization_end:
                    self._thread_utilization_end = (
                            self._utilization_tracker.utilization_count())

        self._cpu_user_time_end = os.times()[0]

    def add_custom_parameter(self, name, value):
        self._custom_params[name] = value

    def add_custom_parameters(self, items):
        for name, value in items:
            self._custom_params[name] = value

    def add_user_attribute(self, name, value):
        #warnings.warn('Internal API change. Use add_custom_parameter() '
        #        'instead of add_user_attribute().', DeprecationWarning,
        #        stacklevel=2)
        self.add_custom_parameter(name, value)

    def add_user_attributes(self, items):
        #warnings.warn('Internal API change. Use add_custom_parameters() '
        #        'instead of add_user_attributes().', DeprecationWarning,
        #        stacklevel=2)
        self.add_custom_parameters(items)

    def dump(self, file):
        """Dumps details about the transaction to the file object."""

        print >> file, 'Application: %s' % (
                self.application.name)
        print >> file, 'Time Started: %s' % (
                time.asctime(time.localtime(self.start_time)))
        print >> file, 'Thread Id: %r' % (
                self.thread_id,)
        print >> file, 'Current Status: %d' % (
                self._state)
        print >> file, 'Recording Enabled: %s' % (
                self.enabled)
        print >> file, 'Ignore Transaction: %s' % (
                self.ignore_transaction)
        print >> file, 'Transaction Dead: %s' % (
                self._dead)
        print >> file, 'Transaction Stopped: %s' % (
                self.stopped)
        print >> file, 'Background Task: %s' % (
                self.background_task)
        print >> file, 'Request URI: %s' % (
                self._request_uri)
        print >> file, 'Transaction Group: %s' % (
                self._group)
        print >> file, 'Transaction Name: %s' % (
                self._name)
        print >> file, 'Name Priority: %r' % (
                self._priority)
        print >> file, 'Frozen Path: %s' % (
                self._frozen_path)
        print >> file, 'AutoRUM Disabled: %s' % (
                self.autorum_disabled)
        print >> file, 'Supress Apdex: %s' % (
                self.suppress_apdex)

        print >> file, 'Node Stack:'
        for node in self._node_stack[1:]:
            node.dump(file)
コード例 #7
0
    def __init__(self, application, enabled=None):

        self._application = application

        self.thread_id = transaction_cache().current_thread_id()

        self._transaction_id = id(self)
        self._transaction_lock = threading.Lock()

        self._dead = False

        self._state = self.STATE_PENDING
        self._settings = None

        self._priority = 0
        self._group = None
        self._name = None

        self._frameworks = set()

        self._frozen_path = None

        self._node_stack = []

        self._request_uri = None

        self.queue_start = 0.0

        self.start_time = 0.0
        self.end_time = 0.0

        self.stopped = False

        self._trace_node_count = 0

        self._errors = []
        self._slow_sql = []

        self._stack_trace_count = 0
        self._explain_plan_count = 0

        self._string_cache = {}

        self._custom_params = {}
        self._request_params = {}

        self._utilization_tracker = None

        self._thread_utilization_start = None
        self._thread_utilization_end = None
        self._thread_utilization_value = None

        self._cpu_user_time_start = None
        self._cpu_user_time_end = None
        self._cpu_user_time_value = 0.0

        self._read_length = None

        self._read_start = None
        self._read_end = None

        self._sent_start = None
        self._sent_end = None

        self._bytes_read = 0
        self._bytes_sent = 0

        self._calls_read = 0
        self._calls_readline = 0
        self._calls_readlines = 0

        self._calls_write = 0
        self._calls_yield = 0

        self._request_environment = {}
        self._response_properties = {}
        self._transaction_metrics = {}

        self.background_task = False

        self.enabled = False
        self.autorum_disabled = False

        self.ignore_transaction = False
        self.suppress_apdex = False
        self.suppress_transaction_trace = False

        self.capture_params = False
        self.ignored_params = []

        self.response_code = 0

        self.apdex = 0

        self.rum_token = None
        self.rum_trace = False

        # 16-digit random hex. Padded with zeros in the front.
        self.guid = '%016x' % random.getrandbits(64)

        self.client_cross_process_id = None
        self.client_account_id = None
        self.client_application_id = None
        self.referring_transaction_guid = None
        self.record_tt = False
        self._trip_id = None
        self._referring_path_hash = None
        self._alternate_path_hashes = {}
        self.is_part_of_cat = False

        self.synthetics_resource_id = None
        self.synthetics_job_id = None
        self.synthetics_monitor_id = None
        self.synthetics_header = None

        self._custom_metrics = CustomMetrics()

        self._profile_samples = deque()
        self._profile_frames = {}
        self._profile_skip = 1
        self._profile_count = 0

        global_settings = application.global_settings

        if global_settings.enabled:
            if enabled or (enabled is None and application.enabled):
                self._settings = application.settings
                if not self._settings:
                    application.activate()

                    # We see again if the settings is now valid
                    # in case startup timeout had been specified
                    # and registration had been started and
                    # completed within the timeout.

                    self._settings = application.settings

                if self._settings:
                    self.enabled = True
コード例 #8
0
class Transaction(object):

    STATE_PENDING = 0
    STATE_RUNNING = 1
    STATE_STOPPED = 2

    def __init__(self, application, enabled=None):

        self._application = application

        self.thread_id = transaction_cache().current_thread_id()

        self._transaction_id = id(self)
        self._transaction_lock = threading.Lock()

        self._dead = False

        self._state = self.STATE_PENDING
        self._settings = None

        self._priority = 0
        self._group = None
        self._name = None

        self._frameworks = set()

        self._frozen_path = None

        self._node_stack = []

        self._request_uri = None

        self.queue_start = 0.0

        self.start_time = 0.0
        self.end_time = 0.0

        self.stopped = False

        self._trace_node_count = 0

        self._errors = []
        self._slow_sql = []

        self._stack_trace_count = 0
        self._explain_plan_count = 0

        self._string_cache = {}

        self._custom_params = {}
        self._request_params = {}

        self._utilization_tracker = None

        self._thread_utilization_start = None
        self._thread_utilization_end = None
        self._thread_utilization_value = None

        self._cpu_user_time_start = None
        self._cpu_user_time_end = None
        self._cpu_user_time_value = 0.0

        self._read_length = None

        self._read_start = None
        self._read_end = None

        self._sent_start = None
        self._sent_end = None

        self._bytes_read = 0
        self._bytes_sent = 0

        self._calls_read = 0
        self._calls_readline = 0
        self._calls_readlines = 0

        self._calls_write = 0
        self._calls_yield = 0

        self._request_environment = {}
        self._response_properties = {}
        self._transaction_metrics = {}

        self.background_task = False

        self.enabled = False
        self.autorum_disabled = False

        self.ignore_transaction = False
        self.suppress_apdex = False
        self.suppress_transaction_trace = False

        self.capture_params = False
        self.ignored_params = []

        self.response_code = 0

        self.apdex = 0

        self.rum_token = None
        self.rum_trace = False

        # 16-digit random hex. Padded with zeros in the front.
        self.guid = '%016x' % random.getrandbits(64)

        self.client_cross_process_id = None
        self.client_account_id = None
        self.client_application_id = None
        self.referring_transaction_guid = None
        self.record_tt = False
        self._trip_id = None
        self._referring_path_hash = None
        self._alternate_path_hashes = {}
        self.is_part_of_cat = False

        self.synthetics_resource_id = None
        self.synthetics_job_id = None
        self.synthetics_monitor_id = None
        self.synthetics_header = None

        self._custom_metrics = CustomMetrics()

        self._profile_samples = deque()
        self._profile_frames = {}
        self._profile_skip = 1
        self._profile_count = 0

        global_settings = application.global_settings

        if global_settings.enabled:
            if enabled or (enabled is None and application.enabled):
                self._settings = application.settings
                if not self._settings:
                    application.activate()

                    # We see again if the settings is now valid
                    # in case startup timeout had been specified
                    # and registration had been started and
                    # completed within the timeout.

                    self._settings = application.settings

                if self._settings:
                    self.enabled = True

    def __del__(self):
        self._dead = True
        if self._state == self.STATE_RUNNING:
            self.__exit__(None, None, None)

    def save_transaction(self):
        transaction_cache().save_transaction(self)

    def drop_transaction(self):
        transaction_cache().drop_transaction(self)

    def __enter__(self):

        assert(self._state == self.STATE_PENDING)

        # Bail out if the transaction is not enabled.

        if not self.enabled:
            return self

        # Mark transaction as active and update state
        # used to validate correct usage of class.

        self._state = self.STATE_RUNNING

        # Cache transaction in thread/coroutine local
        # storage so that it can be accessed from
        # anywhere in the context of the transaction.

        try:
            self.save_transaction()
        except:  # Catch all
            self._state = self.STATE_PENDING
            self.enabled = False
            raise

        # Record the start time for transaction.

        self.start_time = time.time()

        # Record initial CPU user time.

        self._cpu_user_time_start = os.times()[0]

        # Calculate initial thread utilisation factor.
        # For now we only do this if we know it is an
        # actual thread and not a greenlet.

        if (not hasattr(sys, '_current_frames') or
                self.thread_id in sys._current_frames()):
            thread_instance = threading.currentThread()
            self._utilization_tracker = utilization_tracker(
                    self.application.name)
            if self._utilization_tracker:
                self._utilization_tracker.enter_transaction(thread_instance)
                self._thread_utilization_start = \
                        self._utilization_tracker.utilization_count()

        # We need to push an object onto the top of the
        # node stack so that children can reach back and
        # add themselves as children to the parent. We
        # can't use ourself though as we then end up
        # with a reference count cycle which will cause
        # the destructor to never be called if the
        # __exit__() function is never called. We
        # instead push on to the top of the node stack a
        # dummy time trace object and when done we will
        # just grab what we need from that.

        self._node_stack.append(Sentinel())

        return self

    def __exit__(self, exc, value, tb):

        # Bail out if the transaction is not enabled.

        if not self.enabled:
            return

        # Ensure that we are actually back at the top of
        # transaction call stack. Assume that it is an
        # instrumentation error and return with hope that
        # will recover later.

        if not len(self._node_stack) == 1:
            _logger.error('Runtime instrumentation error. Exiting the '
                    'transaction but the node stack is not empty. '
                    'Node stack is %r. Report this issue to New Relic '
                    'support.\n%s', self._node_stack, ''.join(
                    traceback.format_stack()[:-1]))

            return

        # Mark as stopped and drop the transaction from
        # thread/coroutine local storage.
        #
        # Note that we validate the saved transaction ID
        # against that for the current transaction object
        # to protect against situations where a copy was
        # made of the transaction object for some reason.
        # Such a copy when garbage collected could trigger
        # this function and cause a deadlock if it occurs
        # while original transaction was being recorded.

        self._state = self.STATE_STOPPED

        if self._transaction_id != id(self):
            return

        if not self._settings:
            return

        if not self._dead:
            try:
                self.drop_transaction()
            except:  # Catch all
                _logger.exception('Unable to drop transaction.')
                raise

        # Record error if one was registered.

        if exc is not None and value is not None and tb is not None:
            self.record_exception(exc, value, tb)

        # Record the end time for transaction and then
        # calculate the duration.

        if not self.stopped:
            self.end_time = time.time()

        duration = self.end_time - self.start_time

        # Calculate overall user time.

        if not self._cpu_user_time_end:
            self._cpu_user_time_end = os.times()[0]

        if duration and self._cpu_user_time_end:
            self._cpu_user_time_value = (self._cpu_user_time_end -
                    self._cpu_user_time_start)

        # Calculate thread utilisation factor. Note that even if
        # we are tracking thread utilization we skip calculation
        # if duration is zero. Under normal circumstances this
        # should not occur but may if the system clock is wound
        # backwards and duration was squashed to zero due to the
        # request appearing to finish before it started. It may
        # also occur if true response time came in under the
        # resolution of the clock being used, but that is highly
        # unlikely as the overhead of the agent itself should
        # always ensure that that is hard to achieve.

        if self._utilization_tracker:
            self._utilization_tracker.exit_transaction()
            if self._thread_utilization_start and duration > 0.0:
                if not self._thread_utilization_end:
                    self._thread_utilization_end = (
                            self._utilization_tracker.utilization_count())
                self._thread_utilization_value = (
                        self._thread_utilization_end -
                        self._thread_utilization_start) / duration

        # Derive generated values from the raw data. The
        # dummy root node has exclusive time of children
        # as negative number. Add our own duration to get
        # our own exclusive time.

        root = self._node_stack.pop()
        children = root.children

        exclusive = duration + root.exclusive

        # Construct final root node of transaction trace.
        # Freeze path in case not already done. This will
        # construct out path.

        self._freeze_path()

        if self.background_task:
            transaction_type = 'OtherTransaction'
        else:
            transaction_type = 'WebTransaction'

        group = self._group

        if group is None:
            if self.background_task:
                group = 'Python'
            else:
                group = 'Uri'

        if self.response_code != 0:
            self._response_properties['STATUS'] = str(self.response_code)

        metrics = self._transaction_metrics

        if self._bytes_read != 0:
            metrics['WSGI/Input/Bytes'] = self._bytes_read
        if self._bytes_sent != 0:
            metrics['WSGI/Output/Bytes'] = self._bytes_sent
        if self._calls_read != 0:
            metrics['WSGI/Input/Calls/read'] = self._calls_read
        if self._calls_readline != 0:
            metrics['WSGI/Input/Calls/readline'] = self._calls_readline
        if self._calls_readlines != 0:
            metrics['WSGI/Input/Calls/readlines'] = self._calls_readlines
        if self._calls_write != 0:
            metrics['WSGI/Output/Calls/write'] = self._calls_write
        if self._calls_yield != 0:
            metrics['WSGI/Output/Calls/yield'] = self._calls_yield

        if self._thread_utilization_value:
            metrics['Thread/Concurrency'] = \
                    '%.4f' % self._thread_utilization_value

        read_duration = 0
        if self._read_start:
            read_duration = self._read_end - self._read_start
            metrics['WSGI/Input/Time'] = '%.4f' % read_duration
        self.record_custom_metric('Python/WSGI/Input/Time', read_duration)

        sent_duration = 0
        if self._sent_start:
            if not self._sent_end:
                self._sent_end = time.time()
            sent_duration = self._sent_end - self._sent_start
            metrics['WSGI/Output/Time'] = '%.4f' % sent_duration
        self.record_custom_metric('Python/WSGI/Output/Time',
                           sent_duration)

        if self.queue_start:
            queue_wait = self.start_time - self.queue_start
            if queue_wait < 0:
                queue_wait = 0
            metrics['WebFrontend/QueueTime'] = '%.4f' % queue_wait

        self.record_custom_metric('Python/WSGI/Input/Bytes',
                           self._bytes_read)

        self.record_custom_metric('Python/WSGI/Input/Calls/read',
                           self._calls_read)
        self.record_custom_metric('Python/WSGI/Input/Calls/readline',
                           self._calls_readline)
        self.record_custom_metric('Python/WSGI/Input/Calls/readlines',
                           self._calls_readlines)

        self.record_custom_metric('Python/WSGI/Output/Bytes',
                           self._bytes_sent)
        self.record_custom_metric('Python/WSGI/Output/Calls/yield',
                           self._calls_yield)
        self.record_custom_metric('Python/WSGI/Output/Calls/write',
                           self._calls_write)

        if self._frameworks:
            for framework, version in self._frameworks:
                self.record_custom_metric('Python/Framework/%s/%s' %
                    (framework, version), 1)

        request_params = {}
        parameter_groups = {}

        if self.capture_params:
            request_params = self._request_params

        if self._request_environment:
            parameter_groups['Request environment'] = self._request_environment
        if self._response_properties:
            parameter_groups['Response properties'] = self._response_properties

        if self._transaction_metrics:
            parameter_groups['Transaction metrics'] = self._transaction_metrics

        node = newrelic.core.transaction_node.TransactionNode(
                settings=self._settings,
                path=self.path,
                type=transaction_type,
                group=group,
                name=self._name,
                request_uri=self._request_uri,
                response_code=self.response_code,
                request_params=request_params,
                custom_params=self._custom_params,
                queue_start=self.queue_start,
                start_time=self.start_time,
                end_time=self.end_time,
                duration=duration,
                exclusive=exclusive,
                children=tuple(children),
                errors=tuple(self._errors),
                slow_sql=tuple(self._slow_sql),
                apdex_t=self.apdex,
                suppress_apdex=self.suppress_apdex,
                custom_metrics=self._custom_metrics,
                parameter_groups=parameter_groups,
                guid=self.guid,
                rum_trace = self.rum_trace,
                cpu_time=self._cpu_user_time_value,
                suppress_transaction_trace=self.suppress_transaction_trace,
                client_cross_process_id=self.client_cross_process_id,
                referring_transaction_guid=self.referring_transaction_guid,
                record_tt = self.record_tt,
                synthetics_resource_id = self.synthetics_resource_id,
                synthetics_job_id = self.synthetics_job_id,
                synthetics_monitor_id = self.synthetics_monitor_id,
                synthetics_header = self.synthetics_header,
                is_part_of_cat=self.is_part_of_cat,
                trip_id=self.trip_id,
                path_hash=self.path_hash,
                referring_path_hash=self._referring_path_hash,
                alternate_path_hashes=self.alternate_path_hashes,
                )

        # Clear settings as we are all done and don't need it
        # anymore.

        self._settings = None
        self.enabled = False

        # Unless we are ignoring the transaction, record it. We
        # need to lock the profile samples and replace it with
        # an empty list just in case the thread profiler kicks
        # in just as we are trying to record the transaction.
        # If we don't, when processing the samples, addition of
        # new samples can cause an error.

        if not self.ignore_transaction:
            profile_samples = []

            if self._profile_samples:
                with self._transaction_lock:
                    profile_samples = self._profile_samples
                    self._profile_samples = deque()

            self._application.record_transaction(node,
                    (self.background_task, profile_samples))

    @property
    def state(self):
        return self._state

    @property
    def settings(self):
        return self._settings

    @property
    def application(self):
        return self._application

    @property
    def name(self):
        return self._name

    @property
    def group(self):
        return self._group

    @property
    def path(self):
        if self._frozen_path:
            return self._frozen_path

        if self.background_task:
            transaction_type = 'OtherTransaction'
        else:
            transaction_type = 'WebTransaction'

        group = self._group

        if group is None:
            if self.background_task:
                group = 'Python'
            else:
                group = 'Uri'

        name = self._name

        if name is None:
            name = '<undefined>'

        # Stripping the leading slash on the request URL held by
        # name when type is 'Uri' is to keep compatibility with
        # PHP agent and also possibly other agents. Leading
        # slash it not deleted for other category groups as the
        # leading slash may be significant in that situation.

        if self._group in ['Uri', 'NormalizedUri'] and name[:1] == '/':
            path = '%s/%s%s' % (transaction_type, group, name)
        else:
            path = '%s/%s/%s' % (transaction_type, group, name)

        return path

    @property
    def profile_sample(self):
        return self._profile_samples

    @property
    def trip_id(self):
        return self._trip_id or self.guid

    @property
    def alternate_path_hashes(self):
        """Return the alternate path hashes but not including the current path
        hash.

        """
        return sorted(set(self._alternate_path_hashes.values()) -
                set([self.path_hash]))

    @property
    def path_hash(self):
        """Path hash is a 32-bit digest of the string "appname;txn_name"
        XORed with the referring_path_hash. Since the txn_name can change
        during the course of a transaction, up to 10 path_hashes are stored
        in _alternate_path_hashes. Before generating the path hash, check the
        _alternate_path_hashes to determine if we've seen this identifier and
        return the value.

        """

        if not self.is_part_of_cat:
            return None

        identifier = '%s;%s' % (self.application.name, self.path)

        # Check if identifier is already part of the _alternate_path_hashes and
        # return the value if available.

        if self._alternate_path_hashes.get(identifier):
            return self._alternate_path_hashes[identifier]

        # If the referring_path_hash is unavailable then we use '0' as the
        # seed.

        try:
            seed = int((self._referring_path_hash or '0'), base=16)
        except Exception:
            seed = 0

        path_hash = generate_path_hash(identifier, seed)

        # Only store upto 10 alternate path hashes.

        if len(self._alternate_path_hashes) < 10:
            self._alternate_path_hashes[identifier] = path_hash

        return path_hash

    def add_profile_sample(self, stack_trace):
        if self._state != self.STATE_RUNNING:
            return

        self._profile_count += 1

        if self._profile_count < self._profile_skip:
            return

        self._profile_count = 0

        with self._transaction_lock:
            new_stack_trace = tuple(self._profile_frames.setdefault(
                    frame, frame) for frame in stack_trace)
            self._profile_samples.append(new_stack_trace)

            agent_limits = self._application.global_settings.agent_limits
            profile_maximum = agent_limits.xray_profile_maximum

            if len(self._profile_samples) >= profile_maximum:
                self._profile_samples = deque(itertools.islice(
                        self._profile_samples, 0,
                        len(self._profile_samples), 2))
                self._profile_skip = 2 * self._profile_skip

    def _freeze_path(self):
        if self._frozen_path is None:
            self._priority = None

            if self._group == 'Uri' and self._name != '/':
                # Apply URL normalization rules. We would only have raw
                # URLs where we were not specifically naming the web
                # transactions for a specific web framework to be a code
                # handler or otherwise.

                name, ignore = self._application.normalize_name(
                        self._name, 'url')

                if self._name != name:
                    self._group = 'NormalizedUri'
                    self._name = name

                self.ignore_transaction = self.ignore_transaction or ignore

            # Apply transaction rules on the full transaction name.

            path, ignore = self._application.normalize_name(
                    self.path, 'transaction')

            self.ignore_transaction = self.ignore_transaction or ignore

            # Apply segment whitelist rule to the segments on the full
            # transaction name. The path is frozen at this point and cannot be
            # further changed.

            self._frozen_path, ignore = self._application.normalize_name(
                    path, 'segment')

            self.ignore_transaction = self.ignore_transaction or ignore

            # Look up the apdex from the table of key transactions. If
            # current transaction is not a key transaction then use the
            # default apdex from settings. The path used at this point
            # is the frozen path.

            self.apdex = (self._settings.web_transactions_apdex.get(
                self.path) or self._settings.apdex_t)

    def set_transaction_name(self, name, group=None, priority=None):

        # Always perform this operation even if the transaction
        # is not active at the time as will be called from
        # constructor. If path has been frozen do not allow
        # name/group to be overridden. New priority then must be
        # same or greater than existing priority. If no priority
        # always override the existing name/group if not frozen.

        if self._priority is None:
            return

        if priority is not None and priority < self._priority:
            return

        if priority is not None:
            self._priority = priority

        # The name can be a URL for the default case. URLs are
        # supposed to be ASCII but can get a URL with illegal
        # non ASCII characters. As the rule patterns and
        # replacements are Unicode then can get Unicode
        # conversion warnings or errors when URL is converted to
        # Unicode and default encoding is ASCII. Thus need to
        # convert URL to Unicode as Latin-1 explicitly to avoid
        # problems with illegal characters.

        if isinstance(name, bytes):
            name = name.decode('Latin-1')

        # Deal with users who use group wrongly and add a leading
        # slash on it. This will cause an empty segment which we
        # want to avoid. In that case insert back in Function as
        # the leading segment.

        group = group or 'Function'

        if group.startswith('/'):
            group = 'Function' + group

        self._group = group
        self._name = name

    def name_transaction(self, name, group=None, priority=None):
        #warnings.warn('Internal API change. Use set_transaction_name() '
        #        'instead of name_transaction().', DeprecationWarning,
        #        stacklevel=2)

        return self.set_transaction_name(name, group, priority)

    def record_exception(self, exc=None, value=None, tb=None,
                         params={}, ignore_errors=[]):

        # Bail out if the transaction is not active or
        # collection of errors not enabled.

        if not self._settings:
            return

        settings = self._settings
        error_collector = settings.error_collector

        if not error_collector.enabled or not settings.collect_errors:
            return

        # If no exception details provided, use current exception.

        if exc is None and value is None and tb is None:
            exc, value, tb = sys.exc_info()

        # Has to be an error to be logged.

        if exc is None or value is None or tb is None:
            return

        # Where ignore_errors is a callable it should return a
        # tri-state variable with the following behavior.
        #
        #   True - Ignore the error.
        #   False- Record the error.
        #   None - Use the default ignore rules.

        should_ignore = None

        if callable(ignore_errors):
            should_ignore = ignore_errors(exc, value, tb)
            if should_ignore:
                return

        module = value.__class__.__module__
        name = value.__class__.__name__

        if should_ignore is None:
            # We need to check for module.name and module:name.
            # Originally we used module.class but that was
            # inconsistent with everything else which used
            # module:name. So changed to use ':' as separator, but
            # for backward compatability need to support '.' as
            # separator for time being. Check that with the ':'
            # last as we will use that name as the exception type.

            if module:
                fullname = '%s.%s' % (module, name)
            else:
                fullname = name

            if not callable(ignore_errors) and fullname in ignore_errors:
                return

            if fullname in error_collector.ignore_errors:
                return

            if module:
                fullname = '%s:%s' % (module, name)
            else:
                fullname = name

            if not callable(ignore_errors) and fullname in ignore_errors:
                return

            if fullname in error_collector.ignore_errors:
                return

        else:
            if module:
                fullname = '%s:%s' % (module, name)
            else:
                fullname = name

        # Only remember up to limit of what can be caught for a
        # single transaction. This could be trimmed further
        # later if there are already recorded errors and would
        # go over the harvest limit.

        if len(self._errors) >= settings.agent_limits.errors_per_transaction:
            return

        # Only add params if High Security Mode is off.

        if settings.high_security:
            custom_params = {}

        else:
            if params:
                custom_params = dict(self._custom_params)
                custom_params.update(params)
            else:
                custom_params = self._custom_params

        exc_type = exc.__name__

        try:
            message = str(value)
        except Exception:
            try:
                # Assume JSON encoding can handle unicode.
                message = six.text_type(value)
            except Exception:
                message = '<unprintable %s object>' % type(value).__name__

        # Check that we have not recorded this exception
        # previously for this transaction due to multiple
        # error traces triggering. This is not going to be
        # exact but the UI hides exceptions of same type
        # anyway. Better that we under count exceptions of
        # same type and message rather than count same one
        # multiple times.

        for error in self._errors:
            if error.type == fullname and error.message == message:
                return

        node = newrelic.core.error_node.ErrorNode(
                timestamp=time.time(),
                type=fullname,
                message=message,
                stack_trace=exception_stack(tb),
                custom_params=custom_params,
                file_name=None,
                line_number=None,
                source=None)

        # TODO Errors are recorded in time order. If
        # there are two exceptions of same type and
        # different message, the UI displays the first
        # one. In the PHP agent it was recording the
        # errors in reverse time order and so the UI
        # displayed the last one. What is the the
        # official order in which they should be sent.

        self._errors.append(node)

    def notice_error(self, exc, value, tb, params={}, ignore_errors=[]):
        warnings.warn('Internal API change. Use record_exception() '
                'instead of notice_error().', DeprecationWarning,
                stacklevel=2)

        self.record_exception(exc, value, tb, params, ignore_errors)

    def record_custom_metric(self, name, value):
        self._custom_metrics.record_custom_metric(name, value)

    def record_custom_metrics(self, metrics):
        for name, value in metrics:
            self._custom_metrics.record_custom_metric(name, value)

    def record_metric(self, name, value):
        warnings.warn('Internal API change. Use record_custom_metric() '
                'instead of record_metric().', DeprecationWarning,
                stacklevel=2)

        return self.record_custom_metric(name, value)

    def active_node(self):
        if self._node_stack:
            return self._node_stack[-1]

    def _intern_string(self, value):
        return self._string_cache.setdefault(value, value)

    def _push_current(self, node):
        self._node_stack.append(node)

    def _pop_current(self, node):
        last = self._node_stack.pop()

        try:
            assert last == node

        except Exception:
            _logger.error('Runtime instrumentation error. Object on '
                    'transaction node stack when removing last value is '
                    'not the expected one, found %r, expected %r. Report '
                    'this issue to New Relic support.\n%s', last, node,
                    ''.join(traceback.format_stack()[:-1]))

            raise

        parent = self._node_stack[-1]

        return parent

    def _process_node(self, node):
        self._trace_node_count += 1
        node.node_count = self._trace_node_count

        if type(node) is newrelic.core.database_node.DatabaseNode:
            settings = self._settings
            if not settings.collect_traces:
                return
            if (not settings.slow_sql.enabled and
                    not settings.transaction_tracer.explain_enabled):
                return
            if settings.transaction_tracer.record_sql == 'off':
                return
            if node.duration < settings.transaction_tracer.explain_threshold:
                return
            self._slow_sql.append(node)

    def stop_recording(self):
        if not self.enabled:
            return

        if self.stopped:
            return

        if self.end_time:
            return

        self.end_time = time.time()
        self.stopped = True

        if self._utilization_tracker:
            if self._thread_utilization_start:
                if not self._thread_utilization_end:
                    self._thread_utilization_end = (
                            self._utilization_tracker.utilization_count())

        self._cpu_user_time_end = os.times()[0]

    def add_custom_parameter(self, name, value):
        if not self._settings:
            return

        if not self._settings.high_security:
            self._custom_params[name] = value

    def add_custom_parameters(self, items):
        for name, value in items:
            self._custom_params[name] = value

    def add_user_attribute(self, name, value):
        #warnings.warn('Internal API change. Use add_custom_parameter() '
        #        'instead of add_user_attribute().', DeprecationWarning,
        #        stacklevel=2)
        self.add_custom_parameter(name, value)

    def add_user_attributes(self, items):
        #warnings.warn('Internal API change. Use add_custom_parameters() '
        #        'instead of add_user_attributes().', DeprecationWarning,
        #        stacklevel=2)
        self.add_custom_parameters(items)

    def add_framework_info(self, name, version=None):
        if name:
            self._frameworks.add((name, version))

    def dump(self, file):
        """Dumps details about the transaction to the file object."""

        print >> file, 'Application: %s' % (
                self.application.name)
        print >> file, 'Time Started: %s' % (
                time.asctime(time.localtime(self.start_time)))
        print >> file, 'Thread Id: %r' % (
                self.thread_id,)
        print >> file, 'Current Status: %d' % (
                self._state)
        print >> file, 'Recording Enabled: %s' % (
                self.enabled)
        print >> file, 'Ignore Transaction: %s' % (
                self.ignore_transaction)
        print >> file, 'Transaction Dead: %s' % (
                self._dead)
        print >> file, 'Transaction Stopped: %s' % (
                self.stopped)
        print >> file, 'Background Task: %s' % (
                self.background_task)
        print >> file, 'Request URI: %s' % (
                self._request_uri)
        print >> file, 'Transaction Group: %s' % (
                self._group)
        print >> file, 'Transaction Name: %s' % (
                self._name)
        print >> file, 'Name Priority: %r' % (
                self._priority)
        print >> file, 'Frozen Path: %s' % (
                self._frozen_path)
        print >> file, 'AutoRUM Disabled: %s' % (
                self.autorum_disabled)
        print >> file, 'Supress Apdex: %s' % (
                self.suppress_apdex)

        print >> file, 'Node Stack:'
        for node in self._node_stack[1:]:
            print >> file, node
コード例 #9
0
 def __init__(self, settings, environ):
     self.gc_time_metrics = CustomMetrics()
     self.start_time = 0.0
     self.previous_stats = {}
     self.pid = os.getpid()
コード例 #10
0
class _GCDataSource(object):
    def __init__(self, settings, environ):
        self.gc_time_metrics = CustomMetrics()
        self.start_time = 0.0
        self.previous_stats = {}
        self.pid = os.getpid()

    @property
    def enabled(self):
        if platform.python_implementation() == "PyPy":
            return False
        else:
            settings = global_settings()
            return settings.gc_runtime_metrics.enabled

    @property
    def top_object_count_limit(self):
        settings = global_settings()
        return settings.gc_runtime_metrics.top_object_count_limit

    def record_gc(self, phase, info):
        if not self.enabled:
            return

        current_generation = info["generation"]

        if phase == "start":
            self.start_time = time.time()
        elif phase == "stop":
            total_time = time.time() - self.start_time
            self.gc_time_metrics.record_custom_metric(
                "GC/time/%d/all" % self.pid, total_time)
            for gen in range(0, 3):
                if gen <= current_generation:
                    self.gc_time_metrics.record_custom_metric(
                        "GC/time/%d/%d" % (self.pid, gen), total_time)
                else:
                    self.gc_time_metrics.record_custom_metric(
                        "GC/time/%d/%d" % (self.pid, gen), 0)

    def start(self):
        if hasattr(gc, "callbacks"):
            gc.callbacks.append(self.record_gc)

    def stop(self):
        # The callback must be removed before resetting the metrics tables.
        # If it isn't, it's possible to be interrupted by the gc and to have more
        # metrics appear in the table that should be empty.
        if hasattr(gc, "callbacks") and self.record_gc in gc.callbacks:
            gc.callbacks.remove(self.record_gc)

        self.gc_time_metrics.reset_metric_stats()
        self.start_time = 0.0

    def __call__(self):
        if not self.enabled:
            return

        # Record object count in total and per generation
        if hasattr(gc, "get_count"):
            counts = gc.get_count()
            yield ("GC/objects/%d/all" % self.pid, {"count": sum(counts)})
            for gen, count in enumerate(counts):
                yield (
                    "GC/objects/%d/generation/%d" % (self.pid, gen),
                    {
                        "count": count
                    },
                )

        # Record object count for top five types with highest count
        if hasattr(gc, "get_objects"):
            object_types = map(type, gc.get_objects())
            if self.top_object_count_limit > 0:
                highest_types = Counter(object_types).most_common(
                    self.top_object_count_limit)
                for obj_type, count in highest_types:
                    yield (
                        "GC/objects/%d/type/%s" %
                        (self.pid, callable_name(obj_type)),
                        {
                            "count": count
                        },
                    )

        if hasattr(gc, "get_stats"):
            stats_by_gen = gc.get_stats()
            if isinstance(stats_by_gen, list):
                for stat_name in stats_by_gen[0].keys():
                    # Aggregate metrics for /all
                    count = sum(stats[stat_name] for stats in stats_by_gen)
                    previous_value = self.previous_stats.get(
                        (stat_name, "all"), 0)
                    self.previous_stats[(stat_name, "all")] = count
                    change_in_value = count - previous_value
                    yield (
                        "GC/%s/%d/all" % (stat_name, self.pid),
                        {
                            "count": change_in_value
                        },
                    )

                    # Breakdowns by generation
                    for gen, stats in enumerate(stats_by_gen):
                        previous_value = self.previous_stats.get(
                            (stat_name, gen), 0)
                        self.previous_stats[(stat_name,
                                             gen)] = stats[stat_name]
                        change_in_value = stats[stat_name] - previous_value

                        yield (
                            "GC/%s/%d/%d" % (stat_name, self.pid, gen),
                            {
                                "count": change_in_value
                            },
                        )

        # In order to avoid a concurrency issue with getting interrupted by the
        # garbage collector, we save a reference to the old metrics table, and overwrite
        # self.gc_time_metrics with a new empty table via reset_metric_stats().
        # This guards against losing data points, or having inconsistent data points
        # reported between /all and the totals of /generation/%d metrics.
        gc_time_metrics = self.gc_time_metrics.metrics()
        self.gc_time_metrics.reset_metric_stats()

        for metric in gc_time_metrics:
            raw_metric = metric[1]
            yield metric[0], {
                "count": raw_metric.call_count,
                "total": raw_metric.total_call_time,
                "min": raw_metric.min_call_time,
                "max": raw_metric.max_call_time,
                "sum_of_squares": raw_metric.sum_of_squares,
            }
コード例 #11
0
def test_http_payload_compression(server, client_cls, method, threshold):
    payload = b"*" * 20

    internal_metrics = CustomMetrics()

    with client_cls(
        "localhost",
        server.port,
        disable_certificate_validation=True,
        compression_method=method,
        compression_threshold=threshold,
    ) as client:
        with InternalTraceContext(internal_metrics):
            status, data = client.send_request(
                payload=payload, params={"method": "test"}
            )

    assert status == 200
    data = data.split(b"\n")
    sent_payload = data[-1]
    payload_byte_len = len(sent_payload)

    internal_metrics = dict(internal_metrics.metrics())
    if client_cls is ApplicationModeClient:
        assert internal_metrics["Supportability/Python/Collector/Output/Bytes/test"][
            :2
        ] == [1, payload_byte_len,]

        if threshold < 20:
            # Verify compression time is recorded
            assert (
                internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][
                    0
                ]
                == 1
            )
            assert (
                internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][
                    1
                ]
                > 0
            )

            # Verify the original payload length is recorded
            assert internal_metrics["Supportability/Python/Collector/ZLIB/Bytes/test"][
                :2
            ] == [1, len(payload)]

            assert len(internal_metrics) == 3
        else:
            # Verify no ZLIB compression metrics were sent
            assert len(internal_metrics) == 1
    else:
        assert not internal_metrics

    if threshold < 20:
        expected_content_encoding = method.encode("utf-8")
        assert sent_payload != payload
        if method == "deflate":
            sent_payload = zlib.decompress(sent_payload)
        elif method == "gzip":
            decompressor = zlib.decompressobj(31)
            sent_payload = decompressor.decompress(sent_payload)
            sent_payload += decompressor.flush()
    else:
        expected_content_encoding = b"Identity"

    for header in data[1:-1]:
        if header.lower().startswith(b"content-encoding"):
            _, content_encoding = header.split(b":", 1)
            content_encoding = content_encoding.strip()
            break
    else:
        assert False, "Missing content-encoding header"

    assert content_encoding == expected_content_encoding
    assert sent_payload == payload
コード例 #12
0
def transaction_node(request):
    default_capacity = SampledDataSet().capacity
    num_events = default_capacity + 1

    custom_events = SampledDataSet(capacity=num_events)
    for _ in range(num_events):
        event = create_custom_event("Custom", {})
        custom_events.add(event)

    error = ErrorNode(
        timestamp=0,
        type="foo:bar",
        message="oh no! your foo had a bar",
        expected=False,
        span_id=None,
        stack_trace="",
        custom_params={},
        file_name=None,
        line_number=None,
        source=None,
    )

    errors = tuple(error for _ in range(num_events))

    function = FunctionNode(
        group="Function",
        name="foo",
        children=(),
        start_time=0,
        end_time=1,
        duration=1,
        exclusive=1,
        label=None,
        params=None,
        rollup=None,
        guid="GUID",
        agent_attributes={},
        user_attributes={},
    )

    children = tuple(function for _ in range(num_events))

    root = RootNode(
        name="Function/main",
        children=children,
        start_time=1524764430.0,
        end_time=1524764430.1,
        duration=0.1,
        exclusive=0.1,
        guid=None,
        agent_attributes={},
        user_attributes={},
        path="OtherTransaction/Function/main",
        trusted_parent_span=None,
        tracing_vendors=None,
    )

    node = TransactionNode(
        settings=finalize_application_settings({"agent_run_id": "1234567"}),
        path="OtherTransaction/Function/main",
        type="OtherTransaction",
        group="Function",
        base_name="main",
        name_for_metric="Function/main",
        port=None,
        request_uri=None,
        queue_start=0.0,
        start_time=1524764430.0,
        end_time=1524764430.1,
        last_byte_time=0.0,
        total_time=0.1,
        response_time=0.1,
        duration=0.1,
        exclusive=0.1,
        root=root,
        errors=errors,
        slow_sql=(),
        custom_events=custom_events,
        apdex_t=0.5,
        suppress_apdex=False,
        custom_metrics=CustomMetrics(),
        guid="4485b89db608aece",
        cpu_time=0.0,
        suppress_transaction_trace=False,
        client_cross_process_id=None,
        referring_transaction_guid=None,
        record_tt=False,
        synthetics_resource_id=None,
        synthetics_job_id=None,
        synthetics_monitor_id=None,
        synthetics_header=None,
        is_part_of_cat=False,
        trip_id="4485b89db608aece",
        path_hash=None,
        referring_path_hash=None,
        alternate_path_hashes=[],
        trace_intrinsics={},
        distributed_trace_intrinsics={},
        agent_attributes=[],
        user_attributes=[],
        priority=1.0,
        parent_transport_duration=None,
        parent_span=None,
        parent_type=None,
        parent_account=None,
        parent_app=None,
        parent_tx=None,
        parent_transport_type=None,
        sampled=True,
        root_span_guid=None,
        trace_id="4485b89db608aece",
        loop_time=0.0,
    )
    return node
コード例 #13
0
class Transaction(object):

    STATE_PENDING = 0
    STATE_RUNNING = 1
    STATE_STOPPED = 2

    def __init__(self, application, enabled=None):

        self._application = application

        self.thread_id = transaction_cache().current_thread_id()

        self._transaction_id = id(self)
        self._transaction_lock = threading.Lock()

        self._dead = False

        self._state = self.STATE_PENDING
        self._settings = None

        self._priority = 0
        self._group = None
        self._name = None

        self._frameworks = set()

        self._frozen_path = None

        self.current_node = None

        self._request_uri = None
        self._port = None

        self.queue_start = 0.0

        self.start_time = 0.0
        self.end_time = 0.0
        self.last_byte_time = 0.0

        self.total_time = None

        self.stopped = False

        self._trace_node_count = 0

        self._errors = []
        self._slow_sql = []
        self._custom_events = SampledDataSet(capacity=DEFAULT_RESERVOIR_SIZE)

        self._stack_trace_count = 0
        self._explain_plan_count = 0

        self._string_cache = {}

        self._custom_params = {}
        self._request_params = {}

        self._utilization_tracker = None

        self._thread_utilization_start = None
        self._thread_utilization_end = None
        self._thread_utilization_value = None

        self._cpu_user_time_start = None
        self._cpu_user_time_end = None
        self._cpu_user_time_value = 0.0

        self._read_length = None

        self._read_start = None
        self._read_end = None

        self._sent_start = None
        self._sent_end = None

        self._bytes_read = 0
        self._bytes_sent = 0

        self._calls_read = 0
        self._calls_readline = 0
        self._calls_readlines = 0

        self._calls_write = 0
        self._calls_yield = 0

        self._request_environment = {}
        self._response_properties = {}
        self._transaction_metrics = {}

        self.background_task = False

        self.enabled = False
        self.autorum_disabled = False

        self.ignore_transaction = False
        self.suppress_apdex = False
        self.suppress_transaction_trace = False

        self.capture_params = None

        self.response_code = 0

        self.apdex = 0

        self.rum_token = None

        # 16-digit random hex. Padded with zeros in the front.
        self.guid = '%016x' % random.getrandbits(64)

        self.client_cross_process_id = None
        self.client_account_id = None
        self.client_application_id = None
        self.referring_transaction_guid = None
        self.record_tt = False
        self._trip_id = None
        self._referring_path_hash = None
        self._alternate_path_hashes = {}
        self.is_part_of_cat = False

        self.synthetics_resource_id = None
        self.synthetics_job_id = None
        self.synthetics_monitor_id = None
        self.synthetics_header = None

        self._custom_metrics = CustomMetrics()

        self._profile_samples = deque()
        self._profile_frames = {}
        self._profile_skip = 1
        self._profile_count = 0

        global_settings = application.global_settings

        if global_settings.enabled:
            if enabled or (enabled is None and application.enabled):
                self._settings = application.settings
                if not self._settings:
                    application.activate()

                    # We see again if the settings is now valid
                    # in case startup timeout had been specified
                    # and registration had been started and
                    # completed within the timeout.

                    self._settings = application.settings

                if self._settings:
                    self.enabled = True

    def __del__(self):
        self._dead = True
        if self._state == self.STATE_RUNNING:
            self.__exit__(None, None, None)

    def save_transaction(self):
        transaction_cache().save_transaction(self)

    def drop_transaction(self):
        transaction_cache().drop_transaction(self)

    def __enter__(self):

        assert (self._state == self.STATE_PENDING)

        # Bail out if the transaction is not enabled.

        if not self.enabled:
            return self

        # Mark transaction as active and update state
        # used to validate correct usage of class.

        self._state = self.STATE_RUNNING

        # Cache transaction in thread/coroutine local
        # storage so that it can be accessed from
        # anywhere in the context of the transaction.

        try:
            self.save_transaction()
        except:  # Catch all
            self._state = self.STATE_PENDING
            self.enabled = False
            raise

        # Record the start time for transaction.

        self.start_time = time.time()

        # Record initial CPU user time.

        self._cpu_user_time_start = os.times()[0]

        # Calculate initial thread utilisation factor.
        # For now we only do this if we know it is an
        # actual thread and not a greenlet.

        if (not hasattr(sys, '_current_frames')
                or self.thread_id in sys._current_frames()):
            thread_instance = threading.currentThread()
            self._utilization_tracker = utilization_tracker(
                self.application.name)
            if self._utilization_tracker:
                self._utilization_tracker.enter_transaction(thread_instance)
                self._thread_utilization_start = \
                        self._utilization_tracker.utilization_count()

        # We need to push an object onto the top of the
        # node stack so that children can reach back and
        # add themselves as children to the parent. We
        # can't use ourself though as we then end up
        # with a reference count cycle which will cause
        # the destructor to never be called if the
        # __exit__() function is never called. We
        # instead push on to the top of the node stack a
        # dummy time trace object and when done we will
        # just grab what we need from that.

        self.current_node = Sentinel()

        return self

    def __exit__(self, exc, value, tb):

        # Bail out if the transaction is not enabled.

        if not self.enabled:
            return

        # Ensure that we are actually back at the top of
        # transaction call stack. Assume that it is an
        # instrumentation error and return with hope that
        # will recover later.

        if not isinstance(self.current_node, Sentinel):
            _logger.error(
                'Transaction ended but current_node is not Sentinel.'
                ' Current node is %r. Report this issue to New Relic '
                'support.\n%s', self.current_node,
                ''.join(traceback.format_stack()[:-1]))

            return

        # Mark as stopped and drop the transaction from
        # thread/coroutine local storage.
        #
        # Note that we validate the saved transaction ID
        # against that for the current transaction object
        # to protect against situations where a copy was
        # made of the transaction object for some reason.
        # Such a copy when garbage collected could trigger
        # this function and cause a deadlock if it occurs
        # while original transaction was being recorded.

        self._state = self.STATE_STOPPED

        if self._transaction_id != id(self):
            return

        if not self._settings:
            return

        if not self._dead:
            try:
                self.drop_transaction()
            except:  # Catch all
                _logger.exception('Unable to drop transaction.')
                raise

        # Record error if one was registered.

        if exc is not None and value is not None and tb is not None:
            self.record_exception(exc, value, tb)

        # Record the end time for transaction and then
        # calculate the duration.

        if not self.stopped:
            self.end_time = time.time()

        # Calculate transaction duration

        duration = self.end_time - self.start_time

        # Calculate response time. Calculation depends on whether
        # a web response was sent back.

        if self.last_byte_time == 0.0:
            response_time = duration
        else:
            response_time = self.last_byte_time - self.start_time

        # Calculate overall user time.

        if not self._cpu_user_time_end:
            self._cpu_user_time_end = os.times()[0]

        if duration and self._cpu_user_time_end:
            self._cpu_user_time_value = (self._cpu_user_time_end -
                                         self._cpu_user_time_start)

        # Calculate thread utilisation factor. Note that even if
        # we are tracking thread utilization we skip calculation
        # if duration is zero. Under normal circumstances this
        # should not occur but may if the system clock is wound
        # backwards and duration was squashed to zero due to the
        # request appearing to finish before it started. It may
        # also occur if true response time came in under the
        # resolution of the clock being used, but that is highly
        # unlikely as the overhead of the agent itself should
        # always ensure that that is hard to achieve.

        if self._utilization_tracker:
            self._utilization_tracker.exit_transaction()
            if self._thread_utilization_start is not None and duration > 0.0:
                if not self._thread_utilization_end:
                    self._thread_utilization_end = (
                        self._utilization_tracker.utilization_count())
                self._thread_utilization_value = (
                    self._thread_utilization_end -
                    self._thread_utilization_start) / duration

        # Derive generated values from the raw data. The
        # dummy root node has exclusive time of children
        # as negative number. Add our own duration to get
        # our own exclusive time.

        root = self.current_node
        children = root.children

        exclusive = duration + root.exclusive

        # Calculate total time.
        #
        # Because we do not track activity on threads, and we currently
        # don't allocate waiting time in the IOLoop to separate segments
        # (like External or Datastore), for right now, our total_time is
        # equal to the duration of the transaction.

        self.total_time = duration

        # Construct final root node of transaction trace.
        # Freeze path in case not already done. This will
        # construct out path.

        self._freeze_path()

        if self.background_task:
            transaction_type = 'OtherTransaction'
        else:
            transaction_type = 'WebTransaction'

        group = self._group

        if group is None:
            if self.background_task:
                group = 'Python'
            else:
                group = 'Uri'

        if self.response_code != 0:
            self._response_properties['STATUS'] = str(self.response_code)

        # _sent_end should already be set by this point, but in case it
        # isn't, set it now before we record the custom metrics.

        if self._sent_start:
            if not self._sent_end:
                self._sent_end = time.time()

        self.record_custom_metric('Python/WSGI/Input/Bytes', self._bytes_read)
        self.record_custom_metric('Python/WSGI/Input/Time', self.read_duration)
        self.record_custom_metric('Python/WSGI/Input/Calls/read',
                                  self._calls_read)
        self.record_custom_metric('Python/WSGI/Input/Calls/readline',
                                  self._calls_readline)
        self.record_custom_metric('Python/WSGI/Input/Calls/readlines',
                                  self._calls_readlines)

        self.record_custom_metric('Python/WSGI/Output/Bytes', self._bytes_sent)
        self.record_custom_metric('Python/WSGI/Output/Time',
                                  self.sent_duration)
        self.record_custom_metric('Python/WSGI/Output/Calls/yield',
                                  self._calls_yield)
        self.record_custom_metric('Python/WSGI/Output/Calls/write',
                                  self._calls_write)

        if self._frameworks:
            for framework, version in self._frameworks:
                self.record_custom_metric(
                    'Python/Framework/%s/%s' % (framework, version), 1)

        node = newrelic.core.transaction_node.TransactionNode(
            settings=self._settings,
            path=self.path,
            type=transaction_type,
            group=group,
            base_name=self._name,
            name_for_metric=self.name_for_metric,
            port=self._port,
            request_uri=self._request_uri,
            response_code=self.response_code,
            queue_start=self.queue_start,
            start_time=self.start_time,
            end_time=self.end_time,
            last_byte_time=self.last_byte_time,
            total_time=self.total_time,
            response_time=response_time,
            duration=duration,
            exclusive=exclusive,
            children=tuple(children),
            errors=tuple(self._errors),
            slow_sql=tuple(self._slow_sql),
            custom_events=self._custom_events,
            apdex_t=self.apdex,
            suppress_apdex=self.suppress_apdex,
            custom_metrics=self._custom_metrics,
            guid=self.guid,
            cpu_time=self._cpu_user_time_value,
            suppress_transaction_trace=self.suppress_transaction_trace,
            client_cross_process_id=self.client_cross_process_id,
            referring_transaction_guid=self.referring_transaction_guid,
            record_tt=self.record_tt,
            synthetics_resource_id=self.synthetics_resource_id,
            synthetics_job_id=self.synthetics_job_id,
            synthetics_monitor_id=self.synthetics_monitor_id,
            synthetics_header=self.synthetics_header,
            is_part_of_cat=self.is_part_of_cat,
            trip_id=self.trip_id,
            path_hash=self.path_hash,
            referring_path_hash=self._referring_path_hash,
            alternate_path_hashes=self.alternate_path_hashes,
            trace_intrinsics=self.trace_intrinsics,
            agent_attributes=self.agent_attributes,
            user_attributes=self.user_attributes,
        )

        # Clear settings as we are all done and don't need it
        # anymore.

        self._settings = None
        self.enabled = False

        # Unless we are ignoring the transaction, record it. We
        # need to lock the profile samples and replace it with
        # an empty list just in case the thread profiler kicks
        # in just as we are trying to record the transaction.
        # If we don't, when processing the samples, addition of
        # new samples can cause an error.

        if not self.ignore_transaction:
            profile_samples = []

            if self._profile_samples:
                with self._transaction_lock:
                    profile_samples = self._profile_samples
                    self._profile_samples = deque()

            self._application.record_transaction(
                node, (self.background_task, profile_samples))

    @property
    def state(self):
        return self._state

    @property
    def settings(self):
        return self._settings

    @property
    def application(self):
        return self._application

    @property
    def type(self):
        if self.background_task:
            transaction_type = 'OtherTransaction'
        else:
            transaction_type = 'WebTransaction'
        return transaction_type

    @property
    def name(self):
        return self._name

    @property
    def group(self):
        return self._group

    @property
    def name_for_metric(self):
        """Combine group and name for use as transaction name in metrics."""

        group = self._group

        if group is None:
            if self.background_task:
                group = 'Python'
            else:
                group = 'Uri'

        transaction_name = self._name

        if transaction_name is None:
            transaction_name = '<undefined>'

        # Stripping the leading slash on the request URL held by
        # transaction_name when type is 'Uri' is to keep compatibility
        # with PHP agent and also possibly other agents. Leading
        # slash it not deleted for other category groups as the
        # leading slash may be significant in that situation.

        if (group in ('Uri', 'NormalizedUri')
                and transaction_name.startswith('/')):
            name = '%s%s' % (group, transaction_name)
        else:
            name = '%s/%s' % (group, transaction_name)

        return name

    @property
    def path(self):
        if self._frozen_path:
            return self._frozen_path

        return '%s/%s' % (self.type, self.name_for_metric)

    @property
    def profile_sample(self):
        return self._profile_samples

    @property
    def trip_id(self):
        return self._trip_id or self.guid

    @property
    def alternate_path_hashes(self):
        """Return the alternate path hashes but not including the current path
        hash.

        """
        return sorted(
            set(self._alternate_path_hashes.values()) - set([self.path_hash]))

    @property
    def path_hash(self):
        """Path hash is a 32-bit digest of the string "appname;txn_name"
        XORed with the referring_path_hash. Since the txn_name can change
        during the course of a transaction, up to 10 path_hashes are stored
        in _alternate_path_hashes. Before generating the path hash, check the
        _alternate_path_hashes to determine if we've seen this identifier and
        return the value.

        """

        if not self.is_part_of_cat:
            return None

        identifier = '%s;%s' % (self.application.name, self.path)

        # Check if identifier is already part of the _alternate_path_hashes and
        # return the value if available.

        if self._alternate_path_hashes.get(identifier):
            return self._alternate_path_hashes[identifier]

        # If the referring_path_hash is unavailable then we use '0' as the
        # seed.

        try:
            seed = int((self._referring_path_hash or '0'), base=16)
        except Exception:
            seed = 0

        path_hash = generate_path_hash(identifier, seed)

        # Only store upto 10 alternate path hashes.

        if len(self._alternate_path_hashes) < 10:
            self._alternate_path_hashes[identifier] = path_hash

        return path_hash

    @property
    def attribute_filter(self):
        return self._settings.attribute_filter

    @property
    def read_duration(self):
        read_duration = 0
        if self._read_start and self._read_end:
            read_duration = self._read_end - self._read_start
        return read_duration

    @property
    def sent_duration(self):
        sent_duration = 0
        if self._sent_start and self._sent_end:
            sent_duration = self._sent_end - self._sent_start
        return sent_duration

    @property
    def queue_wait(self):
        queue_wait = 0
        if self.queue_start:
            queue_wait = self.start_time - self.queue_start
            if queue_wait < 0:
                queue_wait = 0
        return queue_wait

    @property
    def trace_intrinsics(self):
        """Intrinsic attributes for transaction traces and error traces"""
        i_attrs = {}

        if self.referring_transaction_guid:
            i_attrs['referring_transaction_guid'] = \
                    self.referring_transaction_guid
        if self.client_cross_process_id:
            i_attrs['client_cross_process_id'] = self.client_cross_process_id
        if self.trip_id:
            i_attrs['trip_id'] = self.trip_id
        if self.path_hash:
            i_attrs['path_hash'] = self.path_hash
        if self.synthetics_resource_id:
            i_attrs['synthetics_resource_id'] = self.synthetics_resource_id
        if self.synthetics_job_id:
            i_attrs['synthetics_job_id'] = self.synthetics_job_id
        if self.synthetics_monitor_id:
            i_attrs['synthetics_monitor_id'] = self.synthetics_monitor_id
        if self.total_time:
            i_attrs['totalTime'] = self.total_time

        # Add in special CPU time value for UI to display CPU burn.

        # XXX Disable cpu time value for CPU burn as was
        # previously reporting incorrect value and we need to
        # fix it, at least on Linux to report just the CPU time
        # for the executing thread.

        # if self._cpu_user_time_value:
        #     i_attrs['cpu_time'] = self._cpu_user_time_value

        return i_attrs

    @property
    def request_parameters_attributes(self):
        # Request parameters are a special case of agent attributes, so
        # they must be added on to agent_attributes separately

        # There are 3 cases we need to handle:
        #
        # 1. LEGACY: capture_params = False
        #
        #    Don't add request parameters at all, which means they will not
        #    go through the AttributeFilter.
        #
        # 2. LEGACY: capture_params = True
        #
        #    Filter request parameters through the AttributeFilter, but
        #    set the destinations to `TRANSACTION_TRACER | ERROR_COLLECTOR`.
        #
        #    If the user does not add any additional attribute filtering
        #    rules, this will result in the same outcome as the old
        #    capture_params = True behavior. They will be added to transaction
        #    traces and error traces.
        #
        # 3. CURRENT: capture_params is None
        #
        #    Filter request parameters through the AttributeFilter, but set
        #    the destinations to NONE.
        #
        #    That means by default, request parameters won't get included in
        #    any destination. But, it will allow user added include/exclude
        #    attribute filtering rules to be applied to the request parameters.

        attributes_request = []

        if (self.capture_params is None) or self.capture_params:

            if self._request_params:

                r_attrs = {}

                for k, v in self._request_params.items():
                    new_key = 'request.parameters.%s' % k
                    new_val = ",".join(v)

                    final_key, final_val = process_user_attribute(
                        new_key, new_val)

                    if final_key:
                        r_attrs[final_key] = final_val

                if self.capture_params is None:
                    attributes_request = create_attributes(
                        r_attrs, DST_NONE, self.attribute_filter)
                elif self.capture_params:
                    attributes_request = create_attributes(
                        r_attrs, DST_ERROR_COLLECTOR | DST_TRANSACTION_TRACER,
                        self.attribute_filter)

        return attributes_request

    @property
    def agent_attributes(self):
        a_attrs = {}
        settings = self._settings
        req_env = self._request_environment

        if req_env.get('HTTP_ACCEPT', None):
            a_attrs['request.headers.accept'] = req_env['HTTP_ACCEPT']
        if req_env.get('CONTENT_LENGTH', None):
            a_attrs['request.headers.contentLength'] = \
                    req_env['CONTENT_LENGTH']
        if req_env.get('CONTENT_TYPE', None):
            a_attrs['request.headers.contentType'] = req_env['CONTENT_TYPE']
        if req_env.get('HTTP_HOST', None):
            a_attrs['request.headers.host'] = req_env['HTTP_HOST']
        if req_env.get('HTTP_REFERER', None):
            a_attrs['request.headers.referer'] = req_env['HTTP_REFERER']
        if req_env.get('HTTP_USER_AGENT', None):
            a_attrs['request.headers.userAgent'] = req_env['HTTP_USER_AGENT']
        if req_env.get('REQUEST_METHOD', None):
            a_attrs['request.method'] = req_env['REQUEST_METHOD']

        resp_props = self._response_properties

        if resp_props.get('CONTENT_LENGTH', None):
            a_attrs['response.headers.contentLength'] = \
                    resp_props['CONTENT_LENGTH']
        if resp_props.get('CONTENT_TYPE', None):
            a_attrs['response.headers.contentType'] = \
                    resp_props['CONTENT_TYPE']
        if resp_props.get('STATUS', None):
            a_attrs['response.status'] = resp_props['STATUS']

        if self.read_duration != 0:
            a_attrs['wsgi.input.seconds'] = self.read_duration
        if self._bytes_read != 0:
            a_attrs['wsgi.input.bytes'] = self._bytes_read
        if self._calls_read != 0:
            a_attrs['wsgi.input.calls.read'] = self._calls_read
        if self._calls_readline != 0:
            a_attrs['wsgi.input.calls.readline'] = self._calls_readline
        if self._calls_readlines != 0:
            a_attrs['wsgi.input.calls.readlines'] = self._calls_readlines

        if self.sent_duration != 0:
            a_attrs['wsgi.output.seconds'] = self.sent_duration
        if self._bytes_sent != 0:
            a_attrs['wsgi.output.bytes'] = self._bytes_sent
        if self._calls_write != 0:
            a_attrs['wsgi.output.calls.write'] = self._calls_write
        if self._calls_yield != 0:
            a_attrs['wsgi.output.calls.yield'] = self._calls_yield

        if self._settings.process_host.display_name:
            a_attrs['host.displayName'] = settings.process_host.display_name
        if self._thread_utilization_value:
            a_attrs['thread.concurrency'] = self._thread_utilization_value
        if self.queue_wait != 0:
            a_attrs['webfrontend.queue.seconds'] = self.queue_wait

        agent_attributes = create_agent_attributes(a_attrs,
                                                   self.attribute_filter)

        # Include request parameters in agent attributes

        agent_attributes.extend(self.request_parameters_attributes)

        return agent_attributes

    @property
    def user_attributes(self):
        return create_user_attributes(self._custom_params,
                                      self.attribute_filter)

    def add_profile_sample(self, stack_trace):
        if self._state != self.STATE_RUNNING:
            return

        self._profile_count += 1

        if self._profile_count < self._profile_skip:
            return

        self._profile_count = 0

        with self._transaction_lock:
            new_stack_trace = tuple(
                self._profile_frames.setdefault(frame, frame)
                for frame in stack_trace)
            self._profile_samples.append(new_stack_trace)

            agent_limits = self._application.global_settings.agent_limits
            profile_maximum = agent_limits.xray_profile_maximum

            if len(self._profile_samples) >= profile_maximum:
                self._profile_samples = deque(
                    itertools.islice(self._profile_samples, 0,
                                     len(self._profile_samples), 2))
                self._profile_skip = 2 * self._profile_skip

    def _freeze_path(self):
        if self._frozen_path is None:
            self._priority = None

            if self._group == 'Uri' and self._name != '/':
                # Apply URL normalization rules. We would only have raw
                # URLs where we were not specifically naming the web
                # transactions for a specific web framework to be a code
                # handler or otherwise.

                name, ignore = self._application.normalize_name(
                    self._name, 'url')

                if self._name != name:
                    self._group = 'NormalizedUri'
                    self._name = name

                self.ignore_transaction = self.ignore_transaction or ignore

            # Apply transaction rules on the full transaction name.

            path, ignore = self._application.normalize_name(
                self.path, 'transaction')

            self.ignore_transaction = self.ignore_transaction or ignore

            # Apply segment whitelist rule to the segments on the full
            # transaction name. The path is frozen at this point and cannot be
            # further changed.

            self._frozen_path, ignore = self._application.normalize_name(
                path, 'segment')

            self.ignore_transaction = self.ignore_transaction or ignore

            # Look up the apdex from the table of key transactions. If
            # current transaction is not a key transaction then use the
            # default apdex from settings. The path used at this point
            # is the frozen path.

            self.apdex = (self._settings.web_transactions_apdex.get(self.path)
                          or self._settings.apdex_t)

    def set_transaction_name(self, name, group=None, priority=None):

        # Always perform this operation even if the transaction
        # is not active at the time as will be called from
        # constructor. If path has been frozen do not allow
        # name/group to be overridden. New priority then must be
        # same or greater than existing priority. If no priority
        # always override the existing name/group if not frozen.

        if self._priority is None:
            return

        if priority is not None and priority < self._priority:
            return

        if priority is not None:
            self._priority = priority

        # The name can be a URL for the default case. URLs are
        # supposed to be ASCII but can get a URL with illegal
        # non ASCII characters. As the rule patterns and
        # replacements are Unicode then can get Unicode
        # conversion warnings or errors when URL is converted to
        # Unicode and default encoding is ASCII. Thus need to
        # convert URL to Unicode as Latin-1 explicitly to avoid
        # problems with illegal characters.

        if isinstance(name, bytes):
            name = name.decode('Latin-1')

        # Deal with users who use group wrongly and add a leading
        # slash on it. This will cause an empty segment which we
        # want to avoid. In that case insert back in Function as
        # the leading segment.

        group = group or 'Function'

        if group.startswith('/'):
            group = 'Function' + group

        self._group = group
        self._name = name

    def name_transaction(self, name, group=None, priority=None):
        return self.set_transaction_name(name, group, priority)

    def record_exception(self,
                         exc=None,
                         value=None,
                         tb=None,
                         params={},
                         ignore_errors=[]):

        # Bail out if the transaction is not active or
        # collection of errors not enabled.

        if not self._settings:
            return

        settings = self._settings
        error_collector = settings.error_collector

        if not error_collector.enabled:
            return

        if not settings.collect_errors and not settings.collect_error_events:
            return

        # If no exception details provided, use current exception.

        if exc is None and value is None and tb is None:
            exc, value, tb = sys.exc_info()

        # Has to be an error to be logged.

        if exc is None or value is None or tb is None:
            return

        # Where ignore_errors is a callable it should return a
        # tri-state variable with the following behavior.
        #
        #   True - Ignore the error.
        #   False- Record the error.
        #   None - Use the default ignore rules.

        should_ignore = None

        if callable(ignore_errors):
            should_ignore = ignore_errors(exc, value, tb)
            if should_ignore:
                return

        module = value.__class__.__module__
        name = value.__class__.__name__

        if should_ignore is None:
            # We need to check for module.name and module:name.
            # Originally we used module.class but that was
            # inconsistent with everything else which used
            # module:name. So changed to use ':' as separator, but
            # for backward compatibility need to support '.' as
            # separator for time being. Check that with the ':'
            # last as we will use that name as the exception type.

            if module:
                fullname = '%s.%s' % (module, name)
            else:
                fullname = name

            if not callable(ignore_errors) and fullname in ignore_errors:
                return

            if fullname in error_collector.ignore_errors:
                return

            if module:
                fullname = '%s:%s' % (module, name)
            else:
                fullname = name

            if not callable(ignore_errors) and fullname in ignore_errors:
                return

            if fullname in error_collector.ignore_errors:
                return

        else:
            if module:
                fullname = '%s:%s' % (module, name)
            else:
                fullname = name

        # Only remember up to limit of what can be caught for a
        # single transaction. This could be trimmed further
        # later if there are already recorded errors and would
        # go over the harvest limit.

        if len(self._errors) >= settings.agent_limits.errors_per_transaction:
            return

        # Only add params if High Security Mode is off.

        custom_params = {}

        if settings.high_security:
            if params:
                _logger.debug('Cannot add custom parameters in '
                              'High Security Mode.')
        else:
            try:
                for k, v in params.items():
                    name, val = process_user_attribute(k, v)
                    if name:
                        custom_params[name] = val
            except Exception:
                _logger.debug(
                    'Parameters failed to validate for unknown '
                    'reason. Dropping parameters for error: %r. Check '
                    'traceback for clues.',
                    fullname,
                    exc_info=True)
                custom_params = {}

        # Check to see if we need to strip the message before recording it.

        if (settings.strip_exception_messages.enabled and fullname
                not in settings.strip_exception_messages.whitelist):
            message = STRIP_EXCEPTION_MESSAGE
        else:
            try:

                # Favor unicode in exception messages.

                message = six.text_type(value)

            except Exception:
                try:

                    # If exception cannot be represented in unicode, this means
                    # that it is a byte string encoded with an encoding
                    # that is not compatible with the default system encoding.
                    # So, just pass this byte string along.

                    message = str(value)

                except Exception:
                    message = '<unprintable %s object>' % type(value).__name__

        # Check that we have not recorded this exception
        # previously for this transaction due to multiple
        # error traces triggering. This is not going to be
        # exact but the UI hides exceptions of same type
        # anyway. Better that we under count exceptions of
        # same type and message rather than count same one
        # multiple times.

        for error in self._errors:
            if error.type == fullname and error.message == message:
                return

        node = newrelic.core.error_node.ErrorNode(
            timestamp=time.time(),
            type=fullname,
            message=message,
            stack_trace=exception_stack(tb),
            custom_params=custom_params,
            file_name=None,
            line_number=None,
            source=None)

        # TODO Errors are recorded in time order. If
        # there are two exceptions of same type and
        # different message, the UI displays the first
        # one. In the PHP agent it was recording the
        # errors in reverse time order and so the UI
        # displayed the last one. What is the the
        # official order in which they should be sent.

        self._errors.append(node)

    def notice_error(self, exc, value, tb, params={}, ignore_errors=[]):
        warnings.warn(
            'Internal API change. Use record_exception() '
            'instead of notice_error().',
            DeprecationWarning,
            stacklevel=2)

        self.record_exception(exc, value, tb, params, ignore_errors)

    def record_custom_metric(self, name, value):
        self._custom_metrics.record_custom_metric(name, value)

    def record_custom_metrics(self, metrics):
        for name, value in metrics:
            self._custom_metrics.record_custom_metric(name, value)

    def record_custom_event(self, event_type, params):
        settings = self._settings

        if not settings:
            return

        if not settings.custom_insights_events.enabled:
            return

        event = create_custom_event(event_type, params)
        if event:
            self._custom_events.add(event)

    def record_metric(self, name, value):
        warnings.warn(
            'Internal API change. Use record_custom_metric() '
            'instead of record_metric().',
            DeprecationWarning,
            stacklevel=2)

        return self.record_custom_metric(name, value)

    def active_node(self):
        return self.current_node

    def _intern_string(self, value):
        return self._string_cache.setdefault(value, value)

    def _push_current(self, node):
        self.current_node = node

    def _pop_current(self, node):
        parent = node.parent
        self.current_node = parent

        return parent

    def _process_node(self, node):
        self._trace_node_count += 1
        node.node_count = self._trace_node_count

        if type(node) is newrelic.core.database_node.DatabaseNode:
            settings = self._settings
            if not settings.collect_traces:
                return
            if (not settings.slow_sql.enabled
                    and not settings.transaction_tracer.explain_enabled):
                return
            if settings.transaction_tracer.record_sql == 'off':
                return
            if node.duration < settings.transaction_tracer.explain_threshold:
                return
            self._slow_sql.append(node)

    def stop_recording(self):
        if not self.enabled:
            return

        if self.stopped:
            return

        if self.end_time:
            return

        self.end_time = time.time()
        self.stopped = True

        if self._utilization_tracker:
            if self._thread_utilization_start:
                if not self._thread_utilization_end:
                    self._thread_utilization_end = (
                        self._utilization_tracker.utilization_count())

        self._cpu_user_time_end = os.times()[0]

    def add_custom_parameter(self, name, value):
        if not self._settings:
            return False

        if self._settings.high_security:
            _logger.debug('Cannot add custom parameter in High Security Mode.')
            return False

        if len(self._custom_params) >= MAX_NUM_USER_ATTRIBUTES:
            _logger.debug(
                'Maximum number of custom attributes already '
                'added. Dropping attribute: %r=%r', name, value)
            return False

        key, val = process_user_attribute(name, value)

        if key is None:
            return False
        else:
            self._custom_params[key] = val
            return True

    def add_custom_parameters(self, items):
        # items is a list of (name, value) tuples.
        for name, value in items:
            self.add_custom_parameter(name, value)

    def add_user_attribute(self, name, value):
        self.add_custom_parameter(name, value)

    def add_user_attributes(self, items):
        self.add_custom_parameters(items)

    def add_framework_info(self, name, version=None):
        if name:
            self._frameworks.add((name, version))

    def dump(self, file):
        """Dumps details about the transaction to the file object."""

        print >> file, 'Application: %s' % (self.application.name)
        print >> file, 'Time Started: %s' % (time.asctime(
            time.localtime(self.start_time)))
        print >> file, 'Thread Id: %r' % (self.thread_id, )
        print >> file, 'Current Status: %d' % (self._state)
        print >> file, 'Recording Enabled: %s' % (self.enabled)
        print >> file, 'Ignore Transaction: %s' % (self.ignore_transaction)
        print >> file, 'Transaction Dead: %s' % (self._dead)
        print >> file, 'Transaction Stopped: %s' % (self.stopped)
        print >> file, 'Background Task: %s' % (self.background_task)
        print >> file, 'Request URI: %s' % (self._request_uri)
        print >> file, 'Transaction Group: %s' % (self._group)
        print >> file, 'Transaction Name: %s' % (self._name)
        print >> file, 'Name Priority: %r' % (self._priority)
        print >> file, 'Frozen Path: %s' % (self._frozen_path)
        print >> file, 'AutoRUM Disabled: %s' % (self.autorum_disabled)
        print >> file, 'Supress Apdex: %s' % (self.suppress_apdex)
        print >> file, 'Current Node: %s' % (self.current_node)
コード例 #14
0
ファイル: transaction.py プロジェクト: tasnim07/BlackFly
class Transaction(object):

    STATE_PENDING = 0
    STATE_RUNNING = 1
    STATE_STOPPED = 2

    def __init__(self, application, enabled=None):

        self._application = application

        self.thread_id = transaction_cache().current_thread_id()

        self._transaction_id = id(self)
        self._transaction_lock = threading.Lock()

        self._dead = False

        self._state = self.STATE_PENDING
        self._settings = None

        self._priority = 0
        self._group = None
        self._name = None

        self._frameworks = set()

        self._frozen_path = None

        self._node_stack = []

        self._request_uri = None

        self.queue_start = 0.0

        self.start_time = 0.0
        self.end_time = 0.0

        self.stopped = False

        self._trace_node_count = 0

        self._errors = []
        self._slow_sql = []

        self._stack_trace_count = 0
        self._explain_plan_count = 0

        self._string_cache = {}

        self._custom_params = {}
        self._request_params = {}

        self._utilization_tracker = None

        self._thread_utilization_start = None
        self._thread_utilization_end = None
        self._thread_utilization_value = None

        self._cpu_user_time_start = None
        self._cpu_user_time_end = None
        self._cpu_user_time_value = 0.0

        self._read_length = None

        self._read_start = None
        self._read_end = None

        self._sent_start = None
        self._sent_end = None

        self._bytes_read = 0
        self._bytes_sent = 0

        self._calls_read = 0
        self._calls_readline = 0
        self._calls_readlines = 0

        self._calls_write = 0
        self._calls_yield = 0

        self._request_environment = {}
        self._response_properties = {}
        self._transaction_metrics = {}

        self.background_task = False

        self.enabled = False
        self.autorum_disabled = False

        self.ignore_transaction = False
        self.suppress_apdex = False
        self.suppress_transaction_trace = False

        self.capture_params = None

        self.response_code = 0

        self.apdex = 0

        self.rum_token = None

        # 16-digit random hex. Padded with zeros in the front.
        self.guid = "%016x" % random.getrandbits(64)

        self.client_cross_process_id = None
        self.client_account_id = None
        self.client_application_id = None
        self.referring_transaction_guid = None
        self.record_tt = False
        self._trip_id = None
        self._referring_path_hash = None
        self._alternate_path_hashes = {}
        self.is_part_of_cat = False

        self.synthetics_resource_id = None
        self.synthetics_job_id = None
        self.synthetics_monitor_id = None
        self.synthetics_header = None

        self._custom_metrics = CustomMetrics()

        self._profile_samples = deque()
        self._profile_frames = {}
        self._profile_skip = 1
        self._profile_count = 0

        global_settings = application.global_settings

        if global_settings.enabled:
            if enabled or (enabled is None and application.enabled):
                self._settings = application.settings
                if not self._settings:
                    application.activate()

                    # We see again if the settings is now valid
                    # in case startup timeout had been specified
                    # and registration had been started and
                    # completed within the timeout.

                    self._settings = application.settings

                if self._settings:
                    self.enabled = True

    def __del__(self):
        self._dead = True
        if self._state == self.STATE_RUNNING:
            self.__exit__(None, None, None)

    def save_transaction(self):
        transaction_cache().save_transaction(self)

    def drop_transaction(self):
        transaction_cache().drop_transaction(self)

    def __enter__(self):

        assert self._state == self.STATE_PENDING

        # Bail out if the transaction is not enabled.

        if not self.enabled:
            return self

        # Mark transaction as active and update state
        # used to validate correct usage of class.

        self._state = self.STATE_RUNNING

        # Cache transaction in thread/coroutine local
        # storage so that it can be accessed from
        # anywhere in the context of the transaction.

        try:
            self.save_transaction()
        except:  # Catch all
            self._state = self.STATE_PENDING
            self.enabled = False
            raise

        # Record the start time for transaction.

        self.start_time = time.time()

        # Record initial CPU user time.

        self._cpu_user_time_start = os.times()[0]

        # Calculate initial thread utilisation factor.
        # For now we only do this if we know it is an
        # actual thread and not a greenlet.

        if not hasattr(sys, "_current_frames") or self.thread_id in sys._current_frames():
            thread_instance = threading.currentThread()
            self._utilization_tracker = utilization_tracker(self.application.name)
            if self._utilization_tracker:
                self._utilization_tracker.enter_transaction(thread_instance)
                self._thread_utilization_start = self._utilization_tracker.utilization_count()

        # We need to push an object onto the top of the
        # node stack so that children can reach back and
        # add themselves as children to the parent. We
        # can't use ourself though as we then end up
        # with a reference count cycle which will cause
        # the destructor to never be called if the
        # __exit__() function is never called. We
        # instead push on to the top of the node stack a
        # dummy time trace object and when done we will
        # just grab what we need from that.

        self._node_stack.append(Sentinel())

        return self

    def __exit__(self, exc, value, tb):

        # Bail out if the transaction is not enabled.

        if not self.enabled:
            return

        # Ensure that we are actually back at the top of
        # transaction call stack. Assume that it is an
        # instrumentation error and return with hope that
        # will recover later.

        if not len(self._node_stack) == 1:
            _logger.error(
                "Runtime instrumentation error. Exiting the "
                "transaction but the node stack is not empty. "
                "Node stack is %r. Report this issue to New Relic "
                "support.\n%s",
                self._node_stack,
                "".join(traceback.format_stack()[:-1]),
            )

            return

        # Mark as stopped and drop the transaction from
        # thread/coroutine local storage.
        #
        # Note that we validate the saved transaction ID
        # against that for the current transaction object
        # to protect against situations where a copy was
        # made of the transaction object for some reason.
        # Such a copy when garbage collected could trigger
        # this function and cause a deadlock if it occurs
        # while original transaction was being recorded.

        self._state = self.STATE_STOPPED

        if self._transaction_id != id(self):
            return

        if not self._settings:
            return

        if not self._dead:
            try:
                self.drop_transaction()
            except:  # Catch all
                _logger.exception("Unable to drop transaction.")
                raise

        # Record error if one was registered.

        if exc is not None and value is not None and tb is not None:
            self.record_exception(exc, value, tb)

        # Record the end time for transaction and then
        # calculate the duration.

        if not self.stopped:
            self.end_time = time.time()

        duration = self.end_time - self.start_time

        # Calculate overall user time.

        if not self._cpu_user_time_end:
            self._cpu_user_time_end = os.times()[0]

        if duration and self._cpu_user_time_end:
            self._cpu_user_time_value = self._cpu_user_time_end - self._cpu_user_time_start

        # Calculate thread utilisation factor. Note that even if
        # we are tracking thread utilization we skip calculation
        # if duration is zero. Under normal circumstances this
        # should not occur but may if the system clock is wound
        # backwards and duration was squashed to zero due to the
        # request appearing to finish before it started. It may
        # also occur if true response time came in under the
        # resolution of the clock being used, but that is highly
        # unlikely as the overhead of the agent itself should
        # always ensure that that is hard to achieve.

        if self._utilization_tracker:
            self._utilization_tracker.exit_transaction()
            if self._thread_utilization_start and duration > 0.0:
                if not self._thread_utilization_end:
                    self._thread_utilization_end = self._utilization_tracker.utilization_count()
                self._thread_utilization_value = (
                    self._thread_utilization_end - self._thread_utilization_start
                ) / duration

        # Derive generated values from the raw data. The
        # dummy root node has exclusive time of children
        # as negative number. Add our own duration to get
        # our own exclusive time.

        root = self._node_stack.pop()
        children = root.children

        exclusive = duration + root.exclusive

        # Construct final root node of transaction trace.
        # Freeze path in case not already done. This will
        # construct out path.

        self._freeze_path()

        if self.background_task:
            transaction_type = "OtherTransaction"
        else:
            transaction_type = "WebTransaction"

        group = self._group

        if group is None:
            if self.background_task:
                group = "Python"
            else:
                group = "Uri"

        if self.response_code != 0:
            self._response_properties["STATUS"] = str(self.response_code)

        read_duration = 0
        if self._read_start:
            read_duration = self._read_end - self._read_start

        sent_duration = 0
        if self._sent_start:
            if not self._sent_end:
                self._sent_end = time.time()
            sent_duration = self._sent_end - self._sent_start

        if self.queue_start:
            queue_wait = self.start_time - self.queue_start
            if queue_wait < 0:
                queue_wait = 0

        # _sent_end should already be set by this point, but in case it
        # isn't, set it now before we record the custom metrics.

        if self._sent_start:
            if not self._sent_end:
                self._sent_end = time.time()

        self.record_custom_metric("Python/WSGI/Input/Bytes", self._bytes_read)
        self.record_custom_metric("Python/WSGI/Input/Time", self.read_duration)
        self.record_custom_metric("Python/WSGI/Input/Calls/read", self._calls_read)
        self.record_custom_metric("Python/WSGI/Input/Calls/readline", self._calls_readline)
        self.record_custom_metric("Python/WSGI/Input/Calls/readlines", self._calls_readlines)

        self.record_custom_metric("Python/WSGI/Output/Bytes", self._bytes_sent)
        self.record_custom_metric("Python/WSGI/Output/Time", self.sent_duration)
        self.record_custom_metric("Python/WSGI/Output/Calls/yield", self._calls_yield)
        self.record_custom_metric("Python/WSGI/Output/Calls/write", self._calls_write)

        if self._frameworks:
            for framework, version in self._frameworks:
                self.record_custom_metric("Python/Framework/%s/%s" % (framework, version), 1)

        request_params = {}

        if self.capture_params:
            request_params = self._request_params

        node = newrelic.core.transaction_node.TransactionNode(
            settings=self._settings,
            path=self.path,
            type=transaction_type,
            group=group,
            name=self._name,
            request_uri=self._request_uri,
            response_code=self.response_code,
            queue_start=self.queue_start,
            start_time=self.start_time,
            end_time=self.end_time,
            duration=duration,
            exclusive=exclusive,
            children=tuple(children),
            errors=tuple(self._errors),
            slow_sql=tuple(self._slow_sql),
            apdex_t=self.apdex,
            suppress_apdex=self.suppress_apdex,
            custom_metrics=self._custom_metrics,
            guid=self.guid,
            cpu_time=self._cpu_user_time_value,
            suppress_transaction_trace=self.suppress_transaction_trace,
            client_cross_process_id=self.client_cross_process_id,
            referring_transaction_guid=self.referring_transaction_guid,
            record_tt=self.record_tt,
            synthetics_resource_id=self.synthetics_resource_id,
            synthetics_job_id=self.synthetics_job_id,
            synthetics_monitor_id=self.synthetics_monitor_id,
            synthetics_header=self.synthetics_header,
            is_part_of_cat=self.is_part_of_cat,
            trip_id=self.trip_id,
            path_hash=self.path_hash,
            referring_path_hash=self._referring_path_hash,
            alternate_path_hashes=self.alternate_path_hashes,
            trace_intrinsics=self.trace_intrinsics,
            agent_attributes=self.agent_attributes,
            user_attributes=self.user_attributes,
        )

        # Clear settings as we are all done and don't need it
        # anymore.

        self._settings = None
        self.enabled = False

        # Unless we are ignoring the transaction, record it. We
        # need to lock the profile samples and replace it with
        # an empty list just in case the thread profiler kicks
        # in just as we are trying to record the transaction.
        # If we don't, when processing the samples, addition of
        # new samples can cause an error.

        if not self.ignore_transaction:
            profile_samples = []

            if self._profile_samples:
                with self._transaction_lock:
                    profile_samples = self._profile_samples
                    self._profile_samples = deque()

            self._application.record_transaction(node, (self.background_task, profile_samples))

    @property
    def state(self):
        return self._state

    @property
    def settings(self):
        return self._settings

    @property
    def application(self):
        return self._application

    @property
    def name(self):
        return self._name

    @property
    def group(self):
        return self._group

    @property
    def path(self):
        if self._frozen_path:
            return self._frozen_path

        if self.background_task:
            transaction_type = "OtherTransaction"
        else:
            transaction_type = "WebTransaction"

        group = self._group

        if group is None:
            if self.background_task:
                group = "Python"
            else:
                group = "Uri"

        name = self._name

        if name is None:
            name = "<undefined>"

        # Stripping the leading slash on the request URL held by
        # name when type is 'Uri' is to keep compatibility with
        # PHP agent and also possibly other agents. Leading
        # slash it not deleted for other category groups as the
        # leading slash may be significant in that situation.

        if self._group in ["Uri", "NormalizedUri"] and name[:1] == "/":
            path = "%s/%s%s" % (transaction_type, group, name)
        else:
            path = "%s/%s/%s" % (transaction_type, group, name)

        return path

    @property
    def profile_sample(self):
        return self._profile_samples

    @property
    def trip_id(self):
        return self._trip_id or self.guid

    @property
    def alternate_path_hashes(self):
        """Return the alternate path hashes but not including the current path
        hash.

        """
        return sorted(set(self._alternate_path_hashes.values()) - set([self.path_hash]))

    @property
    def path_hash(self):
        """Path hash is a 32-bit digest of the string "appname;txn_name"
        XORed with the referring_path_hash. Since the txn_name can change
        during the course of a transaction, up to 10 path_hashes are stored
        in _alternate_path_hashes. Before generating the path hash, check the
        _alternate_path_hashes to determine if we've seen this identifier and
        return the value.

        """

        if not self.is_part_of_cat:
            return None

        identifier = "%s;%s" % (self.application.name, self.path)

        # Check if identifier is already part of the _alternate_path_hashes and
        # return the value if available.

        if self._alternate_path_hashes.get(identifier):
            return self._alternate_path_hashes[identifier]

        # If the referring_path_hash is unavailable then we use '0' as the
        # seed.

        try:
            seed = int((self._referring_path_hash or "0"), base=16)
        except Exception:
            seed = 0

        path_hash = generate_path_hash(identifier, seed)

        # Only store upto 10 alternate path hashes.

        if len(self._alternate_path_hashes) < 10:
            self._alternate_path_hashes[identifier] = path_hash

        return path_hash

    @property
    def attribute_filter(self):
        return self._settings.attribute_filter

    @property
    def read_duration(self):
        read_duration = 0
        if self._read_start and self._read_end:
            read_duration = self._read_end - self._read_start
        return read_duration

    @property
    def sent_duration(self):
        sent_duration = 0
        if self._sent_start and self._sent_end:
            sent_duration = self._sent_end - self._sent_start
        return sent_duration

    @property
    def queue_wait(self):
        queue_wait = 0
        if self.queue_start:
            queue_wait = self.start_time - self.queue_start
            if queue_wait < 0:
                queue_wait = 0
        return queue_wait

    @property
    def trace_intrinsics(self):
        """Intrinsic attributes for transaction traces and error traces"""
        i_attrs = {}

        if self.referring_transaction_guid:
            i_attrs["referring_transaction_guid"] = self.referring_transaction_guid
        if self.client_cross_process_id:
            i_attrs["client_cross_process_id"] = self.client_cross_process_id
        if self.trip_id:
            i_attrs["trip_id"] = self.trip_id
        if self.path_hash:
            i_attrs["path_hash"] = self.path_hash
        if self.synthetics_resource_id:
            i_attrs["synthetics_resource_id"] = self.synthetics_resource_id
        if self.synthetics_job_id:
            i_attrs["synthetics_job_id"] = self.synthetics_job_id
        if self.synthetics_monitor_id:
            i_attrs["synthetics_monitor_id"] = self.synthetics_monitor_id

        # Add in special CPU time value for UI to display CPU burn.

        # XXX Disable cpu time value for CPU burn as was
        # previously reporting incorrect value and we need to
        # fix it, at least on Linux to report just the CPU time
        # for the executing thread.

        # if self._cpu_user_time_value:
        #     i_attrs['cpu_time'] = self._cpu_user_time_value

        return i_attrs

    @property
    def request_parameters_attributes(self):
        # Request parameters are a special case of agent attributes, so
        # they must be added on to agent_attributes separately

        # There are 3 cases we need to handle:
        #
        # 1. LEGACY: capture_params = False
        #
        #    Don't add request parameters at all, which means they will not
        #    go through the AttributeFilter.
        #
        # 2. LEGACY: capture_params = True
        #
        #    Filter request parameters through the AttributeFilter, but
        #    set the destinations to `TRANSACTION_TRACER | ERROR_COLLECTOR`.
        #
        #    If the user does not add any additional attribute filtering
        #    rules, this will result in the same outcome as the old
        #    capture_params = True behavior. They will be added to transaction
        #    traces and error traces.
        #
        # 3. CURRENT: capture_params is None
        #
        #    Filter request parameters through the AttributeFilter, but set
        #    the destinations to NONE.
        #
        #    That means by default, request parameters won't get included in
        #    any destination. But, it will allow user added include/exclude
        #    attribute filtering rules to be applied to the request parameters.

        attributes_request = []

        if (self.capture_params is None) or self.capture_params:

            if self._request_params:

                r_attrs = {}

                for k, v in self._request_params.items():
                    new_key = "request.parameters.%s" % k
                    new_val = ",".join(v)

                    final_key, final_val = process_user_attribute(new_key, new_val)

                    if final_key:
                        r_attrs[final_key] = final_val

                if self.capture_params is None:
                    attributes_request = create_attributes(r_attrs, DST_NONE, self.attribute_filter)
                elif self.capture_params:
                    attributes_request = create_attributes(
                        r_attrs, DST_ERROR_COLLECTOR | DST_TRANSACTION_TRACER, self.attribute_filter
                    )

        return attributes_request

    @property
    def agent_attributes(self):
        a_attrs = {}
        req_env = self._request_environment

        if req_env.get("REQUEST_METHOD", None):
            a_attrs["request.method"] = req_env["REQUEST_METHOD"]
        if req_env.get("HTTP_USER_AGENT", None):
            a_attrs["request.headers.userAgent"] = req_env["HTTP_USER_AGENT"]
        if req_env.get("HTTP_REFERER", None):
            a_attrs["request.headers.referer"] = req_env["HTTP_REFERER"]
        if req_env.get("CONTENT_TYPE", None):
            a_attrs["request.headers.contentType"] = req_env["CONTENT_TYPE"]
        if req_env.get("CONTENT_LENGTH", None):
            a_attrs["request.headers.contentLength"] = req_env["CONTENT_LENGTH"]

        resp_props = self._response_properties

        if resp_props.get("STATUS", None):
            a_attrs["response.status"] = resp_props["STATUS"]
        if resp_props.get("CONTENT_LENGTH", None):
            a_attrs["response.contentLength"] = resp_props["CONTENT_LENGTH"]

        if self.read_duration != 0:
            a_attrs["wsgi.input.seconds"] = "%.4f" % self.read_duration
        if self._bytes_read != 0:
            a_attrs["wsgi.input.bytes"] = self._bytes_read
        if self._calls_read != 0:
            a_attrs["wsgi.input.calls.read"] = self._calls_read
        if self._calls_readline != 0:
            a_attrs["wsgi.input.calls.readline"] = self._calls_readline
        if self._calls_readlines != 0:
            a_attrs["wsgi.input.calls.readlines"] = self._calls_readlines

        if self.sent_duration != 0:
            a_attrs["wsgi.output.seconds"] = "%.4f" % self.sent_duration
        if self._bytes_sent != 0:
            a_attrs["wsgi.output.bytes"] = self._bytes_sent
        if self._calls_write != 0:
            a_attrs["wsgi.output.calls.write"] = self._calls_write
        if self._calls_yield != 0:
            a_attrs["wsgi.output.calls.yield"] = self._calls_yield

        if self._thread_utilization_value:
            a_attrs["thread.concurrency"] = self._thread_utilization_value
        if self.queue_wait != 0:
            a_attrs["webfrontend.queue.seconds"] = "%.4f" % self.queue_wait

        agent_attributes = create_agent_attributes(a_attrs, self.attribute_filter)

        # Include request parameters in agent attributes

        agent_attributes.extend(self.request_parameters_attributes)

        return agent_attributes

    @property
    def user_attributes(self):
        return create_user_attributes(self._custom_params, self.attribute_filter)

    def add_profile_sample(self, stack_trace):
        if self._state != self.STATE_RUNNING:
            return

        self._profile_count += 1

        if self._profile_count < self._profile_skip:
            return

        self._profile_count = 0

        with self._transaction_lock:
            new_stack_trace = tuple(self._profile_frames.setdefault(frame, frame) for frame in stack_trace)
            self._profile_samples.append(new_stack_trace)

            agent_limits = self._application.global_settings.agent_limits
            profile_maximum = agent_limits.xray_profile_maximum

            if len(self._profile_samples) >= profile_maximum:
                self._profile_samples = deque(itertools.islice(self._profile_samples, 0, len(self._profile_samples), 2))
                self._profile_skip = 2 * self._profile_skip

    def _freeze_path(self):
        if self._frozen_path is None:
            self._priority = None

            if self._group == "Uri" and self._name != "/":
                # Apply URL normalization rules. We would only have raw
                # URLs where we were not specifically naming the web
                # transactions for a specific web framework to be a code
                # handler or otherwise.

                name, ignore = self._application.normalize_name(self._name, "url")

                if self._name != name:
                    self._group = "NormalizedUri"
                    self._name = name

                self.ignore_transaction = self.ignore_transaction or ignore

            # Apply transaction rules on the full transaction name.

            path, ignore = self._application.normalize_name(self.path, "transaction")

            self.ignore_transaction = self.ignore_transaction or ignore

            # Apply segment whitelist rule to the segments on the full
            # transaction name. The path is frozen at this point and cannot be
            # further changed.

            self._frozen_path, ignore = self._application.normalize_name(path, "segment")

            self.ignore_transaction = self.ignore_transaction or ignore

            # Look up the apdex from the table of key transactions. If
            # current transaction is not a key transaction then use the
            # default apdex from settings. The path used at this point
            # is the frozen path.

            self.apdex = self._settings.web_transactions_apdex.get(self.path) or self._settings.apdex_t

    def set_transaction_name(self, name, group=None, priority=None):

        # Always perform this operation even if the transaction
        # is not active at the time as will be called from
        # constructor. If path has been frozen do not allow
        # name/group to be overridden. New priority then must be
        # same or greater than existing priority. If no priority
        # always override the existing name/group if not frozen.

        if self._priority is None:
            return

        if priority is not None and priority < self._priority:
            return

        if priority is not None:
            self._priority = priority

        # The name can be a URL for the default case. URLs are
        # supposed to be ASCII but can get a URL with illegal
        # non ASCII characters. As the rule patterns and
        # replacements are Unicode then can get Unicode
        # conversion warnings or errors when URL is converted to
        # Unicode and default encoding is ASCII. Thus need to
        # convert URL to Unicode as Latin-1 explicitly to avoid
        # problems with illegal characters.

        if isinstance(name, bytes):
            name = name.decode("Latin-1")

        # Deal with users who use group wrongly and add a leading
        # slash on it. This will cause an empty segment which we
        # want to avoid. In that case insert back in Function as
        # the leading segment.

        group = group or "Function"

        if group.startswith("/"):
            group = "Function" + group

        self._group = group
        self._name = name

    def name_transaction(self, name, group=None, priority=None):
        # warnings.warn('Internal API change. Use set_transaction_name() '
        #        'instead of name_transaction().', DeprecationWarning,
        #        stacklevel=2)

        return self.set_transaction_name(name, group, priority)

    def record_exception(self, exc=None, value=None, tb=None, params={}, ignore_errors=[]):

        # Bail out if the transaction is not active or
        # collection of errors not enabled.

        if not self._settings:
            return

        settings = self._settings
        error_collector = settings.error_collector

        if not error_collector.enabled or not settings.collect_errors:
            return

        # If no exception details provided, use current exception.

        if exc is None and value is None and tb is None:
            exc, value, tb = sys.exc_info()

        # Has to be an error to be logged.

        if exc is None or value is None or tb is None:
            return

        # Where ignore_errors is a callable it should return a
        # tri-state variable with the following behavior.
        #
        #   True - Ignore the error.
        #   False- Record the error.
        #   None - Use the default ignore rules.

        should_ignore = None

        if callable(ignore_errors):
            should_ignore = ignore_errors(exc, value, tb)
            if should_ignore:
                return

        module = value.__class__.__module__
        name = value.__class__.__name__

        if should_ignore is None:
            # We need to check for module.name and module:name.
            # Originally we used module.class but that was
            # inconsistent with everything else which used
            # module:name. So changed to use ':' as separator, but
            # for backward compatability need to support '.' as
            # separator for time being. Check that with the ':'
            # last as we will use that name as the exception type.

            if module:
                fullname = "%s.%s" % (module, name)
            else:
                fullname = name

            if not callable(ignore_errors) and fullname in ignore_errors:
                return

            if fullname in error_collector.ignore_errors:
                return

            if module:
                fullname = "%s:%s" % (module, name)
            else:
                fullname = name

            if not callable(ignore_errors) and fullname in ignore_errors:
                return

            if fullname in error_collector.ignore_errors:
                return

        else:
            if module:
                fullname = "%s:%s" % (module, name)
            else:
                fullname = name

        # Only remember up to limit of what can be caught for a
        # single transaction. This could be trimmed further
        # later if there are already recorded errors and would
        # go over the harvest limit.

        if len(self._errors) >= settings.agent_limits.errors_per_transaction:
            return

        # Only add params if High Security Mode is off.

        custom_params = {}

        if settings.high_security:
            if params:
                _logger.debug("Cannot add custom parameters in " "High Security Mode.")
        else:
            for k, v in params.items():
                name, value = process_user_attribute(k, v)
                if name:
                    custom_params[name] = value

        # Check to see if we need to strip the message before recording it.

        if settings.strip_exception_messages.enabled and fullname not in settings.strip_exception_messages.whitelist:
            message = STRIP_EXCEPTION_MESSAGE
        else:
            try:
                message = str(value)
            except Exception:
                try:
                    # Assume JSON encoding can handle unicode.
                    message = six.text_type(value)
                except Exception:
                    message = "<unprintable %s object>" % type(value).__name__

        # Check that we have not recorded this exception
        # previously for this transaction due to multiple
        # error traces triggering. This is not going to be
        # exact but the UI hides exceptions of same type
        # anyway. Better that we under count exceptions of
        # same type and message rather than count same one
        # multiple times.

        for error in self._errors:
            if error.type == fullname and error.message == message:
                return

        node = newrelic.core.error_node.ErrorNode(
            timestamp=time.time(),
            type=fullname,
            message=message,
            stack_trace=exception_stack(tb),
            custom_params=custom_params,
            file_name=None,
            line_number=None,
            source=None,
        )

        # TODO Errors are recorded in time order. If
        # there are two exceptions of same type and
        # different message, the UI displays the first
        # one. In the PHP agent it was recording the
        # errors in reverse time order and so the UI
        # displayed the last one. What is the the
        # official order in which they should be sent.

        self._errors.append(node)

    def notice_error(self, exc, value, tb, params={}, ignore_errors=[]):
        warnings.warn(
            "Internal API change. Use record_exception() " "instead of notice_error().",
            DeprecationWarning,
            stacklevel=2,
        )

        self.record_exception(exc, value, tb, params, ignore_errors)

    def record_custom_metric(self, name, value):
        self._custom_metrics.record_custom_metric(name, value)

    def record_custom_metrics(self, metrics):
        for name, value in metrics:
            self._custom_metrics.record_custom_metric(name, value)

    def record_metric(self, name, value):
        warnings.warn(
            "Internal API change. Use record_custom_metric() " "instead of record_metric().",
            DeprecationWarning,
            stacklevel=2,
        )

        return self.record_custom_metric(name, value)

    def active_node(self):
        if self._node_stack:
            return self._node_stack[-1]

    def _intern_string(self, value):
        return self._string_cache.setdefault(value, value)

    def _push_current(self, node):
        self._node_stack.append(node)

    def _pop_current(self, node):
        last = self._node_stack.pop()

        try:
            assert last == node

        except Exception:
            _logger.error(
                "Runtime instrumentation error. Object on "
                "transaction node stack when removing last value is "
                "not the expected one, found %r, expected %r. Report "
                "this issue to New Relic support.\n%s",
                last,
                node,
                "".join(traceback.format_stack()[:-1]),
            )

            raise

        parent = self._node_stack[-1]

        return parent

    def _process_node(self, node):
        self._trace_node_count += 1
        node.node_count = self._trace_node_count

        if type(node) is newrelic.core.database_node.DatabaseNode:
            settings = self._settings
            if not settings.collect_traces:
                return
            if not settings.slow_sql.enabled and not settings.transaction_tracer.explain_enabled:
                return
            if settings.transaction_tracer.record_sql == "off":
                return
            if node.duration < settings.transaction_tracer.explain_threshold:
                return
            self._slow_sql.append(node)

    def stop_recording(self):
        if not self.enabled:
            return

        if self.stopped:
            return

        if self.end_time:
            return

        self.end_time = time.time()
        self.stopped = True

        if self._utilization_tracker:
            if self._thread_utilization_start:
                if not self._thread_utilization_end:
                    self._thread_utilization_end = self._utilization_tracker.utilization_count()

        self._cpu_user_time_end = os.times()[0]

    def add_custom_parameter(self, name, value):
        if not self._settings:
            return False

        if self._settings.high_security:
            _logger.debug("Cannot add custom parameter in High Security Mode.")
            return False

        if len(self._custom_params) >= MAX_NUM_USER_ATTRIBUTES:
            _logger.debug(
                "Maximum number of custom attributes already " "added. Dropping attribute: %r=%r", name, value
            )
            return False

        key, val = process_user_attribute(name, value)

        if key is None:
            return False
        else:
            self._custom_params[key] = val
            return True

    def add_custom_parameters(self, items):
        # items is a list of (name, value) tuples.
        for name, value in items:
            self.add_custom_parameter(name, value)

    def add_user_attribute(self, name, value):
        # warnings.warn('Internal API change. Use add_custom_parameter() '
        #        'instead of add_user_attribute().', DeprecationWarning,
        #        stacklevel=2)
        self.add_custom_parameter(name, value)

    def add_user_attributes(self, items):
        # warnings.warn('Internal API change. Use add_custom_parameters() '
        #        'instead of add_user_attributes().', DeprecationWarning,
        #        stacklevel=2)
        self.add_custom_parameters(items)

    def add_framework_info(self, name, version=None):
        if name:
            self._frameworks.add((name, version))

    def dump(self, file):
        """Dumps details about the transaction to the file object."""

        print >> file, "Application: %s" % (self.application.name)
        print >> file, "Time Started: %s" % (time.asctime(time.localtime(self.start_time)))
        print >> file, "Thread Id: %r" % (self.thread_id,)
        print >> file, "Current Status: %d" % (self._state)
        print >> file, "Recording Enabled: %s" % (self.enabled)
        print >> file, "Ignore Transaction: %s" % (self.ignore_transaction)
        print >> file, "Transaction Dead: %s" % (self._dead)
        print >> file, "Transaction Stopped: %s" % (self.stopped)
        print >> file, "Background Task: %s" % (self.background_task)
        print >> file, "Request URI: %s" % (self._request_uri)
        print >> file, "Transaction Group: %s" % (self._group)
        print >> file, "Transaction Name: %s" % (self._name)
        print >> file, "Name Priority: %r" % (self._priority)
        print >> file, "Frozen Path: %s" % (self._frozen_path)
        print >> file, "AutoRUM Disabled: %s" % (self.autorum_disabled)
        print >> file, "Supress Apdex: %s" % (self.suppress_apdex)

        print >> file, "Node Stack:"
        for node in self._node_stack[1:]:
            print >> file, node