コード例 #1
0
    def generate_request_headers(transaction):
        """
        Return a list of NewRelic specific headers as tuples
        [(HEADER_NAME0, HEADER_VALUE0), (HEADER_NAME1, HEADER_VALUE1)]

        """

        if transaction is None:
            return []

        settings = transaction.settings

        if not settings.cross_application_tracer.enabled:
            return []

        encoded_cross_process_id = obfuscate(settings.cross_process_id, settings.encoding_key)

        transaction_data = [transaction.guid, transaction.record_tt]
        encoded_transaction = obfuscate(
            simplejson.dumps(transaction_data, ensure_ascii=True, encoding="Latin-1"), settings.encoding_key
        )

        nr_headers = [("X-NewRelic-ID", encoded_cross_process_id), ("X-NewRelic-Transaction", encoded_transaction)]

        return nr_headers
コード例 #2
0
    def profile_data(self):
        """Returns the profile data once the thread profiling session has
        finished otherwise returns None. The data structure returned is
        in a form suitable for sending back to the data collector.

        """

        # Profiling session not finished.

        if self._profiler_thread.isAlive() and not self._xray_txns:
            return None

        call_data = {}
        thread_count = 0

        # We prune the number of nodes sent if we are over the specified
        # limit. This is just to avoid having the response be too large
        # and get rejected by the data collector.

        settings = global_settings()

        self._prune_call_trees(settings.agent_limits.thread_profiler_nodes)

        for thread_category, bucket in self._call_buckets.items():
            if bucket:
                call_data[thread_category] = bucket.values()
                thread_count += len(bucket)

        # If no profile data was captured return None instead of sending an
        # encoded empty data-structure 

        if thread_count == 0:
            return None

        # Construct the actual final data for sending. The actual call
        # data is turned into JSON, compessed and then base64 encoded at
        # this point to cut its size.

        json_data = simplejson.dumps(call_data, ensure_ascii=True,
                encoding='Latin-1', default=lambda o: o.jsonable(),
                namedtuple_as_object=False)
        encoded_data = base64.standard_b64encode(zlib.compress(json_data))

        if self._xray_txns:
            xray_obj = self._xray_txns.values()[0]
            xray_id = xray_obj.xray_id
        else:
            xray_id = None

        profile = [[self.profile_id, self._start_time*1000,
                self._stop_time*1000, self._sample_count, encoded_data,
                thread_count, 0, xray_id]]

        # If xray session is running send partial call tree and clear the
        # data-structures.
        if self._xray_txns:
            self._reset_call_buckets()

        return profile
コード例 #3
0
    def slow_transaction_data(self):
        """Returns a list containing any slow transaction data collected
        during the reporting period.

        NOTE Currently only the slowest transaction for the reporting
        period is retained.

        """

        if not self.__settings:
            return []

        if not self.__slow_transaction:
            return []

        maximum = self.__settings.agent_limits.transaction_traces_nodes

        transaction_trace = self.__slow_transaction.transaction_trace(
                self, maximum)

        internal_metric('Supportability/StatsEngine/Counts/'
                'transaction_sample_data',
                self.__slow_transaction.trace_node_count)

        data = [transaction_trace,
                self.__slow_transaction.string_table.values()]

        if self.__settings.debug.log_transaction_trace_payload:
            _logger.debug('Encoding slow transaction data where '
                    'payload=%r.', data)

        with InternalTrace('Supportability/StatsEngine/JSON/Encode/'
                'transaction_sample_data'):

            json_data = simplejson.dumps(data, ensure_ascii=True,
                    encoding='Latin-1', namedtuple_as_object=False,
                    default=lambda o: list(iter(o)))

        internal_metric('Supportability/StatsEngine/ZLIB/Bytes/'
                'transaction_sample_data', len(json_data))

        with InternalTrace('Supportability/StatsEngine/ZLIB/Compress/'
                'transaction_sample_data'):
            zlib_data = zlib.compress(json_data)

        with InternalTrace('Supportability/StatsEngine/BASE64/Encode/'
                'transaction_sample_data'):
            pack_data = base64.standard_b64encode(zlib_data)

        root = transaction_trace.root

        trace_data = [[root.start_time,
                root.end_time - root.start_time,
                self.__slow_transaction.path,
                self.__slow_transaction.request_uri,
                pack_data]]

        return trace_data
コード例 #4
0
def send_request(url, license_key, payload=()):
    """Constructs and sends a request to the data collector."""

    headers = {}
    config = {}

    start = time.time()

    # Validate that the license key was actually set and if not replace
    # it with a string which makes it more obvious it was not set.

    if not license_key:
        license_key = "NO LICENSE KEY WAS SET IN AGENT CONFIGURATION"

    headers["User-Agent"] = USER_AGENT
    headers["Content-Encoding"] = "identity"
    headers["X-License-Key"] = license_key

    # Set up definitions for proxy server in case that has been set.

    proxies = proxy_server()

    # At this time we use JSON content encoding for the data being
    # sent. Ensure that normal byte strings are interpreted as Latin-1
    # and that the final result is ASCII so that don't need to worry
    # about converting back to bytes again. We set the default fallback
    # encoder to treat any iterable as a list. Unfortunately the JSON
    # library can't use it as an iterable and so means that generator
    # will be consumed up front and everything collected in memory as a
    # list before then converting to JSON.
    #
    # If an error does occur when encoding the JSON, then it isn't
    # likely going to work later on in a subsequent request with same
    # data, even if aggregated with other data, so we need to log the
    # details and then flag that data should be thrown away. Don't mind
    # being noisy in the the log in this situation as it would indicate
    # a problem with the implementation of the agent.

    try:
        data = json.dumps(payload, ensure_ascii=True, encoding="Latin-1")

    except Exception, exc:
        _logger.error(
            "Error encoding data for JSON payload "
            "with payload of %r. Exception which occurred was %r. "
            "Please report this problem to New Relic support.",
            payload,
            exc,
        )

        raise DiscardDataForRequest(str(exc))
コード例 #5
0
    def slow_sql_data(self):

        if not self.__settings:
            return []

        if not self.__sql_stats_table:
            return []

        maximum = self.__settings.agent_limits.slow_sql_data

        slow_sql_nodes = list(sorted(self.__sql_stats_table.values(),
                key=lambda x: x.max_call_time))[-maximum:]

        result = []

        for node in slow_sql_nodes:

            params = {}

            if node.slow_sql_node.stack_trace:
                params['backtrace'] = node.slow_sql_node.stack_trace

            explain_plan = node.slow_sql_node.explain_plan

            if explain_plan:
                params['explain_plan'] = explain_plan

            json_data = simplejson.dumps(params, ensure_ascii=True,
                    encoding='Latin-1', namedtuple_as_object=False,
                    default=lambda o: list(iter(o)))

            params_data = base64.standard_b64encode(zlib.compress(json_data))

            data = [node.slow_sql_node.path,
                    node.slow_sql_node.request_uri,
                    node.slow_sql_node.identifier,
                    node.slow_sql_node.formatted,
                    node.slow_sql_node.metric,
                    node.call_count,
                    node.total_call_time * 1000,
                    node.min_call_time * 1000,
                    node.max_call_time * 1000,
                    params_data]

            result.append(data)

        return result
コード例 #6
0
    def transaction_trace_data(self):
        """Returns a list of slow transaction data collected
        during the reporting period.

        """
        if not self.__settings:
            return []

        if (not self.__slow_transaction) and (not self.__saved_transactions):
            return []

        trace_data = []
        maximum = self.__settings.agent_limits.transaction_traces_nodes
        traces = list(self.__saved_transactions)

        if (self.__slow_transaction is not None and
                self.__slow_transaction not in traces):
            traces.append(self.__slow_transaction)

        for trace in traces:
            transaction_trace = trace.transaction_trace(self, maximum)

            internal_metric('Supportability/StatsEngine/Counts/'
                            'transaction_sample_data',
                            trace.trace_node_count)

            data = [transaction_trace,
                    trace.string_table.values()]

            if self.__settings.debug.log_transaction_trace_payload:
                _logger.debug('Encoding slow transaction data where '
                              'payload=%r.', data)

            with InternalTrace('Supportability/StatsEngine/JSON/Encode/'
                               'transaction_sample_data'):

                json_data = simplejson.dumps(data, ensure_ascii=True,
                        encoding='Latin-1', namedtuple_as_object=False,
                        default=lambda o: list(iter(o)))

            internal_metric('Supportability/StatsEngine/ZLIB/Bytes/'
                            'transaction_sample_data', len(json_data))

            with InternalTrace('Supportability/StatsEngine/ZLIB/Compress/'
                               'transaction_sample_data'):
                zlib_data = zlib.compress(json_data)

            with InternalTrace('Supportability/StatsEngine/BASE64/Encode/'
                               'transaction_sample_data'):
                pack_data = base64.standard_b64encode(zlib_data)

            root = transaction_trace.root
            force_persist = trace.guid is not None

            trace_data.append([root.start_time,
                    root.end_time - root.start_time,
                    trace.path,
                    trace.request_uri,
                    pack_data,
                    trace.guid,
                    None,
                    force_persist])

        return trace_data
コード例 #7
0
    def profile_data(self):

        # Generic profiling sessions have to wait for completion before
        # reporting data.
        #
        # Xray profile session can send partial profile data on every harvest. 

        if ((self.profiler_type == SessionType.GENERIC) and 
                (self.state == SessionState.RUNNING)):
            return None

        # We prune the number of nodes sent if we are over the specified
        # limit. This is just to avoid having the response be too large
        # and get rejected by the data collector.

        settings = global_settings()
        self._prune_call_trees(settings.agent_limits.thread_profiler_nodes)

        flat_tree = {}
        thread_count = 0

        for category, bucket in self.call_buckets.items():

            # Only flatten buckets that have data in them. No need to send
            # empty buckets.

            if bucket:
                flat_tree[category] = [x.flatten() for x in bucket.values()]
                thread_count += len(bucket)

        # If no profile data was captured return None instead of sending an
        # encoded empty data-structure 

        if thread_count == 0:
            return None

        # Construct the actual final data for sending. The actual call
        # data is turned into JSON, compessed and then base64 encoded at
        # this point to cut its size.

        _logger.debug('Returning partial thread profiling data '
                'for %d transactions with name %r and xray ID of '
                '%r over a period of %.2f seconds and %d samples.',
                self.transaction_count, self.key_txn, self.xray_id,
                time.time()-self.start_time_s, self.sample_count)

        if settings.debug.log_thread_profile_payload:
            _logger.debug('Encoding thread profile data where '
                    'payload=%r.', flat_tree)

        json_call_tree = simplejson.dumps(flat_tree, ensure_ascii=True,
                encoding='Latin-1', namedtuple_as_object=False)
        encoded_tree = base64.standard_b64encode(zlib.compress(json_call_tree))

        profile = [[self.profile_id, self.start_time_s*1000,
            (self.actual_stop_time_s or time.time()) * 1000, self.sample_count,
            encoded_tree, thread_count, 0, self.xray_id]]

        # Reset the datastructures to default. For xray profile sessions we
        # report the partial call tree at every harvest cycle. It is required
        # to reset the datastructures to avoid aggregating the call trees
        # across harvest cycles.

        self.reset_profile_data()
        return profile
コード例 #8
0
def send_request(session, url, method, license_key, agent_run_id=None,
            payload=()):
    """Constructs and sends a request to the data collector."""

    params = {}
    headers = {}
    config = {}

    settings = global_settings()

    start = time.time()

    # Validate that the license key was actually set and if not replace
    # it with a string which makes it more obvious it was not set.

    if not license_key:
        license_key = 'NO LICENSE KEY WAS SET IN AGENT CONFIGURATION'

    # The agent formats requests and is able to handle responses for
    # protocol version 12.

    params['method'] = method
    params['license_key'] = license_key
    params['protocol_version'] = '12'
    params['marshal_format'] = 'json'

    if agent_run_id:
        params['run_id'] = str(agent_run_id)

    headers['User-Agent'] = USER_AGENT
    headers['Content-Encoding'] = 'identity'

    # Set up definitions for proxy server in case that has been set.

    proxies = proxy_server()

    # At this time we use JSON content encoding for the data being
    # sent. Ensure that normal byte strings are interpreted as Latin-1
    # and that the final result is ASCII so that don't need to worry
    # about converting back to bytes again. We set the default fallback
    # encoder to treat any iterable as a list. Unfortunately the JSON
    # library can't use it as an iterable and so means that generator
    # will be consumed up front and everything collected in memory as a
    # list before then converting to JSON.
    #
    # If an error does occur when encoding the JSON, then it isn't
    # likely going to work later on in a subsequent request with same
    # data, even if aggregated with other data, so we need to log the
    # details and then flag that data should be thrown away. Don't mind
    # being noisy in the the log in this situation as it would indicate
    # a problem with the implementation of the agent.

    try:
        with InternalTrace('Supportability/Collector/JSON/Encode/%s' % method):
            data = simplejson.dumps(payload, ensure_ascii=True,
                    encoding='Latin-1', namedtuple_as_object=False,
                    default=lambda o: list(iter(o)))

    except Exception, exc:
        _logger.error('Error encoding data for JSON payload for method %r '
                'with payload of %r. Exception which occurred was %r. '
                'Please report this problem to New Relic support.', method,
                payload, exc)

        raise DiscardDataForRequest(str(exc))
コード例 #9
0
    def process_response(self, status, response_headers, *args):
        """Processes response status and headers, extracting any
        details required and returning a set of additional headers
        to merge into that being returned for the web transaction.

        """

        additional_headers = []

        # Extract the HTTP status response code.

        try:
            self.response_code = int(status.split(' ')[0])
        except Exception:
            pass

        # Extract response content length for inclusion in custom
        # parameters returned for slow transactions and errors.

        try:
            header = filter(lambda x: x[0].lower() == 'content-length',
                    response_headers)[-1:]

            if header:
                self._response_properties['CONTENT_LENGTH'] = header[0][1]
        except Exception:
            pass

        # Generate metrics and response headers for inbound cross
        # process web external calls.

        if self.client_cross_process_id is not None:
            # Need to work out queueing time and duration up to this
            # point for inclusion in metrics and response header. If the
            # recording of the transaction had been prematurely stopped
            # via an API call, only return time up until that call was
            # made so it will match what is reported as duration for the
            # transaction.

            if self.queue_start:
                queue_time = self.start_time - self.queue_start
            else:
                queue_time = 0

            if self.end_time:
                duration = self.end_time = self.start_time
            else:
                duration = time.time() - self.start_time

            # Generate the metric identifying the caller.

            metric_name = 'ClientApplication/%s/all' % (
                    self.client_cross_process_id)
            self.record_custom_metric(metric_name, duration)

            # Generate the additional response headers which provide
            # information back to the caller. We need to freeze the
            # transaction name before adding to the header.

            self._freeze_path()

            payload = (self._settings.cross_process_id, self.path, queue_time,
                    duration, self._read_length, self.guid, self.record_tt)
            app_data = simplejson.dumps(payload, ensure_ascii=True,
                    encoding='Latin-1')

            additional_headers.append(('X-NewRelic-App-Data', obfuscate(
                    app_data, self._settings.encoding_key)))

        # The additional headers returned need to be merged into the
        # original response headers passed back by the application.

        return additional_headers
コード例 #10
0
    def transaction_trace_data(self):
        """Returns a list of slow transaction data collected
        during the reporting period.

        """
        if not self.__settings:
            return []

        # Create a set 'traces' that is a union of slow transaction,
        # browser_transactions and xray_transactions. This ensures we don't
        # send duplicates of a transaction.

        traces = set()
        if self.__slow_transaction:
            traces.add(self.__slow_transaction)
        traces.update(self.__browser_transactions)
        traces.update(self.__xray_transactions)

        # Return an empty list if no transactions were captured.

        if not traces:
            return []

        trace_data = []
        maximum = self.__settings.agent_limits.transaction_traces_nodes

        for trace in traces:
            transaction_trace = trace.transaction_trace(self, maximum)

            internal_metric('Supportability/StatsEngine/Counts/'
                            'transaction_sample_data',
                            trace.trace_node_count)

            data = [transaction_trace,
                    trace.string_table.values()]

            if self.__settings.debug.log_transaction_trace_payload:
                _logger.debug('Encoding slow transaction data where '
                              'payload=%r.', data)

            with InternalTrace('Supportability/StatsEngine/JSON/Encode/'
                               'transaction_sample_data'):

                json_data = simplejson.dumps(data, ensure_ascii=True,
                        encoding='Latin-1', namedtuple_as_object=False,
                        default=lambda o: list(iter(o)))

            internal_metric('Supportability/StatsEngine/ZLIB/Bytes/'
                            'transaction_sample_data', len(json_data))

            with InternalTrace('Supportability/StatsEngine/ZLIB/Compress/'
                               'transaction_sample_data'):
                zlib_data = zlib.compress(json_data)

            with InternalTrace('Supportability/StatsEngine/BASE64/Encode/'
                               'transaction_sample_data'):
                pack_data = base64.standard_b64encode(zlib_data)

            root = transaction_trace.root
            xray_id = getattr(trace, 'xray_id', None)

            if (xray_id or trace.rum_trace or trace.record_tt):
                force_persist = True
            else:
                force_persist = False

            trace_data.append([root.start_time,
                    root.end_time - root.start_time,
                    trace.path,
                    trace.request_uri,
                    pack_data,
                    trace.guid,
                    None,
                    force_persist,
                    xray_id,])

        return trace_data
コード例 #11
0
def send_request(session,
                 url,
                 method,
                 license_key,
                 agent_run_id=None,
                 payload=()):
    """Constructs and sends a request to the data collector."""

    params = {}
    headers = {}
    config = {}

    settings = global_settings()

    start = time.time()

    # Validate that the license key was actually set and if not replace
    # it with a string which makes it more obvious it was not set.

    if not license_key:
        license_key = 'NO LICENSE KEY WAS SET IN AGENT CONFIGURATION'

    # The agent formats requests and is able to handle responses for
    # protocol version 12.

    params['method'] = method
    params['license_key'] = license_key
    params['protocol_version'] = '12'
    params['marshal_format'] = 'json'

    if agent_run_id:
        params['run_id'] = str(agent_run_id)

    headers['User-Agent'] = USER_AGENT
    headers['Content-Encoding'] = 'identity'

    # Set up definitions for proxy server in case that has been set.

    proxies = proxy_server()

    # At this time we use JSON content encoding for the data being
    # sent. Ensure that normal byte strings are interpreted as Latin-1
    # and that the final result is ASCII so that don't need to worry
    # about converting back to bytes again. We set the default fallback
    # encoder to treat any iterable as a list. Unfortunately the JSON
    # library can't use it as an iterable and so means that generator
    # will be consumed up front and everything collected in memory as a
    # list before then converting to JSON.
    #
    # If an error does occur when encoding the JSON, then it isn't
    # likely going to work later on in a subsequent request with same
    # data, even if aggregated with other data, so we need to log the
    # details and then flag that data should be thrown away. Don't mind
    # being noisy in the the log in this situation as it would indicate
    # a problem with the implementation of the agent.

    try:
        with InternalTrace('Supportability/Collector/JSON/Encode/%s' % method):
            data = simplejson.dumps(payload,
                                    ensure_ascii=True,
                                    encoding='Latin-1',
                                    namedtuple_as_object=False,
                                    default=lambda o: list(iter(o)))

    except Exception, exc:
        _logger.error(
            'Error encoding data for JSON payload for method %r '
            'with payload of %r. Exception which occurred was %r. '
            'Please report this problem to New Relic support.', method,
            payload, exc)

        raise DiscardDataForRequest(str(exc))
コード例 #12
0
    def slow_transaction_data(self):
        """Returns a list containing any slow transaction data collected
        during the reporting period.

        NOTE Currently only the slowest transaction for the reporting
        period is retained.

        """

        if not self.__settings:
            return []

        if not self.__slow_transaction:
            return []

        maximum = self.__settings.agent_limits.transaction_traces_nodes

        transaction_trace = self.__slow_transaction.transaction_trace(
            self, maximum)

        internal_metric(
            'Supportability/StatsEngine/Counts/'
            'transaction_sample_data',
            self.__slow_transaction.trace_node_count)

        data = [
            transaction_trace,
            self.__slow_transaction.string_table.values()
        ]

        if self.__settings.debug.log_transaction_trace_payload:
            _logger.debug(
                'Encoding slow transaction data where '
                'payload=%r.', data)

        with InternalTrace('Supportability/StatsEngine/JSON/Encode/'
                           'transaction_sample_data'):

            json_data = simplejson.dumps(data,
                                         ensure_ascii=True,
                                         encoding='Latin-1',
                                         namedtuple_as_object=False,
                                         default=lambda o: list(iter(o)))

        internal_metric(
            'Supportability/StatsEngine/ZLIB/Bytes/'
            'transaction_sample_data', len(json_data))

        with InternalTrace('Supportability/StatsEngine/ZLIB/Compress/'
                           'transaction_sample_data'):
            zlib_data = zlib.compress(json_data)

        with InternalTrace('Supportability/StatsEngine/BASE64/Encode/'
                           'transaction_sample_data'):
            pack_data = base64.standard_b64encode(zlib_data)

        root = transaction_trace.root

        trace_data = [[
            root.start_time, root.end_time - root.start_time,
            self.__slow_transaction.path, self.__slow_transaction.request_uri,
            pack_data
        ]]

        return trace_data
コード例 #13
0
    def transaction_trace_data(self):
        """Returns a list of slow transaction data collected
        during the reporting period.

        """
        if not self.__settings:
            return []

        # Create a set 'traces' that is a union of slow transaction,
        # browser_transactions and xray_transactions. This ensures we don't
        # send duplicates of a transaction.

        traces = set()
        if self.__slow_transaction:
            traces.add(self.__slow_transaction)
        traces.update(self.__browser_transactions)
        traces.update(self.__xray_transactions)

        # Return an empty list if no transactions were captured.

        if not traces:
            return []

        trace_data = []
        maximum = self.__settings.agent_limits.transaction_traces_nodes

        for trace in traces:
            transaction_trace = trace.transaction_trace(self, maximum)

            internal_metric(
                'Supportability/StatsEngine/Counts/'
                'transaction_sample_data', trace.trace_node_count)

            data = [transaction_trace, trace.string_table.values()]

            if self.__settings.debug.log_transaction_trace_payload:
                _logger.debug(
                    'Encoding slow transaction data where '
                    'payload=%r.', data)

            with InternalTrace('Supportability/StatsEngine/JSON/Encode/'
                               'transaction_sample_data'):

                json_data = simplejson.dumps(data,
                                             ensure_ascii=True,
                                             encoding='Latin-1',
                                             namedtuple_as_object=False,
                                             default=lambda o: list(iter(o)))

            internal_metric(
                'Supportability/StatsEngine/ZLIB/Bytes/'
                'transaction_sample_data', len(json_data))

            with InternalTrace('Supportability/StatsEngine/ZLIB/Compress/'
                               'transaction_sample_data'):
                zlib_data = zlib.compress(json_data)

            with InternalTrace('Supportability/StatsEngine/BASE64/Encode/'
                               'transaction_sample_data'):
                pack_data = base64.standard_b64encode(zlib_data)

            root = transaction_trace.root
            xray_id = getattr(trace, 'xray_id', None)
            force_persist = (trace.guid is not None) or (xray_id is not None)

            trace_data.append([
                root.start_time,
                root.end_time - root.start_time,
                trace.path,
                trace.request_uri,
                pack_data,
                trace.guid,
                None,
                force_persist,
                xray_id,
            ])

        return trace_data