def get_idempotence(self, request, session): """ Check if idempotence is in the request and if that key has already been processed for the given user. If it has, return the same response. :param self: :param request: :param session: :return: """ request.setHeader("Content-Type", CONTENT_TYPE_JSON) idempotence = request.getHeader("x-idempotence") if idempotence is None: arguments = args_to_dict(request.args) idempotence = arguments.get("_idempotence", None) if idempotence is not None: request.idempotence = idempotence idempotence = sha256_compact(f"{session.auth_id}:{idempotence}") if idempotence in self.idempotence: results = msgpack.loads(zlib.uncompress(self.idempotence[idempotence])) print(f"get_idempotence results {results}") return False request.setHeader("Idempotence", "true") request.setResponseCode(results["response_code"]) return results["data"] return False
def send_request(self, url, proxies, session, payload=()): """Constructs and sends a request to the data collector.""" headers = {} config = {} start = time.time() # Validate that the license key was actually set and if not replace # it with a string which makes it more obvious it was not set. license_key = self.license_key if not self.license_key: license_key = 'NO LICENSE KEY WAS SET IN AGENT CONFIGURATION' headers['User-Agent'] = USER_AGENT headers['Content-Encoding'] = 'identity' headers['X-License-Key'] = license_key # At this time we use JSON content encoding for the data being # sent. Ensure that normal byte strings are interpreted as Latin-1 # and that the final result is ASCII so that don't need to worry # about converting back to bytes again. We set the default fallback # encoder to treat any iterable as a list. Unfortunately the JSON # library can't use it as an iterable and so means that generator # will be consumed up front and everything collected in memory as a # list before then converting to JSON. # # If an error does occur when encoding the JSON, then it isn't # likely going to work later on in a subsequent request with same # data, even if aggregated with other data, so we need to log the # details and then flag that data should be thrown away. Don't mind # being noisy in the the log in this situation as it would indicate # a problem with the implementation of the agent. try: data = json.dumps(payload, ensure_ascii=True, encoding='Latin-1') except Exception as exc: _logger.error('Error encoding data for JSON payload ' 'with payload of %r. Exception which occurred was %r. ' 'Please report this problem to New Relic support.', payload, exc) raise DiscardDataForRequest(str(exc)) # Log details of call and/or payload for debugging. Use the JSON # encoded value so know that what is encoded is correct. _logger.debug('Calling data collector to report custom metrics ' 'with payload=%r.', data) # Compress the serialized JSON being sent as content if over 64KiB # in size. If less than 2MB in size compress for speed. If over # 2MB then compress for smallest size. This parallels what the Ruby # agent does. if len(data) > 64*1024: headers['Content-Encoding'] = 'deflate' level = (len(data) < 2000000) and 1 or 9 data = zlib.compress(data, level) # The 'requests' library can raise a number of exception derived # from 'RequestException' before we even manage to get a connection # to the data collector. The data collector can the generate a # number of different types of HTTP errors for requests. try: session = requests.session() r = session.post(url, headers=headers, proxies=proxies, timeout=self.timeout, data=data) # Read the content now so we can force close the socket # connection if this is a transient session as quickly # as possible. content = r.content except requests.RequestException as exc: if not self.proxy_host or not self.proxy_port: _logger.warning('Data collector is not contactable. This can ' 'be because of a network issue or because of the data ' 'collector being restarted. In the event that contact ' 'cannot be made after a period of time then please ' 'report this problem to New Relic support for further ' 'investigation. The error raised was %r.', exc) else: _logger.warning('Data collector is not contactable via the ' 'proxy host %r on port %r with proxy user of %r. This ' 'can be because of a network issue or because of the ' 'data collector being restarted. In the event that ' 'contact cannot be made after a period of time then ' 'please report this problem to New Relic support for ' 'further investigation. The error raised was %r.', self.proxy_host, self.proxy_port, self.proxy_user, exc) raise RetryDataForRequest(str(exc)) finally: session.close() if r.status_code != 200: _logger.debug('Received a non 200 HTTP response from the data ' 'collector where url=%r, license_key=%r, headers=%r, ' 'status_code=%r and content=%r.', url, license_key, headers, r.status_code, r.content) if r.status_code == 400: if headers['Content-Encoding'] == 'deflate': data = zlib.uncompress(data) _logger.error('Data collector is indicating that a bad ' 'request has been submitted for url %r, headers of %r ' 'and payload of %r with response of %r. Please report ' 'this problem to New Relic support.', url, headers, data, r.content) raise DiscardDataForRequest() elif r.status_code == 403: _logger.error('Data collector is indicating that the license ' 'key %r is not valid.', license_key) raise DiscardDataForRequest() elif r.status_code == 413: _logger.warning('Data collector is indicating that a request ' 'was received where the request content size ' 'was over the maximum allowed size limit. The length of ' 'the request content was %d. If this keeps occurring on a ' 'regular basis, please report this problem to New Relic ' 'support for further investigation.', len(data)) raise DiscardDataForRequest() elif r.status_code in (503, 504): _logger.warning('Data collector is unavailable. This can be a ' 'transient issue because of the data collector or our ' 'core application being restarted. If the issue persists ' 'it can also be indicative of a problem with our servers. ' 'In the event that availability of our servers is not ' 'restored after a period of time then please report this ' 'problem to New Relic support for further investigation.') raise ServerIsUnavailable() elif r.status_code != 200: if not self.proxy_host or not self.proxy_port: _logger.warning('An unexpected HTTP response was received ' 'from the data collector of %r. The payload for ' 'the request was %r. If this issue persists then ' 'please report this problem to New Relic support ' 'for further investigation.', r.status_code, payload) else: _logger.warning('An unexpected HTTP response was received ' 'from the data collector of %r while connecting ' 'via proxy host %r on port %r with proxy user of %r. ' 'The payload for the request was %r. If this issue ' 'persists then please report this problem to New ' 'Relic support for further investigation.', r.status_code, self.proxy_host, self.proxy_port, self.proxy_user, payload) raise DiscardDataForRequest() # Log details of response payload for debugging. Use the JSON # encoded value so know that what original encoded value was. duration = time.time() - start _logger.debug('Valid response from data collector after %.2f ' 'seconds with content=%r.', duration, r.content) # If we got this far we should have a legitimate response from the # data collector. The response is JSON so need to decode it. # Everything will come back as Unicode. Make sure all strings are # decoded as 'UTF-8'. try: result = json.loads(r.content, encoding='UTF-8') except Exception as exc: _logger.error('Error decoding data for JSON payload ' 'with payload of %r. Exception which occurred was %r. ' 'Please report this problem to New Relic support.', r.content, exc) raise DiscardDataForRequest(str(exc)) # The decoded JSON can be either for a successful response or an # error. A successful response has a 'return_value' element and an # error an 'exception' element. if 'status' in result: return result['status'] error_message = result['error'] # Now need to check for server side exceptions. The following # exceptions can occur for abnormal events. _logger.debug('Received an exception from the data collector where ' 'url=%r, license_key=%r, headers=%r and error_message=%r. ', url, license_key, headers, error_message) raise DiscardDataForRequest(error_message)
if r.status_code != 200: _logger.debug( "Received a non 200 HTTP response from the data " "collector where url=%r, license_key=%r, headers=%r, " "status_code=%r and content=%r.", url, license_key, headers, r.status_code, r.content, ) if r.status_code == 400: if headers["Content-Encoding"] == "deflate": data = zlib.uncompress(data) _logger.error( "Data collector is indicating that a bad " "request has been submitted for url %r, headers of %r " "and payload of %r with response of %r. Please report " "this problem to New Relic support.", url, headers, data, r.content, ) raise DiscardDataForRequest() elif r.status_code == 403:
def send_request(session, url, method, license_key, agent_run_id=None, payload=()): """Constructs and sends a request to the data collector.""" params = {} headers = {} config = {} settings = global_settings() start = time.time() # Validate that the license key was actually set and if not replace # it with a string which makes it more obvious it was not set. if not license_key: license_key = 'NO LICENSE KEY WAS SET IN AGENT CONFIGURATION' # The agent formats requests and is able to handle responses for # protocol version 12. params['method'] = method params['license_key'] = license_key params['protocol_version'] = '12' params['marshal_format'] = 'json' if agent_run_id: params['run_id'] = str(agent_run_id) headers['User-Agent'] = USER_AGENT headers['Content-Encoding'] = 'identity' # Set up definitions for proxy server in case that has been set. proxies = proxy_server() # At this time we use JSON content encoding for the data being # sent. Ensure that normal byte strings are interpreted as Latin-1 # and that the final result is ASCII so that don't need to worry # about converting back to bytes again. We set the default fallback # encoder to treat any iterable as a list. Unfortunately the JSON # library can't use it as an iterable and so means that generator # will be consumed up front and everything collected in memory as a # list before then converting to JSON. # # If an error does occur when encoding the JSON, then it isn't # likely going to work later on in a subsequent request with same # data, even if aggregated with other data, so we need to log the # details and then flag that data should be thrown away. Don't mind # being noisy in the the log in this situation as it would indicate # a problem with the implementation of the agent. try: with InternalTrace('Supportability/Collector/JSON/Encode/%s' % method): data = simplejson.dumps(payload, ensure_ascii=True, encoding='Latin-1', namedtuple_as_object=False, default=lambda o: list(iter(o))) except Exception: _logger.exception('Error encoding data for JSON payload for ' 'method %r with payload of %r. Please report this problem ' 'to New Relic support.', method, payload) raise DiscardDataForRequest(str(sys.exc_info()[1])) # Log details of call and/or payload for debugging. Use the JSON # encoded value so know that what is encoded is correct. if settings.debug.log_data_collector_payloads: _logger.debug('Calling data collector with url=%r, method=%r and ' 'payload=%r.', url, method, data) elif settings.debug.log_data_collector_calls: _logger.debug('Calling data collector with url=%r and method=%r.', url, method) # Compress the serialized JSON being sent as content if over 64KiB # in size. If less than 2MB in size compress for speed. If over # 2MB then compress for smallest size. This parallels what the Ruby # agent does. if len(data) > 64*1024: headers['Content-Encoding'] = 'deflate' level = (len(data) < 2000000) and 1 or 9 internal_metric('Supportability/Collector/ZLIB/Bytes/%s' % method, len(data)) with InternalTrace('Supportability/Collector/ZLIB/Compress/' '%s' % method): data = zlib.compress(six.b(data), level) # If there is no requests session object provided for making # requests create one now. We want to close this as soon as we # are done with it. auto_close_session = False if not session: session = requests.session() auto_close_session = True # The 'requests' library can raise a number of exception derived # from 'RequestException' before we even manage to get a connection # to the data collector. # # The data collector can the generate a number of different types of # HTTP errors for requests. These are: # # 400 Bad Request - For incorrect method type or incorrectly # construct parameters. We should not get this and if we do it would # likely indicate a problem with the implementation of the agent. # # 413 Request Entity Too Large - Where the request content was too # large. The limits on number of nodes in slow transaction traces # should in general prevent this, but not everything has size limits # and so rogue data could still blow things out. Same data is not # going to work later on in a subsequent request, even if aggregated # with other data, so we need to log the details and then flag that # data should be thrown away. # # 415 Unsupported Media Type - This occurs when the JSON which was # sent can't be decoded by the data collector. If this is a true # problem with the JSON formatting, then sending again, even if # aggregated with other data, may not work, so we need to log the # details and then flag that data should be thrown away. # # 503 Service Unavailable - This occurs when data collector, or core # application is being restarted and not in state to be able to # accept requests. It should be a transient issue so should be able # to retain data and try again. internal_metric('Supportability/Collector/Output/Bytes/%s' % method, len(data)) try: # The timeout value in the requests module is only on # the initial connection and doesn't apply to how long # it takes to get back a response. timeout = settings.agent_limits.data_collector_timeout r = session.post(url, params=params, headers=headers, proxies=proxies, timeout=timeout, data=data) # Read the content now so we can force close the socket # connection if this is a transient session as quickly # as possible. content = r.content except requests.RequestException: if not settings.proxy_host or not settings.proxy_port: _logger.warning('Data collector is not contactable. This can be ' 'because of a network issue or because of the data ' 'collector being restarted. In the event that contact ' 'cannot be made after a period of time then please ' 'report this problem to New Relic support for further ' 'investigation. The error raised was %r.', sys.exc_info()[1]) else: _logger.warning('Data collector is not contactable via the proxy ' 'host %r on port %r with proxy user of %r. This can be ' 'because of a network issue or because of the data ' 'collector being restarted. In the event that contact ' 'cannot be made after a period of time then please ' 'report this problem to New Relic support for further ' 'investigation. The error raised was %r.', settings.proxy_host, settings.proxy_port, settings.proxy_user, sys.exc_info()[1]) raise RetryDataForRequest(str(sys.exc_info()[1])) finally: if auto_close_session: session.close() session = None if r.status_code != 200: _logger.debug('Received a non 200 HTTP response from the data ' 'collector where url=%r, method=%r, license_key=%r, ' 'agent_run_id=%r, params=%r, headers=%r, status_code=%r ' 'and content=%r.', url, method, license_key, agent_run_id, params, headers, r.status_code, content) if r.status_code == 400: _logger.error('Data collector is indicating that a bad ' 'request has been submitted for url %r, headers of %r, ' 'params of %r and payload of %r. Please report this ' 'problem to New Relic support.', url, headers, params, payload) raise DiscardDataForRequest() elif r.status_code == 413: _logger.warning('Data collector is indicating that a request for ' 'method %r was received where the request content size ' 'was over the maximum allowed size limit. The length of ' 'the request content was %d. If this keeps occurring on a ' 'regular basis, please report this problem to New Relic ' 'support for further investigation.', method, len(data)) raise DiscardDataForRequest() elif r.status_code == 415: _logger.warning('Data collector is indicating that it was sent ' 'malformed JSON data for method %r. If this keeps occurring ' 'on a regular basis, please report this problem to New ' 'Relic support for further investigation.', method) if settings.debug.log_malformed_json_data: if headers['Content-Encoding'] == 'deflate': data = zlib.uncompress(data) _logger.info('JSON data which was rejected by the data ' 'collector was %r.', data) raise DiscardDataForRequest(content) elif r.status_code == 503: _logger.warning('Data collector is unavailable. This can be a ' 'transient issue because of the data collector or our ' 'core application being restarted. If the issue persists ' 'it can also be indicative of a problem with our servers. ' 'In the event that availability of our servers is not ' 'restored after a period of time then please report this ' 'problem to New Relic support for further investigation.') raise ServerIsUnavailable() elif r.status_code != 200: if not settings.proxy_host or not settings.proxy_port: _logger.warning('An unexpected HTTP response was received from ' 'the data collector of %r for method %r. The payload for ' 'the request was %r. If this issue persists then please ' 'report this problem to New Relic support for further ' 'investigation.', r.status_code, method, payload) else: _logger.warning('An unexpected HTTP response was received from ' 'the data collector of %r for method %r while connecting ' 'via proxy host %r on port %r with proxy user of %r. ' 'The payload for the request was %r. If this issue ' 'persists then please report this problem to New Relic ' 'support for further investigation.', r.status_code, method, settings.proxy_host, settings.proxy_port, settings.proxy_user, payload) raise DiscardDataForRequest() # Log details of response payload for debugging. Use the JSON # encoded value so know that what original encoded value was. duration = time.time() - start if settings.debug.log_data_collector_payloads: _logger.debug('Valid response from data collector after %.2f ' 'seconds with content=%r.', duration, content) elif settings.debug.log_data_collector_calls: _logger.debug('Valid response from data collector after %.2f ' 'seconds.', duration) # If we got this far we should have a legitimate response from the # data collector. The response is JSON so need to decode it. # Everything will come back as Unicode. Make sure all strings are # decoded as 'UTF-8'. internal_metric('Supportability/Collector/Input/Bytes/%s' % method, len(content)) try: with InternalTrace('Supportability/Collector/JSON/Decode/%s' % method): result = simplejson.loads(content, encoding='UTF-8') except Exception: _logger.exception('Error decoding data for JSON payload for ' 'method %r with payload of %r. Please report this problem ' 'to New Relic support.', method, content) if settings.debug.log_malformed_json_data: _logger.info('JSON data received from data collector which ' 'could not be decoded was %r.', content) raise DiscardDataForRequest(str(sys.exc_info()[1])) # The decoded JSON can be either for a successful response or an # error. A successful response has a 'return_value' element and an # error an 'exception' element. if 'return_value' in result: return result['return_value'] error_type = result['exception']['error_type'] message = result['exception']['message'] # Now need to check for server side exceptions. The following # exceptions can occur for abnormal events. _logger.debug('Received an exception from the data collector where ' 'url=%r, method=%r, license_key=%r, agent_run_id=%r, params=%r, ' 'headers=%r, error_type=%r and message=%r', url, method, license_key, agent_run_id, params, headers, error_type, message) if error_type == 'NewRelic::Agent::LicenseException': _logger.error('Data collector is indicating that an incorrect ' 'license key has been supplied by the agent. The value ' 'which was used by the agent is %r. Please correct any ' 'problem with the license key or report this problem to ' 'New Relic support.', license_key) raise DiscardDataForRequest(message) elif error_type == 'NewRelic::Agent::PostTooBigException': _logger.warning('Core application is indicating that a request for ' 'method %r was received where the request content size ' 'was over the maximum allowed size limit. The length of ' 'the request content was %d. If this keeps occurring on a ' 'regular basis, please report this problem to New Relic ' 'support for further investigation.', method, len(data)) raise DiscardDataForRequest(message) # Server side exceptions are also used to inform the agent to # perform certain actions such as restart when server side # configuration has changed for this application or when agent is # being disabled remotely for some reason. if error_type == 'NewRelic::Agent::ForceRestartException': _logger.info('An automatic internal agent restart has been ' 'requested by the data collector for the application ' 'where the agent run was %r. The reason given for the ' 'forced restart is %r.', agent_run_id, message) raise ForceAgentRestart(message) elif error_type == 'NewRelic::Agent::ForceDisconnectException': _logger.critical('Disconnection of the agent has been requested by ' 'the data collector for the application where the ' 'agent run was %r. The reason given for the forced ' 'disconnection is %r. Please contact New Relic support ' 'for further information.', agent_run_id, message) raise ForceAgentDisconnect(message) # We received an unexpected server side error we don't know what # to do with. _logger.warning('An unexpected server error was received from the ' 'data collector for method %r with payload of %r. The error ' 'was of type %r with message %r. If this issue persists ' 'then please report this problem to New Relic support for ' 'further investigation.', method, payload, error_type, message) raise DiscardDataForRequest(message)
def send_request(self, url, proxies, session, payload=()): """Constructs and sends a request to the data collector.""" headers = {} config = {} start = time.time() # Validate that the license key was actually set and if not replace # it with a string which makes it more obvious it was not set. license_key = self.license_key if not self.license_key: license_key = 'NO LICENSE KEY WAS SET IN AGENT CONFIGURATION' headers['User-Agent'] = USER_AGENT headers['Content-Encoding'] = 'identity' headers['X-License-Key'] = license_key # At this time we use JSON content encoding for the data being # sent. Ensure that normal byte strings are interpreted as Latin-1 # and that the final result is ASCII so that don't need to worry # about converting back to bytes again. We set the default fallback # encoder to treat any iterable as a list. Unfortunately the JSON # library can't use it as an iterable and so means that generator # will be consumed up front and everything collected in memory as a # list before then converting to JSON. # # If an error does occur when encoding the JSON, then it isn't # likely going to work later on in a subsequent request with same # data, even if aggregated with other data, so we need to log the # details and then flag that data should be thrown away. Don't mind # being noisy in the the log in this situation as it would indicate # a problem with the implementation of the agent. try: data = json.dumps(payload, ensure_ascii=True, encoding='Latin-1') except Exception as exc: _logger.error( 'Error encoding data for JSON payload ' 'with payload of %r. Exception which occurred was %r. ' 'Please report this problem to New Relic support.', payload, exc) raise DiscardDataForRequest(str(exc)) # Log details of call and/or payload for debugging. Use the JSON # encoded value so know that what is encoded is correct. _logger.debug( 'Calling data collector to report custom metrics ' 'with payload=%r.', data) # Compress the serialized JSON being sent as content if over 64KiB # in size. If less than 2MB in size compress for speed. If over # 2MB then compress for smallest size. This parallels what the Ruby # agent does. if len(data) > 64 * 1024: headers['Content-Encoding'] = 'deflate' level = (len(data) < 2000000) and 1 or 9 data = zlib.compress(data, level) # The 'requests' library can raise a number of exception derived # from 'RequestException' before we even manage to get a connection # to the data collector. The data collector can the generate a # number of different types of HTTP errors for requests. try: session = requests.session() r = session.post(url, headers=headers, proxies=proxies, timeout=self.timeout, data=data) # Read the content now so we can force close the socket # connection if this is a transient session as quickly # as possible. content = r.content except requests.RequestException as exc: if not self.proxy_host or not self.proxy_port: _logger.warning( 'Data collector is not contactable. This can ' 'be because of a network issue or because of the data ' 'collector being restarted. In the event that contact ' 'cannot be made after a period of time then please ' 'report this problem to New Relic support for further ' 'investigation. The error raised was %r.', exc) else: _logger.warning( 'Data collector is not contactable via the ' 'proxy host %r on port %r with proxy user of %r. This ' 'can be because of a network issue or because of the ' 'data collector being restarted. In the event that ' 'contact cannot be made after a period of time then ' 'please report this problem to New Relic support for ' 'further investigation. The error raised was %r.', self.proxy_host, self.proxy_port, self.proxy_user, exc) raise RetryDataForRequest(str(exc)) finally: session.close() if r.status_code != 200: _logger.debug( 'Received a non 200 HTTP response from the data ' 'collector where url=%r, license_key=%r, headers=%r, ' 'status_code=%r and content=%r.', url, license_key, headers, r.status_code, r.content) if r.status_code == 400: if headers['Content-Encoding'] == 'deflate': data = zlib.uncompress(data) _logger.error( 'Data collector is indicating that a bad ' 'request has been submitted for url %r, headers of %r ' 'and payload of %r with response of %r. Please report ' 'this problem to New Relic support.', url, headers, data, r.content) raise DiscardDataForRequest() elif r.status_code == 403: _logger.error( 'Data collector is indicating that the license ' 'key %r is not valid.', license_key) raise DiscardDataForRequest() elif r.status_code == 413: _logger.warning( 'Data collector is indicating that a request ' 'was received where the request content size ' 'was over the maximum allowed size limit. The length of ' 'the request content was %d. If this keeps occurring on a ' 'regular basis, please report this problem to New Relic ' 'support for further investigation.', len(data)) raise DiscardDataForRequest() elif r.status_code in (503, 504): _logger.warning( 'Data collector is unavailable. This can be a ' 'transient issue because of the data collector or our ' 'core application being restarted. If the issue persists ' 'it can also be indicative of a problem with our servers. ' 'In the event that availability of our servers is not ' 'restored after a period of time then please report this ' 'problem to New Relic support for further investigation.') raise ServerIsUnavailable() elif r.status_code != 200: if not self.proxy_host or not self.proxy_port: _logger.warning( 'An unexpected HTTP response was received ' 'from the data collector of %r. The payload for ' 'the request was %r. If this issue persists then ' 'please report this problem to New Relic support ' 'for further investigation.', r.status_code, payload) else: _logger.warning( 'An unexpected HTTP response was received ' 'from the data collector of %r while connecting ' 'via proxy host %r on port %r with proxy user of %r. ' 'The payload for the request was %r. If this issue ' 'persists then please report this problem to New ' 'Relic support for further investigation.', r.status_code, self.proxy_host, self.proxy_port, self.proxy_user, payload) raise DiscardDataForRequest() # Log details of response payload for debugging. Use the JSON # encoded value so know that what original encoded value was. duration = time.time() - start _logger.debug( 'Valid response from data collector after %.2f ' 'seconds with content=%r.', duration, r.content) # If we got this far we should have a legitimate response from the # data collector. The response is JSON so need to decode it. # Everything will come back as Unicode. Make sure all strings are # decoded as 'UTF-8'. try: result = json.loads(r.content, encoding='UTF-8') except Exception as exc: _logger.error( 'Error decoding data for JSON payload ' 'with payload of %r. Exception which occurred was %r. ' 'Please report this problem to New Relic support.', r.content, exc) raise DiscardDataForRequest(str(exc)) # The decoded JSON can be either for a successful response or an # error. A successful response has a 'return_value' element and an # error an 'exception' element. if 'status' in result: return result['status'] error_message = result['error'] # Now need to check for server side exceptions. The following # exceptions can occur for abnormal events. _logger.debug( 'Received an exception from the data collector where ' 'url=%r, license_key=%r, headers=%r and error_message=%r. ', url, license_key, headers, error_message) raise DiscardDataForRequest(error_message)
'the request content was %d. If this keeps occurring on a ' 'regular basis, please report this problem to New Relic ' 'support for further investigation.', method, len(data)) raise DiscardDataForRequest() elif r.status_code == 415: _logger.warning( 'Data collector is indicating that it was sent ' 'malformed JSON data for method %r. If this keeps occurring ' 'on a regular basis, please report this problem to New ' 'Relic support for further investigation.', method) if settings.debug.log_malformed_json_data: if headers['Content-Encoding'] == 'deflate': data = zlib.uncompress(data) _logger.info( 'JSON data which was rejected by the data ' 'collector was %r.', data) raise DiscardDataForRequest(content) elif r.status_code == 503: _logger.warning( 'Data collector is unavailable. This can be a ' 'transient issue because of the data collector or our ' 'core application being restarted. If the issue persists ' 'it can also be indicative of a problem with our servers. ' 'In the event that availability of our servers is not ' 'restored after a period of time then please report this '