def normalize(self): # TODO(dcramer): store http.env.REMOTE_ADDR as user.ip # First we pull out our top-level (non-data attr) kwargs data = self.data if not isinstance(data.get('level'), (six.string_types, int)): data['level'] = logging.ERROR elif data['level'] not in LOG_LEVELS: data['level'] = logging.ERROR if not data.get('logger') or not isinstance(data.get('logger'), six.string_types): data['logger'] = DEFAULT_LOGGER_NAME else: logger = trim(data['logger'].strip(), 64) if tagstore.is_valid_key(logger): data['logger'] = logger else: data['logger'] = DEFAULT_LOGGER_NAME if data.get('platform'): data['platform'] = trim(data['platform'], 64) current_timestamp = timezone.now() timestamp = data.get('timestamp') if not timestamp: timestamp = current_timestamp if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) if not data.get('event_id'): data['event_id'] = uuid4().hex data.setdefault('culprit', None) data.setdefault('transaction', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('checksum', None) data.setdefault('fingerprint', None) data.setdefault('platform', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('errors', []) tags = data.get('tags') if not tags: tags = [] # full support for dict syntax elif isinstance(tags, dict): tags = list(tags.items()) # prevent [tag, tag, tag] (invalid) syntax elif not all(len(t) == 2 for t in tags): tags = [] else: tags = list(tags) data['tags'] = [] for key, value in tags: key = six.text_type(key).strip() value = six.text_type(value).strip() if not (key and value): continue # XXX(dcramer): many legacy apps are using the environment tag # rather than the key itself if key == 'environment' and not data.get('environment'): data['environment'] = value else: data['tags'].append((key, value)) if not isinstance(data['extra'], dict): # throw it away data['extra'] = {} trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) # TODO(dcramer): more of validate data needs stuffed into the manager for key in list(iter(data)): if key in CLIENT_RESERVED_ATTRS: continue value = data.pop(key) try: interface = get_interface(key)() except ValueError: continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception: # XXX: we should consider logging this. pass # TODO(dcramer): this logic is duplicated in ``validate_data`` from # coreapi # message is coerced to an interface, as its used for pure # index of searchable strings # See GH-3248 message = data.pop('message', None) if message: if 'sentry.interfaces.Message' not in data: interface = get_interface('sentry.interfaces.Message') try: inst = interface.to_python({ 'message': message, }) data[inst.get_path()] = inst.to_json() except Exception: pass elif not data['sentry.interfaces.Message'].get('formatted'): interface = get_interface('sentry.interfaces.Message') try: inst = interface.to_python( dict( data['sentry.interfaces.Message'], formatted=message, ) ) data[inst.get_path()] = inst.to_json() except Exception: pass # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version # TODO(dcramer): find a better place for this logic exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] if 'sentry.interfaces.Http' in data: try: ip_address = validate_ip( data['sentry.interfaces.Http'].get('env', {}).get('REMOTE_ADDR'), required=False, ) except ValueError: ip_address = None if ip_address: data.setdefault('sentry.interfaces.User', {}) data['sentry.interfaces.User'].setdefault('ip_address', ip_address) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) return data
def normalize(self): # TODO(dcramer): store http.env.REMOTE_ADDR as user.ip # First we pull out our top-level (non-data attr) kwargs data = self.data if not isinstance(data.get('level'), (six.string_types, int)): data['level'] = logging.ERROR elif data['level'] not in LOG_LEVELS: data['level'] = logging.ERROR if not data.get('logger'): data['logger'] = DEFAULT_LOGGER_NAME else: logger = trim(data['logger'].strip(), 64) if TagKey.is_valid_key(logger): data['logger'] = logger else: data['logger'] = DEFAULT_LOGGER_NAME if data.get('platform'): data['platform'] = trim(data['platform'], 64) current_timestamp = timezone.now() timestamp = data.get('timestamp') if not timestamp: timestamp = current_timestamp if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) if not data.get('event_id'): data['event_id'] = uuid4().hex data.setdefault('culprit', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('checksum', None) data.setdefault('fingerprint', None) data.setdefault('platform', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('errors', []) tags = data.get('tags') if not tags: tags = [] # full support for dict syntax elif isinstance(tags, dict): tags = list(tags.items()) # prevent [tag, tag, tag] (invalid) syntax elif not all(len(t) == 2 for t in tags): tags = [] else: tags = list(tags) data['tags'] = [] for key, value in tags: key = six.text_type(key).strip() value = six.text_type(value).strip() if not (key and value): continue # XXX(dcramer): many legacy apps are using the environment tag # rather than the key itself if key == 'environment' and not data.get('environment'): data['environment'] = value else: data['tags'].append((key, value)) if not isinstance(data['extra'], dict): # throw it away data['extra'] = {} trim_dict( data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) # TODO(dcramer): more of validate data needs stuffed into the manager for key in list(iter(data)): if key in CLIENT_RESERVED_ATTRS: continue value = data.pop(key) try: interface = get_interface(key)() except ValueError: continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception: # XXX: we should consider logging this. pass # TODO(dcramer): this logic is duplicated in ``validate_data`` from # coreapi # message is coerced to an interface, as its used for pure # index of searchable strings # See GH-3248 message = data.pop('message', None) if message: if 'sentry.interfaces.Message' not in data: interface = get_interface('sentry.interfaces.Message') try: inst = interface.to_python({ 'message': message, }) data[inst.get_path()] = inst.to_json() except Exception: pass elif not data['sentry.interfaces.Message'].get('formatted'): interface = get_interface('sentry.interfaces.Message') try: inst = interface.to_python(dict( data['sentry.interfaces.Message'], formatted=message, )) data[inst.get_path()] = inst.to_json() except Exception: pass # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version # TODO(dcramer): find a better place for this logic exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] if 'sentry.interfaces.Http' in data: try: ip_address = validate_ip( data['sentry.interfaces.Http'].get( 'env', {}).get('REMOTE_ADDR'), required=False, ) except ValueError: ip_address = None if ip_address: data.setdefault('sentry.interfaces.User', {}) data['sentry.interfaces.User'].setdefault( 'ip_address', ip_address) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) return data
def normalize(self, request_env=None): request_env = request_env or {} data = self.data errors = data['errors'] = [] # Ignore event meta data for now. data.pop('_meta', None) # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v def stringify(f): if isinstance(f, float): return text(int(f)) if abs(f) < (1 << 53) else None return text(f) casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: list(x for x in map(stringify, v) if x is not None) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', 'logentry': lambda v: v if isinstance(v, dict) else {'message': v}, # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'breadcrumbs': to_values, 'threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except InvalidTimestamp as it: errors.append({'type': it.args[0], 'name': c, 'value': data[c]}) del data[c] except Exception as e: errors.append({'type': EventError.INVALID_DATA, 'name': c, 'value': data[c]}) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.get('logentry') msg_meta = data.get('_meta', {}).get('message') if not msg_if: msg_if = data['logentry'] = {'message': msg_str} if msg_meta: data.setdefault('_meta', {}).setdefault('logentry', {})['message'] = msg_meta if msg_if.get('message') != msg_str: if not msg_if.get('formatted'): msg_if['formatted'] = msg_str if msg_meta: data.setdefault('_meta', {}).setdefault( 'logentry', {})['formatted'] = msg_meta # Fill in ip addresses marked as {{auto}} client_ip = request_env.get('client_ip') if client_ip: if get_path(data, ['sentry.interfaces.Http', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['sentry.interfaces.Http']['env']['REMOTE_ADDR'] = client_ip if get_path(data, ['request', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['request']['env']['REMOTE_ADDR'] = client_ip if get_path(data, ['sentry.interfaces.User', 'ip_address']) == '{{auto}}': data['sentry.interfaces.User']['ip_address'] = client_ip if get_path(data, ['user', 'ip_address']) == '{{auto}}': data['user']['ip_address'] = client_ip # Validate main event body and tags against schema. # XXX(ja): jsonschema does not like CanonicalKeyDict, so we need to pass # in the inner data dict. is_valid, event_errors = validate_and_default_interface(data.data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({'type': EventError.INVALID_ATTRIBUTE, 'name': k}) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({'type': EventError.INVALID_DATA, 'name': k, 'value': value}) # Additional data coercion and defaulting level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('checksum', None) data.setdefault('culprit', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('fingerprint', None) data.setdefault('logger', DEFAULT_LOGGER_NAME) data.setdefault('platform', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('tags', []) data.setdefault('transaction', None) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] # Exception mechanism needs SDK information to resolve proper names in # exception meta (such as signal names). "SDK Information" really means # the operating system version the event was generated on. Some # normalization still works without sdk_info, such as mach_exception # names (they can only occur on macOS). if exception: sdk_info = get_sdk_from_event(data) for ex in exception['values']: if 'mechanism' in ex: normalize_mechanism_meta(ex['mechanism'], sdk_info) # If there is no User ip_addres, update it either from the Http interface # or the client_ip of the request. auth = request_env.get('auth') is_public = auth and auth.is_public add_ip_platforms = ('javascript', 'cocoa', 'objc') http_ip = data.get('sentry.interfaces.Http', {}).get('env', {}).get('REMOTE_ADDR') if http_ip: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', http_ip) elif client_ip and (is_public or data.get('platform') in add_ip_platforms): data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', client_ip) # Trim values data['logger'] = trim(data['logger'].strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) return data
def normalize(self, request_env=None): request_env = request_env or {} data = self.data errors = data['errors'] = [] # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v def convert_fingerprint(values): rv = values[:] bad_float = False for idx, item in enumerate(rv): if isinstance(item, float) and \ (abs(item) >= (1 << 53) or int(item) != item): bad_float = True rv[idx] = text(item) if bad_float: metrics.incr( 'events.bad_float_fingerprint', skip_internal=True, tags={ 'project_id': data.get('project'), }, ) return rv casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: convert_fingerprint(v) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', 'sentry.interfaces.Message': lambda v: v if isinstance(v, dict) else { 'message': v }, # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'sentry.interfaces.Exception': to_values, 'breadcrumbs': to_values, 'sentry.interfaces.Breadcrumbs': to_values, 'threads': to_values, 'sentry.interfaces.Threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except InvalidTimestamp as it: errors.append({ 'type': it.args[0], 'name': c, 'value': data[c] }) del data[c] except Exception as e: errors.append({ 'type': EventError.INVALID_DATA, 'name': c, 'value': data[c] }) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.setdefault('sentry.interfaces.Message', {'message': msg_str}) if msg_if.get('message') != msg_str: msg_if.setdefault('formatted', msg_str) # Fill in ip addresses marked as {{auto}} client_ip = request_env.get('client_ip') if client_ip: if get_path(data, ['sentry.interfaces.Http', 'env', 'REMOTE_ADDR' ]) == '{{auto}}': data['sentry.interfaces.Http']['env'][ 'REMOTE_ADDR'] = client_ip if get_path(data, ['request', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['request']['env']['REMOTE_ADDR'] = client_ip if get_path( data, ['sentry.interfaces.User', 'ip_address']) == '{{auto}}': data['sentry.interfaces.User']['ip_address'] = client_ip if get_path(data, ['user', 'ip_address']) == '{{auto}}': data['user']['ip_address'] = client_ip # Validate main event body and tags against schema is_valid, event_errors = validate_and_default_interface(data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value }) # Additional data coercion and defaulting level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('checksum', None) data.setdefault('culprit', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('fingerprint', None) data.setdefault('logger', DEFAULT_LOGGER_NAME) data.setdefault('platform', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('tags', []) data.setdefault('transaction', None) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] # Exception mechanism needs SDK information to resolve proper names in # exception meta (such as signal names). "SDK Information" really means # the operating system version the event was generated on. Some # normalization still works without sdk_info, such as mach_exception # names (they can only occur on macOS). if exception: sdk_info = get_sdk_from_event(data) for ex in exception['values']: if 'mechanism' in ex: normalize_mechanism_meta(ex['mechanism'], sdk_info) # If there is no User ip_addres, update it either from the Http interface # or the client_ip of the request. auth = request_env.get('auth') is_public = auth and auth.is_public add_ip_platforms = ('javascript', 'cocoa', 'objc') http_ip = data.get('sentry.interfaces.Http', {}).get('env', {}).get('REMOTE_ADDR') if http_ip: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', http_ip) elif client_ip and (is_public or data.get('platform') in add_ip_platforms): data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', client_ip) # Trim values data['logger'] = trim(data['logger'].strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) return data
def normalize(self): # TODO(dcramer): store http.env.REMOTE_ADDR as user.ip # First we pull out our top-level (non-data attr) kwargs data = self.data if not isinstance(data.get("level"), (six.string_types, int)): data["level"] = logging.ERROR elif data["level"] not in LOG_LEVELS: data["level"] = logging.ERROR if not data.get("logger"): data["logger"] = DEFAULT_LOGGER_NAME else: logger = trim(data["logger"].strip(), 64) if TagKey.is_valid_key(logger): data["logger"] = logger else: data["logger"] = DEFAULT_LOGGER_NAME if data.get("platform"): data["platform"] = trim(data["platform"], 64) current_timestamp = timezone.now() timestamp = data.get("timestamp") if not timestamp: timestamp = current_timestamp if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime("%s")) data["timestamp"] = timestamp data["received"] = float(timezone.now().strftime("%s")) if not data.get("event_id"): data["event_id"] = uuid4().hex data.setdefault("culprit", None) data.setdefault("server_name", None) data.setdefault("site", None) data.setdefault("checksum", None) data.setdefault("fingerprint", None) data.setdefault("platform", None) data.setdefault("environment", None) data.setdefault("extra", {}) data.setdefault("errors", []) tags = data.get("tags") if not tags: tags = [] # full support for dict syntax elif isinstance(tags, dict): tags = tags.items() # prevent [tag, tag, tag] (invalid) syntax elif not all(len(t) == 2 for t in tags): tags = [] else: tags = list(tags) data["tags"] = [] for key, value in tags: key = six.text_type(key).strip() value = six.text_type(value).strip() if not (key and value): continue data["tags"].append((key, value)) # XXX(dcramer): many legacy apps are using the environment tag # rather than the key itself if key == "environment" and not data.get("environment"): data["environment"] = value if not isinstance(data["extra"], dict): # throw it away data["extra"] = {} trim_dict(data["extra"], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) # TODO(dcramer): more of validate data needs stuffed into the manager for key in data.keys(): if key in CLIENT_RESERVED_ATTRS: continue value = data.pop(key) try: interface = get_interface(key)() except ValueError: continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception: # XXX: we should consider logging this. pass # TODO(dcramer): this logic is duplicated in ``validate_data`` from # coreapi # message is coerced to an interface, as its used for pure # index of searchable strings # See GH-3248 message = data.pop("message", None) if message: if "sentry.interfaces.Message" not in data: interface = get_interface("sentry.interfaces.Message") try: inst = interface.to_python({"message": message}) data[inst.get_path()] = inst.to_json() except Exception: pass elif not data["sentry.interfaces.Message"].get("formatted"): interface = get_interface("sentry.interfaces.Message") try: inst = interface.to_python(dict(data["sentry.interfaces.Message"], formatted=message)) data[inst.get_path()] = inst.to_json() except Exception: pass # the SDKs currently do not describe event types, and we must infer # them from available attributes data["type"] = eventtypes.infer(data).key data["version"] = self.version # TODO(dcramer): find a better place for this logic exception = data.get("sentry.interfaces.Exception") stacktrace = data.get("sentry.interfaces.Stacktrace") if exception and len(exception["values"]) == 1 and stacktrace: exception["values"][0]["stacktrace"] = stacktrace del data["sentry.interfaces.Stacktrace"] if "sentry.interfaces.Http" in data: try: ip_address = validate_ip( data["sentry.interfaces.Http"].get("env", {}).get("REMOTE_ADDR"), required=False ) except ValueError: ip_address = None if ip_address: data.setdefault("sentry.interfaces.User", {}) data["sentry.interfaces.User"].setdefault("ip_address", ip_address) if data["culprit"]: data["culprit"] = trim(data["culprit"], MAX_CULPRIT_LENGTH) return data
def normalize(self): data = self.data errors = data.get('errors', []) # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: list(map(text, v)) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'sentry.interfaces.Exception': to_values, 'breadcrumbs': to_values, 'sentry.interfaces.Breadcrumbs': to_values, 'threads': to_values, 'sentry.interfaces.Threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except Exception as e: errors.append({ 'type': EventError.INVALID_DATA, 'name': c, 'value': data[c] }) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.setdefault('sentry.interfaces.Message', {'message': msg_str}) if msg_if.get('message') != msg_str: msg_if.setdefault('formatted', msg_str) # Validate main event body and tags against schema is_valid, event_errors = validate_and_default_interface(data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value }) level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('culprit', None) data.setdefault('transaction', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('checksum', None) data.setdefault('fingerprint', None) data.setdefault('platform', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('tags', []) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] if 'sentry.interfaces.Http' in data: try: ip_address = validate_ip( data['sentry.interfaces.Http'].get('env', {}).get('REMOTE_ADDR'), required=False, ) if ip_address: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', ip_address) except ValueError: pass # Trim values logger = data.get('logger', DEFAULT_LOGGER_NAME) data['logger'] = trim(logger.strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) data['errors'] = errors return data