def _merge_system_info(data, system_info): set_path(data, "contexts", "os", "type", value="os") # Required by "get_sdk_from_event" os_name = system_info.get("os_name") os_version = system_info.get("os_version") os_build = system_info.get("os_build") if os_version: setdefault_path(data, "contexts", "os", "version", value=os_version) if os_build: setdefault_path(data, "contexts", "os", "build", value=os_build) if os_name and not os_version and not os_build: setdefault_path(data, "contexts", "os", "raw_description", value=os_name) elif os_name: setdefault_path(data, "contexts", "os", "name", value=os_name) set_path(data, "contexts", "device", "type", value="device") setdefault_path(data, "contexts", "device", "arch", value=system_info.get("cpu_arch")) device_model = system_info.get("device_model") if device_model: setdefault_path(data, "contexts", "device", "model", value=device_model)
def _merge_system_info(data, system_info): set_path(data, "contexts", "os", "type", value="os") # Required by "get_sdk_from_event" setdefault_path(data, "contexts", "os", "name", value=system_info.get("os_name")) setdefault_path(data, "contexts", "os", "version", value=system_info.get("os_version")) setdefault_path(data, "contexts", "os", "build", value=system_info.get("os_build")) set_path(data, "contexts", "device", "type", value="device") setdefault_path(data, "contexts", "device", "arch", value=system_info.get("cpu_arch"))
def merge_symbolicator_minidump_system_info(data, system_info): set_path(data, 'contexts', 'os', 'type', value='os') # Required by "get_sdk_from_event" setdefault_path(data, 'contexts', 'os', 'name', value=system_info.get('os_name')) setdefault_path(data, 'contexts', 'os', 'version', value=system_info.get('os_version')) setdefault_path(data, 'contexts', 'os', 'build', value=system_info.get('os_build')) set_path(data, 'contexts', 'device', 'type', value='device') setdefault_path(data, 'contexts', 'device', 'arch', value=system_info.get('cpu_arch'))
def merge_unreal_context_event(unreal_context, event, project): """Merges the context from an Unreal Engine 4 crash with the given event.""" runtime_prop = unreal_context.get("runtime_properties") if runtime_prop is None: return message = runtime_prop.pop("error_message", None) if message is not None: event["message"] = message username = runtime_prop.pop("username", None) if username is not None: set_path(event, "user", "username", value=username) memory_physical = runtime_prop.pop("memory_stats_total_physical", None) if memory_physical is not None: set_path(event, "contexts", "device", "memory_size", value=memory_physical) # Likely overwritten by minidump processing os_major = runtime_prop.pop("misc_os_version_major", None) if os_major is not None: # i.e: Windows 10 set_path(event, "contexts", "os", "name", value=os_major) gpu_brand = runtime_prop.pop("misc_primary_cpu_brand", None) if gpu_brand is not None: set_path(event, "contexts", "gpu", "name", value=gpu_brand) user_desc = runtime_prop.pop("user_description", None) if user_desc is not None: feedback_user = "******" if username is not None: feedback_user = username UserReport.objects.create( project=project, event_id=event["event_id"], name=feedback_user, email="", comments=user_desc, ) # drop modules. minidump processing adds 'images loaded' runtime_prop.pop("modules", None) # add everything else as extra set_path(event, "contexts", "unreal", "type", value="unreal") event["contexts"]["unreal"].update(**runtime_prop) # add sdk info event["sdk"] = { "name": "sentry.unreal.crashreporter", "version": runtime_prop.pop("crash_reporter_client_version", "0.0.0"), }
def merge_unreal_context_event(unreal_context, event, project): """Merges the context from an Unreal Engine 4 crash with the given event.""" runtime_prop = unreal_context.get('runtime_properties') if runtime_prop is None: return message = runtime_prop.pop('error_message', None) if message is not None: event['message'] = message username = runtime_prop.pop('username', None) if username is not None: set_path(event, 'user', 'username', value=username) memory_physical = runtime_prop.pop('memory_stats_total_physical', None) if memory_physical is not None: set_path(event, 'contexts', 'device', 'memory_size', value=memory_physical) # Likely overwritten by minidump processing os_major = runtime_prop.pop('misc_os_version_major', None) if os_major is not None: # i.e: Windows 10 set_path(event, 'contexts', 'os', 'name', value=os_major) gpu_brand = runtime_prop.pop('misc_primary_cpu_brand', None) if gpu_brand is not None: set_path(event, 'contexts', 'gpu', 'name', value=gpu_brand) user_desc = runtime_prop.pop('user_description', None) if user_desc is not None: feedback_user = '******' if username is not None: feedback_user = username UserReport.objects.create( project=project, event_id=event['event_id'], name=feedback_user, email='', comments=user_desc, ) # drop modules. minidump processing adds 'images loaded' runtime_prop.pop('modules', None) # add everything else as extra set_path(event, 'contexts', 'unreal', 'type', value='unreal') event['contexts']['unreal'].update(**runtime_prop) # add sdk info event['sdk'] = { 'name': 'sentry.unreal.crashreporter', 'version': runtime_prop.pop('crash_reporter_client_version', '0.0.0') }
def _merge_minidump_response(data, response): data["platform"] = "native" if response.get("crashed") is not None: data["level"] = "fatal" if response["crashed"] else "info" validate_and_set_timestamp(data, response.get("timestamp")) if response.get("system_info"): _merge_system_info(data, response["system_info"]) sdk_info = get_sdk_from_event(data) images = [] set_path(data, "debug_meta", "images", value=images) for complete_image in response["modules"]: image = {} _merge_image(image, complete_image, sdk_info, lambda e: write_error(e, data)) images.append(image) # Extract the crash reason and infos data_exception = get_path(data, "exception", "values", 0) exc_value = ("Assertion Error: %s" % response.get("assertion") if response.get("assertion") else "Fatal Error: %s" % response.get("crash_reason")) data_exception["value"] = exc_value data_exception["type"] = response.get("crash_reason") data_threads = [] if response["stacktraces"]: data["threads"] = {"values": data_threads} else: error = SymbolicationFailed(message="minidump has no thread list", type=EventError.NATIVE_SYMBOLICATOR_FAILED) write_error(error, data) for complete_stacktrace in response["stacktraces"]: is_requesting = complete_stacktrace.get("is_requesting") thread_id = complete_stacktrace.get("thread_id") data_thread = {"id": thread_id, "crashed": is_requesting} data_threads.append(data_thread) if is_requesting: data_exception["thread_id"] = thread_id data_stacktrace = data_exception.setdefault("stacktrace", {}) data_stacktrace["frames"] = [] else: data_thread["stacktrace"] = data_stacktrace = {"frames": []} if complete_stacktrace.get("registers"): data_stacktrace["registers"] = complete_stacktrace["registers"] for complete_frame in reversed(complete_stacktrace["frames"]): new_frame = {} _merge_frame(new_frame, complete_frame) data_stacktrace["frames"].append(new_frame)
def merge_apple_crash_report(apple_crash_report, event): event['platform'] = 'native' timestamp = apple_crash_report.get('timestamp') if timestamp: event['timestamp'] = timestamp event['threads'] = [] for thread in apple_crash_report['threads']: crashed = thread.get('crashed') # We don't create an exception because an apple crash report can have # multiple crashed threads. event['threads'].append({ 'id': thread.get('id'), 'name': thread.get('name'), 'crashed': crashed, 'stacktrace': { 'frames': [{ 'instruction_addr': frame.get('instruction_addr'), 'package': frame.get('module'), 'lineno': frame.get('lineno'), 'filename': frame.get('filename'), } for frame in reversed(thread.get('frames', []))], 'registers': thread.get('registers') or None, }, }) if crashed: event['level'] = 'fatal' if event.get('level') is None: event['level'] = 'info' metadata = apple_crash_report.get('metadata') if metadata: set_path(event, 'contexts', 'os', 'raw_description', value=metadata.get('OS Version')) set_path(event, 'contexts', 'device', 'model', value=metadata.get('Hardware Model')) # Extract referenced (not all loaded) images images = [{ 'type': 'macho', 'code_file': module.get('path'), 'debug_id': module.get('uuid'), 'image_addr': module.get('addr'), 'image_size': module.get('size'), 'arch': module.get('arch'), } for module in apple_crash_report.get('binary_images')] event.setdefault('debug_meta', {})['images'] = images
def _get_message_with_bad_extension(self): message = self._get_message() set_path(message, "platform", value="javascript") set_path( message, "exception", value={"values": [{"type": "Error", "value": "http://loading.retry.widdit.com/"}]}, ) return message
def merge_apple_crash_report(apple_crash_report, event): event["platform"] = "native" timestamp = apple_crash_report.get("timestamp") if timestamp: event["timestamp"] = timestamp event["threads"] = [] for thread in apple_crash_report["threads"]: crashed = thread.get("crashed") # We don't create an exception because an apple crash report can have # multiple crashed threads. event["threads"].append( { "id": thread.get("id"), "name": thread.get("name"), "crashed": crashed, "stacktrace": { "frames": [ { "instruction_addr": frame.get("instruction_addr"), "package": frame.get("module"), "lineno": frame.get("lineno"), "filename": frame.get("filename"), } for frame in reversed(thread.get("frames", [])) ], "registers": thread.get("registers") or None, }, } ) if crashed: event["level"] = "fatal" if event.get("level") is None: event["level"] = "info" metadata = apple_crash_report.get("metadata") if metadata: set_path(event, "contexts", "os", "raw_description", value=metadata.get("OS Version")) set_path(event, "contexts", "device", "model", value=metadata.get("Hardware Model")) # Extract referenced (not all loaded) images images = [ { "type": "macho", "code_file": module.get("path"), "debug_id": module.get("uuid"), "image_addr": module.get("addr"), "image_size": module.get("size"), "arch": module.get("arch"), } for module in apple_crash_report.get("binary_images") ] event.setdefault("debug_meta", {})["images"] = images
def apply_modifications_to_frame(self, frames, match_frames, idx, rule=None): if self.var == "category": frame = frames[idx] set_path(frame, "data", "category", value=self.value) match_frames[idx]["category"] = self._encoded_value
def _get_message_from_webcrawler(self): message = self._get_message() set_path(message, 'request', value={ 'url': 'http://example.com', 'method': 'GET', 'headers': [ ['User-Agent', 'Mediapartners-Google'], ] }) return message
def _get_message_from_webcrawler(self): message = self._get_message() set_path( message, "request", value={ "url": "http://example.com", "method": "GET", "headers": [["User-Agent", "Mediapartners-Google"]], }, ) return message
def _get_message_with_bad_extension(self): message = self._get_message() set_path(message, 'platform', value='javascript') set_path(message, 'exception', value={ 'values': [{ 'type': 'Error', 'value': 'http://loading.retry.widdit.com/', }] }) return message
def merge_symbolicator_minidump_response(data, response): sdk_info = get_sdk_from_event(data) # TODO(markus): Add OS context here when `merge_process_state_event` is no # longer called for symbolicator projects images = [] set_path(data, 'debug_meta', 'images', value=images) for complete_image in response['modules']: image = {} merge_symbolicator_image( image, complete_image, sdk_info, lambda e: handle_symbolication_failed(e, data=data) ) images.append(image) data_threads = [] data['threads'] = {'values': data_threads} data_exception = get_path(data, 'exception', 'values', 0) for complete_stacktrace in response['stacktraces']: is_requesting = complete_stacktrace.get('is_requesting') thread_id = complete_stacktrace.get('thread_id') data_thread = { 'id': thread_id, 'crashed': is_requesting, } data_threads.append(data_thread) if is_requesting: data_stacktrace = get_path(data_exception, 'stacktrace') assert isinstance(data_stacktrace, dict), data_stacktrace # Make exemption specifically for unreal portable callstacks # TODO(markus): Allow overriding stacktrace more generically # (without looking into unreal context) once we no longer parse # minidump in the endpoint (right now we can't distinguish that # from user json). if data_stacktrace['frames'] and is_unreal_exception_stacktrace(data): continue del data_stacktrace['frames'][:] else: data_thread['stacktrace'] = data_stacktrace = {'frames': []} if complete_stacktrace.get('registers'): data_stacktrace['registers'] = complete_stacktrace['registers'] for complete_frame in reversed(complete_stacktrace['frames']): new_frame = {} merge_symbolicated_frame(new_frame, complete_frame) data_stacktrace['frames'].append(new_frame)
def merge_symbolicator_minidump_response(data, response): sdk_info = get_sdk_from_event(data) # TODO(markus): Add OS context here when `merge_process_state_event` is no # longer called for symbolicator projects images = [] set_path(data, 'debug_meta', 'images', value=images) for complete_image in response['modules']: image = {} merge_symbolicator_image( image, complete_image, sdk_info, lambda e: handle_symbolication_failed(e, data=data)) images.append(image) data_threads = [] data['threads'] = {'values': data_threads} data_exception = get_path(data, 'exception', 'values', 0) for complete_stacktrace in response['stacktraces']: is_requesting = complete_stacktrace.get('is_requesting') thread_id = complete_stacktrace.get('thread_id') data_thread = { 'id': thread_id, 'crashed': is_requesting, } data_threads.append(data_thread) if is_requesting: data_stacktrace = get_path(data_exception, 'stacktrace') assert isinstance(data_stacktrace, dict), data_stacktrace # Make exemption specifically for unreal portable callstacks # TODO(markus): Allow overriding stacktrace more generically # (without looking into unreal context) once we no longer parse # minidump in the endpoint (right now we can't distinguish that # from user json). if data_stacktrace['frames'] and is_unreal_exception_stacktrace( data): continue data_stacktrace['frames'] = [] else: data_thread['stacktrace'] = data_stacktrace = {'frames': []} if complete_stacktrace.get('registers'): data_stacktrace['registers'] = complete_stacktrace['registers'] for complete_frame in reversed(complete_stacktrace['frames']): new_frame = {} merge_symbolicated_frame(new_frame, complete_frame) data_stacktrace['frames'].append(new_frame)
def merge_unreal_user(event, user_id): """ Merges user information from the unreal "UserId" into the event payload. """ # https://github.com/EpicGames/UnrealEngine/blob/f509bb2d6c62806882d9a10476f3654cf1ee0634/Engine/Source/Programs/CrashReportClient/Private/CrashUpload.cpp#L769 parts = user_id.split("|", 2) login_id, epic_account_id, machine_id = parts + [""] * (3 - len(parts)) event["user"] = {"id": login_id if login_id else user_id} if epic_account_id: set_path(event, "tags", "epic_account_id", value=epic_account_id) if machine_id: set_path(event, "tags", "machine_id", value=machine_id)
def _get_message_from_legacy_browser(self): ie_5_user_agent = 'Mozilla/4.0 (compatible; MSIE 5.50; Windows NT; SiteKiosk 4.9; SiteCoach 1.0)' message = self._get_message() set_path(message, 'platform', value='javascript') set_path(message, 'request', value={ 'url': 'http://example.com', 'method': 'GET', 'headers': [ ['User-Agent', ie_5_user_agent], ] }) return message
def process_unreal_crash(payload, user_id, environment, event): """Initial processing of the event from the Unreal Crash Reporter data. Processes the raw bytes of the unreal crash by returning a Unreal4Crash""" event["environment"] = environment if user_id: # https://github.com/EpicGames/UnrealEngine/blob/f509bb2d6c62806882d9a10476f3654cf1ee0634/Engine/Source/Programs/CrashReportClient/Private/CrashUpload.cpp#L769 parts = user_id.split("|", 2) login_id, epic_account_id, machine_id = parts + [""] * (3 - len(parts)) event["user"] = {"id": login_id if login_id else user_id} if epic_account_id: set_path(event, "tags", "epic_account_id", value=epic_account_id) if machine_id: set_path(event, "tags", "machine_id", value=machine_id) return Unreal4Crash.from_bytes(payload)
def test_set_dict(self): data = {} assert set_path(data, 'a', value=42) assert data == {'a': 42} data = {'a': 2} assert set_path(data, 'a', value=42) assert data == {'a': 42} data = {} assert set_path(data, 'a', 'b', value=42) assert data == {'a': {'b': 42}} data = CanonicalKeyDict({}) assert set_path(data, 'a', value=42) assert data == {'a': 42}
def test_set_dict(self): data = {} assert set_path(data, "a", value=42) assert data == {"a": 42} data = {"a": 2} assert set_path(data, "a", value=42) assert data == {"a": 42} data = {} assert set_path(data, "a", "b", value=42) assert data == {"a": {"b": 42}} data = CanonicalKeyDict({}) assert set_path(data, "a", value=42) assert data == {"a": 42}
def test_set_dict(self): data = {} assert set_path(data, 'a', value=42) assert data == {'a': 42} data = {'a': 2} assert set_path(data, 'a', value=42) assert data == {'a': 42} data = {} assert set_path(data, 'a', 'b', value=42) assert data == {'a': {'b': 42}} data = CanonicalKeyDict({}) assert set_path(data, 'a', value=42) assert data == {'a': 42}
def _get_message_from_legacy_browser(self): ie_5_user_agent = ( "Mozilla/4.0 (compatible; MSIE 5.50; Windows NT; SiteKiosk 4.9; SiteCoach 1.0)" ) message = self._get_message() set_path(message, "platform", value="javascript") set_path( message, "request", value={ "url": "http://example.com", "method": "GET", "headers": [["User-Agent", ie_5_user_agent]], }, ) return message
def _write_tree_labels(tree_labels: Sequence[Optional[TreeLabel]], event_data: EventData) -> None: event_labels: List[Optional[StrippedTreeLabel]] = [] event_data["hierarchical_tree_labels"] = event_labels for level, tree_label in enumerate(tree_labels): if tree_label is None: event_labels.append(None) continue event_labels.append(_strip_tree_label(tree_label)) for part in tree_label: datapath = part["datapath"] frame = get_path(event_data, *datapath) if not frame: raise ValueError("datapath not found in event") if part.get("is_sentinel"): set_path(frame, "data", "is_sentinel", value=True) if part.get("is_prefix"): set_path(frame, "data", "is_prefix", value=True) prev_level = get_path(frame, "data", "min_grouping_level") if not isinstance(prev_level, int) or level < prev_level: set_path(frame, "data", "min_grouping_level", value=level)
def merge_symbolicator_minidump_response(data, response): sdk_info = get_sdk_from_event(data) data['platform'] = 'native' if response.get('crashed') is not None: data['level'] = 'fatal' if response['crashed'] else 'info' if response.get('timestamp'): data['timestamp'] = float(response['timestamp']) if response.get('system_info'): merge_symbolicator_minidump_system_info(data, response['system_info']) images = [] set_path(data, 'debug_meta', 'images', value=images) for complete_image in response['modules']: image = {} merge_symbolicator_image( image, complete_image, sdk_info, lambda e: handle_symbolication_failed(e, data=data)) images.append(image) # Extract the crash reason and infos data_exception = get_path(data, 'exception', 'values', 0) exc_value = ('Assertion Error: %s' % response.get('assertion') if response.get('assertion') else 'Fatal Error: %s' % response.get('crash_reason')) data_exception['value'] = exc_value data_exception['type'] = response.get('crash_reason') data_threads = [] if response['stacktraces']: data['threads'] = {'values': data_threads} else: error = SymbolicationFailed(message='minidump has no thread list', type=EventError.NATIVE_SYMBOLICATOR_FAILED) handle_symbolication_failed(error, data=data) for complete_stacktrace in response['stacktraces']: is_requesting = complete_stacktrace.get('is_requesting') thread_id = complete_stacktrace.get('thread_id') data_thread = { 'id': thread_id, 'crashed': is_requesting, } data_threads.append(data_thread) if is_requesting: data_exception['thread_id'] = thread_id data_stacktrace = data_exception.setdefault('stacktrace', {}) # Make exemption specifically for unreal portable callstacks # TODO(markus): Allow overriding stacktrace more generically # (without looking into unreal context) once we no longer parse # minidump in the endpoint (right now we can't distinguish that # from user json). if data_stacktrace.get( 'frames') and is_unreal_exception_stacktrace(data): continue data_stacktrace['frames'] = [] else: data_thread['stacktrace'] = data_stacktrace = {'frames': []} if complete_stacktrace.get('registers'): data_stacktrace['registers'] = complete_stacktrace['registers'] for complete_frame in reversed(complete_stacktrace['frames']): new_frame = {} merge_symbolicated_frame(new_frame, complete_frame) data_stacktrace['frames'].append(new_frame)
def test_set_none(self): assert not set_path(None, 'foo', value=42) assert not set_path('foo', 'foo', value=42) assert not set_path(42, 'foo', value=42) assert not set_path(ValueError(), 'foo', value=42) assert not set_path(True, 'foo', value=42)
def _get_message_with_bad_ip(self): message = self._get_message() set_path(message, "user", "ip_address", value="127.0.0.1") return message
def test_kwargs(self): with pytest.raises(TypeError): set_path({}, 'foo') with pytest.raises(TypeError): set_path({}, 'foo', value=1, unknown=True)
def reprocess_event(project_id, event_id, start_time): from sentry.tasks.store import preprocess_event_from_reprocessing from sentry.ingest.ingest_consumer import CACHE_TIMEOUT # Take unprocessed data from old event and save it as unprocessed data # under a new event ID. The second step happens in pre-process. We could # save the "original event ID" instead and get away with writing less to # nodestore, but doing it this way makes the logic slightly simpler. node_id = _generate_unprocessed_event_node_id(project_id=project_id, event_id=event_id) with sentry_sdk.start_span(op="reprocess_events.nodestore.get"): data = nodestore.get(node_id) with sentry_sdk.start_span(op="reprocess_events.eventstore.get"): event = eventstore.get_event_by_id(project_id, event_id) if event is None: logger.error("reprocessing2.event.not_found", extra={ "project_id": project_id, "event_id": event_id }) return if data is None: logger.error( "reprocessing2.reprocessing_nodestore.not_found", extra={ "project_id": project_id, "event_id": event_id }, ) # We have no real data for reprocessing. We assume this event goes # straight to save_event, and hope that the event data can be # reingested like that. It's better than data loss. # # XXX: Ideally we would run a "save-lite" for this that only updates # the group ID in-place. Like a snuba merge message. data = dict(event.data) # Step 1: Fix up the event payload for reprocessing and put it in event # cache/event_processing_store set_path(data, "contexts", "reprocessing", "original_issue_id", value=event.group_id) cache_key = event_processing_store.store(data) # Step 2: Copy attachments into attachment cache queryset = models.EventAttachment.objects.filter(project_id=project_id, event_id=event_id) files = { f.id: f for f in models.File.objects.filter( id__in=[ea.file_id for ea in queryset]) } attachment_objects = [] for attachment_id, attachment in enumerate(queryset): with sentry_sdk.start_span( op="reprocess_event._copy_attachment_into_cache") as span: span.set_data("attachment_id", attachment.id) attachment_objects.append( _copy_attachment_into_cache( attachment_id=attachment_id, attachment=attachment, file=files[attachment.file_id], cache_key=cache_key, cache_timeout=CACHE_TIMEOUT, )) if attachment_objects: with sentry_sdk.start_span(op="reprocess_event.set_attachment_meta"): attachment_cache.set(cache_key, attachments=attachment_objects, timeout=CACHE_TIMEOUT) preprocess_event_from_reprocessing(cache_key=cache_key, start_time=start_time, event_id=event_id)
def merge_unreal_context_event(unreal_context, event, project): """Merges the context from an Unreal Engine 4 crash with the given event.""" runtime_prop = unreal_context.get('runtime_properties') if runtime_prop is None: return message = runtime_prop.pop('error_message', None) if message is not None: event['message'] = message username = runtime_prop.pop('username', None) user = None if username is not None: set_path(event, 'user', 'username', value=username) memory_physical = runtime_prop.pop('memory_stats_total_physical', None) if memory_physical is not None: set_path(event, 'contexts', 'device', 'memory_size', value=memory_physical) # Likely overwritten by minidump processing os_major = runtime_prop.pop('misc_os_version_major', None) if os_major is not None: # i.e: Windows 10 set_path(event, 'contexts', 'os', 'name', value=os_major) gpu_brand = runtime_prop.pop('misc_primary_cpu_brand', None) if gpu_brand is not None: set_path(event, 'contexts', 'gpu', 'name', value=gpu_brand) user_desc = runtime_prop.pop('user_description', None) if user_desc is not None: event_id = event.setdefault('event_id', uuid.uuid4().hex) feedback_user = '******' if user is not None: feedback_user = user.get('username', feedback_user) UserReport.objects.create( project=project, event_id=event_id, name=feedback_user, email='', comments=user_desc, ) portable_callstack_list = [] portable_callstack = runtime_prop.pop('portable_call_stack', None) if portable_callstack is not None: for match in _portable_callstack_regexp.finditer(portable_callstack): addr = hex( int(match.group('baseaddr'), 16) + int(match.group('offset'), 16)) portable_callstack_list.append(addr) legacy_callstack = runtime_prop.pop('legacy_call_stack', None) if legacy_callstack is not None: traces = legacy_callstack.split('\n') frames = [] for i, trace in enumerate(traces): match = _frame_regexp.match(trace) if not match: continue frames.append({ 'package': match.group('package'), 'lineno': match.group('lineno'), 'filename': match.group('filename'), 'function': match.group('function'), 'in_app': match.group('function') is not None, 'instruction_addr': portable_callstack_list[i], }) frames.reverse() event['stacktrace'] = {'frames': frames} # drop modules. minidump processing adds 'images loaded' runtime_prop.pop('modules', None) # add everything else as extra extra = event.setdefault('extra', {}) extra.update(**runtime_prop)
def _merge_full_response(data, response): data["platform"] = "native" if response.get("crashed") is not None: data["level"] = "fatal" if response["crashed"] else "info" if response.get("system_info"): _merge_system_info(data, response["system_info"]) sdk_info = get_sdk_from_event(data) images = [] set_path(data, "debug_meta", "images", value=images) for complete_image in response["modules"]: image = {} _merge_image(image, complete_image, sdk_info, data) images.append(image) # Extract the crash reason and infos data_exception = get_path(data, "exception", "values", 0) if response.get("assertion"): data_exception["value"] = "Assertion Error: {}".format( response["assertion"]) elif response.get("crash_details"): data_exception["value"] = response["crash_details"] elif response.get("crash_reason"): data_exception["value"] = "Fatal Error: {}".format( response["crash_reason"]) else: # We're merging a full response, so there was no initial payload # submitted. Assuming that this still contains the placeholder, remove # it rather than showing a default value. data_exception.pop("value", None) if response.get("crash_reason"): data_exception["type"] = response["crash_reason"] data_threads = [] if response["stacktraces"]: data["threads"] = {"values": data_threads} else: error = SymbolicationFailed(message="minidump has no thread list", type=EventError.NATIVE_SYMBOLICATOR_FAILED) write_error(error, data) for complete_stacktrace in response["stacktraces"]: is_requesting = complete_stacktrace.get("is_requesting") thread_id = complete_stacktrace.get("thread_id") data_thread = {"id": thread_id, "crashed": is_requesting} data_threads.append(data_thread) if is_requesting: data_exception["thread_id"] = thread_id data_stacktrace = data_exception.setdefault("stacktrace", {}) data_stacktrace["frames"] = [] else: data_thread["stacktrace"] = data_stacktrace = {"frames": []} if complete_stacktrace.get("registers"): data_stacktrace["registers"] = complete_stacktrace["registers"] for complete_frame in reversed(complete_stacktrace["frames"]): new_frame = {} _merge_frame(new_frame, complete_frame) data_stacktrace["frames"].append(new_frame)
def reprocess_event(project_id, event_id, start_time): from sentry.ingest.ingest_consumer import CACHE_TIMEOUT from sentry.lang.native.processing import get_required_attachment_types from sentry.tasks.store import preprocess_event_from_reprocessing with sentry_sdk.start_span(op="reprocess_events.nodestore.get"): node_id = Event.generate_node_id(project_id, event_id) data = nodestore.get(node_id, subkey="unprocessed") if data is None: node_id = _generate_unprocessed_event_node_id(project_id=project_id, event_id=event_id) data = nodestore.get(node_id) if data is None: raise CannotReprocess("reprocessing_nodestore.not_found") with sentry_sdk.start_span(op="reprocess_events.eventstore.get"): event = eventstore.get_event_by_id(project_id, event_id) if event is None: raise CannotReprocess("event.not_found") required_attachment_types = get_required_attachment_types(data) attachments = list( models.EventAttachment.objects.filter( project_id=project_id, event_id=event_id, type__in=list(required_attachment_types) ) ) missing_attachment_types = required_attachment_types - {ea.type for ea in attachments} if missing_attachment_types: raise CannotReprocess( f"attachment.not_found.{'_and_'.join(sorted(missing_attachment_types))}" ) # Step 1: Fix up the event payload for reprocessing and put it in event # cache/event_processing_store set_path(data, "contexts", "reprocessing", "original_issue_id", value=event.group_id) set_path( data, "contexts", "reprocessing", "original_primary_hash", value=event.get_primary_hash() ) cache_key = event_processing_store.store(data) # Step 2: Copy attachments into attachment cache. Note that we can only # consider minidumps because filestore just stays as-is after reprocessing # (we simply update group_id on the EventAttachment models in post_process) attachment_objects = [] files = {f.id: f for f in models.File.objects.filter(id__in=[ea.file_id for ea in attachments])} for attachment_id, attachment in enumerate(attachments): with sentry_sdk.start_span(op="reprocess_event._copy_attachment_into_cache") as span: span.set_data("attachment_id", attachment.id) attachment_objects.append( _copy_attachment_into_cache( attachment_id=attachment_id, attachment=attachment, file=files[attachment.file_id], cache_key=cache_key, cache_timeout=CACHE_TIMEOUT, ) ) if attachment_objects: with sentry_sdk.start_span(op="reprocess_event.set_attachment_meta"): attachment_cache.set(cache_key, attachments=attachment_objects, timeout=CACHE_TIMEOUT) preprocess_event_from_reprocessing( cache_key=cache_key, start_time=start_time, event_id=event_id, data=data, )
def test_set_none(self): assert not set_path(None, "foo", value=42) assert not set_path("foo", "foo", value=42) assert not set_path(42, "foo", value=42) assert not set_path(ValueError(), "foo", value=42) assert not set_path(True, "foo", value=42)
def test_kwargs(self): with pytest.raises(TypeError): set_path({}, "foo") with pytest.raises(TypeError): set_path({}, "foo", value=1, unknown=True)
def merge_unreal_context_event(unreal_context, event, project): """Merges the context from an Unreal Engine 4 crash with the given event.""" runtime_prop = unreal_context.get('runtime_properties') if runtime_prop is None: return message = runtime_prop.pop('error_message', None) if message is not None: event['message'] = message username = runtime_prop.pop('username', None) if username is not None: set_path(event, 'user', 'username', value=username) memory_physical = runtime_prop.pop('memory_stats_total_physical', None) if memory_physical is not None: set_path(event, 'contexts', 'device', 'memory_size', value=memory_physical) # Likely overwritten by minidump processing os_major = runtime_prop.pop('misc_os_version_major', None) if os_major is not None: # i.e: Windows 10 set_path(event, 'contexts', 'os', 'name', value=os_major) gpu_brand = runtime_prop.pop('misc_primary_cpu_brand', None) if gpu_brand is not None: set_path(event, 'contexts', 'gpu', 'name', value=gpu_brand) user_desc = runtime_prop.pop('user_description', None) if user_desc is not None: event_id = event.setdefault('event_id', uuid.uuid4().hex) feedback_user = '******' if username is not None: feedback_user = username UserReport.objects.create( project=project, event_id=event_id, name=feedback_user, email='', comments=user_desc, ) portable_callstack = runtime_prop.pop('portable_call_stack', None) if portable_callstack is not None: frames = [] for match in _portable_callstack_regexp.finditer(portable_callstack): baseaddr = int(match.group('baseaddr'), 16) offset = int(match.group('offset'), 16) # Crashes without PDB in the client report: 0x00000000ffffffff + ffffffff if baseaddr == 0xffffffff and offset == 0xffffffff: continue frames.append({ 'package': match.group('package'), 'instruction_addr': hex(baseaddr + offset), }) frames.reverse() if len(frames) > 0: event['stacktrace'] = { 'frames': frames } # drop modules. minidump processing adds 'images loaded' runtime_prop.pop('modules', None) # add everything else as extra extra = event.setdefault('extra', {}) extra.update(**runtime_prop) # add sdk info event['sdk'] = { 'name': 'sentry.unreal.crashreporter', 'version': runtime_prop.pop('crash_reporter_client_version', '0.0.0') }
def merge_unreal_context_event(unreal_context, event, project, refactor_enabled=False): """Merges the context from an Unreal Engine 4 crash with the given event.""" runtime_prop = unreal_context.get('runtime_properties') if runtime_prop is None: return message = runtime_prop.pop('error_message', None) if message is not None: event['message'] = message username = runtime_prop.pop('username', None) if username is not None: set_path(event, 'user', 'username', value=username) memory_physical = runtime_prop.pop('memory_stats_total_physical', None) if memory_physical is not None: set_path(event, 'contexts', 'device', 'memory_size', value=memory_physical) # Likely overwritten by minidump processing os_major = runtime_prop.pop('misc_os_version_major', None) if os_major is not None: # i.e: Windows 10 set_path(event, 'contexts', 'os', 'name', value=os_major) gpu_brand = runtime_prop.pop('misc_primary_cpu_brand', None) if gpu_brand is not None: set_path(event, 'contexts', 'gpu', 'name', value=gpu_brand) user_desc = runtime_prop.pop('user_description', None) if user_desc is not None: feedback_user = '******' if username is not None: feedback_user = username UserReport.objects.create( project=project, event_id=event['event_id'], name=feedback_user, email='', comments=user_desc, ) portable_callstack = runtime_prop.pop('portable_call_stack', None) if portable_callstack is not None: if refactor_enabled: set_path(event, 'contexts', 'unreal', 'type', value='unreal') set_path(event, 'contexts', 'unreal', 'portable_call_stack', value=portable_callstack) # TODO(markus): Add other stuff from extra here. Make sure trimming # will not be a problem for portable_call_stack then! else: # TODO(markus): Remove after refactor rolled out images = get_path(event, 'debug_meta', 'images', filter=True, default=()) frames = parse_portable_callstack(portable_callstack, images) if len(frames) > 0: exception = get_path(event, 'exception', 'values', 0) if exception: # This property is required for correct behavior of symbolicator codepath. exception['mechanism'] = { 'type': 'unreal', 'handled': False, 'synthetic': True } event['stacktrace'] = {'frames': frames} # drop modules. minidump processing adds 'images loaded' runtime_prop.pop('modules', None) # add everything else as extra extra = event.setdefault('extra', {}) extra.update(**runtime_prop) # add sdk info event['sdk'] = { 'name': 'sentry.unreal.crashreporter', 'version': runtime_prop.pop('crash_reporter_client_version', '0.0.0') }