def _do_process_event(cache_key, start_time, event_id): from sentry.plugins import plugins data = default_cache.get(cache_key) if data is None: metrics.incr('events.failed', tags={ 'reason': 'cache', 'stage': 'process' }) error_logger.error('process.failed.empty', extra={'cache_key': cache_key}) return project = data['project'] Raven.tags_context({ 'project': project, }) has_changed = False # Stacktrace based event processors. These run before anything else. new_data = process_stacktraces(data) if new_data is not None: has_changed = True data = new_data # TODO(dcramer): ideally we would know if data changed by default # Default event processors. for plugin in plugins.all(version=2): processors = safe_execute(plugin.get_event_preprocessors, data=data, _with_transaction=False) for processor in (processors or ()): result = safe_execute(processor, data) if result: data = result has_changed = True assert data[ 'project'] == project, 'Project cannot be mutated by preprocessor' if has_changed: issues = data.get('processing_issues') if issues and create_failed_event(cache_key, project, list(issues.values()), event_id=event_id, start_time=start_time): return default_cache.set(cache_key, data, 3600) save_event.delay( cache_key=cache_key, data=None, start_time=start_time, event_id=event_id, )
def _do_process_event(cache_key, start_time, event_id): from sentry.plugins import plugins data = default_cache.get(cache_key) if data is None: metrics.incr('events.failed', tags={'reason': 'cache', 'stage': 'process'}) error_logger.error('process.failed.empty', extra={'cache_key': cache_key}) return project = data['project'] Raven.tags_context({ 'project': project, }) has_changed = False # Stacktrace based event processors. These run before anything else. new_data = process_stacktraces(data) if new_data is not None: has_changed = True data = new_data # TODO(dcramer): ideally we would know if data changed by default # Default event processors. for plugin in plugins.all(version=2): processors = safe_execute(plugin.get_event_preprocessors, data=data, _with_transaction=False) for processor in (processors or ()): result = safe_execute(processor, data) if result: data = result has_changed = True assert data['project'] == project, 'Project cannot be mutated by preprocessor' if has_changed: issues = data.get('processing_issues') if issues: create_failed_event(cache_key, project, list(issues.values()), event_id=event_id) return default_cache.set(cache_key, data, 3600) save_event.delay(cache_key=cache_key, data=None, start_time=start_time, event_id=event_id)
def test_frame_resolution(self): event_data = { "user": { "ip_address": "31.172.207.97" }, "extra": {}, "project": self.project.id, "platform": "cocoa", "debug_meta": { "images": [{ "type": "apple", "cpu_subtype": 0, "uuid": "C05B4DDD-69A7-3840-A649-32180D341587", "image_vmaddr": 4294967296, "image_addr": 4295121760, "cpu_type": 16777228, "image_size": 32768, "name": OBJECT_NAME, }, { "type": "apple", "cpu_subtype": 0, "cpu_type": 16777228, "uuid": "B78CB4FB-3A90-4039-9EFD-C58932803AE5", "image_vmaddr": 0, "image_addr": 6000, "cpu_type": 16777228, "image_size": 32768, 'name': '/usr/lib/whatever.dylib', }], "sdk_info": SDK_INFO, }, "exception": { "values": [{ "stacktrace": { "frames": [{ "function": "<redacted>", "abs_path": None, "package": "/usr/lib/system/libdyld.dylib", "filename": None, "lineno": None, "in_app": False, "instruction_addr": 6010, }, { "function": "main", "instruction_addr": 4295123760 }, { "function": "whatever_system", "instruction_addr": 6020, "symbol_addr": 6016, }, { "platform": "javascript", "function": "merge", "abs_path": "/scripts/views.js", "vars": {}, "module": None, "filename": "../../sentry/scripts/views.js", "colno": 16, "in_app": True, "lineno": 268 }] }, "type": "NSRangeException", "mechanism": { "type": "mach", "meta": { "signal": { "number": 6, "code": 0, "name": "SIGABRT", "code_name": None }, "mach_exception": { "subcode": 0, "code": 0, "exception": 10, "name": "EXC_CRASH" } } }, "value": ("*** -[__NSArray0 objectAtIndex:]: index 3 " "beyond bounds for empty NSArray") }] }, "contexts": { "device": { "type": "device", "model_id": "N102AP", "model": "iPod7,1", "arch": "arm64", "family": "iPod" }, "os": { "type": "os", "version": "9.3.2", "rooted": False, "build": "13F69", "name": "iOS" } } } def make_processors(data, infos): return [NativeStacktraceProcessor(data, infos)] event_data = process_stacktraces(event_data, make_processors=make_processors) bt = event_data['exception']['values'][0]['stacktrace'] frames = bt['frames'] assert frames[0]['function'] == '<redacted>' assert frames[0]['instruction_addr'] == 6010 assert frames[1]['function'] == 'real_main' assert frames[1]['lineno'] == 42 assert frames[1]['colno'] == 23 assert frames[1]['package'] == OBJECT_NAME assert frames[1]['instruction_addr'] == 4295123760 assert frames[2]['function'] == 'whatever_system' assert frames[2]['package'] == '/usr/lib/whatever.dylib' assert frames[2]['instruction_addr'] == 6020
def _do_process_event(cache_key, start_time, event_id, process_task): from sentry.plugins import plugins data = default_cache.get(cache_key) if data is None: metrics.incr('events.failed', tags={ 'reason': 'cache', 'stage': 'process' }) error_logger.error('process.failed.empty', extra={'cache_key': cache_key}) return data = CanonicalKeyDict(data) project = data['project'] Raven.tags_context({ 'project': project, }) has_changed = False # Fetch the reprocessing revision reprocessing_rev = reprocessing.get_reprocessing_revision(project) # Stacktrace based event processors. These run before anything else. new_data = process_stacktraces(data) if new_data is not None: has_changed = True data = new_data # TODO(dcramer): ideally we would know if data changed by default # Default event processors. for plugin in plugins.all(version=2): processors = safe_execute(plugin.get_event_preprocessors, data=data, _with_transaction=False) for processor in (processors or ()): result = safe_execute(processor, data) if result: data = result has_changed = True assert data[ 'project'] == project, 'Project cannot be mutated by preprocessor' if has_changed: issues = data.get('processing_issues') try: if issues and create_failed_event( cache_key, project, list(issues.values()), event_id=event_id, start_time=start_time, reprocessing_rev=reprocessing_rev): return except RetryProcessing: # If `create_failed_event` indicates that we need to retry we # invoke outselves again. This happens when the reprocessing # revision changed while we were processing. process_task.delay(cache_key, start_time=start_time, event_id=event_id) return # We cannot persist canonical types in the cache, so we need to # downgrade this. if isinstance(data, CANONICAL_TYPES): data = dict(data.items()) default_cache.set(cache_key, data, 3600) save_event.delay(cache_key=cache_key, data=None, start_time=start_time, event_id=event_id, project_id=project)
def _do_process_event(cache_key, start_time, event_id, process_task): from sentry.plugins import plugins data = default_cache.get(cache_key) if data is None: metrics.incr('events.failed', tags={'reason': 'cache', 'stage': 'process'}) error_logger.error('process.failed.empty', extra={'cache_key': cache_key}) return project = data['project'] Raven.tags_context({ 'project': project, }) has_changed = False # Fetch the reprocessing revision reprocessing_rev = reprocessing.get_reprocessing_revision(project) # Stacktrace based event processors. These run before anything else. new_data = process_stacktraces(data) if new_data is not None: has_changed = True data = new_data # TODO(dcramer): ideally we would know if data changed by default # Default event processors. for plugin in plugins.all(version=2): processors = safe_execute( plugin.get_event_preprocessors, data=data, _with_transaction=False ) for processor in (processors or ()): result = safe_execute(processor, data) if result: data = result has_changed = True assert data['project'] == project, 'Project cannot be mutated by preprocessor' if has_changed: issues = data.get('processing_issues') try: if issues and create_failed_event( cache_key, project, list(issues.values()), event_id=event_id, start_time=start_time, reprocessing_rev=reprocessing_rev ): return except RetryProcessing: # If `create_failed_event` indicates that we need to retry we # invoke outselves again. This happens when the reprocessing # revision changed while we were processing. process_task.delay(cache_key, start_time=start_time, event_id=event_id) return default_cache.set(cache_key, data, 3600) save_event.delay( cache_key=cache_key, data=None, start_time=start_time, event_id=event_id, project_id=project )
def test_frame_resolution(self): event_data = { "sentry.interfaces.User": { "ip_address": "31.172.207.97" }, "extra": {}, "project": self.project.id, "platform": "cocoa", "debug_meta": { "images": [ { "type": "apple", "cpu_subtype": 0, "uuid": "C05B4DDD-69A7-3840-A649-32180D341587", "image_vmaddr": 4294967296, "image_addr": 4295098368, "cpu_type": 16777228, "image_size": 32768, "name": OBJECT_NAME, }, { "type": "apple", "cpu_subtype": 0, "cpu_type": 16777228, "uuid": "B78CB4FB-3A90-4039-9EFD-C58932803AE5", "image_vmaddr": 0, "image_addr": 4295092368, "cpu_type": 16777228, "image_size": 32768, 'name': '/usr/lib/whatever.dylib', } ], "sdk_info": SDK_INFO, }, "sentry.interfaces.Exception": { "values": [ { "stacktrace": { "frames": [ { "function": "<redacted>", "abs_path": None, "instruction_offset": 4, "package": "/usr/lib/system/libdyld.dylib", "filename": None, "symbol_addr": "0x002ac28b4", "lineno": None, "in_app": False, "instruction_addr": "0x002ac28b8" }, { "function": "main", "instruction_addr": 4295123760, "symbol_addr": 4295123616, "image_addr": 4295098368 }, { "function": "whatever_system", "instruction_addr": 4295123360, "symbol_addr": 4295123216, "image_addr": 4295092368 }, { "platform": "javascript", "function": "merge", "abs_path": "/scripts/views.js", "vars": {}, "module": None, "filename": "../../sentry/scripts/views.js", "colno": 16, "in_app": True, "lineno": 268 } ] }, "type": "NSRangeException", "mechanism": { "posix_signal": { "signal": 6, "code": 0, "name": "SIGABRT", "code_name": None }, "type": "cocoa", "mach_exception": { "subcode": 0, "code": 0, "exception": 10, "exception_name": "EXC_CRASH" } }, "value": ( "*** -[__NSArray0 objectAtIndex:]: index 3 " "beyond bounds for empty NSArray" ) } ] }, "contexts": { "device": { "model_id": "N102AP", "model": "iPod7,1", "arch": "arm64", "family": "iPod" }, "os": { "version": "9.3.2", "rooted": False, "build": "13F69", "name": "iOS" } } } def make_processors(data, infos): return [NativeStacktraceProcessor(data, infos)] event_data = process_stacktraces( event_data, make_processors=make_processors) bt = event_data['sentry.interfaces.Exception']['values'][0]['stacktrace'] frames = bt['frames'] assert frames[0]['function'] == '<redacted>' assert frames[0]['instruction_addr'] == '0x002ac28b8' assert frames[1]['function'] == 'real_main' assert frames[1]['lineno'] == 42 assert frames[1]['colno'] == 23 assert frames[1]['package'] == OBJECT_NAME assert frames[1]['instruction_addr'] == '0x100026330' assert frames[1].get('instruction_offset') is None assert frames[2]['function'] == 'whatever_system' assert frames[2]['package'] == '/usr/lib/whatever.dylib' assert frames[2]['instruction_addr'] == '0x1000261a0' assert frames[2].get('instruction_offset') == 144
def _do_process_event(cache_key, start_time, event_id, process_task, data=None): from sentry.plugins import plugins if data is None: data = default_cache.get(cache_key) if data is None: metrics.incr( 'events.failed', tags={ 'reason': 'cache', 'stage': 'process'}, skip_internal=False) error_logger.error('process.failed.empty', extra={'cache_key': cache_key}) return data = CanonicalKeyDict(data) project_id = data['project'] with configure_scope() as scope: scope.set_tag("project", project_id) has_changed = False # Fetch the reprocessing revision reprocessing_rev = reprocessing.get_reprocessing_revision(project_id) # Event enhancers. These run before anything else. for plugin in plugins.all(version=2): enhancers = safe_execute(plugin.get_event_enhancers, data=data) for enhancer in (enhancers or ()): enhanced = safe_execute(enhancer, data) if enhanced: data = enhanced has_changed = True try: # Stacktrace based event processors. new_data = process_stacktraces(data) if new_data is not None: has_changed = True data = new_data except RetrySymbolication as e: if start_time and (time() - start_time) > 3600: raise RuntimeError('Event spent one hour in processing') retry_process_event.apply_async( args=(), kwargs={ 'process_task_name': process_task.__name__, 'task_kwargs': { 'cache_key': cache_key, 'event_id': event_id, 'start_time': start_time, } }, countdown=e.retry_after ) return # TODO(dcramer): ideally we would know if data changed by default # Default event processors. for plugin in plugins.all(version=2): processors = safe_execute( plugin.get_event_preprocessors, data=data, _with_transaction=False ) for processor in (processors or ()): result = safe_execute(processor, data) if result: data = result has_changed = True assert data['project'] == project_id, 'Project cannot be mutated by preprocessor' project = Project.objects.get_from_cache(id=project_id) # We cannot persist canonical types in the cache, so we need to # downgrade this. if isinstance(data, CANONICAL_TYPES): data = dict(data.items()) if has_changed: issues = data.get('processing_issues') try: if issues and create_failed_event( cache_key, project_id, list(issues.values()), event_id=event_id, start_time=start_time, reprocessing_rev=reprocessing_rev ): return except RetryProcessing: # If `create_failed_event` indicates that we need to retry we # invoke outselves again. This happens when the reprocessing # revision changed while we were processing. from_reprocessing = process_task is process_event_from_reprocessing submit_process(project, from_reprocessing, cache_key, event_id, start_time, data) process_task.delay(cache_key, start_time=start_time, event_id=event_id) return default_cache.set(cache_key, data, 3600) submit_save_event(project, cache_key, event_id, start_time, data)
def _do_process_event(cache_key, start_time, event_id, process_task): from sentry.plugins import plugins data = default_cache.get(cache_key) if data is None: metrics.incr( 'events.failed', tags={ 'reason': 'cache', 'stage': 'process'}, skip_internal=False) error_logger.error('process.failed.empty', extra={'cache_key': cache_key}) return data = CanonicalKeyDict(data) project = data['project'] with configure_scope() as scope: scope.set_tag("project", project) has_changed = False # Fetch the reprocessing revision reprocessing_rev = reprocessing.get_reprocessing_revision(project) # Event enhancers. These run before anything else. for plugin in plugins.all(version=2): enhancers = safe_execute(plugin.get_event_enhancers, data=data) for enhancer in (enhancers or ()): enhanced = safe_execute(enhancer, data) if enhanced: data = enhanced has_changed = True # Stacktrace based event processors. new_data = process_stacktraces(data) if new_data is not None: has_changed = True data = new_data # TODO(dcramer): ideally we would know if data changed by default # Default event processors. for plugin in plugins.all(version=2): processors = safe_execute( plugin.get_event_preprocessors, data=data, _with_transaction=False ) for processor in (processors or ()): result = safe_execute(processor, data) if result: data = result has_changed = True assert data['project'] == project, 'Project cannot be mutated by preprocessor' if has_changed: issues = data.get('processing_issues') try: if issues and create_failed_event( cache_key, project, list(issues.values()), event_id=event_id, start_time=start_time, reprocessing_rev=reprocessing_rev ): return except RetryProcessing: # If `create_failed_event` indicates that we need to retry we # invoke outselves again. This happens when the reprocessing # revision changed while we were processing. process_task.delay(cache_key, start_time=start_time, event_id=event_id) return # We cannot persist canonical types in the cache, so we need to # downgrade this. if isinstance(data, CANONICAL_TYPES): data = dict(data.items()) default_cache.set(cache_key, data, 3600) save_event.delay( cache_key=cache_key, data=None, start_time=start_time, event_id=event_id, project_id=project )