def test_getitem_setitem(self): d = CanonicalKeyDict({"user": {"id": "DemoUser"}}) d["user"] = {"id": "other"} assert d["user"] == {"id": "other"} assert d["sentry.interfaces.User"] == {"id": "other"} d = CanonicalKeyDict({"user": {"id": "DemoUser"}}) d["sentry.interfaces.User"] = {"id": "other"} assert d["user"] == {"id": "other"} assert d["sentry.interfaces.User"] == {"id": "other"}
def test_getitem_setitem(self): d = CanonicalKeyDict({'user': {'id': 'DemoUser'}}) d['user'] = {'id': 'other'} assert d['user'] == {'id': 'other'} assert d['sentry.interfaces.User'] == {'id': 'other'} d = CanonicalKeyDict({'user': {'id': 'DemoUser'}}) d['sentry.interfaces.User'] = {'id': 'other'} assert d['user'] == {'id': 'other'} assert d['sentry.interfaces.User'] == {'id': 'other'}
def create_event(group=None, project=None, event_id=None, normalize=True, **kwargs): # XXX: Do not use this method for new tests! Prefer `store_event`. if event_id is None: event_id = uuid4().hex kwargs.setdefault('project', project if project else group.project) kwargs.setdefault('data', copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault('platform', kwargs['data'].get('platform', 'python')) kwargs.setdefault('message', kwargs['data'].get('message', 'message')) if kwargs.get('tags'): tags = kwargs.pop('tags') if isinstance(tags, dict): tags = list(tags.items()) kwargs['data']['tags'] = tags if kwargs.get('stacktrace'): stacktrace = kwargs.pop('stacktrace') kwargs['data']['stacktrace'] = stacktrace user = kwargs.pop('user', None) if user is not None: kwargs['data']['user'] = user kwargs['data'].setdefault('errors', [{ 'type': EventError.INVALID_DATA, 'name': 'foobar', }]) # maintain simple event Factories by supporting the legacy message # parameter just like our API would if 'logentry' not in kwargs['data']: kwargs['data']['logentry'] = { 'message': kwargs['message'] or '<unlabeled event>', } if normalize: manager = EventManager(CanonicalKeyDict(kwargs['data'])) manager.normalize() kwargs['data'] = manager.get_data() kwargs['data'].update(manager.materialize_metadata()) kwargs['message'] = manager.get_search_message() # This is needed so that create_event saves the event in nodestore # under the correct key. This is usually dont in EventManager.save() kwargs['data'].setdefault( 'node_id', Event.generate_node_id(kwargs['project'].id, event_id)) event = Event(event_id=event_id, group=group, **kwargs) if group: EventMapping.objects.create( project_id=event.project.id, event_id=event_id, group=group, ) # emulate EventManager refs event.data.bind_ref(event) event.save() return event
def _normalize_impl(self): if self._normalized: raise RuntimeError('Already normalized') self._normalized = True from semaphore.processing import StoreNormalizer rust_normalizer = StoreNormalizer( geoip_lookup=rust_geoip, project_id=self._project.id if self._project else None, client_ip=self._client_ip, client=self._auth.client if self._auth else None, key_id=six.text_type(self._key.id) if self._key else None, grouping_config=self._grouping_config, protocol_version=six.text_type(self.version) if self.version is not None else None, stacktrace_frames_hard_limit=settings. SENTRY_STACKTRACE_FRAMES_HARD_LIMIT, max_stacktrace_frames=settings.SENTRY_MAX_STACKTRACE_FRAMES, valid_platforms=list(VALID_PLATFORMS), max_secs_in_future=MAX_SECS_IN_FUTURE, max_secs_in_past=MAX_SECS_IN_PAST, enable_trimming=True, is_renormalize=self._is_renormalize) self._data = CanonicalKeyDict( rust_normalizer.normalize_event(dict(self._data))) normalize_user_agent(self._data)
def __setstate__(self, state): # If there is a legacy pickled version that used to have data as a # duplicate, reject it. state.pop('data', None) if state.pop('_node_data_CANONICAL', False): state['_node_data'] = CanonicalKeyDict(state['_node_data']) self.__dict__ = state
def test_get_path_dict(self): assert get_path({}, 'a') is None assert get_path({'a': 2}, 'a') == 2 assert get_path({'a': 2}, 'b') is None assert get_path({'a': {'b': []}}, 'a', 'b') == [] assert get_path({'a': []}, 'a', 'b') is None assert get_path(CanonicalKeyDict({'a': 2}), 'a') == 2
def _normalize_impl(self, project_id=None): if self._project and project_id and project_id != self._project.id: raise RuntimeError( "Initialized EventManager with one project ID and called save() with another one" ) if self._normalized: raise RuntimeError("Already normalized") self._normalized = True from sentry_relay.processing import StoreNormalizer rust_normalizer = StoreNormalizer( project_id=self._project.id if self._project else project_id, client_ip=self._client_ip, client=self._auth.client if self._auth else None, key_id=six.text_type(self._key.id) if self._key else None, grouping_config=self._grouping_config, protocol_version=six.text_type(self.version) if self.version is not None else None, is_renormalize=self._is_renormalize, remove_other=self._remove_other, normalize_user_agent=True, sent_at=self.sent_at.isoformat() if self.sent_at is not None else None, **DEFAULT_STORE_NORMALIZER_ARGS ) self._data = CanonicalKeyDict(rust_normalizer.normalize_event(dict(self._data)))
def _do_preprocess_event(cache_key, data, start_time, event_id, process_task, project): if cache_key and data is None: data = default_cache.get(cache_key) if data is None: metrics.incr("events.failed", tags={ "reason": "cache", "stage": "pre" }, skip_internal=False) error_logger.error("preprocess.failed.empty", extra={"cache_key": cache_key}) return original_data = data data = CanonicalKeyDict(data) project_id = data["project"] with configure_scope() as scope: scope.set_tag("project", project_id) if project is None: project = Project.objects.get_from_cache(id=project_id) else: assert project.id == project_id, (project.id, project_id) if should_process(data): from_reprocessing = process_task is process_event_from_reprocessing submit_process(project, from_reprocessing, cache_key, event_id, start_time, original_data) return submit_save_event(project, cache_key, event_id, start_time, original_data)
def _do_preprocess_event(cache_key, data, start_time, event_id, process_event): if cache_key: data = default_cache.get(cache_key) if data is None: metrics.incr('events.failed', tags={'reason': 'cache', 'stage': 'pre'}) error_logger.error('preprocess.failed.empty', extra={'cache_key': cache_key}) return data = CanonicalKeyDict(data) project = data['project'] Raven.tags_context({ 'project': project, }) if should_process(data): process_event.delay(cache_key=cache_key, start_time=start_time, event_id=event_id) return # If we get here, that means the event had no preprocessing needed to be done # so we can jump directly to save_event if cache_key: data = None save_event.delay(cache_key=cache_key, data=data, start_time=start_time, event_id=event_id, project_id=project)
def _do_preprocess_event(cache_key, data, start_time, event_id, process_task): if cache_key and data is None: data = default_cache.get(cache_key) if data is None: metrics.incr('events.failed', tags={ 'reason': 'cache', 'stage': 'pre' }, skip_internal=False) error_logger.error('preprocess.failed.empty', extra={'cache_key': cache_key}) return original_data = data data = CanonicalKeyDict(data) project_id = data['project'] with configure_scope() as scope: scope.set_tag("project", project_id) project = Project.objects.get_from_cache(id=project_id) if should_process(data): from_reprocessing = process_task is process_event_from_reprocessing submit_process(project, from_reprocessing, cache_key, event_id, start_time, original_data) return submit_save_event(project, cache_key, event_id, start_time, original_data)
def test_get_path_dict(self): assert get_path({}, "a") is None assert get_path({"a": 2}, "a") == 2 assert get_path({"a": 2}, "b") is None assert get_path({"a": {"b": []}}, "a", "b") == [] assert get_path({"a": []}, "a", "b") is None assert get_path(CanonicalKeyDict({"a": 2}), "a") == 2
def test_len(self): assert len(CanonicalKeyDict({ 'release': 'asdf', 'exception': {'type': 'DemoException'}, 'user': {'id': 'DemoUser'}, 'sentry.interfaces.Exception': {'type': 'INVALID'}, 'sentry.interfaces.User': {'id': 'INVALID'}, })) == 3
def test_mixed(self): assert CanonicalKeyDict({ 'release': 'asdf', 'exception': {'type': 'DemoException'}, 'user': {'id': 'DemoUser'}, 'sentry.interfaces.Exception': {'type': 'INVALID'}, 'sentry.interfaces.User': {'id': 'INVALID'}, }) == self.canonical_data
def test_get_path(self): assert get_path({}, ['a']) is None assert get_path({}, ['a'], 1) == 1 assert get_path({'a': 2}, ['a']) == 2 assert get_path({'a': 2}, ['b']) is None assert get_path({'a': 2}, ['b'], 1) == 1 assert get_path({'a': {'b': []}}, ['a', 'b']) == [] assert get_path({'a': []}, ['a', 'b']) is None assert get_path(CanonicalKeyDict({'a': 2}), ['a']) == 2
def _do_preprocess_event(cache_key, data, start_time, event_id, process_task, project): from sentry.lang.native.processing import should_process_with_symbolicator if cache_key and data is None: data = event_processing_store.get(cache_key) if data is None: metrics.incr("events.failed", tags={ "reason": "cache", "stage": "pre" }, skip_internal=False) error_logger.error("preprocess.failed.empty", extra={"cache_key": cache_key}) return original_data = data data = CanonicalKeyDict(data) project_id = data["project"] set_current_project(project_id) if project is None: project = Project.objects.get_from_cache(id=project_id) else: assert project.id == project_id, (project.id, project_id) from_reprocessing = process_task is process_event_from_reprocessing with metrics.timer( "tasks.store.preprocess_event.organization.get_from_cache"): project._organization_cache = Organization.objects.get_from_cache( id=project.organization_id) if should_process_with_symbolicator(data): reprocessing2.backup_unprocessed_event(project=project, data=original_data) submit_symbolicate(project, from_reprocessing, cache_key, event_id, start_time, original_data) return if should_process(data): reprocessing2.backup_unprocessed_event(project=project, data=original_data) submit_process( project, from_reprocessing, cache_key, event_id, start_time, data_has_changed=False, ) return submit_save_event(project, from_reprocessing, cache_key, event_id, start_time, original_data)
def create_event(self, event_id=None, normalize=True, **kwargs): if event_id is None: event_id = uuid4().hex if 'group' not in kwargs: kwargs['group'] = self.group kwargs.setdefault('project', kwargs['group'].project) kwargs.setdefault('data', copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault('platform', kwargs['data'].get('platform', 'python')) kwargs.setdefault('message', kwargs['data'].get('message', 'message')) if kwargs.get('tags'): tags = kwargs.pop('tags') if isinstance(tags, dict): tags = list(tags.items()) kwargs['data']['tags'] = tags if kwargs.get('stacktrace'): stacktrace = kwargs.pop('stacktrace') kwargs['data']['stacktrace'] = stacktrace user = kwargs.pop('user', None) if user is not None: kwargs['data']['user'] = user kwargs['data'].setdefault( 'errors', [{ 'type': EventError.INVALID_DATA, 'name': 'foobar', }] ) # maintain simple event fixtures by supporting the legacy message # parameter just like our API would if 'logentry' not in kwargs['data']: kwargs['data']['logentry'] = { 'message': kwargs.get('message') or '<unlabeled event>', } if normalize: manager = EventManager(CanonicalKeyDict(kwargs['data']), for_store=False) manager.normalize() kwargs['data'] = manager.get_data() kwargs['message'] = manager.get_search_message() else: assert 'message' not in kwargs, 'do not pass message this way' event = Event(event_id=event_id, **kwargs) EventMapping.objects.create( project_id=event.project.id, event_id=event_id, group=event.group, ) # emulate EventManager refs event.data.bind_ref(event) event.save() return event
def test_canonical(self): assert CanonicalKeyDict({ 'release': 'asdf', 'exception': { 'type': 'DemoException' }, 'user': { 'id': 'DemoUser' }, }) == self.canonical_data
def test_canonical(self): assert (CanonicalKeyDict({ "release": "asdf", "exception": { "type": "DemoException" }, "user": { "id": "DemoUser" }, }) == self.canonical_data)
def test_legacy(self): assert (CanonicalKeyDict({ "release": "asdf", "sentry.interfaces.Exception": { "type": "DemoException" }, "sentry.interfaces.User": { "id": "DemoUser" }, }) == self.canonical_data)
def create_event(group=None, project=None, event_id=None, normalize=True, **kwargs): # XXX: Do not use this method for new tests! Prefer `store_event`. if event_id is None: event_id = uuid4().hex kwargs.setdefault("project", project if project else group.project) kwargs.setdefault("data", copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault("platform", kwargs["data"].get("platform", "python")) kwargs.setdefault("message", kwargs["data"].get("message", "message")) if kwargs.get("tags"): tags = kwargs.pop("tags") if isinstance(tags, dict): tags = list(tags.items()) kwargs["data"]["tags"] = tags if kwargs.get("stacktrace"): stacktrace = kwargs.pop("stacktrace") kwargs["data"]["stacktrace"] = stacktrace user = kwargs.pop("user", None) if user is not None: kwargs["data"]["user"] = user kwargs["data"].setdefault("errors", [{ "type": EventError.INVALID_DATA, "name": "foobar" }]) # maintain simple event Factories by supporting the legacy message # parameter just like our API would if "logentry" not in kwargs["data"]: kwargs["data"]["logentry"] = { "message": kwargs["message"] or "<unlabeled event>" } if normalize: manager = EventManager(CanonicalKeyDict(kwargs["data"])) manager.normalize() kwargs["data"] = manager.get_data() kwargs["data"].update(manager.materialize_metadata()) kwargs["message"] = manager.get_search_message() # This is needed so that create_event saves the event in nodestore # under the correct key. This is usually dont in EventManager.save() kwargs["data"].setdefault( "node_id", Event.generate_node_id(kwargs["project"].id, event_id)) event = Event(event_id=event_id, group=group, **kwargs) # emulate EventManager refs event.data.bind_ref(event) event.save() event.data.save() return event
def _do_preprocess_event(cache_key, data, start_time, event_id, process_task, project): from sentry.lang.native.processing import should_process_with_symbolicator if cache_key and data is None: data = default_cache.get(cache_key) if data is None: metrics.incr("events.failed", tags={ "reason": "cache", "stage": "pre" }, skip_internal=False) error_logger.error("preprocess.failed.empty", extra={"cache_key": cache_key}) return original_data = data data = CanonicalKeyDict(data) project_id = data["project"] set_current_project(project_id) if project is None: project = Project.objects.get_from_cache(id=project_id) else: assert project.id == project_id, (project.id, project_id) from_reprocessing = process_task is process_event_from_reprocessing new_process_behavior = bool( options.get("sentry:preprocess-use-new-behavior", False)) metrics.incr("tasks.store.preprocess_event.new_process_behavior", tags={"value": new_process_behavior}) if new_process_behavior and should_process_with_symbolicator(data): submit_symbolicate(project, from_reprocessing, cache_key, event_id, start_time, original_data) return if should_process(data): submit_process( project, from_reprocessing, cache_key, event_id, start_time, original_data, data_has_changed=False, new_process_behavior=new_process_behavior, ) return submit_save_event(project, cache_key, event_id, start_time, original_data)
def create_event(self, event_id=None, **kwargs): if event_id is None: event_id = uuid4().hex if 'group' not in kwargs: kwargs['group'] = self.group kwargs.setdefault('project', kwargs['group'].project) kwargs.setdefault('data', copy.deepcopy(DEFAULT_EVENT_DATA)) kwargs.setdefault('platform', kwargs['data'].get('platform', 'python')) kwargs.setdefault('message', kwargs['data'].get('message', 'message')) if kwargs.get('tags'): tags = kwargs.pop('tags') if isinstance(tags, dict): tags = list(tags.items()) kwargs['data']['tags'] = tags if kwargs.get('stacktrace'): stacktrace = kwargs.pop('stacktrace') kwargs['data']['sentry.interfaces.Stacktrace'] = stacktrace kwargs['data'].setdefault( 'errors', [{ 'type': EventError.INVALID_DATA, 'name': 'foobar', }] ) # maintain simple event fixtures by supporting the legacy message # parameter just like our API would if 'sentry.interfaces.Message' not in kwargs['data']: kwargs['data']['sentry.interfaces.Message'] = { 'message': kwargs.get('message') or '<unlabeled event>', } if 'type' not in kwargs['data']: kwargs['data'].update( { 'type': 'default', 'metadata': { 'title': kwargs['data']['sentry.interfaces.Message']['message'], }, } ) kwargs['data'] = CanonicalKeyDict(kwargs.pop('data')) event = Event(event_id=event_id, **kwargs) EventMapping.objects.create( project_id=event.project.id, event_id=event_id, group=event.group, ) # emulate EventManager refs event.data.bind_ref(event) event.save() return event
def test_legacy(self): assert CanonicalKeyDict( { 'release': 'asdf', 'sentry.interfaces.Exception': { 'type': 'DemoException' }, 'sentry.interfaces.User': { 'id': 'DemoUser' }, }, legacy=True) == self.canonical_data
def _decode_event(data, content_encoding): if isinstance(data, six.binary_type): if content_encoding == "gzip": data = decompress_gzip(data) elif content_encoding == "deflate": data = decompress_deflate(data) elif data[0] != b"{": data = decode_and_decompress_data(data) else: data = decode_data(data) if isinstance(data, six.text_type): data = safely_load_json_string(data) return CanonicalKeyDict(data)
def test_mixed(self): assert ( CanonicalKeyDict( { "release": "asdf", "sentry.interfaces.Exception": {"type": "DemoException"}, "sentry.interfaces.User": {"id": "DemoUser"}, "exception": {"type": "INVALID"}, "user": {"id": "INVALID"}, }, legacy=True, ) == self.canonical_data )
def test_len(self): assert ( len( CanonicalKeyDict( { "release": "asdf", "exception": {"type": "DemoException"}, "user": {"id": "DemoUser"}, "sentry.interfaces.Exception": {"type": "INVALID"}, "sentry.interfaces.User": {"id": "INVALID"}, } ) ) == 3 )
def test_set_dict(self): data = {} assert set_path(data, 'a', value=42) assert data == {'a': 42} data = {'a': 2} assert set_path(data, 'a', value=42) assert data == {'a': 42} data = {} assert set_path(data, 'a', 'b', value=42) assert data == {'a': {'b': 42}} data = CanonicalKeyDict({}) assert set_path(data, 'a', value=42) assert data == {'a': 42}
def test_set_dict(self): data = {} assert set_path(data, "a", value=42) assert data == {"a": 42} data = {"a": 2} assert set_path(data, "a", value=42) assert data == {"a": 42} data = {} assert set_path(data, "a", "b", value=42) assert data == {"a": {"b": 42}} data = CanonicalKeyDict({}) assert set_path(data, "a", value=42) assert data == {"a": 42}
def _normalize_impl(self): if self._normalized: raise RuntimeError("Already normalized") self._normalized = True from semaphore.processing import StoreNormalizer rust_normalizer = StoreNormalizer( project_id=self._project.id if self._project else None, client_ip=self._client_ip, client=self._auth.client if self._auth else None, key_id=six.text_type(self._key.id) if self._key else None, grouping_config=self._grouping_config, protocol_version=six.text_type(self.version) if self.version is not None else None, is_renormalize=self._is_renormalize, remove_other=self._remove_other, normalize_user_agent=True, **DEFAULT_STORE_NORMALIZER_ARGS ) self._data = CanonicalKeyDict(rust_normalizer.normalize_event(dict(self._data)))
def __init__( self, data, version="5", project=None, grouping_config=None, client_ip=None, user_agent=None, auth=None, key=None, content_encoding=None, is_renormalize=False, remove_other=None, project_config=None, sent_at=None, ): self._data = CanonicalKeyDict(data) self.version = version self._project = project # if not explicitly specified try to get the grouping from project_config if grouping_config is None and project_config is not None: config = project_config.config grouping_config = config.get("grouping_config") # if we still don't have a grouping also try the project if grouping_config is None and project is not None: grouping_config = get_grouping_config_dict_for_project(self._project) self._grouping_config = grouping_config self._client_ip = client_ip self._user_agent = user_agent self._auth = auth self._key = key self._is_renormalize = is_renormalize self._remove_other = remove_other self._normalized = False self.project_config = project_config self.sent_at = sent_at