def test_reaction(self) -> None: sender = self.example_user('othello') receiver = self.example_user('hamlet') recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL) sending_client = make_client(name="test suite") message = Message( sender=sender, recipient=recipient, content='hello **world**', date_sent=timezone_now(), sending_client=sending_client, last_edit_time=timezone_now(), edit_history='[]', ) message.set_topic_name('whatever') message.save() reaction = Reaction.objects.create(message=message, user_profile=sender, emoji_name='simple_smile') row = MessageDict.get_raw_db_rows([message.id])[0] msg_dict = MessageDict.build_dict_from_raw_db_row(row) self.assertEqual(msg_dict['reactions'][0]['emoji_name'], reaction.emoji_name) self.assertEqual(msg_dict['reactions'][0]['user_id'], sender.id) self.assertEqual(msg_dict['reactions'][0]['user']['id'], sender.id) self.assertEqual(msg_dict['reactions'][0]['user']['email'], sender.email) self.assertEqual(msg_dict['reactions'][0]['user']['full_name'], sender.full_name)
def test_reaction(self) -> None: sender = self.example_user("othello") receiver = self.example_user("hamlet") recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL) sending_client = make_client(name="test suite") message = Message( sender=sender, recipient=recipient, content="hello **world**", date_sent=timezone_now(), sending_client=sending_client, last_edit_time=timezone_now(), edit_history="[]", ) message.set_topic_name("whatever") message.save() reaction = Reaction.objects.create(message=message, user_profile=sender, emoji_name="simple_smile") row = MessageDict.get_raw_db_rows([message.id])[0] msg_dict = MessageDict.build_dict_from_raw_db_row(row) self.assertEqual(msg_dict["reactions"][0]["emoji_name"], reaction.emoji_name) self.assertEqual(msg_dict["reactions"][0]["user_id"], sender.id) self.assertEqual(msg_dict["reactions"][0]["user"]["id"], sender.id) self.assertEqual(msg_dict["reactions"][0]["user"]["email"], sender.email) self.assertEqual(msg_dict["reactions"][0]["user"]["full_name"], sender.full_name)
def test_applying_markdown(self) -> None: sender = self.example_user('othello') receiver = self.example_user('hamlet') recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL) sending_client = make_client(name="test suite") message = Message( sender=sender, recipient=recipient, content='hello **world**', date_sent=timezone_now(), sending_client=sending_client, last_edit_time=timezone_now(), edit_history='[]', ) message.set_topic_name('whatever') message.save() # An important part of this test is to get the message through this exact code path, # because there is an ugly hack we need to cover. So don't just say "row = message". row = MessageDict.get_raw_db_rows([message.id])[0] dct = MessageDict.build_dict_from_raw_db_row(row) expected_content = '<p>hello <strong>world</strong></p>' self.assertEqual(dct['rendered_content'], expected_content) message = Message.objects.get(id=message.id) self.assertEqual(message.rendered_content, expected_content) self.assertEqual(message.rendered_content_version, markdown_version)
def test_applying_markdown_invalid_format(self, convert_mock: Any) -> None: # pretend the converter returned an invalid message without raising an exception convert_mock.return_value = None sender = self.example_user('othello') receiver = self.example_user('hamlet') recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL) sending_client = make_client(name="test suite") message = Message( sender=sender, recipient=recipient, content='hello **world**', date_sent=timezone_now(), sending_client=sending_client, last_edit_time=timezone_now(), edit_history='[]', ) message.set_topic_name('whatever') message.save() # An important part of this test is to get the message through this exact code path, # because there is an ugly hack we need to cover. So don't just say "row = message". row = MessageDict.get_raw_db_rows([message.id])[0] dct = MessageDict.build_dict_from_raw_db_row(row) error_content = '<p>[Zulip note: Sorry, we could not understand the formatting of your message]</p>' self.assertEqual(dct['rendered_content'], error_content)
def test_bulk_message_fetching(self) -> None: sender = self.example_user('othello') receiver = self.example_user('hamlet') pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL) stream_name = 'Çiğdem' stream = self.make_stream(stream_name) stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM) sending_client = make_client(name="test suite") ids = [] for i in range(300): for recipient in [pm_recipient, stream_recipient]: message = Message( sender=sender, recipient=recipient, content=f'whatever {i}', rendered_content='DOES NOT MATTER', rendered_content_version=markdown_version, date_sent=timezone_now(), sending_client=sending_client, last_edit_time=timezone_now(), edit_history='[]', ) message.set_topic_name('whatever') message.save() ids.append(message.id) Reaction.objects.create(user_profile=sender, message=message, emoji_name='simple_smile') num_ids = len(ids) self.assertTrue(num_ids >= 600) flush_per_request_caches() t = time.time() with queries_captured() as queries: rows = list(MessageDict.get_raw_db_rows(ids)) objs = [ MessageDict.build_dict_from_raw_db_row(row) for row in rows ] MessageDict.post_process_dicts(objs, apply_markdown=False, client_gravatar=False) delay = time.time() - t # Make sure we don't take longer than 1.5ms per message to # extract messages. Note that we increased this from 1ms to # 1.5ms to handle tests running in parallel being a bit # slower. error_msg = f"Number of ids: {num_ids}. Time delay: {delay}" self.assertTrue(delay < 0.0015 * num_ids, error_msg) self.assert_length(queries, 7) self.assertEqual(len(rows), num_ids)
def get_send_message_payload(msg_id: int, apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]: msg = reload_message(msg_id) wide_dict = MessageDict.wide_dict(msg) narrow_dict = MessageDict.finalize_payload( wide_dict, apply_markdown=apply_markdown, client_gravatar=client_gravatar, ) return narrow_dict
def get_fetch_payload(msg_id: int, apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]: msg = reload_message(msg_id) unhydrated_dict = MessageDict.to_dict_uncached_helper([msg])[0] # The next step mutates the dict in place # for performance reasons. MessageDict.post_process_dicts( [unhydrated_dict], apply_markdown=apply_markdown, client_gravatar=client_gravatar, ) final_dict = unhydrated_dict return final_dict
def build_bot_request(self, event: Dict[str, Any]) -> Optional[Any]: # Because we don't have a place for the recipient of an # outgoing webhook to indicate whether it wants the raw # Markdown or the rendered HTML, we leave both the content and # rendered_content fields in the message payload. MessageDict.finalize_payload(event['message'], False, False, keep_rendered_content=True) request_data = {"data": event['command'], "message": event['message'], "bot_email": self.user_profile.email, "token": self.token, "trigger": event['trigger']} return json.dumps(request_data)
def get_client_payload(apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]: dct = copy.deepcopy(wide_dict) # Temporary transitional code: Zulip servers that have message # events in their event queues and upgrade to the new version # that expects sender_delivery_email in these events will # throw errors processing events. We can remove this block # once we don't expect anyone to be directly upgrading from # 2.0.x to the latest Zulip. if 'sender_delivery_email' not in dct: # nocoverage dct['sender_delivery_email'] = dct['sender_email'] MessageDict.finalize_payload(dct, apply_markdown, client_gravatar) return dct
def test_bulk_message_fetching(self) -> None: sender = self.example_user("othello") receiver = self.example_user("hamlet") pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL) stream_name = "Çiğdem" stream = self.make_stream(stream_name) stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM) sending_client = make_client(name="test suite") ids = [] for i in range(300): for recipient in [pm_recipient, stream_recipient]: message = Message( sender=sender, recipient=recipient, content=f"whatever {i}", rendered_content="DOES NOT MATTER", rendered_content_version=markdown_version, date_sent=timezone_now(), sending_client=sending_client, last_edit_time=timezone_now(), edit_history="[]", ) message.set_topic_name("whatever") message.save() ids.append(message.id) Reaction.objects.create(user_profile=sender, message=message, emoji_name="simple_smile") num_ids = len(ids) self.assertTrue(num_ids >= 600) flush_per_request_caches() with queries_captured() as queries: rows = list(MessageDict.get_raw_db_rows(ids)) objs = [ MessageDict.build_dict_from_raw_db_row(row) for row in rows ] MessageDict.post_process_dicts(objs, apply_markdown=False, client_gravatar=False) self.assert_length(queries, 7) self.assert_length(rows, num_ids)
def build_bot_request(self, event: Dict[str, Any]) -> Optional[Any]: ''' We send a simple version of the message to outgoing webhooks, since most of them really only need `content` and a few other fields. We may eventually allow certain bots to get more information, but that's not a high priority. We do send the gravatar info to the clients (so they don't have to compute it themselves). ''' message_dict = MessageDict.finalize_payload( event['message'], apply_markdown=False, client_gravatar=False, keep_rendered_content=True, ) request_data = { "data": event['command'], "message": message_dict, "bot_email": self.user_profile.email, "token": self.token, "trigger": event['trigger'] } return json.dumps(request_data)
def get_client_payload(apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]: return MessageDict.finalize_payload( wide_dict, apply_markdown=apply_markdown, client_gravatar=client_gravatar, )
def make_request(self, base_url: str, event: Dict[str, Any]) -> Optional[Response]: """ We send a simple version of the message to outgoing webhooks, since most of them really only need `content` and a few other fields. We may eventually allow certain bots to get more information, but that's not a high priority. We do send the gravatar info to the clients (so they don't have to compute it themselves). """ message_dict = MessageDict.finalize_payload( event["message"], apply_markdown=False, client_gravatar=False, keep_rendered_content=True, ) request_data = { "data": event["command"], "message": message_dict, "bot_email": self.user_profile.email, "bot_full_name": self.user_profile.full_name, "token": self.token, "trigger": event["trigger"], } return self.session.post(base_url, json=request_data)
def test_build_bot_request(self) -> None: othello = self.example_user("othello") stream = get_stream("Denmark", othello.realm) message_id = self.send_stream_message( othello, stream.name, content="@**test**", ) message = Message.objects.get(id=message_id) gravatar_url = get_gravatar_url( othello.delivery_email, othello.avatar_version, ) expected_message_data = { "avatar_url": gravatar_url, "client": "test suite", "content": "@**test**", "content_type": "text/x-markdown", "display_recipient": "Denmark", "id": message.id, "is_me_message": False, "reactions": [], "recipient_id": message.recipient_id, "rendered_content": "<p>@<strong>test</strong></p>", "sender_email": othello.email, "sender_full_name": "Othello, the Moor of Venice", "sender_id": othello.id, "sender_realm_str": "zulip", "stream_id": stream.id, TOPIC_NAME: "test", "submessages": [], "timestamp": datetime_to_timestamp(message.date_sent), "topic_links": [], "type": "stream", } wide_message_dict = MessageDict.wide_dict(message) event = { "command": "@**test**", "message": wide_message_dict, "trigger": "mention", } request_data = self.handler.build_bot_request(event) request_data = json.loads(request_data) validate_against_openapi_schema(request_data, "/zulip-outgoing-webhook", "post", "200") self.assertEqual(request_data["data"], "@**test**") self.assertEqual(request_data["token"], "abcdef") self.assertEqual(request_data["message"], expected_message_data) # Make sure we didn't accidentally mutate wide_message_dict. self.assertEqual(wide_message_dict["sender_realm_id"], othello.realm_id)
def test_build_bot_request(self) -> None: othello = self.example_user('othello') stream = get_stream('Denmark', othello.realm) message_id = self.send_stream_message( othello, stream.name, content="@**test**", ) message = Message.objects.get(id=message_id) gravatar_url = get_gravatar_url( othello.delivery_email, othello.avatar_version, ) expected_message_data = { 'avatar_url': gravatar_url, 'client': 'test suite', 'content': '@**test**', 'content_type': 'text/x-markdown', 'display_recipient': 'Denmark', 'id': message.id, 'is_me_message': False, 'reactions': [], 'recipient_id': message.recipient_id, 'rendered_content': '<p>@<strong>test</strong></p>', 'sender_email': othello.email, 'sender_full_name': 'Othello, the Moor of Venice', 'sender_id': othello.id, 'sender_realm_str': 'zulip', 'stream_id': stream.id, TOPIC_NAME: 'test', 'submessages': [], 'timestamp': datetime_to_timestamp(message.date_sent), 'topic_links': [], 'type': 'stream', } wide_message_dict = MessageDict.wide_dict(message) event = { 'command': '@**test**', 'message': wide_message_dict, 'trigger': 'mention', } request_data = self.handler.build_bot_request(event) request_data = json.loads(request_data) validate_against_openapi_schema(request_data, '/zulip-outgoing-webhook', 'post', '200') self.assertEqual(request_data['data'], "@**test**") self.assertEqual(request_data['token'], "abcdef") self.assertEqual(request_data['message'], expected_message_data) # Make sure we didn't accidentally mutate wide_message_dict. self.assertEqual(wide_message_dict['sender_realm_id'], othello.realm_id)
def message_cache_items(items_for_remote_cache: Dict[str, Tuple[bytes]], message: Message) -> None: """ Note: this code is untested, and the caller has been commented out for a while. """ key = to_dict_cache_key_id(message.id) value = MessageDict.to_dict_uncached([message])[message.id] items_for_remote_cache[key] = (value, )
def message_cache_items(items_for_remote_cache, message): # type: (Dict[Text, Tuple[bytes]], Message) -> None ''' Note: this code is untested, and the caller has been commented out for a while. ''' key = to_dict_cache_key_id(message.id) value = MessageDict.to_dict_uncached(message) items_for_remote_cache[key] = (value, )
def message_cache_items(items_for_remote_cache: Dict[Text, Tuple[bytes]], message: Message) -> None: ''' Note: this code is untested, and the caller has been commented out for a while. ''' key = to_dict_cache_key_id(message.id) value = MessageDict.to_dict_uncached(message) items_for_remote_cache[key] = (value,)
def test_hydrate_pm_recipient_info(self) -> None: cordelia = self.example_user('cordelia') display_recipient: List[UserDisplayRecipient] = [ dict( email='*****@*****.**', full_name='Aaron Smith', short_name='Aaron', id=999, is_mirror_dummy=False, ), ] obj = dict( recipient_type=Recipient.PERSONAL, recipient_type_id=None, sender_is_mirror_dummy=False, sender_email=cordelia.email, sender_full_name=cordelia.full_name, sender_short_name=cordelia.short_name, sender_id=cordelia.id, ) MessageDict.hydrate_recipient_info(obj, display_recipient) self.assertEqual( obj['display_recipient'], [ dict( email='*****@*****.**', full_name='Aaron Smith', short_name='Aaron', id=999, is_mirror_dummy=False, ), dict( email=cordelia.email, full_name=cordelia.full_name, id=cordelia.id, short_name=cordelia.short_name, is_mirror_dummy=False, ), ], ) self.assertEqual(obj['type'], 'private')
def test_hydrate_stream_recipient_info(self) -> None: realm = get_realm('zulip') cordelia = self.example_user('cordelia') stream_id = get_stream('Verona', realm).id obj = dict( recipient_type=Recipient.STREAM, recipient_type_id=stream_id, sender_is_mirror_dummy=False, sender_email=cordelia.email, sender_full_name=cordelia.full_name, sender_id=cordelia.id, ) MessageDict.hydrate_recipient_info(obj, 'Verona') self.assertEqual(obj['display_recipient'], 'Verona') self.assertEqual(obj['type'], 'stream')
def test_hydrate_stream_recipient_info(self) -> None: realm = get_realm("zulip") cordelia = self.example_user("cordelia") stream_id = get_stream("Verona", realm).id obj = dict( recipient_type=Recipient.STREAM, recipient_type_id=stream_id, sender_is_mirror_dummy=False, sender_email=cordelia.email, sender_full_name=cordelia.full_name, sender_id=cordelia.id, ) MessageDict.hydrate_recipient_info(obj, "Verona") self.assertEqual(obj["display_recipient"], "Verona") self.assertEqual(obj["type"], "stream")
def test_hydrate_pm_recipient_info(self) -> None: cordelia = self.example_user("cordelia") display_recipient: List[UserDisplayRecipient] = [ dict( email="*****@*****.**", full_name="Aaron Smith", id=999, is_mirror_dummy=False, ), ] obj = dict( recipient_type=Recipient.PERSONAL, recipient_type_id=None, sender_is_mirror_dummy=False, sender_email=cordelia.email, sender_full_name=cordelia.full_name, sender_id=cordelia.id, ) MessageDict.hydrate_recipient_info(obj, display_recipient) self.assertEqual( obj["display_recipient"], [ dict( email="*****@*****.**", full_name="Aaron Smith", id=999, is_mirror_dummy=False, ), dict( email=cordelia.email, full_name=cordelia.full_name, id=cordelia.id, is_mirror_dummy=False, ), ], ) self.assertEqual(obj["type"], "private")
def setUp(self) -> None: super().setUp() # TODO: Ideally, this test would use the full flow, rather # than making a mock message like this. message_id = self.send_stream_message(self.example_email('othello'), "Denmark", content="@**test**") message = Message.objects.get(id=message_id) wide_message_dict = MessageDict.wide_dict(message) self.event = { u'command': '@**test**', u'message': wide_message_dict, u'trigger': 'mention', } self.bot_user = get_user("*****@*****.**", get_realm("zulip")) service_class = get_service_interface_class('whatever') # GenericOutgoingWebhookService self.handler = service_class(service_name='test-service', token='abcdef', user_profile=self.bot_user)
def test_get_raw_db_rows(self) -> None: cordelia = self.example_user("cordelia") hamlet = self.example_user("hamlet") stream_name = "Verona" message_id = self.send_stream_message( sender=cordelia, stream_name=stream_name, ) def get_raw_rows() -> List[Dict[str, Any]]: query = SubMessage.get_raw_db_rows([message_id]) rows = list(query) return rows rows = get_raw_rows() self.assertEqual(rows, []) sm1 = SubMessage.objects.create( msg_type="whatever", content="stuff1", message_id=message_id, sender=cordelia, ) sm2 = SubMessage.objects.create( msg_type="whatever", content="stuff2", message_id=message_id, sender=hamlet, ) expected_data = [ dict( id=sm1.id, message_id=message_id, sender_id=cordelia.id, msg_type="whatever", content="stuff1", ), dict( id=sm2.id, message_id=message_id, sender_id=hamlet.id, msg_type="whatever", content="stuff2", ), ] self.assertEqual(get_raw_rows(), expected_data) message = Message.objects.get(id=message_id) message_json = MessageDict.wide_dict(message) rows = message_json["submessages"] rows.sort(key=lambda r: r["id"]) self.assertEqual(rows, expected_data) msg_rows = MessageDict.get_raw_db_rows([message_id]) rows = msg_rows[0]["submessages"] rows.sort(key=lambda r: r["id"]) self.assertEqual(rows, expected_data)
def get_client_payload(apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]: dct = copy.deepcopy(wide_dict) MessageDict.finalize_payload(dct, apply_markdown, client_gravatar) return dct
def get_client_payload(apply_markdown, client_gravatar): # type: (bool, bool) -> Mapping[str, Any] dct = copy.deepcopy(wide_dict) MessageDict.finalize_payload(dct, apply_markdown, client_gravatar) return dct
def process_message_event(event_template, users): # type: (Mapping[str, Any], Iterable[Mapping[str, Any]]) -> None send_to_clients = get_client_info_for_message_event(event_template, users) presence_idle_user_ids = set( event_template.get('presence_idle_user_ids', [])) message_dict = event_template['message_dict'] # type: Dict[str, Any] sender_id = message_dict['sender_id'] # type: int message_id = message_dict['id'] # type: int message_type = message_dict['type'] # type: str sending_client = message_dict['client'] # type: Text message_dict_html = copy.deepcopy(message_dict) MessageDict.finalize_payload(message_dict_html, apply_markdown=True) message_dict_text = copy.deepcopy(message_dict) MessageDict.finalize_payload(message_dict_text, apply_markdown=False) # Extra user-specific data to include extra_user_data = {} # type: Dict[int, Any] for user_data in users: user_profile_id = user_data['id'] # type: int flags = user_data.get('flags', []) # type: Iterable[str] # If the recipient was offline and the message was a single or group PM to them # or they were @-notified potentially notify more immediately private_message = message_type == "private" and user_profile_id != sender_id mentioned = 'mentioned' in flags and 'read' not in flags stream_push_notify = user_data.get('stream_push_notify', False) # We first check if a message is potentially mentionable, # since receiver_is_off_zulip is somewhat expensive. if private_message or mentioned or stream_push_notify: idle = receiver_is_off_zulip(user_profile_id) or ( user_profile_id in presence_idle_user_ids) always_push_notify = user_data.get('always_push_notify', False) stream_name = event_template.get('stream_name') result = maybe_enqueue_notifications(user_profile_id, message_id, private_message, mentioned, stream_push_notify, stream_name, always_push_notify, idle, {}) result['stream_push_notify'] = stream_push_notify extra_user_data[user_profile_id] = result for client_data in send_to_clients.values(): client = client_data['client'] flags = client_data['flags'] is_sender = client_data.get('is_sender', False) # type: bool extra_data = extra_user_data.get( client.user_profile_id, None) # type: Optional[Mapping[str, bool]] if not client.accepts_messages(): # The actual check is the accepts_event() check below; # this line is just an optimization to avoid copying # message data unnecessarily continue if client.apply_markdown: message_dict = message_dict_html else: message_dict = message_dict_text # Make sure Zephyr mirroring bots know whether stream is invite-only if "mirror" in client.client_type_name and event_template.get( "invite_only"): message_dict = message_dict.copy() message_dict["invite_only_stream"] = True user_event = dict(type='message', message=message_dict, flags=flags) # type: Dict[str, Any] if extra_data is not None: user_event.update(extra_data) if is_sender: local_message_id = event_template.get('local_id', None) if local_message_id is not None: user_event["local_message_id"] = local_message_id if not client.accepts_event(user_event): continue # The below prevents (Zephyr) mirroring loops. if ('mirror' in sending_client and sending_client.lower() == client.client_type_name.lower()): continue client.add_event(user_event)
def process_message_event(event_template, users): # type: (Mapping[str, Any], Iterable[Mapping[str, Any]]) -> None send_to_clients = get_client_info_for_message_event(event_template, users) presence_idle_user_ids = set(event_template.get('presence_idle_user_ids', [])) message_dict = event_template['message_dict'] # type: Dict[str, Any] sender_id = message_dict['sender_id'] # type: int message_id = message_dict['id'] # type: int message_type = message_dict['type'] # type: str sending_client = message_dict['client'] # type: Text message_dict_html = copy.deepcopy(message_dict) MessageDict.finalize_payload(message_dict_html, apply_markdown=True) message_dict_text = copy.deepcopy(message_dict) MessageDict.finalize_payload(message_dict_text, apply_markdown=False) # Extra user-specific data to include extra_user_data = {} # type: Dict[int, Any] for user_data in users: user_profile_id = user_data['id'] # type: int flags = user_data.get('flags', []) # type: Iterable[str] # If the recipient was offline and the message was a single or group PM to them # or they were @-notified potentially notify more immediately private_message = message_type == "private" and user_profile_id != sender_id mentioned = 'mentioned' in flags and 'read' not in flags stream_push_notify = user_data.get('stream_push_notify', False) # We first check if a message is potentially mentionable, # since receiver_is_off_zulip is somewhat expensive. if private_message or mentioned or stream_push_notify: idle = receiver_is_off_zulip(user_profile_id) or (user_profile_id in presence_idle_user_ids) always_push_notify = user_data.get('always_push_notify', False) stream_name = event_template.get('stream_name') result = maybe_enqueue_notifications(user_profile_id, message_id, private_message, mentioned, stream_push_notify, stream_name, always_push_notify, idle, {}) result['stream_push_notify'] = stream_push_notify extra_user_data[user_profile_id] = result for client_data in send_to_clients.values(): client = client_data['client'] flags = client_data['flags'] is_sender = client_data.get('is_sender', False) # type: bool extra_data = extra_user_data.get(client.user_profile_id, None) # type: Optional[Mapping[str, bool]] if not client.accepts_messages(): # The actual check is the accepts_event() check below; # this line is just an optimization to avoid copying # message data unnecessarily continue if client.apply_markdown: message_dict = message_dict_html else: message_dict = message_dict_text # Make sure Zephyr mirroring bots know whether stream is invite-only if "mirror" in client.client_type_name and event_template.get("invite_only"): message_dict = message_dict.copy() message_dict["invite_only_stream"] = True user_event = dict(type='message', message=message_dict, flags=flags) # type: Dict[str, Any] if extra_data is not None: user_event.update(extra_data) if is_sender: local_message_id = event_template.get('local_id', None) if local_message_id is not None: user_event["local_message_id"] = local_message_id if not client.accepts_event(user_event): continue # The below prevents (Zephyr) mirroring loops. if ('mirror' in sending_client and sending_client.lower() == client.client_type_name.lower()): continue client.add_event(user_event)
def get_old_messages_backend(request, user_profile, anchor = REQ(converter=int), num_before = REQ(converter=to_non_negative_int), num_after = REQ(converter=to_non_negative_int), narrow = REQ('narrow', converter=narrow_parameter, default=None), use_first_unread_anchor = REQ(default=False, converter=ujson.loads), apply_markdown=REQ(default=True, converter=ujson.loads)): # type: (HttpRequest, UserProfile, int, int, int, Optional[List[Dict[str, Any]]], bool, bool) -> HttpResponse include_history = ok_to_include_history(narrow, user_profile.realm) if include_history and not use_first_unread_anchor: query = select([column("id").label("message_id")], None, table("zerver_message")) inner_msg_id_col = literal_column("zerver_message.id") elif narrow is None: query = select([column("message_id"), column("flags")], column("user_profile_id") == literal(user_profile.id), table("zerver_usermessage")) inner_msg_id_col = column("message_id") else: # TODO: Don't do this join if we're not doing a search query = select([column("message_id"), column("flags")], column("user_profile_id") == literal(user_profile.id), join(table("zerver_usermessage"), table("zerver_message"), literal_column("zerver_usermessage.message_id") == literal_column("zerver_message.id"))) inner_msg_id_col = column("message_id") num_extra_messages = 1 is_search = False if narrow is not None: # Add some metadata to our logging data for narrows verbose_operators = [] for term in narrow: if term['operator'] == "is": verbose_operators.append("is:" + term['operand']) else: verbose_operators.append(term['operator']) request._log_data['extra'] = "[%s]" % (",".join(verbose_operators),) # Build the query for the narrow num_extra_messages = 0 builder = NarrowBuilder(user_profile, inner_msg_id_col) search_term = None # type: Optional[Dict[str, Any]] for term in narrow: if term['operator'] == 'search': if not is_search: search_term = term query = query.column(column("subject")).column(column("rendered_content")) is_search = True else: # Join the search operators if there are multiple of them search_term['operand'] += ' ' + term['operand'] else: query = builder.add_term(query, term) if is_search: query = builder.add_term(query, search_term) # We add 1 to the number of messages requested if no narrow was # specified to ensure that the resulting list always contains the # anchor message. If a narrow was specified, the anchor message # might not match the narrow anyway. if num_after != 0: num_after += num_extra_messages else: num_before += num_extra_messages sa_conn = get_sqlalchemy_connection() if use_first_unread_anchor: condition = column("flags").op("&")(UserMessage.flags.read.mask) == 0 # We exclude messages on muted topics when finding the first unread # message in this narrow muting_conditions = exclude_muting_conditions(user_profile, narrow) if muting_conditions: condition = and_(condition, *muting_conditions) first_unread_query = query.where(condition) first_unread_query = first_unread_query.order_by(inner_msg_id_col.asc()).limit(1) first_unread_result = list(sa_conn.execute(first_unread_query).fetchall()) if len(first_unread_result) > 0: anchor = first_unread_result[0][0] else: anchor = LARGER_THAN_MAX_MESSAGE_ID before_query = None after_query = None if num_before != 0: before_anchor = anchor if num_after != 0: # Don't include the anchor in both the before query and the after query before_anchor = anchor - 1 before_query = query.where(inner_msg_id_col <= before_anchor) \ .order_by(inner_msg_id_col.desc()).limit(num_before) if num_after != 0: after_query = query.where(inner_msg_id_col >= anchor) \ .order_by(inner_msg_id_col.asc()).limit(num_after) if anchor == LARGER_THAN_MAX_MESSAGE_ID: # There's no need for an after_query if we're targeting just the target message. after_query = None if before_query is not None: if after_query is not None: query = union_all(before_query.self_group(), after_query.self_group()) else: query = before_query elif after_query is not None: query = after_query else: # This can happen when a narrow is specified. query = query.where(inner_msg_id_col == anchor) main_query = alias(query) query = select(main_query.c, None, main_query).order_by(column("message_id").asc()) # This is a hack to tag the query we use for testing query = query.prefix_with("/* get_old_messages */") query_result = list(sa_conn.execute(query).fetchall()) # The following is a little messy, but ensures that the code paths # are similar regardless of the value of include_history. The # 'user_messages' dictionary maps each message to the user's # UserMessage object for that message, which we will attach to the # rendered message dict before returning it. We attempt to # bulk-fetch rendered message dicts from remote cache using the # 'messages' list. search_fields = dict() # type: Dict[int, Dict[str, Text]] message_ids = [] # type: List[int] user_message_flags = {} # type: Dict[int, List[str]] if include_history: message_ids = [row[0] for row in query_result] # TODO: This could be done with an outer join instead of two queries user_message_flags = dict((user_message.message_id, user_message.flags_list()) for user_message in UserMessage.objects.filter(user_profile=user_profile, message__id__in=message_ids)) for row in query_result: message_id = row[0] if user_message_flags.get(message_id) is None: user_message_flags[message_id] = ["read", "historical"] if is_search: (_, subject, rendered_content, content_matches, subject_matches) = row search_fields[message_id] = get_search_fields(rendered_content, subject, content_matches, subject_matches) else: for row in query_result: message_id = row[0] flags = row[1] user_message_flags[message_id] = parse_usermessage_flags(flags) message_ids.append(message_id) if is_search: (_, _, subject, rendered_content, content_matches, subject_matches) = row search_fields[message_id] = get_search_fields(rendered_content, subject, content_matches, subject_matches) cache_transformer = lambda row: MessageDict.build_dict_from_raw_db_row(row, apply_markdown) id_fetcher = lambda row: row['id'] message_dicts = generic_bulk_cached_fetch(lambda message_id: to_dict_cache_key_id(message_id, apply_markdown), Message.get_raw_db_rows, message_ids, id_fetcher=id_fetcher, cache_transformer=cache_transformer, extractor=extract_message_dict, setter=stringify_message_dict) message_list = [] for message_id in message_ids: msg_dict = message_dicts[message_id] msg_dict.update({"flags": user_message_flags[message_id]}) msg_dict.update(search_fields.get(message_id, {})) message_list.append(msg_dict) statsd.incr('loaded_old_messages', len(message_list)) ret = {'messages': message_list, "result": "success", "msg": ""} return json_success(ret)
def test_raw_unread_personal_from_self(self) -> None: hamlet = self.example_user("hamlet") def send_unread_pm(other_user: UserProfile) -> Message: # It is rare to send a message from Hamlet to Othello # (or any other user) and have it be unread for # Hamlet himself, but that is actually normal # behavior for most API clients. message_id = self.send_personal_message( from_user=hamlet, to_user=other_user, sending_client_name="some_api_program", ) # Check our test setup is correct--the message should # not have looked like it was sent by a human. message = Message.objects.get(id=message_id) self.assertFalse(message.sent_by_human()) # And since it was not sent by a human, it should not # be read, not even by the sender (Hamlet). um = UserMessage.objects.get( user_profile_id=hamlet.id, message_id=message_id, ) self.assertFalse(um.flags.read) return message othello = self.example_user("othello") othello_msg = send_unread_pm(other_user=othello) # And now check the unread data structure... raw_unread_data = get_raw_unread_data( user_profile=hamlet, ) pm_dict = raw_unread_data["pm_dict"] self.assertEqual(set(pm_dict.keys()), {othello_msg.id}) # For legacy reason we call the field `sender_id` here, # but it really refers to the other user id in the conversation, # which is Othello. self.assertEqual( pm_dict[othello_msg.id], dict(sender_id=othello.id), ) cordelia = self.example_user("cordelia") cordelia_msg = send_unread_pm(other_user=cordelia) apply_unread_message_event( user_profile=hamlet, state=raw_unread_data, message=MessageDict.wide_dict(cordelia_msg), flags=[], ) self.assertEqual( set(pm_dict.keys()), {othello_msg.id, cordelia_msg.id}, ) # Again, `sender_id` is misnamed here. self.assertEqual( pm_dict[cordelia_msg.id], dict(sender_id=cordelia.id), ) # Send a message to ourself. hamlet_msg = send_unread_pm(other_user=hamlet) apply_unread_message_event( user_profile=hamlet, state=raw_unread_data, message=MessageDict.wide_dict(hamlet_msg), flags=[], ) self.assertEqual( set(pm_dict.keys()), {othello_msg.id, cordelia_msg.id, hamlet_msg.id}, ) # Again, `sender_id` is misnamed here. self.assertEqual( pm_dict[hamlet_msg.id], dict(sender_id=hamlet.id), ) # Call get_raw_unread_data again. raw_unread_data = get_raw_unread_data( user_profile=hamlet, ) pm_dict = raw_unread_data["pm_dict"] self.assertEqual( set(pm_dict.keys()), {othello_msg.id, cordelia_msg.id, hamlet_msg.id}, ) # Again, `sender_id` is misnamed here. self.assertEqual( pm_dict[hamlet_msg.id], dict(sender_id=hamlet.id), )
def test_get_raw_db_rows(self) -> None: cordelia = self.example_user('cordelia') hamlet = self.example_user('hamlet') stream_name = 'Verona' message_id = self.send_stream_message( sender_email=cordelia.email, stream_name=stream_name, ) def get_raw_rows() -> List[Dict[str, Any]]: query = SubMessage.get_raw_db_rows([message_id]) rows = list(query) return rows rows = get_raw_rows() self.assertEqual(rows, []) sm1 = SubMessage.objects.create( msg_type='whatever', content='stuff1', message_id=message_id, sender=cordelia, ) sm2 = SubMessage.objects.create( msg_type='whatever', content='stuff2', message_id=message_id, sender=hamlet, ) expected_data = [ dict( id=sm1.id, message_id=message_id, sender_id=cordelia.id, msg_type='whatever', content='stuff1', ), dict( id=sm2.id, message_id=message_id, sender_id=hamlet.id, msg_type='whatever', content='stuff2', ), ] self.assertEqual(get_raw_rows(), expected_data) message = Message.objects.get(id=message_id) message_json = MessageDict.wide_dict(message) rows = message_json['submessages'] rows.sort(key=lambda r: r['id']) self.assertEqual(rows, expected_data) msg_rows = MessageDict.get_raw_db_rows([message_id]) rows = msg_rows[0]['submessages'] rows.sort(key=lambda r: r['id']) self.assertEqual(rows, expected_data)
def assert_topic_links(links: List[str], msg: Message) -> None: dct = MessageDict.to_dict_uncached_helper([msg])[0] self.assertEqual(dct[TOPIC_LINKS], links)