def get_user(self, user_profile_id): # type: (int) -> Optional[UserProfile] """ Get a UserProfile object from the user_profile_id. """ try: return get_user_profile_by_id(user_profile_id) except UserProfile.DoesNotExist: return None
def consume(self, event): # type: (Mapping[str, Any]) -> None user_profile = get_user_profile_by_id(event["user_profile_id"]) client = get_client(event["client"]) log_time = timestamp_to_datetime(event["time"]) query = event["query"] do_update_user_activity(user_profile, client, query, log_time)
def send_to_missed_message_address(address, message): # type: (Text, message.Message) -> None token = get_missed_message_token_from_address(address) key = missed_message_redis_key(token) result = redis_client.hmget(key, 'user_profile_id', 'recipient_id', 'subject') if not all(val is not None for val in result): raise ZulipEmailForwardError('Missing missed message address data') user_profile_id, recipient_id, subject_b = result # type: (bytes, bytes, bytes) user_profile = get_user_profile_by_id(user_profile_id) recipient = Recipient.objects.get(id=recipient_id) display_recipient = get_display_recipient(recipient) # Testing with basestring so we don't depend on the list return type from # get_display_recipient if not isinstance(display_recipient, str): recipient_str = u','.join([user['email'] for user in display_recipient]) else: recipient_str = display_recipient body = construct_zulip_body(message, user_profile.realm) if recipient.type == Recipient.STREAM: recipient_type_name = 'stream' else: recipient_type_name = 'private' internal_send_message(user_profile.realm, user_profile.email, recipient_type_name, recipient_str, subject_b.decode('utf-8'), body) logger.info("Successfully processed email from %s to %s" % ( user_profile.email, recipient_str))
def consume(self, event): logging.info("Received event: %s" % (event),) user_profile = get_user_profile_by_id(event["user_profile_id"]) client = get_client(event["client"]) log_time = timestamp_to_datetime(event["time"]) status = event["status"] do_update_user_presence(user_profile, client, log_time, status)
def consume(self, data: Mapping[str, Any]) -> None: if "email" in data: # When upgrading from a version up through 1.7.1, there may be # existing items in the queue with `email` instead of `prereg_id`. invitee = PreregistrationUser.objects.filter( email__iexact=data["email"].strip()).latest("invited_at") else: invitee = PreregistrationUser.objects.filter(id=data["prereg_id"]).first() if invitee is None: # The invitation could have been revoked return referrer = get_user_profile_by_id(data["referrer_id"]) logger.info("Sending invitation for realm %s to %s" % (referrer.realm.string_id, invitee.email)) do_send_confirmation_email(invitee, referrer) # queue invitation reminder for two days from now. link = create_confirmation_link(invitee, referrer.realm.host, Confirmation.INVITATION) context = common_context(referrer) context.update({ 'activate_url': link, 'referrer_name': referrer.full_name, 'referrer_email': referrer.email, 'referrer_realm_name': referrer.realm.name, }) send_future_email( "zerver/emails/invitation_reminder", referrer.realm, to_emails=[invitee.email], from_address=FromAddress.tokenized_no_reply_address(), language=referrer.realm.default_language, context=context, delay=datetime.timedelta(days=2))
def import_uploads_s3(bucket_name, import_dir, avatar_bucket=False): # type: (str, Path, bool) -> None conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) bucket = conn.get_bucket(bucket_name, validate=True) records_filename = os.path.join(import_dir, "records.json") with open(records_filename) as records_file: records = ujson.loads(records_file.read()) for record in records: key = Key(bucket) if avatar_bucket: # For avatars, we need to rehash the user's email with the # new server's avatar salt avatar_hash = user_avatar_hash(record['user_profile_email']) key.key = avatar_hash if record['s3_path'].endswith('.original'): key.key += '.original' else: key.key = record['s3_path'] user_profile_id = int(record['user_profile_id']) # Support email gateway bot and other cross-realm messages if user_profile_id in id_maps["user_profile"]: logging.info("Uploaded by ID mapped user: %s!" % (user_profile_id,)) user_profile_id = id_maps["user_profile"][user_profile_id] user_profile = get_user_profile_by_id(user_profile_id) key.set_metadata("user_profile_id", str(user_profile.id)) key.set_metadata("realm_id", str(user_profile.realm.id)) key.set_metadata("orig_last_modified", record['last_modified']) headers = {'Content-Type': key['content_type']} key.set_contents_from_filename(os.path.join(import_dir, record['path']), headers=headers)
def consume(self, event: Mapping[str, Any]) -> None: user_profile_id = event['user_profile_id'] user_profile = get_user_profile_by_id(user_profile_id) message = cast(Dict[str, Any], event['message']) # TODO: Do we actually want to allow multiple Services per bot user? services = get_bot_services(user_profile_id) for service in services: bot_handler = get_bot_handler(str(service.name)) if bot_handler is None: logging.error("Error: User %s has bot with invalid embedded bot service %s" % ( user_profile_id, service.name)) continue try: if hasattr(bot_handler, 'initialize'): bot_handler.initialize(self.get_bot_api_client(user_profile)) if event['trigger'] == 'mention': message['content'] = extract_query_without_mention( message=message, client=self.get_bot_api_client(user_profile), ) assert message['content'] is not None bot_handler.handle_message( message=message, bot_handler=self.get_bot_api_client(user_profile) ) except EmbeddedBotQuitException as e: logging.warning(str(e))
def fake_message_sender(event): # type: (Dict[str, Any]) -> None """This function is used only for Casper and backend tests, where rabbitmq is disabled""" log_data = dict() # type: Dict[str, Any] record_request_start_data(log_data) req = event['request'] try: sender = get_user_profile_by_id(event['server_meta']['user_id']) client = get_client("website") msg_id = check_send_message(sender, client, req['type'], extract_recipients(req['to']), req['subject'], req['content'], local_id=req.get('local_id', None), sender_queue_id=req.get('queue_id', None)) resp = {"result": "success", "msg": "", "id": msg_id} except JsonableError as e: resp = {"result": "error", "msg": str(e)} server_meta = event['server_meta'] server_meta.update({'worker_log_data': log_data, 'time_request_finished': time.time()}) result = {'response': resp, 'req_id': event['req_id'], 'server_meta': server_meta} respond_send_message(result)
def from_dict(cls, d: MutableMapping[str, Any]) -> 'ClientDescriptor': if 'user_profile_email' not in d: # Temporary migration for the addition of the new user_profile_email field from zerver.models import get_user_profile_by_id d['user_profile_email'] = get_user_profile_by_id(d['user_profile_id']).email if 'client_type' in d: # Temporary migration for the rename of client_type to client_type_name d['client_type_name'] = d['client_type'] if 'client_gravatar' not in d: # Temporary migration for the addition of the client_gravatar field d['client_gravatar'] = False ret = cls( d['user_profile_id'], d['user_profile_email'], d['realm_id'], EventQueue.from_dict(d['event_queue']), d['event_types'], d['client_type_name'], d['apply_markdown'], d['client_gravatar'], d['all_public_streams'], d['queue_timeout'], d.get('narrow', []) ) ret.last_connection_time = d['last_connection_time'] return ret
def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]) -> None: """This should be called when a message that had previously had a mobile push executed is read. This triggers a mobile push notifica mobile app when the message is read on the server, to remove the message from the notification. """ user_profile = get_user_profile_by_id(user_profile_id) message_ids = bulk_access_messages_expect_usermessage(user_profile_id, message_ids) gcm_payload, gcm_options = get_remove_payload_gcm(user_profile, message_ids) if uses_notification_bouncer(): try: send_notifications_to_bouncer(user_profile_id, {}, gcm_payload, gcm_options) except requests.ConnectionError: # nocoverage def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:push_notification" % ( event['user_profile_id'])) else: android_devices = list(PushDeviceToken.objects.filter( user=user_profile, kind=PushDeviceToken.GCM)) if android_devices: send_android_push_notification(android_devices, gcm_payload, gcm_options) UserMessage.objects.filter( user_profile_id=user_profile_id, message_id__in=message_ids, ).update( flags=F('flags').bitand( ~UserMessage.flags.active_mobile_push_notification))
def send_to_missed_message_address(address, message): # type: (text_type, message.Message) -> None token = get_missed_message_token_from_address(address) key = missed_message_redis_key(token) result = redis_client.hmget(key, 'user_profile_id', 'recipient_id', 'subject') if not all(val is not None for val in result): raise ZulipEmailForwardError('Missing missed message address data') user_profile_id, recipient_id, subject = result user_profile = get_user_profile_by_id(user_profile_id) recipient = Recipient.objects.get(id=recipient_id) display_recipient = get_display_recipient(recipient) # Testing with basestring so we don't depend on the list return type from # get_display_recipient if not isinstance(display_recipient, six.string_types): recipient_str = ','.join([user['email'] for user in display_recipient]) else: recipient_str = display_recipient body = filter_footer(extract_body(message)) body += extract_and_upload_attachments(message, user_profile.realm) if not body: body = '(No email body)' if recipient.type == Recipient.STREAM: recipient_type_name = 'stream' else: recipient_type_name = 'private' internal_send_message(user_profile.email, recipient_type_name, recipient_str, subject, body)
def avatar(request: HttpRequest, email_or_id: str, medium: bool=False) -> HttpResponse: """Accepts an email address or user ID and returns the avatar""" is_email = False try: int(email_or_id) except ValueError: is_email = True try: if is_email: realm = request.user.realm user_profile = get_user_including_cross_realm(email_or_id, realm) else: user_profile = get_user_profile_by_id(email_or_id) # If there is a valid user account passed in, use its avatar url = avatar_url(user_profile, medium=medium) except UserProfile.DoesNotExist: # If there is no such user, treat it as a new gravatar email = email_or_id avatar_version = 1 url = get_gravatar_url(email, avatar_version, medium) # We can rely on the url already having query parameters. Because # our templates depend on being able to use the ampersand to # add query parameters to our url, get_avatar_url does '?x=x' # hacks to prevent us from having to jump through decode/encode hoops. assert '?' in url url += '&' + request.META['QUERY_STRING'] return redirect(url)
def response_listener(error_response): # type: (Dict[str, SupportsInt]) -> None identifier = error_response['identifier'] key = get_apns_key(identifier) if not redis_client.exists(key): logging.warn("APNs key, {}, doesn't not exist.".format(key)) return code = error_response['status'] assert isinstance(code, int) errmsg = ERROR_CODES[code] data = redis_client.hgetall(key) token = data['token'] user = get_user_profile_by_id(int(data['user_id'])) b64_token = hex_to_b64(token) logging.warn("APNS: Failed to deliver APNS notification to %s, reason: %s" % (b64_token, errmsg)) if code == 8: # Invalid Token, remove from our database logging.warn("APNS: Removing token from database due to above failure") try: PushDeviceToken.objects.get(user=user, token=b64_token).delete() except PushDeviceToken.DoesNotExist: pass
def test_change_delivery_email_end_to_end_with_admins_visibility(self) -> None: user_profile = self.example_user('hamlet') do_set_realm_property(user_profile.realm, 'email_address_visibility', Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS) old_email = user_profile.email new_email = '*****@*****.**' self.login(self.example_email('hamlet')) obj = EmailChangeStatus.objects.create(new_email=new_email, old_email=old_email, user_profile=user_profile, realm=user_profile.realm) key = generate_key() Confirmation.objects.create(content_object=obj, date_sent=now(), confirmation_key=key, type=Confirmation.EMAIL_CHANGE) url = confirmation_url(key, user_profile.realm.host, Confirmation.EMAIL_CHANGE) response = self.client_get(url) self.assertEqual(response.status_code, 200) self.assert_in_success_response(["This confirms that the email address for your Zulip"], response) user_profile = get_user_profile_by_id(user_profile.id) self.assertEqual(user_profile.delivery_email, new_email) self.assertEqual(user_profile.email, "*****@*****.**") obj.refresh_from_db() self.assertEqual(obj.status, 1) with self.assertRaises(UserProfile.DoesNotExist): get_user(old_email, user_profile.realm) with self.assertRaises(UserProfile.DoesNotExist): get_user_by_delivery_email(old_email, user_profile.realm) self.assertEqual(get_user_by_delivery_email(new_email, user_profile.realm), user_profile)
def notify_bot_owner(event: Dict[str, Any], request_data: Dict[str, Any], status_code: Optional[int]=None, response_content: Optional[AnyStr]=None, failure_message: Optional[str]=None, exception: Optional[Exception]=None) -> None: message_url = get_message_url(event) bot_id = event['user_profile_id'] bot_owner = get_user_profile_by_id(bot_id).bot_owner notification_message = "[A message](%s) triggered an outgoing webhook." % (message_url,) if failure_message: notification_message += "\n" + failure_message if status_code: notification_message += "\nThe webhook got a response with status code *%s*." % (status_code,) if response_content: notification_message += "\nThe response contains the following payload:\n" \ "```\n%s\n```" % (response_content,) if exception: notification_message += "\nWhen trying to send a request to the webhook service, an exception " \ "of type %s occurred:\n```\n%s\n```" % ( type(exception).__name__, str(exception)) message_info = dict( type='private', display_recipient=[dict(email=bot_owner.email)], ) response_data = dict(content=notification_message) send_response_message(bot_id=bot_id, message_info=message_info, response_data=response_data)
def get_user(self, user_profile_id: int) -> Optional[UserProfile]: """Override the Django method for getting a UserProfile object from the user_profile_id,.""" try: return get_user_profile_by_id(user_profile_id) except UserProfile.DoesNotExist: return None
def send_to_missed_message_address(address, message): # type: (text_type, message.Message) -> None token = get_missed_message_token_from_address(address) key = missed_message_redis_key(token) result = redis_client.hmget(key, "user_profile_id", "recipient_id", "subject") if not all(val is not None for val in result): raise ZulipEmailForwardError("Missing missed message address data") user_profile_id, recipient_id, subject = result user_profile = get_user_profile_by_id(user_profile_id) recipient = Recipient.objects.get(id=recipient_id) display_recipient = get_display_recipient(recipient) # Testing with basestring so we don't depend on the list return type from # get_display_recipient if not isinstance(display_recipient, six.string_types): recipient_str = ",".join([user["email"] for user in display_recipient]) else: recipient_str = display_recipient body = filter_footer(extract_body(message)) body += extract_and_upload_attachments(message, user_profile.realm) if not body: body = "(No email body)" if recipient.type == Recipient.STREAM: recipient_type_name = "stream" else: recipient_type_name = "private" internal_send_message(user_profile.email, recipient_type_name, recipient_str, subject, body) logging.info("Successfully processed email from %s to %s" % (user_profile.email, recipient_str))
def get_realm_for_filename(path: str) -> Optional[int]: conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) key = get_bucket(conn, settings.S3_AUTH_UPLOADS_BUCKET).get_key(path) if key is None: # This happens if the key does not exist. return None return get_user_profile_by_id(key.metadata["user_profile_id"]).realm_id
def consume(self, event: Mapping[str, Any]) -> None: logging.debug("Received presence event: %s" % (event),) user_profile = get_user_profile_by_id(event["user_profile_id"]) client = get_client(event["client"]) log_time = timestamp_to_datetime(event["time"]) status = event["status"] do_update_user_presence(user_profile, client, log_time, status)
def handle_missedmessage_emails(user_profile_id: int, missed_email_events: Iterable[Dict[str, Any]]) -> None: message_ids = {event.get('message_id'): event.get('trigger') for event in missed_email_events} user_profile = get_user_profile_by_id(user_profile_id) if not receives_offline_email_notifications(user_profile): return messages = Message.objects.filter(usermessage__user_profile_id=user_profile, id__in=message_ids, usermessage__flags=~UserMessage.flags.read) # Cancel missed-message emails for deleted messages messages = [um for um in messages if um.content != "(deleted)"] if not messages: return messages_by_recipient_subject = defaultdict(list) # type: Dict[Tuple[int, str], List[Message]] for msg in messages: if msg.recipient.type == Recipient.PERSONAL: # For PM's group using (recipient, sender). messages_by_recipient_subject[(msg.recipient_id, msg.sender_id)].append(msg) else: messages_by_recipient_subject[(msg.recipient_id, msg.topic_name())].append(msg) message_count_by_recipient_subject = { recipient_subject: len(msgs) for recipient_subject, msgs in messages_by_recipient_subject.items() } for msg_list in messages_by_recipient_subject.values(): msg = min(msg_list, key=lambda msg: msg.pub_date) if msg.is_stream_message(): context_messages = get_context_for_message(msg) filtered_context_messages = bulk_access_messages(user_profile, context_messages) msg_list.extend(filtered_context_messages) # Sort emails by least recently-active discussion. recipient_subjects = [] # type: List[Tuple[Tuple[int, str], int]] for recipient_subject, msg_list in messages_by_recipient_subject.items(): max_message_id = max(msg_list, key=lambda msg: msg.id).id recipient_subjects.append((recipient_subject, max_message_id)) recipient_subjects = sorted(recipient_subjects, key=lambda x: x[1]) # Send an email per recipient subject pair for recipient_subject, ignored_max_id in recipient_subjects: unique_messages = {} for m in messages_by_recipient_subject[recipient_subject]: unique_messages[m.id] = dict( message=m, trigger=message_ids.get(m.id) ) do_send_missedmessage_events_reply_in_zulip( user_profile, list(unique_messages.values()), message_count_by_recipient_subject[recipient_subject], )
def delete_all_deactivated_user_sessions() -> None: for session in Session.objects.all(): user_profile_id = get_session_user(session) if user_profile_id is None: continue user_profile = get_user_profile_by_id(user_profile_id) if not user_profile.is_active or user_profile.realm.deactivated: logging.info("Deactivating session for deactivated user %s" % (user_profile.email,)) delete_session(session)
def get_message_url(event: Dict[str, Any]) -> str: bot_user = get_user_profile_by_id(event['user_profile_id']) message = event['message'] realm = bot_user.realm return near_message_url( realm=realm, message=message, )
def render_markdown(message, content, realm=None, realm_alert_words=None, user_ids=None, mention_data=None): # type: (Message, Text, Optional[Realm], Optional[RealmAlertWords], Optional[Set[int]], Optional[bugdown.MentionData]) -> Text """Return HTML for given markdown. Bugdown may add properties to the message object such as `mentions_user_ids`, `mentions_user_group_ids`, and `mentions_wildcard`. These are only on this Django object and are not saved in the database. """ if user_ids is None: message_user_ids = set() # type: Set[int] else: message_user_ids = user_ids if message is not None: message.mentions_wildcard = False message.mentions_user_ids = set() message.mentions_user_group_ids = set() message.alert_words = set() message.links_for_preview = set() if realm is None: realm = message.get_realm() possible_words = set() # type: Set[Text] if realm_alert_words is not None: for user_id, words in realm_alert_words.items(): if user_id in message_user_ids: possible_words.update(set(words)) if message is None: # If we don't have a message, then we are in the compose preview # codepath, so we know we are dealing with a human. sent_by_bot = False else: sent_by_bot = get_user_profile_by_id(message.sender_id).is_bot # DO MAIN WORK HERE -- call bugdown to convert rendered_content = bugdown.convert( content, message=message, message_realm=realm, possible_words=possible_words, sent_by_bot=sent_by_bot, mention_data=mention_data, ) if message is not None: message.user_ids_with_alert_words = set() if realm_alert_words is not None: for user_id, words in realm_alert_words.items(): if user_id in message_user_ids: if set(words).intersection(message.alert_words): message.user_ids_with_alert_words.add(user_id) return rendered_content
def render_markdown(message: Message, content: str, realm: Optional[Realm]=None, realm_alert_words: Optional[RealmAlertWords]=None, user_ids: Optional[Set[int]]=None, mention_data: Optional[bugdown.MentionData]=None, email_gateway: Optional[bool]=False) -> str: """Return HTML for given markdown. Bugdown may add properties to the message object such as `mentions_user_ids`, `mentions_user_group_ids`, and `mentions_wildcard`. These are only on this Django object and are not saved in the database. """ if user_ids is None: message_user_ids = set() # type: Set[int] else: message_user_ids = user_ids message.mentions_wildcard = False message.mentions_user_ids = set() message.mentions_user_group_ids = set() message.alert_words = set() message.links_for_preview = set() if realm is None: realm = message.get_realm() possible_words = set() # type: Set[str] if realm_alert_words is not None: for user_id, words in realm_alert_words.items(): if user_id in message_user_ids: possible_words.update(set(words)) sent_by_bot = get_user_profile_by_id(message.sender_id).is_bot # DO MAIN WORK HERE -- call bugdown to convert rendered_content = bugdown.convert( content, message=message, message_realm=realm, possible_words=possible_words, sent_by_bot=sent_by_bot, mention_data=mention_data, email_gateway=email_gateway ) if message is not None: message.user_ids_with_alert_words = set() if realm_alert_words is not None: for user_id, words in realm_alert_words.items(): if user_id in message_user_ids: if set(words).intersection(message.alert_words): message.user_ids_with_alert_words.add(user_id) return rendered_content
def request_retry(event, failure_message): # type: (Dict[str, Any], Text) -> None event['failed_tries'] += 1 if event['failed_tries'] > MAX_REQUEST_RETRIES: bot_user = get_user_profile_by_id(event['user_profile_id']) failure_message = "Maximum retries exceeded! " + failure_message fail_with_message(event, failure_message) logging.warning("Maximum retries exceeded for trigger:%s event:%s" % (bot_user.email, event['command'])) else: queue_json_publish("outgoing_webhooks", event, lambda x: None)
def failure_processor(event: Dict[str, Any]) -> None: """ The name of the argument is 'event' on purpose. This argument will hide the 'event' argument of the request_retry function. Keeping the same name results in a smaller diff. """ bot_user = get_user_profile_by_id(event['user_profile_id']) fail_with_message(event, "Maximum retries exceeded! " + failure_message) notify_bot_owner(event, request_data, exception=exception) logging.warning("Maximum retries exceeded for trigger:%s event:%s" % ( bot_user.email, event['command']))
def consume(self, event: Mapping[str, Any]) -> None: if event['type'] == 'mark_stream_messages_as_read': user_profile = get_user_profile_by_id(event['user_profile_id']) for stream_id in event['stream_ids']: # Since the user just unsubscribed, we don't require # an active Subscription object (otherwise, private # streams would never be accessible) (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id, require_active=False) do_mark_stream_messages_as_read(user_profile, stream)
def from_dict(cls, d): if 'user_profile_email' not in d: # Temporary migration for the addition of the new user_profile_email field from zerver.models import get_user_profile_by_id d['user_profile_email'] = get_user_profile_by_id(d['user_profile_id']).email ret = cls(d['user_profile_id'], d['user_profile_email'], d['realm_id'], EventQueue.from_dict(d['event_queue']), d['event_types'], d['client_type'], d['apply_markdown'], d['all_public_streams'], d['queue_timeout'], d.get('narrow', [])) ret.last_connection_time = d['last_connection_time'] return ret
def build_email(template_prefix: str, to_user_id: Optional[int]=None, to_email: Optional[Text]=None, from_name: Optional[Text]=None, from_address: Optional[Text]=None, reply_to_email: Optional[Text]=None, context: Optional[Dict[str, Any]]=None) -> EmailMultiAlternatives: # Callers should pass exactly one of to_user_id and to_email. assert (to_user_id is None) ^ (to_email is None) if to_user_id is not None: to_user = get_user_profile_by_id(to_user_id) # Change to formataddr((to_user.full_name, to_user.email)) once # https://github.com/zulip/zulip/issues/4676 is resolved to_email = to_user.email if context is None: context = {} context.update({ 'realm_name_in_notifications': False, 'support_email': FromAddress.SUPPORT, 'email_images_base_uri': settings.ROOT_DOMAIN_URI + '/static/images/emails', 'physical_address': settings.PHYSICAL_ADDRESS, }) subject = loader.render_to_string(template_prefix + '.subject', context=context, using='Jinja2_plaintext').strip().replace('\n', '') message = loader.render_to_string(template_prefix + '.txt', context=context, using='Jinja2_plaintext') try: html_message = loader.render_to_string(template_prefix + '.html', context) except TemplateDoesNotExist: emails_dir = os.path.dirname(template_prefix) template = os.path.basename(template_prefix) compiled_template_prefix = os.path.join(emails_dir, "compiled", template) html_message = loader.render_to_string(compiled_template_prefix + '.html', context) if from_name is None: from_name = "Zulip" if from_address is None: from_address = FromAddress.NOREPLY from_email = formataddr((from_name, from_address)) reply_to = None if reply_to_email is not None: reply_to = [reply_to_email] # Remove the from_name in the reply-to for noreply emails, so that users # see "noreply@..." rather than "Zulip" or whatever the from_name is # when they reply in their email client. elif from_address == FromAddress.NOREPLY: reply_to = [FromAddress.NOREPLY] mail = EmailMultiAlternatives(subject, message, from_email, [to_email], reply_to=reply_to) if html_message is not None: mail.attach_alternative(html_message, 'text/html') return mail
def consume(self, event): # type: (Mapping[str, Any]) -> None server_meta = event['server_meta'] environ = { 'REQUEST_METHOD': 'SOCKET', 'SCRIPT_NAME': '', 'PATH_INFO': '/json/messages', 'SERVER_NAME': '127.0.0.1', 'SERVER_PORT': 9993, 'SERVER_PROTOCOL': 'ZULIP_SOCKET/1.0', 'wsgi.version': (1, 0), 'wsgi.input': StringIO(), 'wsgi.errors': sys.stderr, 'wsgi.multithread': False, 'wsgi.multiprocess': True, 'wsgi.run_once': False, 'zulip.emulated_method': 'POST' } if 'socket_user_agent' in event['request']: environ['HTTP_USER_AGENT'] = event['request']['socket_user_agent'] del event['request']['socket_user_agent'] # We're mostly using a WSGIRequest for convenience environ.update(server_meta['request_environ']) request = WSGIRequest(environ) # Note: If we ever support non-POST methods, we'll need to change this. request._post = event['request'] request.csrf_processing_done = True user_profile = get_user_profile_by_id(server_meta['user_id']) request._cached_user = user_profile resp = self.handler.get_response(request) server_meta['time_request_finished'] = time.time() server_meta['worker_log_data'] = request._log_data resp_content = resp.content.decode('utf-8') response_data = ujson.loads(resp_content) if response_data['result'] == 'error': check_and_send_restart_signal() result = {'response': response_data, 'req_id': event['req_id'], 'server_meta': server_meta} redis_key = req_redis_key(event['req_id']) self.redis_client.hmset(redis_key, {'status': 'complete', 'response': resp_content}) # Since this sends back to Tornado, we can't use # call_consume_in_tests here. queue_json_publish(server_meta['return_queue'], result, lambda e: None)
def consume(self, event: Dict[str, Any]) -> None: start = time.time() if event["type"] == "mark_stream_messages_as_read": user_profile = get_user_profile_by_id(event["user_profile_id"]) for recipient_id in event["stream_recipient_ids"]: count = do_mark_stream_messages_as_read( user_profile, recipient_id) logger.info( "Marked %s messages as read for user %s, stream_recipient_id %s", count, user_profile.id, recipient_id, ) elif event["type"] == "mark_stream_messages_as_read_for_everyone": # This event is generated by the stream deactivation code path. batch_size = 100 offset = 0 while True: messages = Message.objects.filter( recipient_id=event["stream_recipient_id"]).order_by( "id")[offset:offset + batch_size] UserMessage.objects.filter(message__in=messages).extra( where=[UserMessage.where_unread()]).update( flags=F("flags").bitor(UserMessage.flags.read)) offset += len(messages) if len(messages) < batch_size: break logger.info( "Marked %s messages as read for all users, stream_recipient_id %s", offset, event["stream_recipient_id"], ) elif event["type"] == "clear_push_device_tokens": try: clear_push_device_tokens(event["user_profile_id"]) except PushNotificationBouncerRetryLaterError: def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens", event["user_profile_id"], ) retry_event(self.queue_name, event, failure_processor) elif event["type"] == "realm_export": realm = Realm.objects.get(id=event["realm_id"]) output_dir = tempfile.mkdtemp(prefix="zulip-export-") export_event = RealmAuditLog.objects.get(id=event["id"]) user_profile = get_user_profile_by_id(event["user_profile_id"]) try: public_url = export_realm_wrapper( realm=realm, output_dir=output_dir, threads=6, upload=True, public_only=True, ) except Exception: export_event.extra_data = orjson.dumps( dict(failed_timestamp=timezone_now().timestamp(), )).decode() export_event.save(update_fields=["extra_data"]) logging.error( "Data export for %s failed after %s", user_profile.realm.string_id, time.time() - start, ) notify_realm_export(user_profile) return assert public_url is not None # Update the extra_data field now that the export is complete. export_event.extra_data = orjson.dumps( dict(export_path=urllib.parse.urlparse(public_url).path, )).decode() export_event.save(update_fields=["extra_data"]) # Send a private message notification letting the user who # triggered the export know the export finished. with override_language(user_profile.default_language): content = _( "Your data export is complete and has been uploaded here:\n\n{public_url}" ).format(public_url=public_url) internal_send_private_message( sender=get_system_bot(settings.NOTIFICATION_BOT, realm.id), recipient_user=user_profile, content=content, ) # For future frontend use, also notify administrator # clients that the export happened. notify_realm_export(user_profile) logging.info( "Completed data export for %s in %s", user_profile.realm.string_id, time.time() - start, ) elif event["type"] == "reupload_realm_emoji": # This is a special event queued by the migration for reuploading emojis. # We don't want to run the necessary code in the actual migration, so it simply # queues the necessary event, and the actual work is done here in the queue worker. realm = Realm.objects.get(id=event["realm_id"]) logger.info("Processing reupload_realm_emoji event for realm %s", realm.id) handle_reupload_emojis_event(realm, logger) end = time.time() logger.info("deferred_work processed %s event (%dms)", event["type"], (end - start) * 1000)
def apply_event(state: Dict[str, Any], event: Dict[str, Any], user_profile: UserProfile, client_gravatar: bool, include_subscribers: bool) -> None: if event['type'] == "message": state['max_message_id'] = max(state['max_message_id'], event['message']['id']) if 'raw_unread_msgs' in state: apply_unread_message_event( user_profile, state['raw_unread_msgs'], event['message'], event['flags'], ) elif event['type'] == "hotspots": state['hotspots'] = event['hotspots'] elif event['type'] == "custom_profile_fields": state['custom_profile_fields'] = event['fields'] elif event['type'] == "pointer": state['pointer'] = max(state['pointer'], event['pointer']) elif event['type'] == "realm_user": person = event['person'] person_user_id = person['user_id'] if event['op'] == "add": person = copy.deepcopy(person) if client_gravatar: if 'gravatar.com' in person['avatar_url']: person['avatar_url'] = None person['is_active'] = True if not person['is_bot']: person['profile_data'] = {} state['raw_users'][person_user_id] = person elif event['op'] == "remove": state['raw_users'][person_user_id]['is_active'] = False elif event['op'] == 'update': is_me = (person_user_id == user_profile.id) if is_me: if ('avatar_url' in person and 'avatar_url' in state): state['avatar_source'] = person['avatar_source'] state['avatar_url'] = person['avatar_url'] state['avatar_url_medium'] = person['avatar_url_medium'] for field in ['is_admin', 'email', 'full_name']: if field in person and field in state: state[field] = person[field] # In the unlikely event that the current user # just changed to/from being an admin, we need # to add/remove the data on all bots in the # realm. This is ugly and probably better # solved by removing the all-realm-bots data # given to admin users from this flow. if ('is_admin' in person and 'realm_bots' in state): prev_state = state['raw_users'][user_profile.id] was_admin = prev_state['is_admin'] now_admin = person['is_admin'] if was_admin and not now_admin: state['realm_bots'] = [] if not was_admin and now_admin: state['realm_bots'] = get_owned_bot_dicts(user_profile) if client_gravatar and 'avatar_url' in person: # Respect the client_gravatar setting in the `users` data. if 'gravatar.com' in person['avatar_url']: person['avatar_url'] = None person['avatar_url_medium'] = None if person_user_id in state['raw_users']: p = state['raw_users'][person_user_id] for field in p: if field in person: p[field] = person[field] elif event['type'] == 'realm_bot': if event['op'] == 'add': state['realm_bots'].append(event['bot']) if event['op'] == 'remove': email = event['bot']['email'] for bot in state['realm_bots']: if bot['email'] == email: bot['is_active'] = False if event['op'] == 'delete': state['realm_bots'] = [ item for item in state['realm_bots'] if item['email'] != event['bot']['email'] ] if event['op'] == 'update': for bot in state['realm_bots']: if bot['email'] == event['bot']['email']: if 'owner_id' in event['bot']: bot['owner'] = get_user_profile_by_id( event['bot']['owner_id']).email else: bot.update(event['bot']) elif event['type'] == 'stream': if event['op'] == 'create': for stream in event['streams']: if not stream['invite_only']: stream_data = copy.deepcopy(stream) if include_subscribers: stream_data['subscribers'] = [] stream_data['stream_weekly_traffic'] = 0 stream_data['is_old_stream'] = False # Add stream to never_subscribed (if not invite_only) state['never_subscribed'].append(stream_data) state['streams'].append(stream) state['streams'].sort(key=lambda elt: elt["name"]) if event['op'] == 'delete': deleted_stream_ids = { stream['stream_id'] for stream in event['streams'] } state['streams'] = [ s for s in state['streams'] if s['stream_id'] not in deleted_stream_ids ] state['never_subscribed'] = [ stream for stream in state['never_subscribed'] if stream['stream_id'] not in deleted_stream_ids ] if event['op'] == 'update': # For legacy reasons, we call stream data 'subscriptions' in # the state var here, for the benefit of the JS code. for obj in state['subscriptions']: if obj['name'].lower() == event['name'].lower(): obj[event['property']] = event['value'] # Also update the pure streams data for stream in state['streams']: if stream['name'].lower() == event['name'].lower(): prop = event['property'] if prop in stream: stream[prop] = event['value'] elif event['op'] == "occupy": state['streams'] += event['streams'] elif event['op'] == "vacate": stream_ids = [s["stream_id"] for s in event['streams']] state['streams'] = [ s for s in state['streams'] if s["stream_id"] not in stream_ids ] elif event['type'] == 'default_streams': state['realm_default_streams'] = event['default_streams'] elif event['type'] == 'default_stream_groups': state['realm_default_stream_groups'] = event['default_stream_groups'] elif event['type'] == 'realm': if event['op'] == "update": field = 'realm_' + event['property'] state[field] = event['value'] # Tricky interaction: Whether we can create streams can get changed here. if (field in [ 'realm_create_stream_by_admins_only', 'realm_waiting_period_threshold' ]) and 'can_create_streams' in state: state['can_create_streams'] = user_profile.can_create_streams() elif event['op'] == "update_dict": for key, value in event['data'].items(): state['realm_' + key] = value # It's a bit messy, but this is where we need to # update the state for whether password authentication # is enabled on this server. if key == 'authentication_methods': state['realm_password_auth_enabled'] = (value['Email'] or value['LDAP']) state['realm_email_auth_enabled'] = value['Email'] elif event['type'] == "subscription": if not include_subscribers and event['op'] in [ 'peer_add', 'peer_remove' ]: return if event['op'] in ["add"]: if not include_subscribers: # Avoid letting 'subscribers' entries end up in the list for i, sub in enumerate(event['subscriptions']): event['subscriptions'][i] = copy.deepcopy( event['subscriptions'][i]) del event['subscriptions'][i]['subscribers'] def name(sub: Dict[str, Any]) -> Text: return sub['name'].lower() if event['op'] == "add": added_names = set(map(name, event["subscriptions"])) was_added = lambda s: name(s) in added_names # add the new subscriptions state['subscriptions'] += event['subscriptions'] # remove them from unsubscribed if they had been there state['unsubscribed'] = [ s for s in state['unsubscribed'] if not was_added(s) ] # remove them from never_subscribed if they had been there state['never_subscribed'] = [ s for s in state['never_subscribed'] if not was_added(s) ] elif event['op'] == "remove": removed_names = set(map(name, event["subscriptions"])) was_removed = lambda s: name(s) in removed_names # Find the subs we are affecting. removed_subs = list(filter(was_removed, state['subscriptions'])) # Remove our user from the subscribers of the removed subscriptions. if include_subscribers: for sub in removed_subs: sub['subscribers'] = [ id for id in sub['subscribers'] if id != user_profile.id ] # We must effectively copy the removed subscriptions from subscriptions to # unsubscribe, since we only have the name in our data structure. state['unsubscribed'] += removed_subs # Now filter out the removed subscriptions from subscriptions. state['subscriptions'] = [ s for s in state['subscriptions'] if not was_removed(s) ] elif event['op'] == 'update': for sub in state['subscriptions']: if sub['name'].lower() == event['name'].lower(): sub[event['property']] = event['value'] elif event['op'] == 'peer_add': user_id = event['user_id'] for sub in state['subscriptions']: if (sub['name'] in event['subscriptions'] and user_id not in sub['subscribers']): sub['subscribers'].append(user_id) for sub in state['never_subscribed']: if (sub['name'] in event['subscriptions'] and user_id not in sub['subscribers']): sub['subscribers'].append(user_id) elif event['op'] == 'peer_remove': user_id = event['user_id'] for sub in state['subscriptions']: if (sub['name'] in event['subscriptions'] and user_id in sub['subscribers']): sub['subscribers'].remove(user_id) elif event['type'] == "presence": # TODO: Add user_id to presence update events / state format! presence_user_profile = get_user(event['email'], user_profile.realm) state['presences'][ event['email']] = UserPresence.get_status_dict_by_user( presence_user_profile)[event['email']] elif event['type'] == "update_message": # We don't return messages in /register, so we don't need to # do anything for content updates, but we may need to update # the unread_msgs data if the topic of an unread message changed. if 'subject' in event: stream_dict = state['raw_unread_msgs']['stream_dict'] topic = event['subject'] for message_id in event['message_ids']: if message_id in stream_dict: stream_dict[message_id]['topic'] = topic elif event['type'] == "delete_message": max_message = Message.objects.filter( usermessage__user_profile=user_profile).order_by('-id').first() if max_message: state['max_message_id'] = max_message.id else: state['max_message_id'] = -1 remove_id = event['message_id'] remove_message_id_from_unread_mgs(state, remove_id) elif event['type'] == "reaction": # The client will get the message with the reactions directly pass elif event['type'] == 'typing': # Typing notification events are transient and thus ignored pass elif event['type'] == "update_message_flags": # We don't return messages in `/register`, so most flags we # can ignore, but we do need to update the unread_msgs data if # unread state is changed. if event['flag'] == 'read' and event['operation'] == 'add': for remove_id in event['messages']: remove_message_id_from_unread_mgs(state, remove_id) elif event['type'] == "realm_domains": if event['op'] == 'add': state['realm_domains'].append(event['realm_domain']) elif event['op'] == 'change': for realm_domain in state['realm_domains']: if realm_domain['domain'] == event['realm_domain']['domain']: realm_domain['allow_subdomains'] = event['realm_domain'][ 'allow_subdomains'] elif event['op'] == 'remove': state['realm_domains'] = [ realm_domain for realm_domain in state['realm_domains'] if realm_domain['domain'] != event['domain'] ] elif event['type'] == "realm_emoji": state['realm_emoji'] = event['realm_emoji'] elif event['type'] == "alert_words": state['alert_words'] = event['alert_words'] elif event['type'] == "muted_topics": state['muted_topics'] = event["muted_topics"] elif event['type'] == "realm_filters": state['realm_filters'] = event["realm_filters"] elif event['type'] == "update_display_settings": assert event['setting_name'] in UserProfile.property_types state[event['setting_name']] = event['setting'] elif event['type'] == "update_global_notifications": assert event[ 'notification_name'] in UserProfile.notification_setting_types state[event['notification_name']] = event['setting'] elif event['type'] == "user_group": if event['op'] == 'add': state['realm_user_groups'].append(event['group']) state['realm_user_groups'].sort(key=lambda group: group['id']) elif event['op'] == 'update': for user_group in state['realm_user_groups']: if user_group['id'] == event['group_id']: user_group.update(event['data']) elif event['op'] == 'add_members': for user_group in state['realm_user_groups']: if user_group['id'] == event['group_id']: user_group['members'].extend(event['user_ids']) user_group['members'].sort() elif event['op'] == 'remove_members': for user_group in state['realm_user_groups']: if user_group['id'] == event['group_id']: members = set(user_group['members']) user_group['members'] = list(members - set(event['user_ids'])) user_group['members'].sort() elif event['op'] == 'remove': state['realm_user_groups'] = [ ug for ug in state['realm_user_groups'] if ug['id'] != event['group_id'] ] else: raise AssertionError("Unexpected event type %s" % (event['type'], ))
def handle_digest_email( user_profile_id: int, cutoff: float, render_to_web: bool = False) -> Union[None, Dict[str, Any]]: user_profile = get_user_profile_by_id(user_profile_id) # Convert from epoch seconds to a datetime object. cutoff_date = datetime.datetime.fromtimestamp(int(cutoff), tz=datetime.timezone.utc) context = common_context(user_profile) # Start building email template data. context.update({ 'unsubscribe_link': one_click_unsubscribe_link(user_profile, "digest"), }) home_view_streams = Subscription.objects.filter( user_profile=user_profile, recipient__type=Recipient.STREAM, active=True, is_muted=False).values_list('recipient__type_id', flat=True) if not user_profile.long_term_idle: stream_ids = home_view_streams else: stream_ids = exclude_subscription_modified_streams( user_profile, home_view_streams, cutoff_date) # Fetch list of all messages sent after cutoff_date where the user is subscribed messages = Message.objects.filter( recipient__type=Recipient.STREAM, recipient__type_id__in=stream_ids, date_sent__gt=cutoff_date).select_related('recipient', 'sender', 'sending_client') # Gather hot conversations. context["hot_conversations"] = gather_hot_conversations( user_profile, messages) # Gather new streams. new_streams_count, new_streams = gather_new_streams( user_profile, cutoff_date) context["new_streams"] = new_streams context["new_streams_count"] = new_streams_count # TODO: Set has_preheader if we want to include a preheader. if render_to_web: return context # We don't want to send emails containing almost no information. if enough_traffic(context["hot_conversations"], new_streams_count): logger.info("Sending digest email for user %s", user_profile.id) # Send now, as a ScheduledEmail send_future_email('zerver/emails/digest', user_profile.realm, to_user_ids=[user_profile.id], from_name="Zulip Digest", from_address=FromAddress.no_reply_placeholder, context=context) return None
def import_uploads_local(import_dir: Path, processing_avatars: bool = False, processing_emojis: bool = False) -> None: records_filename = os.path.join(import_dir, "records.json") with open(records_filename) as records_file: records = ujson.loads(records_file.read()) re_map_foreign_keys_internal(records, 'records', 'realm_id', related_table="realm", id_field=True) if not processing_emojis: re_map_foreign_keys_internal(records, 'records', 'user_profile_id', related_table="user_profile", id_field=True) for record in records: if processing_avatars: # For avatars, we need to rehash the user ID with the # new server's avatar salt avatar_path = user_avatar_path_from_ids(record['user_profile_id'], record['realm_id']) file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", avatar_path) if record['s3_path'].endswith('.original'): file_path += '.original' else: file_path += '.png' elif processing_emojis: # For emojis we follow the function 'upload_emoji_image' emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=record['realm_id'], emoji_file_name=record['file_name']) file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", emoji_path) else: # Should be kept in sync with its equivalent in zerver/lib/uploads in the # function 'upload_message_image' s3_file_name = "/".join([ str(record['realm_id']), random_name(18), sanitize_name(os.path.basename(record['path'])) ]) file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "files", s3_file_name) path_maps['attachment_path'][record['path']] = s3_file_name orig_file_path = os.path.join(import_dir, record['path']) os.makedirs(os.path.dirname(file_path), exist_ok=True) shutil.copy(orig_file_path, file_path) if processing_avatars: # Ensure that we have medium-size avatar images for every # avatar. TODO: This implementation is hacky, both in that it # does get_user_profile_by_id for each user, and in that it # might be better to require the export to just have these. upload_backend = LocalUploadBackend() for record in records: if record['s3_path'].endswith('.original'): user_profile = get_user_profile_by_id( record['user_profile_id']) avatar_path = user_avatar_path_from_ids( user_profile.id, record['realm_id']) medium_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", avatar_path) + '-medium.png' if os.path.exists(medium_file_path): # We remove the image here primarily to deal with # issues when running the import script multiple # times in development (where one might reuse the # same realm ID from a previous iteration). os.remove(medium_file_path) upload_backend.ensure_medium_avatar_image( user_profile=user_profile)
def send_messages(data: Tuple[int, Sequence[Sequence[int]], Mapping[str, Any], Callable[[str], Any], int]) -> int: (tot_messages, personals_pairs, options, output, random_seed) = data random.seed(random_seed) with open("var/test_messages.json", "r") as infile: dialog = ujson.load(infile) random.shuffle(dialog) texts = itertools.cycle(dialog) recipient_streams = [klass.id for klass in Recipient.objects.filter(type=Recipient.STREAM)] # type: List[int] recipient_huddles = [h.id for h in Recipient.objects.filter(type=Recipient.HUDDLE)] # type: List[int] huddle_members = {} # type: Dict[int, List[int]] for h in recipient_huddles: huddle_members[h] = [s.user_profile.id for s in Subscription.objects.filter(recipient_id=h)] num_messages = 0 random_max = 1000000 recipients = {} # type: Dict[int, Tuple[int, int, Dict[str, Any]]] while num_messages < tot_messages: saved_data = {} # type: Dict[str, Any] message = Message() message.sending_client = get_client('populate_db') message.content = next(texts) randkey = random.randint(1, random_max) if (num_messages > 0 and random.randint(1, random_max) * 100. / random_max < options["stickyness"]): # Use an old recipient message_type, recipient_id, saved_data = recipients[num_messages - 1] if message_type == Recipient.PERSONAL: personals_pair = saved_data['personals_pair'] random.shuffle(personals_pair) elif message_type == Recipient.STREAM: message.subject = saved_data['subject'] message.recipient = get_recipient_by_id(recipient_id) elif message_type == Recipient.HUDDLE: message.recipient = get_recipient_by_id(recipient_id) elif (randkey <= random_max * options["percent_huddles"] / 100.): message_type = Recipient.HUDDLE message.recipient = get_recipient_by_id(random.choice(recipient_huddles)) elif (randkey <= random_max * (options["percent_huddles"] + options["percent_personals"]) / 100.): message_type = Recipient.PERSONAL personals_pair = random.choice(personals_pairs) random.shuffle(personals_pair) elif (randkey <= random_max * 1.0): message_type = Recipient.STREAM message.recipient = get_recipient_by_id(random.choice(recipient_streams)) if message_type == Recipient.HUDDLE: sender_id = random.choice(huddle_members[message.recipient.id]) message.sender = get_user_profile_by_id(sender_id) elif message_type == Recipient.PERSONAL: message.recipient = Recipient.objects.get(type=Recipient.PERSONAL, type_id=personals_pair[0]) message.sender = get_user_profile_by_id(personals_pair[1]) saved_data['personals_pair'] = personals_pair elif message_type == Recipient.STREAM: stream = Stream.objects.get(id=message.recipient.type_id) # Pick a random subscriber to the stream message.sender = random.choice(Subscription.objects.filter( recipient=message.recipient)).user_profile message.subject = stream.name + Text(random.randint(1, 3)) saved_data['subject'] = message.subject # Spoofing time not supported with threading if options['threads'] != 1: message.pub_date = timezone_now() else: # Distrubutes 80% of messages starting from 5 days ago, over a period # of 3 days. Then, distributes remaining messages over past 24 hours. spoofed_date = timezone_now() - timezone_timedelta(days = 5) if (num_messages < tot_messages * 0.8): # Maximum of 3 days ahead, convert to minutes time_ahead = 3 * 24 * 60 time_ahead //= int(tot_messages * 0.8) else: time_ahead = 24 * 60 time_ahead //= int(tot_messages * 0.2) spoofed_minute = random.randint(time_ahead * num_messages, time_ahead * (num_messages + 1)) spoofed_date += timezone_timedelta(minutes = spoofed_minute) message.pub_date = spoofed_date # We disable USING_RABBITMQ here, so that deferred work is # executed in do_send_message_messages, rather than being # queued. This is important, because otherwise, if run-dev.py # wasn't running when populate_db was run, a developer can end # up with queued events that reference objects from a previous # life of the database, which naturally throws exceptions. settings.USING_RABBITMQ = False do_send_messages([{'message': message}]) settings.USING_RABBITMQ = True recipients[num_messages] = (message_type, message.recipient.id, saved_data) num_messages += 1 return tot_messages
def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path, processing_avatars: bool=False) -> None: conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) bucket = conn.get_bucket(bucket_name, validate=True) records = [] logging.info("Downloading uploaded files from %s" % (bucket_name)) avatar_hash_values = set() user_ids = set() if processing_avatars: bucket_list = bucket.list() for user_profile in UserProfile.objects.filter(realm=realm): avatar_path = user_avatar_path_from_ids(user_profile.id, realm.id) avatar_hash_values.add(avatar_path) avatar_hash_values.add(avatar_path + ".original") user_ids.add(user_profile.id) else: bucket_list = bucket.list(prefix="%s/" % (realm.id,)) if settings.EMAIL_GATEWAY_BOT is not None: email_gateway_bot = get_system_bot(settings.EMAIL_GATEWAY_BOT) # type: Optional[UserProfile] else: email_gateway_bot = None count = 0 for bkey in bucket_list: if processing_avatars and bkey.name not in avatar_hash_values: continue key = bucket.get_key(bkey.name) # This can happen if an email address has moved realms if 'realm_id' in key.metadata and key.metadata['realm_id'] != str(realm.id): if email_gateway_bot is None or key.metadata['user_profile_id'] != str(email_gateway_bot.id): raise AssertionError("Key metadata problem: %s %s / %s" % (key.name, key.metadata, realm.id)) # Email gateway bot sends messages, potentially including attachments, cross-realm. print("File uploaded by email gateway bot: %s / %s" % (key.name, key.metadata)) elif processing_avatars: if 'user_profile_id' not in key.metadata: raise AssertionError("Missing user_profile_id in key metadata: %s" % (key.metadata,)) if int(key.metadata['user_profile_id']) not in user_ids: raise AssertionError("Wrong user_profile_id in key metadata: %s" % (key.metadata,)) elif 'realm_id' not in key.metadata: raise AssertionError("Missing realm_id in key metadata: %s" % (key.metadata,)) record = dict(s3_path=key.name, bucket=bucket_name, size=key.size, last_modified=key.last_modified, content_type=key.content_type, md5=key.md5) record.update(key.metadata) # A few early avatars don't have 'realm_id' on the object; fix their metadata user_profile = get_user_profile_by_id(record['user_profile_id']) if 'realm_id' not in record: record['realm_id'] = user_profile.realm_id record['user_profile_email'] = user_profile.email if processing_avatars: filename = os.path.join(output_dir, key.name) record['path'] = key.name else: fields = key.name.split('/') if len(fields) != 3: raise AssertionError("Suspicious key with invalid format %s" % (key.name)) filename = os.path.join(output_dir, fields[1], fields[2]) record['path'] = os.path.join(fields[1], fields[2]) dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.makedirs(dirname) key.get_contents_to_filename(filename) records.append(record) count += 1 if (count % 100 == 0): logging.info("Finished %s" % (count,)) with open(os.path.join(output_dir, "records.json"), "w") as records_file: ujson.dump(records, records_file, indent=4)
def get_user(self, user_profile_id: int) -> Optional[UserProfile]: """ Get a UserProfile object from the user_profile_id. """ try: return get_user_profile_by_id(user_profile_id) except UserProfile.DoesNotExist: return None
def generate_and_send_messages( data: Tuple[int, Sequence[Sequence[int]], Mapping[str, Any], Callable[[str], Any], int] ) -> int: (tot_messages, personals_pairs, options, output, random_seed) = data random.seed(random_seed) with open( os.path.join(get_or_create_dev_uuid_var_path('test-backend'), "test_messages.json"), "r") as infile: dialog = ujson.load(infile) random.shuffle(dialog) texts = itertools.cycle(dialog) recipient_streams = [ klass.id for klass in Recipient.objects.filter(type=Recipient.STREAM) ] # type: List[int] recipient_huddles = [ h.id for h in Recipient.objects.filter(type=Recipient.HUDDLE) ] # type: List[int] huddle_members = {} # type: Dict[int, List[int]] for h in recipient_huddles: huddle_members[h] = [ s.user_profile.id for s in Subscription.objects.filter(recipient_id=h) ] message_batch_size = options['batch_size'] num_messages = 0 random_max = 1000000 recipients = {} # type: Dict[int, Tuple[int, int, Dict[str, Any]]] messages = [] while num_messages < tot_messages: saved_data = {} # type: Dict[str, Any] message = Message() message.sending_client = get_client('populate_db') message.content = next(texts) randkey = random.randint(1, random_max) if (num_messages > 0 and random.randint(1, random_max) * 100. / random_max < options["stickyness"]): # Use an old recipient message_type, recipient_id, saved_data = recipients[num_messages - 1] if message_type == Recipient.PERSONAL: personals_pair = saved_data['personals_pair'] random.shuffle(personals_pair) elif message_type == Recipient.STREAM: message.subject = saved_data['subject'] message.recipient = get_recipient_by_id(recipient_id) elif message_type == Recipient.HUDDLE: message.recipient = get_recipient_by_id(recipient_id) elif (randkey <= random_max * options["percent_huddles"] / 100.): message_type = Recipient.HUDDLE message.recipient = get_recipient_by_id( random.choice(recipient_huddles)) elif (randkey <= random_max * (options["percent_huddles"] + options["percent_personals"]) / 100.): message_type = Recipient.PERSONAL personals_pair = random.choice(personals_pairs) random.shuffle(personals_pair) elif (randkey <= random_max * 1.0): message_type = Recipient.STREAM message.recipient = get_recipient_by_id( random.choice(recipient_streams)) if message_type == Recipient.HUDDLE: sender_id = random.choice(huddle_members[message.recipient.id]) message.sender = get_user_profile_by_id(sender_id) elif message_type == Recipient.PERSONAL: message.recipient = Recipient.objects.get( type=Recipient.PERSONAL, type_id=personals_pair[0]) message.sender = get_user_profile_by_id(personals_pair[1]) saved_data['personals_pair'] = personals_pair elif message_type == Recipient.STREAM: stream = Stream.objects.get(id=message.recipient.type_id) # Pick a random subscriber to the stream message.sender = random.choice( Subscription.objects.filter( recipient=message.recipient)).user_profile message.subject = stream.name + str(random.randint(1, 3)) saved_data['subject'] = message.subject message.date_sent = choose_date_sent(num_messages, tot_messages, options['threads']) messages.append(message) recipients[num_messages] = (message_type, message.recipient.id, saved_data) num_messages += 1 if (num_messages % message_batch_size) == 0: # Send the batch and empty the list: send_messages(messages) messages = [] if len(messages) > 0: # If there are unsent messages after exiting the loop, send them: send_messages(messages) return tot_messages
def handle_missedmessage_emails(user_profile_id: int, missed_email_events: Iterable[Dict[str, Any]]) -> None: message_ids = {event.get('message_id'): event.get('trigger') for event in missed_email_events} user_profile = get_user_profile_by_id(user_profile_id) if not receives_offline_email_notifications(user_profile): return # Note: This query structure automatically filters out any # messages that were permanently deleted, since those would now be # in the ArchivedMessage table, not the Message table. messages = Message.objects.filter(usermessage__user_profile_id=user_profile, id__in=message_ids, usermessage__flags=~UserMessage.flags.read) # Cancel missed-message emails for deleted messages messages = [um for um in messages if um.content != "(deleted)"] if not messages: return # We bucket messages by tuples that identify similar messages. # For streams it's recipient_id and topic. # For PMs it's recipient id and sender. messages_by_bucket = defaultdict(list) # type: Dict[Tuple[int, str], List[Message]] for msg in messages: if msg.recipient.type == Recipient.PERSONAL: # For PM's group using (recipient, sender). messages_by_bucket[(msg.recipient_id, msg.sender_id)].append(msg) else: messages_by_bucket[(msg.recipient_id, msg.topic_name())].append(msg) message_count_by_bucket = { bucket_tup: len(msgs) for bucket_tup, msgs in messages_by_bucket.items() } for msg_list in messages_by_bucket.values(): msg = min(msg_list, key=lambda msg: msg.date_sent) if msg.is_stream_message(): context_messages = get_context_for_message(msg) filtered_context_messages = bulk_access_messages(user_profile, context_messages) msg_list.extend(filtered_context_messages) # Sort emails by least recently-active discussion. bucket_tups = [] # type: List[Tuple[Tuple[int, str], int]] for bucket_tup, msg_list in messages_by_bucket.items(): max_message_id = max(msg_list, key=lambda msg: msg.id).id bucket_tups.append((bucket_tup, max_message_id)) bucket_tups = sorted(bucket_tups, key=lambda x: x[1]) # Send an email per bucket. for bucket_tup, ignored_max_id in bucket_tups: unique_messages = {} for m in messages_by_bucket[bucket_tup]: unique_messages[m.id] = dict( message=m, trigger=message_ids.get(m.id) ) do_send_missedmessage_events_reply_in_zulip( user_profile, list(unique_messages.values()), message_count_by_bucket[bucket_tup], )
def build_email(template_prefix: str, to_user_ids: Optional[List[int]]=None, to_emails: Optional[List[str]]=None, from_name: Optional[str]=None, from_address: Optional[str]=None, reply_to_email: Optional[str]=None, language: Optional[str]=None, context: Optional[Dict[str, Any]]=None ) -> EmailMultiAlternatives: # Callers should pass exactly one of to_user_id and to_email. assert (to_user_ids is None) ^ (to_emails is None) if to_user_ids is not None: to_users = [get_user_profile_by_id(to_user_id) for to_user_id in to_user_ids] to_emails = [formataddr((to_user.full_name, to_user.delivery_email)) for to_user in to_users] if context is None: context = {} context.update({ 'support_email': FromAddress.SUPPORT, 'email_images_base_uri': settings.ROOT_DOMAIN_URI + '/static/images/emails', 'physical_address': settings.PHYSICAL_ADDRESS, }) def render_templates() -> Tuple[str, str, str]: email_subject = loader.render_to_string(template_prefix + '.subject.txt', context=context, using='Jinja2_plaintext').strip().replace('\n', '') message = loader.render_to_string(template_prefix + '.txt', context=context, using='Jinja2_plaintext') try: html_message = loader.render_to_string(template_prefix + '.html', context) except TemplateDoesNotExist: emails_dir = os.path.dirname(template_prefix) template = os.path.basename(template_prefix) compiled_template_prefix = os.path.join(emails_dir, "compiled", template) html_message = loader.render_to_string(compiled_template_prefix + '.html', context) return (html_message, message, email_subject) if not language and to_user_ids is not None: language = to_users[0].default_language if language: with override_language(language): # Make sure that we render the email using the target's native language (html_message, message, email_subject) = render_templates() else: (html_message, message, email_subject) = render_templates() logger.warning("Missing language for email template '{}'".format(template_prefix)) if from_name is None: from_name = "Zulip" if from_address is None: from_address = FromAddress.NOREPLY if from_address == FromAddress.tokenized_no_reply_placeholder: from_address = FromAddress.tokenized_no_reply_address() if from_address == FromAddress.no_reply_placeholder: from_address = FromAddress.NOREPLY if from_address == FromAddress.support_placeholder: from_address = FromAddress.SUPPORT from_email = formataddr((from_name, from_address)) reply_to = None if reply_to_email is not None: reply_to = [reply_to_email] # Remove the from_name in the reply-to for noreply emails, so that users # see "noreply@..." rather than "Zulip" or whatever the from_name is # when they reply in their email client. elif from_address == FromAddress.NOREPLY: reply_to = [FromAddress.NOREPLY] mail = EmailMultiAlternatives(email_subject, message, from_email, to_emails, reply_to=reply_to) if html_message is not None: mail.attach_alternative(html_message, 'text/html') return mail
def apply_event(state, event, user_profile, include_subscribers): # type: (Dict[str, Any], Dict[str, Any], UserProfile, bool) -> None if event['type'] == "message": state['max_message_id'] = max(state['max_message_id'], event['message']['id']) elif event['type'] == "pointer": state['pointer'] = max(state['pointer'], event['pointer']) elif event['type'] == "realm_user": person = event['person'] def our_person(p): # type: (Dict[str, Any]) -> bool return p['user_id'] == person['user_id'] if event['op'] == "add": state['realm_users'].append(person) elif event['op'] == "remove": state['realm_users'] = [ user for user in state['realm_users'] if not our_person(user) ] elif event['op'] == 'update': for p in state['realm_users']: if our_person(p): # In the unlikely event that the current user # just changed to/from being an admin, we need # to add/remove the data on all bots in the # realm. This is ugly and probably better # solved by removing the all-realm-bots data # given to admin users from this flow. if ('is_admin' in person and 'realm_bots' in state and user_profile.email == person['email']): if p['is_admin'] and not person['is_admin']: state['realm_bots'] = [] if not p['is_admin'] and person['is_admin']: state['realm_bots'] = get_owned_bot_dicts( user_profile) # Now update the person p.update(person) elif event['type'] == 'realm_bot': if event['op'] == 'add': state['realm_bots'].append(event['bot']) if event['op'] == 'remove': email = event['bot']['email'] for bot in state['realm_bots']: if bot['email'] == email: bot['is_active'] = False if event['op'] == 'update': for bot in state['realm_bots']: if bot['email'] == event['bot']['email']: if 'owner_id' in event['bot']: bot['owner'] = get_user_profile_by_id( event['bot']['owner_id']).email else: bot.update(event['bot']) elif event['type'] == 'stream': if event['op'] == 'create': for stream in event['streams']: if not stream['invite_only']: stream_data = copy.deepcopy(stream) if include_subscribers: stream_data['subscribers'] = [] # Add stream to never_subscribed (if not invite_only) state['never_subscribed'].append(stream_data) if event['op'] == 'delete': deleted_stream_ids = { stream['stream_id'] for stream in event['streams'] } state['streams'] = [ s for s in state['streams'] if s['stream_id'] not in deleted_stream_ids ] state['never_subscribed'] = [ stream for stream in state['never_subscribed'] if stream['stream_id'] not in deleted_stream_ids ] if event['op'] == 'update': # For legacy reasons, we call stream data 'subscriptions' in # the state var here, for the benefit of the JS code. for obj in state['subscriptions']: if obj['name'].lower() == event['name'].lower(): obj[event['property']] = event['value'] # Also update the pure streams data for stream in state['streams']: if stream['name'].lower() == event['name'].lower(): prop = event['property'] if prop in stream: stream[prop] = event['value'] elif event['op'] == "occupy": state['streams'] += event['streams'] elif event['op'] == "vacate": stream_ids = [s["stream_id"] for s in event['streams']] state['streams'] = [ s for s in state['streams'] if s["stream_id"] not in stream_ids ] elif event['type'] == 'default_streams': state['realm_default_streams'] = event['default_streams'] elif event['type'] == 'realm': if event['op'] == "update": field = 'realm_' + event['property'] state[field] = event['value'] elif event['op'] == "update_dict": for key, value in event['data'].items(): state['realm_' + key] = value elif event['type'] == "subscription": if not include_subscribers and event['op'] in [ 'peer_add', 'peer_remove' ]: return if event['op'] in ["add"]: if include_subscribers: # Convert the emails to user_profile IDs since that's what register() returns # TODO: Clean up this situation by making the event also have IDs for item in event["subscriptions"]: item["subscribers"] = [ get_user_profile_by_email(email).id for email in item["subscribers"] ] else: # Avoid letting 'subscribers' entries end up in the list for i, sub in enumerate(event['subscriptions']): event['subscriptions'][i] = copy.deepcopy( event['subscriptions'][i]) del event['subscriptions'][i]['subscribers'] def name(sub): # type: (Dict[str, Any]) -> Text return sub['name'].lower() if event['op'] == "add": added_names = set(map(name, event["subscriptions"])) was_added = lambda s: name(s) in added_names # add the new subscriptions state['subscriptions'] += event['subscriptions'] # remove them from unsubscribed if they had been there state['unsubscribed'] = [ s for s in state['unsubscribed'] if not was_added(s) ] # remove them from never_subscribed if they had been there state['never_subscribed'] = [ s for s in state['never_subscribed'] if not was_added(s) ] elif event['op'] == "remove": removed_names = set(map(name, event["subscriptions"])) was_removed = lambda s: name(s) in removed_names # Find the subs we are affecting. removed_subs = list(filter(was_removed, state['subscriptions'])) # Remove our user from the subscribers of the removed subscriptions. if include_subscribers: for sub in removed_subs: sub['subscribers'] = [ id for id in sub['subscribers'] if id != user_profile.id ] # We must effectively copy the removed subscriptions from subscriptions to # unsubscribe, since we only have the name in our data structure. state['unsubscribed'] += removed_subs # Now filter out the removed subscriptions from subscriptions. state['subscriptions'] = [ s for s in state['subscriptions'] if not was_removed(s) ] elif event['op'] == 'update': for sub in state['subscriptions']: if sub['name'].lower() == event['name'].lower(): sub[event['property']] = event['value'] elif event['op'] == 'peer_add': user_id = event['user_id'] for sub in state['subscriptions']: if (sub['name'] in event['subscriptions'] and user_id not in sub['subscribers']): sub['subscribers'].append(user_id) for sub in state['never_subscribed']: if (sub['name'] in event['subscriptions'] and user_id not in sub['subscribers']): sub['subscribers'].append(user_id) elif event['op'] == 'peer_remove': user_id = event['user_id'] for sub in state['subscriptions']: if (sub['name'] in event['subscriptions'] and user_id in sub['subscribers']): sub['subscribers'].remove(user_id) elif event['type'] == "presence": state['presences'][event['email']] = event['presence'] elif event['type'] == "update_message": # The client will get the updated message directly pass elif event['type'] == "reaction": # The client will get the message with the reactions directly pass elif event['type'] == "referral": state['referrals'] = event['referrals'] elif event['type'] == "update_message_flags": # The client will get the message with the updated flags directly pass elif event['type'] == "realm_domains": if event['op'] == 'add': state['realm_domains'].append(event['alias']) elif event['op'] == 'change': for realm_domain in state['realm_domains']: if realm_domain['domain'] == event['alias']['domain']: realm_domain['allow_subdomains'] = event['alias'][ 'allow_subdomains'] elif event['op'] == 'remove': state['realm_domains'] = [ alias for alias in state['realm_domains'] if alias['domain'] != event['domain'] ] elif event['type'] == "realm_emoji": state['realm_emoji'] = event['realm_emoji'] elif event['type'] == "alert_words": state['alert_words'] = event['alert_words'] elif event['type'] == "muted_topics": state['muted_topics'] = event["muted_topics"] elif event['type'] == "realm_filters": state['realm_filters'] = event["realm_filters"] elif event['type'] == "update_display_settings": if event['setting_name'] == "twenty_four_hour_time": state['twenty_four_hour_time'] = event["setting"] if event['setting_name'] == 'left_side_userlist': state['left_side_userlist'] = event["setting"] if event['setting_name'] == 'emoji_alt_code': state['emoji_alt_code'] = event["setting"] elif event['type'] == "update_global_notifications": if event['notification_name'] == "enable_stream_desktop_notifications": state['enable_stream_desktop_notifications'] = event['setting'] elif event['notification_name'] == "enable_stream_sounds": state['enable_stream_sounds'] = event['setting'] elif event['notification_name'] == "enable_desktop_notifications": state['enable_desktop_notifications'] = event['setting'] elif event['notification_name'] == "enable_sounds": state['enable_sounds'] = event['setting'] elif event[ 'notification_name'] == "enable_offline_email_notifications": state['enable_offline_email_notifications'] = event['setting'] elif event['notification_name'] == "enable_offline_push_notifications": state['enable_offline_push_notifications'] = event['setting'] elif event['notification_name'] == "enable_online_push_notifications": state['enable_online_push_notifications'] = event['setting'] elif event['notification_name'] == "enable_digest_emails": state['enable_digest_emails'] = event['setting'] else: raise ValueError("Unexpected event type %s" % (event['type'], ))
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None: """ missed_message is the event received by the zerver.worker.queue_processors.PushNotificationWorker.consume function. """ if not push_notifications_enabled(): return user_profile = get_user_profile_by_id(user_profile_id) if not (receives_offline_push_notifications(user_profile) or receives_online_notifications(user_profile)): return user_profile = get_user_profile_by_id(user_profile_id) try: (message, user_message) = access_message(user_profile, missed_message['message_id']) except JsonableError: if ArchivedMessage.objects.filter( id=missed_message['message_id']).exists(): # If the cause is a race with the message being deleted, # that's normal and we have no need to log an error. return logging.error( "Unexpected message access failure handling push notifications: %s %s" % (user_profile.id, missed_message['message_id'])) return if user_message is not None: # If the user has read the message already, don't push-notify. # # TODO: It feels like this is already handled when things are # put in the queue; maybe we should centralize this logic with # the `zerver/tornado/event_queue.py` logic? if user_message.flags.read: return # Otherwise, we mark the message as having an active mobile # push notification, so that we can send revocation messages # later. user_message.flags.active_mobile_push_notification = True user_message.save(update_fields=["flags"]) else: # Users should only be getting push notifications into this # queue for messages they haven't received if they're # long-term idle; anything else is likely a bug. if not user_profile.long_term_idle: logger.error( "Could not find UserMessage with message_id %s and user_id %s" % (missed_message['message_id'], user_profile_id)) return message.trigger = missed_message['trigger'] apns_payload = get_message_payload_apns(user_profile, message) gcm_payload, gcm_options = get_message_payload_gcm(user_profile, message) logger.info("Sending push notifications to mobile clients for user %s" % (user_profile_id, )) if uses_notification_bouncer(): try: send_notifications_to_bouncer(user_profile_id, apns_payload, gcm_payload, gcm_options) except requests.ConnectionError: def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:push_notification" % (event['user_profile_id'], )) retry_event('missedmessage_mobile_notifications', missed_message, failure_processor) return android_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.GCM)) apple_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.APNS)) if apple_devices: send_apple_push_notification(user_profile.id, apple_devices, apns_payload) if android_devices: send_android_push_notification(android_devices, gcm_payload, gcm_options)
def apply_event(state, event, user_profile, include_subscribers): # type: (Dict[str, Any], Dict[str, Any], UserProfile, bool) -> None if event['type'] == "message": state['max_message_id'] = max(state['max_message_id'], event['message']['id']) elif event['type'] == "hotspots": state['hotspots'] = event['hotspots'] elif event['type'] == "custom_profile_fields": state['custom_profile_fields'] = event['fields'] elif event['type'] == "pointer": state['pointer'] = max(state['pointer'], event['pointer']) elif event['type'] == "realm_user": person = event['person'] def our_person(p): # type: (Dict[str, Any]) -> bool return p['user_id'] == person['user_id'] if event['op'] == "add": state['realm_users'].append(person) elif event['op'] == "remove": state['realm_users'] = [user for user in state['realm_users'] if not our_person(user)] elif event['op'] == 'update': if (person['user_id'] == user_profile.id and 'avatar_url' in person and 'avatar_url' in state): state['avatar_source'] = person['avatar_source'] state['avatar_url'] = person['avatar_url'] state['avatar_url_medium'] = person['avatar_url_medium'] if 'avatar_source' in person: # Drop these so that they don't modify the # `realm_user` structure in the `p.update()` line # later; they're only used in the above lines del person['avatar_source'] del person['avatar_url_medium'] for field in ['is_admin', 'email', 'full_name']: if person['user_id'] == user_profile.id and field in person and field in state: state[field] = person[field] for p in state['realm_users']: if our_person(p): # In the unlikely event that the current user # just changed to/from being an admin, we need # to add/remove the data on all bots in the # realm. This is ugly and probably better # solved by removing the all-realm-bots data # given to admin users from this flow. if ('is_admin' in person and 'realm_bots' in state and user_profile.email == person['email']): if p['is_admin'] and not person['is_admin']: state['realm_bots'] = [] if not p['is_admin'] and person['is_admin']: state['realm_bots'] = get_owned_bot_dicts(user_profile) # Now update the person p.update(person) elif event['type'] == 'realm_bot': if event['op'] == 'add': state['realm_bots'].append(event['bot']) if event['op'] == 'remove': email = event['bot']['email'] for bot in state['realm_bots']: if bot['email'] == email: bot['is_active'] = False if event['op'] == 'update': for bot in state['realm_bots']: if bot['email'] == event['bot']['email']: if 'owner_id' in event['bot']: bot['owner'] = get_user_profile_by_id(event['bot']['owner_id']).email else: bot.update(event['bot']) elif event['type'] == 'stream': if event['op'] == 'create': for stream in event['streams']: if not stream['invite_only']: stream_data = copy.deepcopy(stream) if include_subscribers: stream_data['subscribers'] = [] # Add stream to never_subscribed (if not invite_only) state['never_subscribed'].append(stream_data) state['streams'].append(stream) state['streams'].sort(key=lambda elt: elt["name"]) if event['op'] == 'delete': deleted_stream_ids = {stream['stream_id'] for stream in event['streams']} state['streams'] = [s for s in state['streams'] if s['stream_id'] not in deleted_stream_ids] state['never_subscribed'] = [stream for stream in state['never_subscribed'] if stream['stream_id'] not in deleted_stream_ids] if event['op'] == 'update': # For legacy reasons, we call stream data 'subscriptions' in # the state var here, for the benefit of the JS code. for obj in state['subscriptions']: if obj['name'].lower() == event['name'].lower(): obj[event['property']] = event['value'] # Also update the pure streams data for stream in state['streams']: if stream['name'].lower() == event['name'].lower(): prop = event['property'] if prop in stream: stream[prop] = event['value'] elif event['op'] == "occupy": state['streams'] += event['streams'] elif event['op'] == "vacate": stream_ids = [s["stream_id"] for s in event['streams']] state['streams'] = [s for s in state['streams'] if s["stream_id"] not in stream_ids] elif event['type'] == 'default_streams': state['realm_default_streams'] = event['default_streams'] elif event['type'] == 'realm': if event['op'] == "update": field = 'realm_' + event['property'] state[field] = event['value'] # Tricky interaction: Whether we can create streams can get changed here. if (field in ['realm_create_stream_by_admins_only', 'realm_waiting_period_threshold']) and 'can_create_streams' in state: state['can_create_streams'] = user_profile.can_create_streams() elif event['op'] == "update_dict": for key, value in event['data'].items(): state['realm_' + key] = value # It's a bit messy, but this is where we need to # update the state for whether password authentication # is enabled on this server. if key == 'authentication_methods': state['realm_password_auth_enabled'] = (value['Email'] or value['LDAP']) elif event['type'] == "subscription": if not include_subscribers and event['op'] in ['peer_add', 'peer_remove']: return if event['op'] in ["add"]: if include_subscribers: # Convert the emails to user_profile IDs since that's what register() returns # TODO: Clean up this situation by making the event also have IDs for item in event["subscriptions"]: item["subscribers"] = [ get_user(email, user_profile.realm).id for email in item["subscribers"] ] else: # Avoid letting 'subscribers' entries end up in the list for i, sub in enumerate(event['subscriptions']): event['subscriptions'][i] = copy.deepcopy(event['subscriptions'][i]) del event['subscriptions'][i]['subscribers'] def name(sub): # type: (Dict[str, Any]) -> Text return sub['name'].lower() if event['op'] == "add": added_names = set(map(name, event["subscriptions"])) was_added = lambda s: name(s) in added_names # add the new subscriptions state['subscriptions'] += event['subscriptions'] # remove them from unsubscribed if they had been there state['unsubscribed'] = [s for s in state['unsubscribed'] if not was_added(s)] # remove them from never_subscribed if they had been there state['never_subscribed'] = [s for s in state['never_subscribed'] if not was_added(s)] elif event['op'] == "remove": removed_names = set(map(name, event["subscriptions"])) was_removed = lambda s: name(s) in removed_names # Find the subs we are affecting. removed_subs = list(filter(was_removed, state['subscriptions'])) # Remove our user from the subscribers of the removed subscriptions. if include_subscribers: for sub in removed_subs: sub['subscribers'] = [id for id in sub['subscribers'] if id != user_profile.id] # We must effectively copy the removed subscriptions from subscriptions to # unsubscribe, since we only have the name in our data structure. state['unsubscribed'] += removed_subs # Now filter out the removed subscriptions from subscriptions. state['subscriptions'] = [s for s in state['subscriptions'] if not was_removed(s)] elif event['op'] == 'update': for sub in state['subscriptions']: if sub['name'].lower() == event['name'].lower(): sub[event['property']] = event['value'] elif event['op'] == 'peer_add': user_id = event['user_id'] for sub in state['subscriptions']: if (sub['name'] in event['subscriptions'] and user_id not in sub['subscribers']): sub['subscribers'].append(user_id) for sub in state['never_subscribed']: if (sub['name'] in event['subscriptions'] and user_id not in sub['subscribers']): sub['subscribers'].append(user_id) elif event['op'] == 'peer_remove': user_id = event['user_id'] for sub in state['subscriptions']: if (sub['name'] in event['subscriptions'] and user_id in sub['subscribers']): sub['subscribers'].remove(user_id) elif event['type'] == "presence": # TODO: Add user_id to presence update events / state format! presence_user_profile = get_user(event['email'], user_profile.realm) state['presences'][event['email']] = UserPresence.get_status_dict_by_user(presence_user_profile)[event['email']] elif event['type'] == "update_message": # The client will get the updated message directly pass elif event['type'] == "delete_message": max_message = Message.objects.filter( usermessage__user_profile=user_profile).order_by('-id').first() if max_message: state['max_message_id'] = max_message.id else: state['max_message_id'] = -1 elif event['type'] == "reaction": # The client will get the message with the reactions directly pass elif event['type'] == 'typing': # Typing notification events are transient and thus ignored pass elif event['type'] == "update_message_flags": # The client will get the message with the updated flags directly pass elif event['type'] == "realm_domains": if event['op'] == 'add': state['realm_domains'].append(event['realm_domain']) elif event['op'] == 'change': for realm_domain in state['realm_domains']: if realm_domain['domain'] == event['realm_domain']['domain']: realm_domain['allow_subdomains'] = event['realm_domain']['allow_subdomains'] elif event['op'] == 'remove': state['realm_domains'] = [realm_domain for realm_domain in state['realm_domains'] if realm_domain['domain'] != event['domain']] elif event['type'] == "realm_emoji": state['realm_emoji'] = event['realm_emoji'] elif event['type'] == "alert_words": state['alert_words'] = event['alert_words'] elif event['type'] == "muted_topics": state['muted_topics'] = event["muted_topics"] elif event['type'] == "realm_filters": state['realm_filters'] = event["realm_filters"] elif event['type'] == "update_display_settings": assert event['setting_name'] in UserProfile.property_types state[event['setting_name']] = event['setting'] elif event['type'] == "update_global_notifications": assert event['notification_name'] in UserProfile.notification_setting_types state[event['notification_name']] = event['setting'] else: raise AssertionError("Unexpected event type %s" % (event['type'],))
def consume(self, event): # type: (Mapping[str, Any]) -> None user_profile = get_user_profile_by_id(event["user_profile_id"]) log_time = timestamp_to_datetime(event["time"]) do_update_user_activity_interval(user_profile, log_time)
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None: """ missed_message is the event received by the zerver.worker.queue_processors.PushNotificationWorker.consume function. """ user_profile = get_user_profile_by_id(user_profile_id) if not (receives_offline_push_notifications(user_profile) or receives_online_notifications(user_profile)): return user_profile = get_user_profile_by_id(user_profile_id) (message, user_message) = access_message(user_profile, missed_message['message_id']) if user_message is not None: # If ther user has read the message already, don't push-notify. # # TODO: It feels like this is already handled when things are # put in the queue; maybe we should centralize this logic with # the `zerver/tornado/event_queue.py` logic? if user_message.flags.read: return else: # Users should only be getting push notifications into this # queue for messages they haven't received if they're # long-term idle; anything else is likely a bug. if not user_profile.long_term_idle: logging.error( "Could not find UserMessage with message_id %s and user_id %s" % (missed_message['message_id'], user_profile_id)) return message.trigger = missed_message['trigger'] message.stream_name = missed_message.get('stream_name', None) apns_payload = get_apns_payload(message) gcm_payload = get_gcm_payload(user_profile, message) logging.info("Sending push notification to user %s" % (user_profile_id, )) if uses_notification_bouncer(): try: send_notifications_to_bouncer(user_profile_id, apns_payload, gcm_payload) except requests.ConnectionError: def failure_processor(event: Dict[str, Any]) -> None: logging.warning( "Maximum retries exceeded for trigger:%s event:push_notification" % (event['user_profile_id'])) retry_event('missedmessage_mobile_notifications', missed_message, failure_processor) return android_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.GCM)) apple_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.APNS)) if apple_devices: send_apple_push_notification(user_profile.id, apple_devices, apns_payload) if android_devices: send_android_push_notification(android_devices, gcm_payload)
def consume(self, event: Dict[str, Any]) -> None: if event['type'] == 'mark_stream_messages_as_read': user_profile = get_user_profile_by_id(event['user_profile_id']) for recipient_id in event['stream_recipient_ids']: do_mark_stream_messages_as_read(user_profile, recipient_id) elif event["type"] == 'mark_stream_messages_as_read_for_everyone': # This event is generated by the stream deactivation code path. batch_size = 100 offset = 0 while True: messages = Message.objects.filter(recipient_id=event["stream_recipient_id"]) \ .order_by("id")[offset:offset + batch_size] UserMessage.objects.filter(message__in=messages).extra(where=[UserMessage.where_unread()]) \ .update(flags=F('flags').bitor(UserMessage.flags.read)) offset += len(messages) if len(messages) < batch_size: break elif event['type'] == 'clear_push_device_tokens': try: clear_push_device_tokens(event["user_profile_id"]) except PushNotificationBouncerRetryLaterError: def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens", event['user_profile_id']) retry_event(self.queue_name, event, failure_processor) elif event['type'] == 'realm_export': start = time.time() realm = Realm.objects.get(id=event['realm_id']) output_dir = tempfile.mkdtemp(prefix="zulip-export-") export_event = RealmAuditLog.objects.get(id=event['id']) user_profile = get_user_profile_by_id(event['user_profile_id']) try: public_url = export_realm_wrapper(realm=realm, output_dir=output_dir, threads=6, upload=True, public_only=True, delete_after_upload=True) except Exception: export_event.extra_data = orjson.dumps( dict(failed_timestamp=timezone_now().timestamp(), )).decode() export_event.save(update_fields=['extra_data']) logging.error( "Data export for %s failed after %s", user_profile.realm.string_id, time.time() - start, ) notify_realm_export(user_profile) return assert public_url is not None # Update the extra_data field now that the export is complete. export_event.extra_data = orjson.dumps( dict(export_path=urllib.parse.urlparse(public_url).path, )).decode() export_event.save(update_fields=['extra_data']) # Send a private message notification letting the user who # triggered the export know the export finished. with override_language(user_profile.default_language): content = _( "Your data export is complete and has been uploaded here:\n\n{public_url}" ).format(public_url=public_url) internal_send_private_message( realm=user_profile.realm, sender=get_system_bot(settings.NOTIFICATION_BOT), recipient_user=user_profile, content=content, ) # For future frontend use, also notify administrator # clients that the export happened. notify_realm_export(user_profile) logging.info( "Completed data export for %s in %s", user_profile.realm.string_id, time.time() - start, )
def update_message_backend(request, user_profile, message_id=REQ(converter=to_non_negative_int), subject=REQ(default=None), propagate_mode=REQ(default="change_one"), content=REQ(default=None)): # type: (HttpRequest, UserProfile, int, Optional[text_type], Optional[str], Optional[text_type]) -> HttpResponse if not user_profile.realm.allow_message_editing: return json_error( _("Your organization has turned off message editing.")) try: message = Message.objects.select_related().get(id=message_id) except Message.DoesNotExist: raise JsonableError(_("Unknown message id")) # You only have permission to edit a message if: # 1. You sent it, OR: # 2. This is a topic-only edit for a (no topic) message, OR: # 3. This is a topic-only edit and you are an admin. if message.sender == user_profile: pass elif (content is None) and ((message.topic_name() == "(no topic)") or user_profile.is_realm_admin): pass else: raise JsonableError( _("You don't have permission to edit this message")) # If there is a change to the content, check that it hasn't been too long # Allow an extra 20 seconds since we potentially allow editing 15 seconds # past the limit, and in case there are network issues, etc. The 15 comes # from (min_seconds_to_edit + seconds_left_buffer) in message_edit.js; if # you change this value also change those two parameters in message_edit.js. edit_limit_buffer = 20 if content is not None and user_profile.realm.message_content_edit_limit_seconds > 0: deadline_seconds = user_profile.realm.message_content_edit_limit_seconds + edit_limit_buffer if (now() - message.pub_date) > datetime.timedelta( seconds=deadline_seconds): raise JsonableError( _("The time limit for editing this message has past")) if subject is None and content is None: return json_error(_("Nothing to change")) if subject is not None: subject = subject.strip() if subject == "": raise JsonableError(_("Topic can't be empty")) rendered_content = None links_for_embed = set() # type: Set[text_type] if content is not None: content = content.strip() if content == "": content = "(deleted)" content = truncate_body(content) # We exclude UserMessage.flags.historical rows since those # users did not receive the message originally, and thus # probably are not relevant for reprocessed alert_words, # mentions and similar rendering features. This may be a # decision we change in the future. ums = UserMessage.objects.filter(message=message.id, flags=~UserMessage.flags.historical) message_users = { get_user_profile_by_id(um.user_profile_id) for um in ums } # If rendering fails, the called code will raise a JsonableError. rendered_content = render_incoming_message(message, content=content, message_users=message_users) links_for_embed |= message.links_for_preview do_update_message(user_profile, message, subject, propagate_mode, content, rendered_content) if links_for_embed and getattr(settings, 'INLINE_URL_EMBED_PREVIEW', None): event_data = { 'message_id': message.id, 'message_content': message.content, 'urls': links_for_embed } queue_json_publish('embed_links', event_data, lambda x: None) return json_success()
def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]) -> None: """This should be called when a message that previously had a mobile push notification executed is read. This triggers a push to the mobile app, when the message is read on the server, to remove the message from the notification. """ if not push_notifications_enabled(): return user_profile = get_user_profile_by_id(user_profile_id) # We may no longer have access to the message here; for example, # the user (1) got a message, (2) read the message in the web UI, # and then (3) it was deleted. When trying to send the push # notification for (2), after (3) has happened, there is no # message to fetch -- but we nonetheless want to remove the mobile # notification. Because of this, the usual access control of # `bulk_access_messages_expect_usermessage` is skipped here. # Because of this, no access to the Message objects should be # done; they are treated as a list of opaque ints. # APNs has a 4KB limit on the maximum size of messages, which # translated to several hundred message IDs in one of these # notifications. In rare cases, it's possible for someone to mark # thousands of push notification eligible messages as read at # once. We could handle this situation with a loop, but we choose # to truncate instead to avoid extra network traffic, because it's # very likely the user has manually cleared the notifications in # their mobile device's UI anyway. # # When truncating, we keep only the newest N messages in this # remove event. This is optimal because older messages are the # ones most likely to have already been manually cleared at some # point in the past. # # We choose 200 here because a 10-digit message ID plus a comma and # space consume 12 bytes, and 12 x 200 = 2400 bytes is still well # below the 4KB limit (leaving plenty of space for metadata). MAX_APNS_MESSAGE_IDS = 200 truncated_message_ids = list(sorted(message_ids))[-MAX_APNS_MESSAGE_IDS:] gcm_payload, gcm_options = get_remove_payload_gcm(user_profile, truncated_message_ids) apns_payload = get_remove_payload_apns(user_profile, truncated_message_ids) if uses_notification_bouncer(): send_notifications_to_bouncer(user_profile_id, apns_payload, gcm_payload, gcm_options) else: user_identity = UserPushIndentityCompat(user_id=user_profile_id) android_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.GCM)) apple_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.APNS)) if android_devices: send_android_push_notification(user_identity, android_devices, gcm_payload, gcm_options) if apple_devices: send_apple_push_notification(user_identity, apple_devices, apns_payload) # We intentionally use the non-truncated message_ids here. We are # assuming in this very rare case that the user has manually # dismissed these notifications on the device side, and the server # should no longer track them as outstanding notifications. UserMessage.objects.filter( user_profile_id=user_profile_id, message_id__in=message_ids, ).update(flags=F("flags").bitand( ~UserMessage.flags.active_mobile_push_notification))
def build_email(template_prefix, to_user_id=None, to_email=None, from_name=None, from_address=None, reply_to_email=None, context=None): # type: (str, Optional[int], Optional[Text], Optional[Text], Optional[Text], Optional[Text], Optional[Dict[str, Any]]) -> EmailMultiAlternatives # Callers should pass exactly one of to_user_id and to_email. assert (to_user_id is None) ^ (to_email is None) if to_user_id is not None: to_user = get_user_profile_by_id(to_user_id) # Change to formataddr((to_user.full_name, to_user.email)) once # https://github.com/zulip/zulip/issues/4676 is resolved to_email = to_user.email if context is None: context = {} context.update({ 'realm_name_in_notifications': False, 'support_email': FromAddress.SUPPORT, 'email_images_base_uri': settings.ROOT_DOMAIN_URI + '/static/images/emails', 'physical_address': settings.PHYSICAL_ADDRESS, }) subject = loader.render_to_string( template_prefix + '.subject', context=context, using='Jinja2_plaintext').strip().replace('\n', '') message = loader.render_to_string(template_prefix + '.txt', context=context, using='Jinja2_plaintext') try: html_message = loader.render_to_string(template_prefix + '.html', context) except TemplateDoesNotExist: emails_dir = os.path.dirname(template_prefix) template = os.path.basename(template_prefix) compiled_template_prefix = os.path.join(emails_dir, "compiled", template) html_message = loader.render_to_string( compiled_template_prefix + '.html', context) if from_name is None: from_name = "Zulip" if from_address is None: from_address = FromAddress.NOREPLY from_email = formataddr((from_name, from_address)) reply_to = None if reply_to_email is not None: reply_to = [reply_to_email] # Remove the from_name in the reply-to for noreply emails, so that users # see "noreply@..." rather than "Zulip" or whatever the from_name is # when they reply in their email client. elif from_address == FromAddress.NOREPLY: reply_to = [FromAddress.NOREPLY] mail = EmailMultiAlternatives(subject, message, from_email, [to_email], reply_to=reply_to) if html_message is not None: mail.attach_alternative(html_message, 'text/html') return mail
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None: """ missed_message is the event received by the zerver.worker.queue_processors.PushNotificationWorker.consume function. """ if not push_notifications_enabled(): return user_profile = get_user_profile_by_id(user_profile_id) if user_profile.is_bot: # nocoverage # We don't expect to reach here for bot users. However, this code exists # to find and throw away any pre-existing events in the queue while # upgrading from versions before our notifiability logic was implemented. # TODO/compatibility: This block can be removed when one can no longer # upgrade from versions <= 4.0 to versions >= 5.0 logger.warning( "Send-push-notification event found for bot user %s. Skipping.", user_profile_id) return if not (user_profile.enable_offline_push_notifications or user_profile.enable_online_push_notifications): # BUG: Investigate why it's possible to get here. return # nocoverage try: (message, user_message) = access_message(user_profile, missed_message["message_id"]) except JsonableError: if ArchivedMessage.objects.filter( id=missed_message["message_id"]).exists(): # If the cause is a race with the message being deleted, # that's normal and we have no need to log an error. return logging.info( "Unexpected message access failure handling push notifications: %s %s", user_profile.id, missed_message["message_id"], ) return if user_message is not None: # If the user has read the message already, don't push-notify. if user_message.flags.read or user_message.flags.active_mobile_push_notification: return # Otherwise, we mark the message as having an active mobile # push notification, so that we can send revocation messages # later. user_message.flags.active_mobile_push_notification = True user_message.save(update_fields=["flags"]) else: # Users should only be getting push notifications into this # queue for messages they haven't received if they're # long-term idle; anything else is likely a bug. if not user_profile.long_term_idle: logger.error( "Could not find UserMessage with message_id %s and user_id %s", missed_message["message_id"], user_profile_id, exc_info=True, ) return trigger = missed_message["trigger"] mentioned_user_group_name = None # mentioned_user_group_id will be None if the user is personally mentioned # regardless whether they are a member of the mentioned user group in the # message or not. mentioned_user_group_id = missed_message.get("mentioned_user_group_id") if mentioned_user_group_id is not None: user_group = UserGroup.objects.get(id=mentioned_user_group_id, realm=user_profile.realm) mentioned_user_group_name = user_group.name # Soft reactivate if pushing to a long_term_idle user that is personally mentioned soft_reactivate_if_personal_notification(user_profile, {trigger}, mentioned_user_group_name) apns_payload = get_message_payload_apns(user_profile, message, trigger, mentioned_user_group_id, mentioned_user_group_name) gcm_payload, gcm_options = get_message_payload_gcm( user_profile, message, trigger, mentioned_user_group_id, mentioned_user_group_name) logger.info("Sending push notifications to mobile clients for user %s", user_profile_id) if uses_notification_bouncer(): total_android_devices, total_apple_devices = send_notifications_to_bouncer( user_profile_id, apns_payload, gcm_payload, gcm_options) logger.info( "Sent mobile push notifications for user %s through bouncer: %s via FCM devices, %s via APNs devices", user_profile_id, total_android_devices, total_apple_devices, ) return android_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.GCM)) apple_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.APNS)) logger.info( "Sending mobile push notifications for local user %s: %s via FCM devices, %s via APNs devices", user_profile_id, len(android_devices), len(apple_devices), ) user_identity = UserPushIndentityCompat(user_id=user_profile.id) send_apple_push_notification(user_identity, apple_devices, apns_payload) send_android_push_notification(user_identity, android_devices, gcm_payload, gcm_options)
def import_uploads_s3(bucket_name: str, import_dir: Path, processing_avatars: bool = False, processing_emojis: bool = False) -> None: upload_backend = S3UploadBackend() conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) bucket = conn.get_bucket(bucket_name, validate=True) records_filename = os.path.join(import_dir, "records.json") with open(records_filename) as records_file: records = ujson.loads(records_file.read()) re_map_foreign_keys_internal(records, 'records', 'realm_id', related_table="realm", id_field=True) timestamp = datetime_to_timestamp(timezone_now()) if not processing_emojis: re_map_foreign_keys_internal(records, 'records', 'user_profile_id', related_table="user_profile", id_field=True) for record in records: key = Key(bucket) if processing_avatars: # For avatars, we need to rehash the user's email with the # new server's avatar salt avatar_path = user_avatar_path_from_ids(record['user_profile_id'], record['realm_id']) key.key = avatar_path if record['s3_path'].endswith('.original'): key.key += '.original' elif processing_emojis: # For emojis we follow the function 'upload_emoji_image' emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=record['realm_id'], emoji_file_name=record['file_name']) key.key = emoji_path record['last_modified'] = timestamp else: # Should be kept in sync with its equivalent in zerver/lib/uploads in the # function 'upload_message_image' s3_file_name = "/".join([ str(record['realm_id']), random_name(18), sanitize_name(os.path.basename(record['path'])) ]) key.key = s3_file_name path_maps['attachment_path'][record['s3_path']] = s3_file_name # Exported custom emoji from tools like Slack don't have # the data for what user uploaded them in `user_profile_id`. if not processing_emojis: user_profile_id = int(record['user_profile_id']) # Support email gateway bot and other cross-realm messages if user_profile_id in id_maps["user_profile"]: logging.info("Uploaded by ID mapped user: %s!" % (user_profile_id, )) user_profile_id = id_maps["user_profile"][user_profile_id] user_profile = get_user_profile_by_id(user_profile_id) key.set_metadata("user_profile_id", str(user_profile.id)) key.set_metadata("orig_last_modified", record['last_modified']) key.set_metadata("realm_id", str(record['realm_id'])) # Zulip exports will always have a content-type, but third-party exports might not. content_type = record.get("content_type", guess_type(record['s3_path'])[0]) headers = {'Content-Type': content_type} key.set_contents_from_filename(os.path.join(import_dir, record['path']), headers=headers) if processing_avatars: # Ensure that we have medium-size avatar images for every # avatar. TODO: This implementation is hacky, both in that it # does get_user_profile_by_id for each user, and in that it # might be better to require the export to just have these. upload_backend = S3UploadBackend() for record in records: if record['s3_path'].endswith('.original'): user_profile = get_user_profile_by_id( record['user_profile_id']) upload_backend.ensure_medium_avatar_image( user_profile=user_profile)
def consume(self, event): user_profile = get_user_profile_by_id(event["user_profile_id"]) log_time = timestamp_to_datetime(event["time"]) do_update_user_activity_interval(user_profile, log_time)
def send_messages(data): # type: (Tuple[int, Sequence[Sequence[int]], Mapping[str, Any], Callable[[str], Any], int]) -> int (tot_messages, personals_pairs, options, output, random_seed) = data random.seed(random_seed) with open("var/test_messages.json", "r") as infile: dialog = ujson.load(infile) random.shuffle(dialog) texts = itertools.cycle(dialog) recipient_streams = [ klass.id for klass in Recipient.objects.filter(type=Recipient.STREAM) ] # type: List[int] recipient_huddles = [ h.id for h in Recipient.objects.filter(type=Recipient.HUDDLE) ] # type: List[int] huddle_members = {} # type: Dict[int, List[int]] for h in recipient_huddles: huddle_members[h] = [ s.user_profile.id for s in Subscription.objects.filter(recipient_id=h) ] num_messages = 0 random_max = 1000000 recipients = {} # type: Dict[int, Tuple[int, int, Dict[str, Any]]] while num_messages < tot_messages: saved_data = {} # type: Dict[str, Any] message = Message() message.sending_client = get_client('populate_db') message.content = next(texts) randkey = random.randint(1, random_max) if (num_messages > 0 and random.randint(1, random_max) * 100. / random_max < options["stickyness"]): # Use an old recipient message_type, recipient_id, saved_data = recipients[num_messages - 1] if message_type == Recipient.PERSONAL: personals_pair = saved_data['personals_pair'] random.shuffle(personals_pair) elif message_type == Recipient.STREAM: message.subject = saved_data['subject'] message.recipient = get_recipient_by_id(recipient_id) elif message_type == Recipient.HUDDLE: message.recipient = get_recipient_by_id(recipient_id) elif (randkey <= random_max * options["percent_huddles"] / 100.): message_type = Recipient.HUDDLE message.recipient = get_recipient_by_id( random.choice(recipient_huddles)) elif (randkey <= random_max * (options["percent_huddles"] + options["percent_personals"]) / 100.): message_type = Recipient.PERSONAL personals_pair = random.choice(personals_pairs) random.shuffle(personals_pair) elif (randkey <= random_max * 1.0): message_type = Recipient.STREAM message.recipient = get_recipient_by_id( random.choice(recipient_streams)) if message_type == Recipient.HUDDLE: sender_id = random.choice(huddle_members[message.recipient.id]) message.sender = get_user_profile_by_id(sender_id) elif message_type == Recipient.PERSONAL: message.recipient = Recipient.objects.get( type=Recipient.PERSONAL, type_id=personals_pair[0]) message.sender = get_user_profile_by_id(personals_pair[1]) saved_data['personals_pair'] = personals_pair elif message_type == Recipient.STREAM: stream = Stream.objects.get(id=message.recipient.type_id) # Pick a random subscriber to the stream message.sender = random.choice( Subscription.objects.filter( recipient=message.recipient)).user_profile message.subject = stream.name + Text(random.randint(1, 3)) saved_data['subject'] = message.subject message.pub_date = timezone_now() do_send_messages([{'message': message}]) recipients[num_messages] = (message_type, message.recipient.id, saved_data) num_messages += 1 return tot_messages
def consume(self, event): user_profile = get_user_profile_by_id(event["user_profile_id"]) client = get_client(event["client"]) log_time = timestamp_to_datetime(event["time"]) query = event["query"] do_update_user_activity(user_profile, client, query, log_time)
def consume(self, event: Dict[str, Any]) -> None: if event['type'] == 'mark_stream_messages_as_read': user_profile = get_user_profile_by_id(event['user_profile_id']) client = Client.objects.get(id=event['client_id']) for stream_id in event['stream_ids']: # Since the user just unsubscribed, we don't require # an active Subscription object (otherwise, private # streams would never be accessible) (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id, require_active=False) do_mark_stream_messages_as_read(user_profile, client, stream) elif event['type'] == 'clear_push_device_tokens': try: clear_push_device_tokens(event["user_profile_id"]) except PushNotificationBouncerRetryLaterError: def failure_processor(event: Dict[str, Any]) -> None: logger.warning( "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens", event['user_profile_id']) retry_event(self.queue_name, event, failure_processor) elif event['type'] == 'realm_export': start = time.time() realm = Realm.objects.get(id=event['realm_id']) output_dir = tempfile.mkdtemp(prefix="zulip-export-") export_event = RealmAuditLog.objects.get(id=event['id']) user_profile = get_user_profile_by_id(event['user_profile_id']) try: public_url = export_realm_wrapper(realm=realm, output_dir=output_dir, threads=6, upload=True, public_only=True, delete_after_upload=True) except Exception: export_event.extra_data = ujson.dumps(dict( failed_timestamp=timezone_now().timestamp(), )) export_event.save(update_fields=['extra_data']) logging.error( "Data export for %s failed after %s", user_profile.realm.string_id, time.time() - start, ) notify_realm_export(user_profile) return assert public_url is not None # Update the extra_data field now that the export is complete. export_event.extra_data = ujson.dumps(dict( export_path=urllib.parse.urlparse(public_url).path, )) export_event.save(update_fields=['extra_data']) # Send a private message notification letting the user who # triggered the export know the export finished. content = f"Your data export is complete and has been uploaded here:\n\n{public_url}" internal_send_private_message( realm=user_profile.realm, sender=get_system_bot(settings.NOTIFICATION_BOT), recipient_user=user_profile, content=content, ) # For future frontend use, also notify administrator # clients that the export happened. notify_realm_export(user_profile) logging.info( "Completed data export for %s in %s", user_profile.realm.string_id, time.time() - start, )
def generate_and_send_messages( data: Tuple[int, Sequence[Sequence[int]], Mapping[str, Any], Callable[[str], Any], int] ) -> int: (tot_messages, personals_pairs, options, output, random_seed) = data random.seed(random_seed) with open( os.path.join(get_or_create_dev_uuid_var_path("test-backend"), "test_messages.json"), "rb") as infile: dialog = orjson.loads(infile.read()) random.shuffle(dialog) texts = itertools.cycle(dialog) # We need to filter out streams from the analytics realm as we don't want to generate # messages to its streams - and they might also have no subscribers, which would break # our message generation mechanism below. stream_ids = Stream.objects.filter(realm=get_realm("zulip")).values_list( "id", flat=True) recipient_streams: List[int] = [ recipient.id for recipient in Recipient.objects.filter(type=Recipient.STREAM, type_id__in=stream_ids) ] recipient_huddles: List[int] = [ h.id for h in Recipient.objects.filter(type=Recipient.HUDDLE) ] huddle_members: Dict[int, List[int]] = {} for h in recipient_huddles: huddle_members[h] = [ s.user_profile.id for s in Subscription.objects.filter(recipient_id=h) ] # Generate different topics for each stream possible_topics = {} for stream_id in recipient_streams: possible_topics[stream_id] = generate_topics(options["max_topics"]) message_batch_size = options["batch_size"] num_messages = 0 random_max = 1000000 recipients: Dict[int, Tuple[int, int, Dict[str, Any]]] = {} messages: List[Message] = [] while num_messages < tot_messages: saved_data: Dict[str, Any] = {} message = Message() message.sending_client = get_client("populate_db") message.content = next(texts) randkey = random.randint(1, random_max) if (num_messages > 0 and random.randint(1, random_max) * 100.0 / random_max < options["stickyness"]): # Use an old recipient message_type, recipient_id, saved_data = recipients[num_messages - 1] if message_type == Recipient.PERSONAL: personals_pair = saved_data["personals_pair"] random.shuffle(personals_pair) elif message_type == Recipient.STREAM: message.subject = saved_data["subject"] message.recipient = get_recipient_by_id(recipient_id) elif message_type == Recipient.HUDDLE: message.recipient = get_recipient_by_id(recipient_id) elif randkey <= random_max * options["percent_huddles"] / 100.0: message_type = Recipient.HUDDLE message.recipient = get_recipient_by_id( random.choice(recipient_huddles)) elif (randkey <= random_max * (options["percent_huddles"] + options["percent_personals"]) / 100.0): message_type = Recipient.PERSONAL personals_pair = random.choice(personals_pairs) random.shuffle(personals_pair) elif randkey <= random_max * 1.0: message_type = Recipient.STREAM message.recipient = get_recipient_by_id( random.choice(recipient_streams)) if message_type == Recipient.HUDDLE: sender_id = random.choice(huddle_members[message.recipient.id]) message.sender = get_user_profile_by_id(sender_id) elif message_type == Recipient.PERSONAL: message.recipient = Recipient.objects.get( type=Recipient.PERSONAL, type_id=personals_pair[0]) message.sender = get_user_profile_by_id(personals_pair[1]) saved_data["personals_pair"] = personals_pair elif message_type == Recipient.STREAM: # Pick a random subscriber to the stream message.sender = random.choice( Subscription.objects.filter( recipient=message.recipient)).user_profile message.subject = random.choice( possible_topics[message.recipient.id]) saved_data["subject"] = message.subject message.date_sent = choose_date_sent(num_messages, tot_messages, options["threads"]) messages.append(message) recipients[num_messages] = (message_type, message.recipient.id, saved_data) num_messages += 1 if (num_messages % message_batch_size) == 0: # Send the batch and empty the list: send_messages(messages) messages = [] if len(messages) > 0: # If there are unsent messages after exiting the loop, send them: send_messages(messages) return tot_messages
def import_uploads_s3(bucket_name: str, import_dir: Path, processing_avatars: bool = False, processing_emojis: bool = False) -> None: upload_backend = S3UploadBackend() conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) bucket = conn.get_bucket(bucket_name, validate=True) records_filename = os.path.join(import_dir, "records.json") with open(records_filename) as records_file: records = ujson.loads(records_file.read()) re_map_foreign_keys_internal(records, 'records', 'realm_id', related_table="realm", id_field=True) re_map_foreign_keys_internal(records, 'records', 'user_profile_id', related_table="user_profile", id_field=True) for record in records: key = Key(bucket) if processing_avatars: # For avatars, we need to rehash the user's email with the # new server's avatar salt avatar_path = user_avatar_path_from_ids(record['user_profile_id'], record['realm_id']) key.key = avatar_path if record['s3_path'].endswith('.original'): key.key += '.original' if processing_emojis: # For emojis we follow the function 'upload_emoji_image' emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=record['realm_id'], emoji_file_name=record['file_name']) key.key = emoji_path else: # Should be kept in sync with its equivalent in zerver/lib/uploads in the # function 'upload_message_image' s3_file_name = "/".join([ str(record['realm_id']), random_name(18), sanitize_name(os.path.basename(record['path'])) ]) key.key = s3_file_name path_maps['attachment_path'][record['path']] = s3_file_name user_profile_id = int(record['user_profile_id']) # Support email gateway bot and other cross-realm messages if user_profile_id in id_maps["user_profile"]: logging.info("Uploaded by ID mapped user: %s!" % (user_profile_id, )) user_profile_id = id_maps["user_profile"][user_profile_id] user_profile = get_user_profile_by_id(user_profile_id) key.set_metadata("user_profile_id", str(user_profile.id)) key.set_metadata("realm_id", str(user_profile.realm_id)) key.set_metadata("orig_last_modified", record['last_modified']) headers = {'Content-Type': record['content_type']} key.set_contents_from_filename(os.path.join(import_dir, record['path']), headers=headers) if processing_avatars: # TODO: Ideally, we'd do this in a separate pass, after # all the avatars have been uploaded, since we may end up # unnecssarily resizing images just before the medium-size # image in the export is uploaded. See the local uplods # code path for more notes. upload_backend.ensure_medium_avatar_image( user_profile=user_profile)
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None: """ missed_message is the event received by the zerver.worker.queue_processors.PushNotificationWorker.consume function. """ if not push_notifications_enabled(): return user_profile = get_user_profile_by_id(user_profile_id) if user_profile.is_bot: # BUG: Investigate why it's possible to get here. return # nocoverage if not (user_profile.enable_offline_push_notifications or user_profile.enable_online_push_notifications): # BUG: Investigate why it's possible to get here. return # nocoverage try: (message, user_message) = access_message(user_profile, missed_message["message_id"]) except JsonableError: if ArchivedMessage.objects.filter( id=missed_message["message_id"]).exists(): # If the cause is a race with the message being deleted, # that's normal and we have no need to log an error. return logging.info( "Unexpected message access failure handling push notifications: %s %s", user_profile.id, missed_message["message_id"], ) return if user_message is not None: # If the user has read the message already, don't push-notify. if user_message.flags.read or user_message.flags.active_mobile_push_notification: return # Otherwise, we mark the message as having an active mobile # push notification, so that we can send revocation messages # later. user_message.flags.active_mobile_push_notification = True user_message.save(update_fields=["flags"]) else: # Users should only be getting push notifications into this # queue for messages they haven't received if they're # long-term idle; anything else is likely a bug. if not user_profile.long_term_idle: logger.error( "Could not find UserMessage with message_id %s and user_id %s", missed_message["message_id"], user_profile_id, exc_info=True, ) return trigger = missed_message["trigger"] mentioned_user_group_name = None mentioned_user_group_id = missed_message.get("mentioned_user_group_id") if mentioned_user_group_id is not None: user_group = access_user_group_by_id(mentioned_user_group_id, user_profile, for_mention=True) mentioned_user_group_name = user_group.name apns_payload = get_message_payload_apns(user_profile, message, trigger, mentioned_user_group_id, mentioned_user_group_name) gcm_payload, gcm_options = get_message_payload_gcm( user_profile, message, trigger, mentioned_user_group_id, mentioned_user_group_name) logger.info("Sending push notifications to mobile clients for user %s", user_profile_id) if uses_notification_bouncer(): total_android_devices, total_apple_devices = send_notifications_to_bouncer( user_profile_id, apns_payload, gcm_payload, gcm_options) logger.info( "Sent mobile push notifications for user %s through bouncer: %s via FCM devices, %s via APNs devices", user_profile_id, total_android_devices, total_apple_devices, ) return android_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.GCM)) apple_devices = list( PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.APNS)) logger.info( "Sending mobile push notifications for local user %s: %s via FCM devices, %s via APNs devices", user_profile_id, len(android_devices), len(apple_devices), ) send_apple_push_notification(user_profile.id, apple_devices, apns_payload) send_android_push_notification(user_profile.id, android_devices, gcm_payload, gcm_options)
def build_email( template_prefix: str, to_user_ids: Optional[List[int]] = None, to_emails: Optional[List[str]] = None, from_name: Optional[str] = None, from_address: Optional[str] = None, reply_to_email: Optional[str] = None, language: Optional[str] = None, context: Mapping[str, Any] = {}, realm: Optional[Realm] = None, ) -> EmailMultiAlternatives: # Callers should pass exactly one of to_user_id and to_email. assert (to_user_ids is None) ^ (to_emails is None) if to_user_ids is not None: to_users = [get_user_profile_by_id(to_user_id) for to_user_id in to_user_ids] if realm is None: assert len({to_user.realm_id for to_user in to_users}) == 1 realm = to_users[0].realm to_emails = [ str(Address(display_name=to_user.full_name, addr_spec=to_user.delivery_email)) for to_user in to_users ] extra_headers = {} if realm is not None: # formaddr is meant for formatting (display_name, email_address) pair for headers like "To", # but we can use its utility for formatting the List-Id header, as it follows the same format, # except having just a domain instead of an email address. extra_headers["List-Id"] = formataddr((realm.name, realm.host)) context = { **context, "support_email": FromAddress.SUPPORT, "email_images_base_uri": settings.ROOT_DOMAIN_URI + "/static/images/emails", "physical_address": settings.PHYSICAL_ADDRESS, } def render_templates() -> Tuple[str, str, str]: email_subject = ( loader.render_to_string( template_prefix + ".subject.txt", context=context, using="Jinja2_plaintext" ) .strip() .replace("\n", "") ) message = loader.render_to_string( template_prefix + ".txt", context=context, using="Jinja2_plaintext" ) try: html_message = loader.render_to_string(template_prefix + ".html", context) except TemplateDoesNotExist: emails_dir = os.path.dirname(template_prefix) template = os.path.basename(template_prefix) compiled_template_prefix = os.path.join(emails_dir, "compiled", template) html_message = loader.render_to_string(compiled_template_prefix + ".html", context) return (html_message, message, email_subject) # The i18n story for emails is a bit complicated. For emails # going to a single user, we want to use the language that user # has configured for their Zulip account. For emails going to # multiple users or to email addresses without a known Zulip # account (E.g. invitations), we want to use the default language # configured for the Zulip organization. # # See our i18n documentation for some high-level details: # https://zulip.readthedocs.io/en/latest/translating/internationalization.html if not language and to_user_ids is not None: language = to_users[0].default_language if language: with override_language(language): # Make sure that we render the email using the target's native language (html_message, message, email_subject) = render_templates() else: (html_message, message, email_subject) = render_templates() logger.warning("Missing language for email template '%s'", template_prefix) if from_name is None: from_name = "Zulip" if from_address is None: from_address = FromAddress.NOREPLY if from_address == FromAddress.tokenized_no_reply_placeholder: from_address = FromAddress.tokenized_no_reply_address() if from_address == FromAddress.no_reply_placeholder: from_address = FromAddress.NOREPLY if from_address == FromAddress.support_placeholder: from_address = FromAddress.SUPPORT # Set the "From" that is displayed separately from the envelope-from. extra_headers["From"] = str(Address(display_name=from_name, addr_spec=from_address)) # Check ASCII encoding length. Amazon SES rejects emails with # From names longer than 320 characters (which appears to be a # misinterpretation of the RFC); in that case we drop the name # from the From line, under the theory that it's better to send # the email with a simplified From field than not. if len(sanitize_address(extra_headers["From"], "utf-8")) > 320: extra_headers["From"] = str(Address(addr_spec=from_address)) # If we have an unsubscribe link for this email, configure it for # "Unsubscribe" buttons in email clients via the List-Unsubscribe header. # # Note that Microsoft ignores URLs in List-Unsubscribe headers, as # they only support the alternative `mailto:` format, which we # have not implemented. if "unsubscribe_link" in context: extra_headers["List-Unsubscribe"] = f"<{context['unsubscribe_link']}>" extra_headers["List-Unsubscribe-Post"] = "List-Unsubscribe=One-Click" reply_to = None if reply_to_email is not None: reply_to = [reply_to_email] # Remove the from_name in the reply-to for noreply emails, so that users # see "noreply@..." rather than "Zulip" or whatever the from_name is # when they reply in their email client. elif from_address == FromAddress.NOREPLY: reply_to = [FromAddress.NOREPLY] envelope_from = FromAddress.NOREPLY mail = EmailMultiAlternatives( email_subject, message, envelope_from, to_emails, reply_to=reply_to, headers=extra_headers ) if html_message is not None: mail.attach_alternative(html_message, "text/html") return mail
def apply_event(state: Dict[str, Any], event: Dict[str, Any], user_profile: UserProfile, client_gravatar: bool, include_subscribers: bool) -> None: if event['type'] == "message": state['max_message_id'] = max(state['max_message_id'], event['message']['id']) if 'raw_unread_msgs' in state: apply_unread_message_event( user_profile, state['raw_unread_msgs'], event['message'], event['flags'], ) if event['message']['type'] != "stream": if 'raw_recent_private_conversations' in state: # Handle maintaining the recent_private_conversations data structure. conversations = state['raw_recent_private_conversations'] recipient_id = get_recent_conversations_recipient_id( user_profile, event['message']['recipient_id'], event['message']["sender_id"]) if recipient_id not in conversations: conversations[recipient_id] = dict( user_ids=[user_dict['id'] for user_dict in event['message']['display_recipient'] if user_dict['id'] != user_profile.id] ) conversations[recipient_id]['max_message_id'] = event['message']['id'] return # Below, we handle maintaining first_message_id. for sub_dict in state.get('subscriptions', []): if event['message']['stream_id'] == sub_dict['stream_id']: if sub_dict['first_message_id'] is None: sub_dict['first_message_id'] = event['message']['id'] for stream_dict in state.get('streams', []): if event['message']['stream_id'] == stream_dict['stream_id']: if stream_dict['first_message_id'] is None: stream_dict['first_message_id'] = event['message']['id'] elif event['type'] == "hotspots": state['hotspots'] = event['hotspots'] elif event['type'] == "custom_profile_fields": state['custom_profile_fields'] = event['fields'] elif event['type'] == "pointer": state['pointer'] = max(state['pointer'], event['pointer']) elif event['type'] == "realm_user": person = event['person'] person_user_id = person['user_id'] if event['op'] == "add": person = copy.deepcopy(person) if client_gravatar: if 'gravatar.com' in person['avatar_url']: person['avatar_url'] = None person['is_active'] = True if not person['is_bot']: person['profile_data'] = {} state['raw_users'][person_user_id] = person elif event['op'] == "remove": state['raw_users'][person_user_id]['is_active'] = False elif event['op'] == 'update': is_me = (person_user_id == user_profile.id) if is_me: if ('avatar_url' in person and 'avatar_url' in state): state['avatar_source'] = person['avatar_source'] state['avatar_url'] = person['avatar_url'] state['avatar_url_medium'] = person['avatar_url_medium'] for field in ['is_admin', 'delivery_email', 'email', 'full_name']: if field in person and field in state: state[field] = person[field] # In the unlikely event that the current user # just changed to/from being an admin, we need # to add/remove the data on all bots in the # realm. This is ugly and probably better # solved by removing the all-realm-bots data # given to admin users from this flow. if ('is_admin' in person and 'realm_bots' in state): prev_state = state['raw_users'][user_profile.id] was_admin = prev_state['is_admin'] now_admin = person['is_admin'] if was_admin and not now_admin: state['realm_bots'] = [] if not was_admin and now_admin: state['realm_bots'] = get_owned_bot_dicts(user_profile) if client_gravatar and 'avatar_url' in person: # Respect the client_gravatar setting in the `users` data. if 'gravatar.com' in person['avatar_url']: person['avatar_url'] = None person['avatar_url_medium'] = None if person_user_id in state['raw_users']: p = state['raw_users'][person_user_id] for field in p: if field in person: p[field] = person[field] if 'custom_profile_field' in person: custom_field_id = person['custom_profile_field']['id'] custom_field_new_value = person['custom_profile_field']['value'] if 'rendered_value' in person['custom_profile_field']: p['profile_data'][custom_field_id] = { 'value': custom_field_new_value, 'rendered_value': person['custom_profile_field']['rendered_value'] } else: p['profile_data'][custom_field_id] = { 'value': custom_field_new_value } elif event['type'] == 'realm_bot': if event['op'] == 'add': state['realm_bots'].append(event['bot']) if event['op'] == 'remove': email = event['bot']['email'] for bot in state['realm_bots']: if bot['email'] == email: bot['is_active'] = False if event['op'] == 'delete': state['realm_bots'] = [item for item in state['realm_bots'] if item['email'] != event['bot']['email']] if event['op'] == 'update': for bot in state['realm_bots']: if bot['email'] == event['bot']['email']: if 'owner_id' in event['bot']: bot['owner'] = get_user_profile_by_id(event['bot']['owner_id']).email else: bot.update(event['bot']) elif event['type'] == 'stream': if event['op'] == 'create': for stream in event['streams']: if not stream['invite_only']: stream_data = copy.deepcopy(stream) if include_subscribers: stream_data['subscribers'] = [] stream_data['stream_weekly_traffic'] = None stream_data['is_old_stream'] = False stream_data['is_announcement_only'] = False # Add stream to never_subscribed (if not invite_only) state['never_subscribed'].append(stream_data) state['streams'].append(stream) state['streams'].sort(key=lambda elt: elt["name"]) if event['op'] == 'delete': deleted_stream_ids = {stream['stream_id'] for stream in event['streams']} state['streams'] = [s for s in state['streams'] if s['stream_id'] not in deleted_stream_ids] state['never_subscribed'] = [stream for stream in state['never_subscribed'] if stream['stream_id'] not in deleted_stream_ids] if event['op'] == 'update': # For legacy reasons, we call stream data 'subscriptions' in # the state var here, for the benefit of the JS code. for obj in state['subscriptions']: if obj['name'].lower() == event['name'].lower(): obj[event['property']] = event['value'] if event['property'] == "description": obj['rendered_description'] = event['rendered_description'] # Also update the pure streams data for stream in state['streams']: if stream['name'].lower() == event['name'].lower(): prop = event['property'] if prop in stream: stream[prop] = event['value'] if prop == 'description': stream['rendered_description'] = event['rendered_description'] elif event['op'] == "occupy": state['streams'] += event['streams'] elif event['op'] == "vacate": stream_ids = [s["stream_id"] for s in event['streams']] state['streams'] = [s for s in state['streams'] if s["stream_id"] not in stream_ids] elif event['type'] == 'default_streams': state['realm_default_streams'] = event['default_streams'] elif event['type'] == 'default_stream_groups': state['realm_default_stream_groups'] = event['default_stream_groups'] elif event['type'] == 'realm': if event['op'] == "update": field = 'realm_' + event['property'] state[field] = event['value'] if event['property'] == 'plan_type': # Then there are some extra fields that also need to be set. state['plan_includes_wide_organization_logo'] = event['value'] != Realm.LIMITED state['realm_upload_quota'] = event['extra_data']['upload_quota'] # Tricky interaction: Whether we can create streams can get changed here. if (field in ['realm_create_stream_policy', 'realm_waiting_period_threshold']) and 'can_create_streams' in state: state['can_create_streams'] = user_profile.can_create_streams() if (field in ['realm_invite_to_stream_policy', 'realm_waiting_period_threshold']) and 'can_subscribe_other_users' in state: state['can_subscribe_other_users'] = user_profile.can_subscribe_other_users() elif event['op'] == "update_dict": for key, value in event['data'].items(): state['realm_' + key] = value # It's a bit messy, but this is where we need to # update the state for whether password authentication # is enabled on this server. if key == 'authentication_methods': state['realm_password_auth_enabled'] = (value['Email'] or value['LDAP']) state['realm_email_auth_enabled'] = value['Email'] elif event['type'] == "subscription": if not include_subscribers and event['op'] in ['peer_add', 'peer_remove']: return if event['op'] in ["add"]: if not include_subscribers: # Avoid letting 'subscribers' entries end up in the list for i, sub in enumerate(event['subscriptions']): event['subscriptions'][i] = copy.deepcopy(event['subscriptions'][i]) del event['subscriptions'][i]['subscribers'] def name(sub: Dict[str, Any]) -> str: return sub['name'].lower() if event['op'] == "add": added_names = set(map(name, event["subscriptions"])) was_added = lambda s: name(s) in added_names # add the new subscriptions state['subscriptions'] += event['subscriptions'] # remove them from unsubscribed if they had been there state['unsubscribed'] = [s for s in state['unsubscribed'] if not was_added(s)] # remove them from never_subscribed if they had been there state['never_subscribed'] = [s for s in state['never_subscribed'] if not was_added(s)] elif event['op'] == "remove": removed_names = set(map(name, event["subscriptions"])) was_removed = lambda s: name(s) in removed_names # Find the subs we are affecting. removed_subs = list(filter(was_removed, state['subscriptions'])) # Remove our user from the subscribers of the removed subscriptions. if include_subscribers: for sub in removed_subs: sub['subscribers'] = [id for id in sub['subscribers'] if id != user_profile.id] # We must effectively copy the removed subscriptions from subscriptions to # unsubscribe, since we only have the name in our data structure. state['unsubscribed'] += removed_subs # Now filter out the removed subscriptions from subscriptions. state['subscriptions'] = [s for s in state['subscriptions'] if not was_removed(s)] elif event['op'] == 'update': for sub in state['subscriptions']: if sub['name'].lower() == event['name'].lower(): sub[event['property']] = event['value'] elif event['op'] == 'peer_add': user_id = event['user_id'] for sub in state['subscriptions']: if (sub['name'] in event['subscriptions'] and user_id not in sub['subscribers']): sub['subscribers'].append(user_id) for sub in state['never_subscribed']: if (sub['name'] in event['subscriptions'] and user_id not in sub['subscribers']): sub['subscribers'].append(user_id) elif event['op'] == 'peer_remove': user_id = event['user_id'] for sub in state['subscriptions']: if (sub['name'] in event['subscriptions'] and user_id in sub['subscribers']): sub['subscribers'].remove(user_id) elif event['type'] == "presence": # TODO: Add user_id to presence update events / state format! presence_user_profile = get_user(event['email'], user_profile.realm) state['presences'][event['email']] = UserPresence.get_status_dict_by_user( presence_user_profile)[event['email']] elif event['type'] == "update_message": # We don't return messages in /register, so we don't need to # do anything for content updates, but we may need to update # the unread_msgs data if the topic of an unread message changed. if TOPIC_NAME in event: stream_dict = state['raw_unread_msgs']['stream_dict'] topic = event[TOPIC_NAME] for message_id in event['message_ids']: if message_id in stream_dict: stream_dict[message_id]['topic'] = topic elif event['type'] == "delete_message": max_message = Message.objects.filter( usermessage__user_profile=user_profile).order_by('-id').first() if max_message: state['max_message_id'] = max_message.id else: state['max_message_id'] = -1 if 'raw_unread_msgs' in state: remove_id = event['message_id'] remove_message_id_from_unread_mgs(state['raw_unread_msgs'], remove_id) # The remainder of this block is about maintaining recent_private_conversations if 'raw_recent_private_conversations' not in state or event['message_type'] != 'private': return recipient_id = get_recent_conversations_recipient_id(user_profile, event['recipient_id'], event['sender_id']) # Ideally, we'd have test coverage for these two blocks. To # do that, we'll need a test where we delete not-the-latest # messages or delete a private message not in # recent_private_conversations. if recipient_id not in state['raw_recent_private_conversations']: # nocoverage return old_max_message_id = state['raw_recent_private_conversations'][recipient_id]['max_message_id'] if old_max_message_id != event['message_id']: # nocoverage return # OK, we just deleted what had been the max_message_id for # this recent conversation; we need to recompute that value # from scratch. Definitely don't need to re-query everything, # but this case is likely rare enough that it's reasonable to do so. state['raw_recent_private_conversations'] = \ get_recent_private_conversations(user_profile) elif event['type'] == "reaction": # The client will get the message with the reactions directly pass elif event['type'] == "submessage": # The client will get submessages with their messages pass elif event['type'] == 'typing': # Typing notification events are transient and thus ignored pass elif event['type'] == "attachment": # Attachment events are just for updating the "uploads" UI; # they are not sent directly. pass elif event['type'] == "update_message_flags": # We don't return messages in `/register`, so most flags we # can ignore, but we do need to update the unread_msgs data if # unread state is changed. if 'raw_unread_msgs' in state and event['flag'] == 'read' and event['operation'] == 'add': for remove_id in event['messages']: remove_message_id_from_unread_mgs(state['raw_unread_msgs'], remove_id) if event['flag'] == 'starred' and event['operation'] == 'add': state['starred_messages'] += event['messages'] if event['flag'] == 'starred' and event['operation'] == 'remove': state['starred_messages'] = [message for message in state['starred_messages'] if not (message in event['messages'])] elif event['type'] == "realm_domains": if event['op'] == 'add': state['realm_domains'].append(event['realm_domain']) elif event['op'] == 'change': for realm_domain in state['realm_domains']: if realm_domain['domain'] == event['realm_domain']['domain']: realm_domain['allow_subdomains'] = event['realm_domain']['allow_subdomains'] elif event['op'] == 'remove': state['realm_domains'] = [realm_domain for realm_domain in state['realm_domains'] if realm_domain['domain'] != event['domain']] elif event['type'] == "realm_emoji": state['realm_emoji'] = event['realm_emoji'] elif event['type'] == 'realm_export': # These realm export events are only available to # administrators, and aren't included in page_params. pass elif event['type'] == "alert_words": state['alert_words'] = event['alert_words'] elif event['type'] == "muted_topics": state['muted_topics'] = event["muted_topics"] elif event['type'] == "realm_filters": state['realm_filters'] = event["realm_filters"] elif event['type'] == "update_display_settings": assert event['setting_name'] in UserProfile.property_types state[event['setting_name']] = event['setting'] elif event['type'] == "update_global_notifications": assert event['notification_name'] in UserProfile.notification_setting_types state[event['notification_name']] = event['setting'] elif event['type'] == "invites_changed": pass elif event['type'] == "user_group": if event['op'] == 'add': state['realm_user_groups'].append(event['group']) state['realm_user_groups'].sort(key=lambda group: group['id']) elif event['op'] == 'update': for user_group in state['realm_user_groups']: if user_group['id'] == event['group_id']: user_group.update(event['data']) elif event['op'] == 'add_members': for user_group in state['realm_user_groups']: if user_group['id'] == event['group_id']: user_group['members'].extend(event['user_ids']) user_group['members'].sort() elif event['op'] == 'remove_members': for user_group in state['realm_user_groups']: if user_group['id'] == event['group_id']: members = set(user_group['members']) user_group['members'] = list(members - set(event['user_ids'])) user_group['members'].sort() elif event['op'] == 'remove': state['realm_user_groups'] = [ug for ug in state['realm_user_groups'] if ug['id'] != event['group_id']] elif event['type'] == 'user_status': user_id = event['user_id'] user_status = state['user_status'] away = event.get('away') status_text = event.get('status_text') if user_id not in user_status: user_status[user_id] = dict() if away is not None: if away: user_status[user_id]['away'] = True else: user_status[user_id].pop('away', None) if status_text is not None: if status_text == '': user_status[user_id].pop('status_text', None) else: user_status[user_id]['status_text'] = status_text if not user_status[user_id]: user_status.pop(user_id, None) state['user_status'] = user_status else: raise AssertionError("Unexpected event type %s" % (event['type'],))