def check_upload_within_quota(realm: Realm, uploaded_file_size: int) -> None: upload_quota = realm.upload_quota_bytes() if upload_quota is None: return used_space = realm.currently_used_upload_space_bytes() if (used_space + uploaded_file_size) > upload_quota: raise RealmUploadQuotaError(_("Upload would exceed your organization's upload quota."))
def set_http_host(self, kwargs: Dict[str, Any]) -> None: if 'subdomain' in kwargs: kwargs['HTTP_HOST'] = Realm.host_for_subdomain(kwargs['subdomain']) del kwargs['subdomain'] elif 'HTTP_HOST' not in kwargs: kwargs['HTTP_HOST'] = Realm.host_for_subdomain( self.DEFAULT_SUBDOMAIN)
def do_set_realm_signup_notifications_stream( realm: Realm, stream: Optional[Stream], stream_id: int, *, acting_user: Optional[UserProfile]) -> None: old_value = realm.signup_notifications_stream_id realm.signup_notifications_stream = stream with transaction.atomic(): realm.save(update_fields=["signup_notifications_stream"]) event_time = timezone_now() RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_time=event_time, acting_user=acting_user, extra_data=orjson.dumps({ RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: stream_id, "property": "signup_notifications_stream", }).decode(), ) event = dict( type="realm", op="update", property="signup_notifications_stream_id", value=stream_id, ) send_event(realm, event, active_user_ids(realm.id))
def do_set_realm_authentication_methods( realm: Realm, authentication_methods: Dict[str, bool], *, acting_user: Optional[UserProfile]) -> None: old_value = realm.authentication_methods_dict() with transaction.atomic(): for key, value in list(authentication_methods.items()): index = getattr(realm.authentication_methods, key).number realm.authentication_methods.set_bit(index, int(value)) realm.save(update_fields=["authentication_methods"]) updated_value = realm.authentication_methods_dict() RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_time=timezone_now(), acting_user=acting_user, extra_data=orjson.dumps({ RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: updated_value, "property": "authentication_methods", }).decode(), ) event = dict( type="realm", op="update_dict", property="default", data=dict(authentication_methods=updated_value), ) send_event(realm, event, active_user_ids(realm.id))
def do_change_realm_org_type( realm: Realm, org_type: int, acting_user: Optional[UserProfile], ) -> None: old_value = realm.org_type realm.org_type = org_type realm.save(update_fields=["org_type"]) RealmAuditLog.objects.create( event_type=RealmAuditLog.REALM_ORG_TYPE_CHANGED, realm=realm, event_time=timezone_now(), acting_user=acting_user, extra_data={ "old_value": old_value, "new_value": org_type }, ) event = dict(type="realm", op="update", property="org_type", value=org_type) transaction.on_commit( lambda: send_event(realm, event, active_user_ids(realm.id)))
def set_http_host(self, kwargs): # type: (Dict[str, Any]) -> None if 'subdomain' in kwargs: kwargs['HTTP_HOST'] = Realm.host_for_subdomain(kwargs['subdomain']) del kwargs['subdomain'] elif 'HTTP_HOST' not in kwargs: kwargs['HTTP_HOST'] = Realm.host_for_subdomain(self.DEFAULT_SUBDOMAIN)
def do_change_icon_source( realm: Realm, icon_source: str, *, acting_user: Optional[UserProfile] ) -> None: realm.icon_source = icon_source realm.icon_version += 1 realm.save(update_fields=["icon_source", "icon_version"]) event_time = timezone_now() RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_ICON_SOURCE_CHANGED, extra_data={"icon_source": icon_source, "icon_version": realm.icon_version}, event_time=event_time, acting_user=acting_user, ) event = dict( type="realm", op="update_dict", property="icon", data=dict(icon_source=realm.icon_source, icon_url=realm_icon_url(realm)), ) transaction.on_commit( lambda: send_event( realm, event, active_user_ids(realm.id), ) )
def do_set_realm_property(realm: Realm, name: str, value: Any, *, acting_user: Optional[UserProfile]) -> None: """Takes in a realm object, the name of an attribute to update, the value to update and and the user who initiated the update. """ property_type = Realm.property_types[name] assert isinstance( value, property_type ), f"Cannot update {name}: {value} is not an instance of {property_type}" old_value = getattr(realm, name) setattr(realm, name, value) realm.save(update_fields=[name]) event = dict( type="realm", op="update", property=name, value=value, ) transaction.on_commit( lambda: send_event(realm, event, active_user_ids(realm.id))) event_time = timezone_now() RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_time=event_time, acting_user=acting_user, extra_data=orjson.dumps({ RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: value, "property": name, }).decode(), ) if name == "email_address_visibility": if Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE not in [old_value, value]: # We use real email addresses on UserProfile.email only if # EMAIL_ADDRESS_VISIBILITY_EVERYONE is configured, so # changes between values that will not require changing # that field, so we can save work and return here. return user_profiles = UserProfile.objects.filter(realm=realm, is_bot=False) for user_profile in user_profiles: user_profile.email = get_display_email_address(user_profile) UserProfile.objects.bulk_update(user_profiles, ["email"]) for user_profile in user_profiles: transaction.on_commit(lambda: flush_user_profile( sender=UserProfile, instance=user_profile)) # TODO: Design a bulk event for this or force-reload all clients send_user_email_update_event(user_profile) if name == "waiting_period_threshold": update_users_in_full_members_system_group(realm)
def do_set_realm_message_editing( realm: Realm, allow_message_editing: bool, message_content_edit_limit_seconds: int, edit_topic_policy: int, *, acting_user: Optional[UserProfile], ) -> None: old_values = dict( allow_message_editing=realm.allow_message_editing, message_content_edit_limit_seconds=realm. message_content_edit_limit_seconds, edit_topic_policy=realm.edit_topic_policy, ) realm.allow_message_editing = allow_message_editing realm.message_content_edit_limit_seconds = message_content_edit_limit_seconds realm.edit_topic_policy = edit_topic_policy event_time = timezone_now() updated_properties = dict( allow_message_editing=allow_message_editing, message_content_edit_limit_seconds=message_content_edit_limit_seconds, edit_topic_policy=edit_topic_policy, ) with transaction.atomic(): for updated_property, updated_value in updated_properties.items(): if updated_value == old_values[updated_property]: continue RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_time=event_time, acting_user=acting_user, extra_data=orjson.dumps({ RealmAuditLog.OLD_VALUE: old_values[updated_property], RealmAuditLog.NEW_VALUE: updated_value, "property": updated_property, }).decode(), ) realm.save(update_fields=list(updated_properties.keys())) event = dict( type="realm", op="update_dict", property="default", data=updated_properties, ) send_event(realm, event, active_user_ids(realm.id))
def update_first_visible_message_id(realm: Realm) -> None: if realm.message_visibility_limit is None: realm.first_visible_message_id = 0 else: try: first_visible_message_id = Message.objects.filter(sender__realm=realm).values('id').\ order_by('-id')[realm.message_visibility_limit - 1]["id"] except IndexError: first_visible_message_id = 0 realm.first_visible_message_id = first_visible_message_id realm.save(update_fields=["first_visible_message_id"])
def do_change_realm_subdomain( realm: Realm, new_subdomain: str, *, acting_user: Optional[UserProfile], add_deactivated_redirect: bool = True, ) -> None: """Changing a realm's subdomain is a highly disruptive operation, because all existing clients will need to be updated to point to the new URL. Further, requests to fetch data from existing event queues will fail with an authentication error when this change happens (because the old subdomain is no longer associated with the realm), making it hard for us to provide a graceful update experience for clients. """ old_subdomain = realm.subdomain old_uri = realm.uri # If the realm had been a demo organization scheduled for # deleting, clear that state. realm.demo_organization_scheduled_deletion_date = None realm.string_id = new_subdomain with transaction.atomic(): realm.save(update_fields=["string_id", "demo_organization_scheduled_deletion_date"]) RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_SUBDOMAIN_CHANGED, event_time=timezone_now(), acting_user=acting_user, extra_data={"old_subdomain": old_subdomain, "new_subdomain": new_subdomain}, ) # If a realm if being renamed multiple times, we should find all the placeholder # realms and reset their deactivated_redirect field to point to the new realm uri placeholder_realms = Realm.objects.filter(deactivated_redirect=old_uri, deactivated=True) for placeholder_realm in placeholder_realms: do_add_deactivated_redirect(placeholder_realm, realm.uri) # The below block isn't executed in a transaction with the earlier code due to # the functions called below being complex and potentially sending events, # which we don't want to do in atomic blocks. # When we change a realm's subdomain the realm with old subdomain is basically # deactivated. We are creating a deactivated realm using old subdomain and setting # it's deactivated redirect to new_subdomain so that we can tell the users that # the realm has been moved to a new subdomain. if add_deactivated_redirect: placeholder_realm = do_create_realm(old_subdomain, realm.name) do_deactivate_realm(placeholder_realm, acting_user=None) do_add_deactivated_redirect(placeholder_realm, realm.uri)
def send_email_to_admins(template_prefix: str, realm: Realm, from_name: Optional[str]=None, from_address: Optional[str]=None, language: Optional[str]=None, context: Dict[str, Any]={}) -> None: admins = realm.get_human_admin_users() admin_user_ids = [admin.id for admin in admins] send_email(template_prefix, to_user_ids=admin_user_ids, from_name=from_name, from_address=from_address, language=language, context=context)
def bulk_get_peers( realm: Realm, streams: List[Stream], ) -> Dict[int, Set[int]]: # This is almost a subset of bulk_get_subscriber_peer_info, # with the nuance that we don't have to query subscribers # for public streams. (The other functions tries to save # a query hop.) peer_ids = {} private_stream_ids = {stream.id for stream in streams if stream.invite_only} public_stream_ids = {stream.id for stream in streams if not stream.invite_only} if private_stream_ids: realm_admin_ids = {user.id for user in realm.get_admin_users_and_bots()} stream_user_ids = get_user_ids_for_streams(private_stream_ids) for stream_id in private_stream_ids: subscribed_user_ids = stream_user_ids.get(stream_id, set()) peer_ids[stream_id] = subscribed_user_ids | realm_admin_ids if public_stream_ids: non_guests = active_non_guest_user_ids(realm.id) for stream_id in public_stream_ids: peer_ids[stream_id] = set(non_guests) return peer_ids
def check_emoji_request(realm: Realm, emoji_name: str, emoji_code: str, emoji_type: str) -> None: # For a given realm and emoji type, checks whether an emoji # code is valid for new reactions, or not. if emoji_type == "realm_emoji": realm_emojis = realm.get_emoji() realm_emoji = realm_emojis.get(emoji_code) if realm_emoji is None: raise JsonableError(_("No such realm emoji found.")) if realm_emoji["deactivated"]: raise JsonableError(_("This realm emoji has been deactivated.")) if emoji_name != emoji_code: raise JsonableError(_("Invalid emoji name.")) elif emoji_type == "zulip_extra_emoji": if emoji_code not in ["zulip"]: raise JsonableError(_("No such extra emoji found.")) if emoji_name != emoji_code: raise JsonableError(_("Invalid emoji name.")) elif emoji_type == "unicode_emoji": if emoji_code not in codepoint_to_name: raise JsonableError(_("No unicode emoji with this emoji code found.")) if name_to_codepoint.get(emoji_name) != emoji_code: raise JsonableError(_("Invalid emoji name.")) else: # The above are the only valid emoji types raise JsonableError(_("Invalid emoji type."))
def check_emoji_request(realm: Realm, emoji_name: str, emoji_code: str, emoji_type: str) -> None: # For a given realm and emoji type, checks whether an emoji # code is valid for new reactions, or not. if emoji_type == "realm_emoji": realm_emojis = realm.get_emoji() if emoji_code not in realm_emojis: raise JsonableError(_("No such realm emoji found.")) if realm_emojis[emoji_code]["deactivated"]: raise JsonableError(_("This realm emoji has been deactivated.")) if emoji_name != emoji_code: raise JsonableError(_("Invalid emoji name.")) elif emoji_type == "zulip_extra_emoji": if emoji_code not in ["zulip"]: raise JsonableError(_("No such extra emoji found.")) if emoji_name != emoji_code: raise JsonableError(_("Invalid emoji name.")) elif emoji_type == "unicode_emoji": if emoji_code not in codepoint_to_name: raise JsonableError( _("No unicode emoji with this emoji code found.")) if name_to_codepoint.get(emoji_name) != emoji_code: raise JsonableError(_("Invalid emoji name.")) else: # The above are the only valid emoji types raise JsonableError(_("Invalid emoji type."))
def approve_sponsorship(realm: Realm, *, acting_user: Optional[UserProfile]) -> None: from zerver.lib.actions import do_change_realm_plan_type, internal_send_private_message do_change_realm_plan_type(realm, Realm.PLAN_TYPE_STANDARD_FREE, acting_user=acting_user) customer = get_customer_by_realm(realm) if customer is not None and customer.sponsorship_pending: customer.sponsorship_pending = False customer.save(update_fields=["sponsorship_pending"]) RealmAuditLog.objects.create( realm=realm, acting_user=acting_user, event_type=RealmAuditLog.REALM_SPONSORSHIP_APPROVED, event_time=timezone_now(), ) notification_bot = get_system_bot(settings.NOTIFICATION_BOT, realm.id) for user in realm.get_human_billing_admin_and_realm_owner_users(): with override_language(user.default_language): # Using variable to make life easier for translators if these details change. plan_name = "Zulip Cloud Standard" emoji = ":tada:" message = _( f"Your organization's request for sponsored hosting has been approved! {emoji}.\n" f"You have been upgraded to {plan_name}, free of charge." ) internal_send_private_message(notification_bot, user, message)
def add_api_uri_context(context: Dict[str, Any], request: HttpRequest) -> None: context.update(zulip_default_context(request)) subdomain = get_subdomain(request) if (subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN or not settings.ROOT_DOMAIN_LANDING_PAGE): display_subdomain = subdomain html_settings_links = True else: display_subdomain = 'yourZulipDomain' html_settings_links = False display_host = Realm.host_for_subdomain(display_subdomain) api_url_scheme_relative = display_host + "/api" api_url = settings.EXTERNAL_URI_SCHEME + api_url_scheme_relative zulip_url = settings.EXTERNAL_URI_SCHEME + display_host context['external_uri_scheme'] = settings.EXTERNAL_URI_SCHEME context['api_url'] = api_url context['api_url_scheme_relative'] = api_url_scheme_relative context['zulip_url'] = zulip_url context["html_settings_links"] = html_settings_links if html_settings_links: settings_html = '<a href="/#settings">Zulip settings page</a>' subscriptions_html = '<a target="_blank" href="/#streams">streams page</a>' else: settings_html = 'Zulip settings page' subscriptions_html = 'streams page' context['settings_html'] = settings_html context['subscriptions_html'] = subscriptions_html
def add_api_uri_context(context: Dict[str, Any], request: HttpRequest) -> None: context.update(zulip_default_context(request)) subdomain = get_subdomain(request) if (subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN or not settings.ROOT_DOMAIN_LANDING_PAGE): display_subdomain = subdomain html_settings_links = True else: display_subdomain = 'yourZulipDomain' html_settings_links = False display_host = Realm.host_for_subdomain(display_subdomain) api_url_scheme_relative = display_host + "/api" api_url = settings.EXTERNAL_URI_SCHEME + api_url_scheme_relative context['external_uri_scheme'] = settings.EXTERNAL_URI_SCHEME context['api_url'] = api_url context['api_url_scheme_relative'] = api_url_scheme_relative context["html_settings_links"] = html_settings_links if html_settings_links: settings_html = '<a href="/#settings">Zulip settings page</a>' subscriptions_html = '<a target="_blank" href="/#streams">streams page</a>' else: settings_html = 'Zulip settings page' subscriptions_html = 'streams page' context['settings_html'] = settings_html context['subscriptions_html'] = subscriptions_html
def bulk_get_private_peers( realm: Realm, private_streams: List[Stream], ) -> Dict[int, Set[int]]: if not private_streams: return {} for stream in private_streams: # Our caller should only pass us private streams. assert stream.invite_only peer_ids: Dict[int, Set[int]] = {} realm_admin_ids = {user.id for user in realm.get_admin_users_and_bots()} stream_ids = {stream.id for stream in private_streams} stream_user_ids = get_user_ids_for_streams(stream_ids) for stream in private_streams: # This is the same business rule as we use in # bulk_get_subscriber_peer_info. Realm admins can see all private # stream subscribers. subscribed_user_ids = stream_user_ids.get(stream.id, set()) peer_ids[stream.id] = subscribed_user_ids | realm_admin_ids return peer_ids
def build_zerver_realm(realm_subdomain: str, time: float) -> List[ZerverFieldsT]: realm = Realm(id=realm_id, date_created=time, name=realm_subdomain, string_id=realm_subdomain, description="Organization imported from Gitter!") auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods] realm_dict = model_to_dict(realm, exclude='authentication_methods') realm_dict['authentication_methods'] = auth_methods return[realm_dict]
def bulk_create_realms(realm_list): existing_realms = set(r.domain for r in Realm.objects.select_related().all()) realms_to_create = [] for domain in realm_list: if domain not in existing_realms: realms_to_create.append(Realm(domain=domain, name=domain)) existing_realms.add(domain) Realm.objects.bulk_create(realms_to_create)
def do_deactivate_realm(realm: Realm, *, acting_user: Optional[UserProfile]) -> None: """ Deactivate this realm. Do NOT deactivate the users -- we need to be able to tell the difference between users that were intentionally deactivated, e.g. by a realm admin, and users who can't currently use Zulip because their realm has been deactivated. """ if realm.deactivated: return realm.deactivated = True realm.save(update_fields=["deactivated"]) if settings.BILLING_ENABLED: downgrade_now_without_creating_additional_invoices(realm) event_time = timezone_now() RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_DEACTIVATED, event_time=event_time, acting_user=acting_user, extra_data=orjson.dumps({ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm), }).decode(), ) ScheduledEmail.objects.filter(realm=realm).delete() for user in active_humans_in_realm(realm): # Don't deactivate the users, but do delete their sessions so they get # bumped to the login screen, where they'll get a realm deactivation # notice when they try to log in. delete_user_sessions(user) # This event will only ever be received by clients with an active # longpoll connection, because by this point clients will be # unable to authenticate again to their event queue (triggering an # immediate reload into the page explaining the realm was # deactivated). So the purpose of sending this is to flush all # active longpoll connections for the realm. event = dict(type="realm", op="deactivated", realm_id=realm.id) send_event(realm, event, active_user_ids(realm.id))
def do_set_realm_stream( realm: Realm, field: Literal["notifications_stream", "signup_notifications_stream"], stream: Optional[Stream], stream_id: int, *, acting_user: Optional[UserProfile], ) -> None: # We could calculate more of these variables from `field`, but # it's probably more readable to not do so. if field == "notifications_stream": old_value = realm.notifications_stream_id realm.notifications_stream = stream property = "notifications_stream_id" elif field == "signup_notifications_stream": old_value = realm.signup_notifications_stream_id realm.signup_notifications_stream = stream property = "signup_notifications_stream_id" else: raise AssertionError("Invalid realm stream field.") with transaction.atomic(): realm.save(update_fields=[field]) event_time = timezone_now() RealmAuditLog.objects.create( realm=realm, event_type=RealmAuditLog.REALM_PROPERTY_CHANGED, event_time=event_time, acting_user=acting_user, extra_data=orjson.dumps({ RealmAuditLog.OLD_VALUE: old_value, RealmAuditLog.NEW_VALUE: stream_id, "property": field, }).decode(), ) event = dict( type="realm", op="update", property=property, value=stream_id, ) send_event(realm, event, active_user_ids(realm.id))
def emoji_name_to_emoji_code(realm: Realm, emoji_name: Text) -> Tuple[Text, Text]: realm_emojis = realm.get_emoji() realm_emoji = realm_emojis.get(emoji_name) if realm_emoji is not None and not realm_emoji['deactivated']: return emoji_name, Reaction.REALM_EMOJI if emoji_name == 'zulip': return emoji_name, Reaction.ZULIP_EXTRA_EMOJI if emoji_name in name_to_codepoint: return name_to_codepoint[emoji_name], Reaction.UNICODE_EMOJI raise JsonableError(_("Emoji '%s' does not exist" % (emoji_name,)))
def bulk_create_realms(realm_list): # type: (Iterable[text_type]) -> None existing_realms = set(r.domain for r in Realm.objects.select_related().all()) realms_to_create = [] # type: List[Realm] for domain in realm_list: if domain not in existing_realms: realms_to_create.append(Realm(domain=domain, name=domain)) existing_realms.add(domain) Realm.objects.bulk_create(realms_to_create)
def do_reactivate_realm(realm: Realm) -> None: realm.deactivated = False with transaction.atomic(): realm.save(update_fields=["deactivated"]) event_time = timezone_now() RealmAuditLog.objects.create( # We hardcode acting_user=None, since realm reactivation # uses an email authentication mechanism that will never # know which user initiated the change. acting_user=None, realm=realm, event_type=RealmAuditLog.REALM_REACTIVATED, event_time=event_time, extra_data=orjson.dumps({ RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm), }).decode(), )
def emoji_name_to_emoji_code(realm: Realm, emoji_name: str) -> Tuple[str, str]: realm_emojis = realm.get_active_emoji() realm_emoji = realm_emojis.get(emoji_name) if realm_emoji is not None: return str(realm_emojis[emoji_name]['id']), Reaction.REALM_EMOJI if emoji_name == 'zulip': return emoji_name, Reaction.ZULIP_EXTRA_EMOJI if emoji_name in name_to_codepoint: return name_to_codepoint[emoji_name], Reaction.UNICODE_EMOJI raise JsonableError(_("Emoji '{}' does not exist").format(emoji_name))
def emoji_name_to_emoji_code(realm: Realm, emoji_name: str) -> Tuple[str, str]: realm_emojis = realm.get_active_emoji() realm_emoji = realm_emojis.get(emoji_name) if realm_emoji is not None: return str(realm_emojis[emoji_name]['id']), Reaction.REALM_EMOJI if emoji_name == 'zulip': return emoji_name, Reaction.ZULIP_EXTRA_EMOJI if emoji_name in name_to_codepoint: return name_to_codepoint[emoji_name], Reaction.UNICODE_EMOJI raise JsonableError(_("Emoji '%s' does not exist" % (emoji_name,)))
def emoji_name_to_emoji_code(realm: Realm, emoji_name: Text) -> Tuple[Text, Text]: realm_emojis = realm.get_emoji() if emoji_name in realm_emojis and not realm_emojis[emoji_name][ 'deactivated']: return emoji_name, Reaction.REALM_EMOJI if emoji_name == 'zulip': return emoji_name, Reaction.ZULIP_EXTRA_EMOJI if emoji_name in name_to_codepoint: return name_to_codepoint[emoji_name], Reaction.UNICODE_EMOJI raise JsonableError(_("Emoji '%s' does not exist" % (emoji_name, )))
def bulk_get_subscriber_peer_info( realm: Realm, streams: List[Stream], ) -> SubscriberPeerInfo: """ Glossary: subscribed_ids: This shows the users who are actually subscribed to the stream, which we generally send to the person subscribing to the stream. peer_ids: These are the folks that need to know about a new subscriber. It's usually a superset of the subscribers. """ subscribed_ids = {} peer_ids = {} private_stream_ids = { stream.id for stream in streams if stream.invite_only } public_stream_ids = { stream.id for stream in streams if not stream.invite_only } stream_user_ids = get_user_ids_for_streams(private_stream_ids | public_stream_ids) if private_stream_ids: realm_admin_ids = { user.id for user in realm.get_admin_users_and_bots() } for stream_id in private_stream_ids: subscribed_user_ids = stream_user_ids.get(stream_id, set()) subscribed_ids[stream_id] = subscribed_user_ids peer_ids[stream_id] = subscribed_user_ids | realm_admin_ids if public_stream_ids: non_guests = active_non_guest_user_ids(realm.id) for stream_id in public_stream_ids: subscribed_user_ids = stream_user_ids.get(stream_id, set()) subscribed_ids[stream_id] = subscribed_user_ids peer_ids[stream_id] = set(non_guests) return SubscriberPeerInfo( subscribed_ids=subscribed_ids, peer_ids=peer_ids, )
def send_message_to_signup_notification_stream( sender: UserProfile, realm: Realm, message: str, topic_name: str = _("signups")) -> None: signup_notifications_stream = realm.get_signup_notifications_stream() if signup_notifications_stream is None: return with override_language(realm.default_language): internal_send_stream_message(sender, signup_notifications_stream, topic_name, message)
def set_realm_permissions_based_on_org_type(realm: Realm) -> None: """This function implements overrides for the default configuration for new organizations when the administrator selected specific organization types. This substantially simplifies our /help/ advice for folks setting up new organizations of these types. """ # Custom configuration for educational organizations. The present # defaults are designed for a single class, not a department or # larger institution, since those are more common. if ( realm.org_type == Realm.ORG_TYPES["education_nonprofit"]["id"] or realm.org_type == Realm.ORG_TYPES["education"]["id"] ): # Limit email address visibility and user creation to administrators. realm.email_address_visibility = Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS realm.invite_to_realm_policy = Realm.POLICY_ADMINS_ONLY # Restrict public stream creation to staff, but allow private # streams (useful for study groups, etc.). realm.create_public_stream_policy = Realm.POLICY_ADMINS_ONLY # Don't allow members (students) to manage user groups or # stream subscriptions. realm.user_group_edit_policy = Realm.POLICY_MODERATORS_ONLY realm.invite_to_stream_policy = Realm.POLICY_MODERATORS_ONLY # Allow moderators (TAs?) to move topics between streams. realm.move_messages_between_streams_policy = Realm.POLICY_MODERATORS_ONLY
def do_change_logo_source(realm: Realm, logo_source: str, night: bool, *, acting_user: Optional[UserProfile]) -> None: if not night: realm.logo_source = logo_source realm.logo_version += 1 realm.save(update_fields=["logo_source", "logo_version"]) else: realm.night_logo_source = logo_source realm.night_logo_version += 1 realm.save(update_fields=["night_logo_source", "night_logo_version"]) RealmAuditLog.objects.create( event_type=RealmAuditLog.REALM_LOGO_CHANGED, realm=realm, event_time=timezone_now(), acting_user=acting_user, ) event = dict( type="realm", op="update_dict", property="night_logo" if night else "logo", data=get_realm_logo_data(realm, night), ) transaction.on_commit( lambda: send_event(realm, event, active_user_ids(realm.id)))
def set_http_headers(self, kwargs: Dict[str, Any]) -> None: if 'subdomain' in kwargs: kwargs['HTTP_HOST'] = Realm.host_for_subdomain(kwargs['subdomain']) del kwargs['subdomain'] elif 'HTTP_HOST' not in kwargs: kwargs['HTTP_HOST'] = Realm.host_for_subdomain(self.DEFAULT_SUBDOMAIN) # set User-Agent if 'HTTP_AUTHORIZATION' in kwargs: # An API request; use mobile as the default user agent default_user_agent = "ZulipMobile/26.22.145 (iOS 10.3.1)" else: # A webapp request; use a browser User-Agent string. default_user_agent = ("Mozilla/5.0 (Windows NT 10.0; Win64; x64) " + "AppleWebKit/537.36 (KHTML, like Gecko) " + "Chrome/79.0.3945.130 Safari/537.36") if kwargs.get('skip_user_agent'): # Provide a way to disable setting User-Agent if desired. assert 'HTTP_USER_AGENT' not in kwargs del kwargs['skip_user_agent'] elif 'HTTP_USER_AGENT' not in kwargs: kwargs['HTTP_USER_AGENT'] = default_user_agent
def build_zerver_realm( realm_id: int, realm_subdomain: str, time: float, other_product: str ) -> List[ZerverFieldsT]: realm = Realm( id=realm_id, date_created=time, name=realm_subdomain, string_id=realm_subdomain, description=f"Organization imported from {other_product}!", ) auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods] realm_dict = model_to_dict(realm, exclude="authentication_methods") realm_dict["authentication_methods"] = auth_methods return [realm_dict]
def add_api_uri_context(context: Dict[str, Any], request: HttpRequest) -> None: subdomain = get_subdomain(request) if (subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN or not settings.ROOT_DOMAIN_LANDING_PAGE): display_subdomain = subdomain html_settings_links = True else: display_subdomain = 'yourZulipDomain' html_settings_links = False display_host = Realm.host_for_subdomain(display_subdomain) api_url_scheme_relative = display_host + "/api" api_url = settings.EXTERNAL_URI_SCHEME + api_url_scheme_relative context['api_url'] = api_url context['api_url_scheme_relative'] = api_url_scheme_relative context["html_settings_links"] = html_settings_links
def do_import_realm(import_dir): logging.info("Importing realm dump %s" % (import_dir,)) if not os.path.exists(import_dir): raise Exception("Missing import directory!") realm_data_filename = os.path.join(import_dir, "realm.json") if not os.path.exists(realm_data_filename): raise Exception("Missing realm.json file!") logging.info("Importing realm data from %s" % (realm_data_filename,)) with open(realm_data_filename) as f: data = ujson.load(f) fix_foreign_keys(data, 'zerver_realm', 'notifications_stream') fix_datetime_fields(data, 'zerver_realm', 'date_created') realm = Realm(**data['zerver_realm'][0]) if realm.notifications_stream_id is not None: notifications_stream_id = int(realm.notifications_stream_id) else: notifications_stream_id = None realm.notifications_stream_id = None realm.save() bulk_import_client(data, Client, 'zerver_client') # Email tokens will automatically be randomly generated when the # Stream objects are created by Django. fix_datetime_fields(data, 'zerver_stream', 'date_created') fix_foreign_keys(data, 'zerver_stream', 'realm') bulk_import_model(data, Stream, 'zerver_stream') realm.notifications_stream_id = notifications_stream_id realm.save() fix_foreign_keys(data, "zerver_defaultstream", 'stream') for (table, model) in realm_tables: fix_foreign_keys(data, table, 'realm') bulk_import_model(data, model, table) # Remap the user IDs for notification_bot and friends to their # appropriate IDs on this server for item in data['zerver_userprofile_crossrealm']: logging.info("Adding to ID map: %s %s" % (item['id'], get_user_profile_by_email(item['email']).id)) id_maps["user_profile"][item['id']] = get_user_profile_by_email(item['email']).id fix_datetime_fields(data, 'zerver_userprofile', 'date_joined') fix_datetime_fields(data, 'zerver_userprofile', 'last_login') fix_datetime_fields(data, 'zerver_userprofile', 'last_reminder') fix_foreign_keys(data, 'zerver_userprofile', 'realm') fix_foreign_keys(data, 'zerver_userprofile', 'bot_owner', id_map_table="user_profile") fix_foreign_keys(data, 'zerver_userprofile', 'default_sending_stream') fix_foreign_keys(data, 'zerver_userprofile', 'default_events_register_stream') for user_profile_dict in data['zerver_userprofile']: user_profile_dict['password'] = None user_profile_dict['api_key'] = random_api_key() # Since Zulip doesn't use these permissions, drop them del user_profile_dict['user_permissions'] del user_profile_dict['groups'] user_profiles = [UserProfile(**item) for item in data['zerver_userprofile']] for user_profile in user_profiles: user_profile.set_unusable_password() UserProfile.objects.bulk_create(user_profiles) if 'zerver_huddle' in data: bulk_import_model(data, Huddle, 'zerver_huddle') bulk_import_model(data, Recipient, 'zerver_recipient') fix_foreign_keys(data, 'zerver_subscription', 'user_profile', id_map_table="user_profile") fix_foreign_keys(data, 'zerver_subscription', 'recipient') bulk_import_model(data, Subscription, 'zerver_subscription') fix_datetime_fields(data, 'zerver_userpresence', 'timestamp') fix_foreign_keys(data, 'zerver_userpresence', 'user_profile', id_map_table="user_profile") fix_foreign_keys(data, 'zerver_userpresence', 'client', id_map_table='client') bulk_import_model(data, UserPresence, 'zerver_userpresence') fix_datetime_fields(data, 'zerver_useractivity', 'last_visit') fix_foreign_keys(data, 'zerver_useractivity', 'user_profile', id_map_table="user_profile") fix_foreign_keys(data, 'zerver_useractivity', 'client', id_map_table='client') bulk_import_model(data, UserActivity, 'zerver_useractivity') fix_datetime_fields(data, 'zerver_useractivityinterval', 'start') fix_datetime_fields(data, 'zerver_useractivityinterval', 'end') fix_foreign_keys(data, 'zerver_useractivityinterval', 'user_profile', id_map_table="user_profile") bulk_import_model(data, UserActivityInterval, 'zerver_useractivityinterval') # Import uploaded files and avatars import_uploads(os.path.join(import_dir, "avatars"), avatar_bucket=True) import_uploads(os.path.join(import_dir, "uploads")) dump_file_id = 1 while True: message_filename = os.path.join(import_dir, "messages-%06d.json" % (dump_file_id,)) if not os.path.exists(message_filename): break with open(message_filename) as f: data = ujson.load(f) logging.info("Importing message dump %s" % (message_filename,)) fix_foreign_keys(data, 'zerver_message', 'sender', id_map_table="user_profile") fix_foreign_keys(data, 'zerver_message', 'recipient') fix_foreign_keys(data, 'zerver_message', 'sending_client', id_map_table='client') fix_datetime_fields(data, 'zerver_message', 'pub_date') fix_datetime_fields(data, 'zerver_message', 'last_edit_time') bulk_import_model(data, Message, 'zerver_message') # Due to the structure of these message chunks, we're # guaranteed to have already imported all the Message objects # for this batch of UserMessage objects. fix_foreign_keys(data, 'zerver_usermessage', 'message') fix_foreign_keys(data, 'zerver_usermessage', 'user_profile', id_map_table="user_profile") fix_bitfield_keys(data, 'zerver_usermessage', 'flags') bulk_import_model(data, UserMessage, 'zerver_usermessage') dump_file_id += 1 fix_datetime_fields(data, 'zerver_attachment', 'create_time') fix_foreign_keys(data, 'zerver_attachment', 'owner', id_map_table="user_profile") fix_foreign_keys(data, 'zerver_attachment', 'realm') # TODO: Handle the `messages` keys. # fix_foreign_keys(data, 'zerver_attachment', 'messages') bulk_import_model(data, Attachment, 'zerver_attachment')
def send_email_to_admins(template_prefix: str, realm: Realm, from_name: Optional[str]=None, from_address: Optional[str]=None, context: Dict[str, Any]={}) -> None: admins = realm.get_admin_users() admin_user_ids = [admin.id for admin in admins] send_email(template_prefix, to_user_ids=admin_user_ids, from_name=from_name, from_address=from_address, context=context)
def do_import_realm(import_dir: Path, subdomain: str) -> Realm: logging.info("Importing realm dump %s" % (import_dir,)) if not os.path.exists(import_dir): raise Exception("Missing import directory!") realm_data_filename = os.path.join(import_dir, "realm.json") if not os.path.exists(realm_data_filename): raise Exception("Missing realm.json file!") logging.info("Importing realm data from %s" % (realm_data_filename,)) with open(realm_data_filename) as f: data = ujson.load(f) update_model_ids(Stream, data, 'zerver_stream', 'stream') re_map_foreign_keys(data, 'zerver_realm', 'notifications_stream', related_table="stream") fix_datetime_fields(data, 'zerver_realm') # Fix realm subdomain information data['zerver_realm'][0]['string_id'] = subdomain data['zerver_realm'][0]['name'] = subdomain fix_realm_authentication_bitfield(data, 'zerver_realm', 'authentication_methods') update_model_ids(Realm, data, 'zerver_realm', 'realm') realm = Realm(**data['zerver_realm'][0]) if realm.notifications_stream_id is not None: notifications_stream_id = int(realm.notifications_stream_id) # type: Optional[int] else: notifications_stream_id = None realm.notifications_stream_id = None realm.save() bulk_import_client(data, Client, 'zerver_client') # Email tokens will automatically be randomly generated when the # Stream objects are created by Django. fix_datetime_fields(data, 'zerver_stream') re_map_foreign_keys(data, 'zerver_stream', 'realm', related_table="realm") bulk_import_model(data, Stream, 'zerver_stream') realm.notifications_stream_id = notifications_stream_id realm.save() re_map_foreign_keys(data, 'zerver_defaultstream', 'stream', related_table="stream") re_map_foreign_keys(data, 'zerver_realmemoji', 'author', related_table="user_profile") for (table, model, related_table) in realm_tables: re_map_foreign_keys(data, table, 'realm', related_table="realm") update_model_ids(model, data, table, related_table) bulk_import_model(data, model, table) # Remap the user IDs for notification_bot and friends to their # appropriate IDs on this server for item in data['zerver_userprofile_crossrealm']: logging.info("Adding to ID map: %s %s" % (item['id'], get_system_bot(item['email']).id)) new_user_id = get_system_bot(item['email']).id update_id_map(table='user_profile', old_id=item['id'], new_id=new_user_id) # Merge in zerver_userprofile_mirrordummy data['zerver_userprofile'] = data['zerver_userprofile'] + data['zerver_userprofile_mirrordummy'] del data['zerver_userprofile_mirrordummy'] data['zerver_userprofile'].sort(key=lambda r: r['id']) # To remap foreign key for UserProfile.last_active_message_id update_message_foreign_keys(import_dir) fix_datetime_fields(data, 'zerver_userprofile') update_model_ids(UserProfile, data, 'zerver_userprofile', 'user_profile') re_map_foreign_keys(data, 'zerver_userprofile', 'realm', related_table="realm") re_map_foreign_keys(data, 'zerver_userprofile', 'bot_owner', related_table="user_profile") re_map_foreign_keys(data, 'zerver_userprofile', 'default_sending_stream', related_table="stream") re_map_foreign_keys(data, 'zerver_userprofile', 'default_events_register_stream', related_table="stream") re_map_foreign_keys(data, 'zerver_userprofile', 'last_active_message_id', related_table="message", id_field=True) for user_profile_dict in data['zerver_userprofile']: user_profile_dict['password'] = None user_profile_dict['api_key'] = random_api_key() # Since Zulip doesn't use these permissions, drop them del user_profile_dict['user_permissions'] del user_profile_dict['groups'] user_profiles = [UserProfile(**item) for item in data['zerver_userprofile']] for user_profile in user_profiles: user_profile.set_unusable_password() UserProfile.objects.bulk_create(user_profiles) if 'zerver_huddle' in data: bulk_import_model(data, Huddle, 'zerver_huddle') re_map_foreign_keys(data, 'zerver_recipient', 'type_id', related_table="stream", recipient_field=True, id_field=True) re_map_foreign_keys(data, 'zerver_recipient', 'type_id', related_table="user_profile", recipient_field=True, id_field=True) update_model_ids(Recipient, data, 'zerver_recipient', 'recipient') bulk_import_model(data, Recipient, 'zerver_recipient') re_map_foreign_keys(data, 'zerver_subscription', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_subscription', 'recipient', related_table="recipient") update_model_ids(Subscription, data, 'zerver_subscription', 'subscription') bulk_import_model(data, Subscription, 'zerver_subscription') fix_datetime_fields(data, 'zerver_userpresence') re_map_foreign_keys(data, 'zerver_userpresence', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_userpresence', 'client', related_table='client') update_model_ids(UserPresence, data, 'zerver_userpresence', 'user_presence') bulk_import_model(data, UserPresence, 'zerver_userpresence') fix_datetime_fields(data, 'zerver_useractivity') re_map_foreign_keys(data, 'zerver_useractivity', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_useractivity', 'client', related_table='client') update_model_ids(UserActivity, data, 'zerver_useractivity', 'useractivity') bulk_import_model(data, UserActivity, 'zerver_useractivity') fix_datetime_fields(data, 'zerver_useractivityinterval') re_map_foreign_keys(data, 'zerver_useractivityinterval', 'user_profile', related_table="user_profile") update_model_ids(UserActivityInterval, data, 'zerver_useractivityinterval', 'useractivityinterval') bulk_import_model(data, UserActivityInterval, 'zerver_useractivityinterval') if 'zerver_customprofilefield' in data: # As the export of Custom Profile fields is not supported, Zulip exported # data would not contain this field. # However this is supported in slack importer script re_map_foreign_keys(data, 'zerver_customprofilefield', 'realm', related_table="realm") update_model_ids(CustomProfileField, data, 'zerver_customprofilefield', related_table="customprofilefield") bulk_import_model(data, CustomProfileField, 'zerver_customprofilefield') re_map_foreign_keys(data, 'zerver_customprofilefield_value', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_customprofilefield_value', 'field', related_table="customprofilefield") update_model_ids(CustomProfileFieldValue, data, 'zerver_customprofilefield_value', related_table="customprofilefield_value") bulk_import_model(data, CustomProfileFieldValue, 'zerver_customprofilefield_value') # Import uploaded files and avatars import_uploads(os.path.join(import_dir, "avatars"), processing_avatars=True) import_uploads(os.path.join(import_dir, "uploads")) # We need to have this check as the emoji files are only present in the data # importer from slack # For Zulip export, this doesn't exist if os.path.exists(os.path.join(import_dir, "emoji")): import_uploads(os.path.join(import_dir, "emoji"), processing_emojis=True) # Import zerver_message and zerver_usermessage import_message_data(import_dir) # Do attachments AFTER message data is loaded. # TODO: de-dup how we read these json files. fn = os.path.join(import_dir, "attachment.json") if not os.path.exists(fn): raise Exception("Missing attachment.json file!") logging.info("Importing attachment data from %s" % (fn,)) with open(fn) as f: data = ujson.load(f) import_attachments(data) return realm
def do_import_realm(import_dir): # type: (Path) -> None logging.info("Importing realm dump %s" % (import_dir,)) if not os.path.exists(import_dir): raise Exception("Missing import directory!") realm_data_filename = os.path.join(import_dir, "realm.json") if not os.path.exists(realm_data_filename): raise Exception("Missing realm.json file!") logging.info("Importing realm data from %s" % (realm_data_filename,)) with open(realm_data_filename) as f: data = ujson.load(f) convert_to_id_fields(data, 'zerver_realm', 'notifications_stream') fix_datetime_fields(data, 'zerver_realm') realm = Realm(**data['zerver_realm'][0]) if realm.notifications_stream_id is not None: notifications_stream_id = int(realm.notifications_stream_id) else: notifications_stream_id = None realm.notifications_stream_id = None realm.save() bulk_import_client(data, Client, 'zerver_client') # Email tokens will automatically be randomly generated when the # Stream objects are created by Django. fix_datetime_fields(data, 'zerver_stream') convert_to_id_fields(data, 'zerver_stream', 'realm') bulk_import_model(data, Stream, 'zerver_stream') realm.notifications_stream_id = notifications_stream_id realm.save() convert_to_id_fields(data, "zerver_defaultstream", 'stream') for (table, model) in realm_tables: convert_to_id_fields(data, table, 'realm') bulk_import_model(data, model, table) # Remap the user IDs for notification_bot and friends to their # appropriate IDs on this server for item in data['zerver_userprofile_crossrealm']: logging.info("Adding to ID map: %s %s" % (item['id'], get_user_profile_by_email(item['email']).id)) new_user_id = get_user_profile_by_email(item['email']).id update_id_map(table='user_profile', old_id=item['id'], new_id=new_user_id) fix_datetime_fields(data, 'zerver_userprofile') convert_to_id_fields(data, 'zerver_userprofile', 'realm') re_map_foreign_keys(data, 'zerver_userprofile', 'bot_owner', related_table="user_profile") convert_to_id_fields(data, 'zerver_userprofile', 'default_sending_stream') convert_to_id_fields(data, 'zerver_userprofile', 'default_events_register_stream') for user_profile_dict in data['zerver_userprofile']: user_profile_dict['password'] = None user_profile_dict['api_key'] = random_api_key() # Since Zulip doesn't use these permissions, drop them del user_profile_dict['user_permissions'] del user_profile_dict['groups'] user_profiles = [UserProfile(**item) for item in data['zerver_userprofile']] for user_profile in user_profiles: user_profile.set_unusable_password() UserProfile.objects.bulk_create(user_profiles) if 'zerver_huddle' in data: bulk_import_model(data, Huddle, 'zerver_huddle') bulk_import_model(data, Recipient, 'zerver_recipient') re_map_foreign_keys(data, 'zerver_subscription', 'user_profile', related_table="user_profile") convert_to_id_fields(data, 'zerver_subscription', 'recipient') bulk_import_model(data, Subscription, 'zerver_subscription') fix_datetime_fields(data, 'zerver_userpresence') re_map_foreign_keys(data, 'zerver_userpresence', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_userpresence', 'client', related_table='client') bulk_import_model(data, UserPresence, 'zerver_userpresence') fix_datetime_fields(data, 'zerver_useractivity') re_map_foreign_keys(data, 'zerver_useractivity', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_useractivity', 'client', related_table='client') bulk_import_model(data, UserActivity, 'zerver_useractivity') fix_datetime_fields(data, 'zerver_useractivityinterval') re_map_foreign_keys(data, 'zerver_useractivityinterval', 'user_profile', related_table="user_profile") bulk_import_model(data, UserActivityInterval, 'zerver_useractivityinterval') # Import uploaded files and avatars import_uploads(os.path.join(import_dir, "avatars"), avatar_bucket=True) import_uploads(os.path.join(import_dir, "uploads")) # Import zerver_message and zerver_usermessage import_message_data(import_dir) # Do attachments AFTER message data is loaded. import_attachments(data)
def do_import_realm(import_dir: Path, subdomain: str) -> Realm: logging.info("Importing realm dump %s" % (import_dir,)) if not os.path.exists(import_dir): raise Exception("Missing import directory!") realm_data_filename = os.path.join(import_dir, "realm.json") if not os.path.exists(realm_data_filename): raise Exception("Missing realm.json file!") logging.info("Importing realm data from %s" % (realm_data_filename,)) with open(realm_data_filename) as f: data = ujson.load(f) bulk_import_client(data, Client, 'zerver_client') # We don't import the Stream model yet, since it depends on Realm, # which isn't imported yet. But we need the Stream model IDs for # notifications_stream. update_model_ids(Stream, data, 'stream') re_map_foreign_keys(data, 'zerver_realm', 'notifications_stream', related_table="stream") re_map_foreign_keys(data, 'zerver_realm', 'signup_notifications_stream', related_table="stream") fix_datetime_fields(data, 'zerver_realm') # Fix realm subdomain information data['zerver_realm'][0]['string_id'] = subdomain data['zerver_realm'][0]['name'] = subdomain fix_realm_authentication_bitfield(data, 'zerver_realm', 'authentication_methods') update_model_ids(Realm, data, 'realm') realm = Realm(**data['zerver_realm'][0]) if settings.BILLING_ENABLED: realm.plan_type = Realm.LIMITED else: realm.plan_type = Realm.SELF_HOSTED if realm.notifications_stream_id is not None: notifications_stream_id = int(realm.notifications_stream_id) # type: Optional[int] else: notifications_stream_id = None realm.notifications_stream_id = None if realm.signup_notifications_stream_id is not None: signup_notifications_stream_id = int(realm.signup_notifications_stream_id) # type: Optional[int] else: signup_notifications_stream_id = None realm.signup_notifications_stream_id = None realm.save() # Email tokens will automatically be randomly generated when the # Stream objects are created by Django. fix_datetime_fields(data, 'zerver_stream') re_map_foreign_keys(data, 'zerver_stream', 'realm', related_table="realm") bulk_import_model(data, Stream) realm.notifications_stream_id = notifications_stream_id realm.signup_notifications_stream_id = signup_notifications_stream_id realm.save() # Remap the user IDs for notification_bot and friends to their # appropriate IDs on this server for item in data['zerver_userprofile_crossrealm']: logging.info("Adding to ID map: %s %s" % (item['id'], get_system_bot(item['email']).id)) new_user_id = get_system_bot(item['email']).id update_id_map(table='user_profile', old_id=item['id'], new_id=new_user_id) new_recipient_id = Recipient.objects.get(type=Recipient.PERSONAL, type_id=new_user_id).id update_id_map(table='recipient', old_id=item['recipient_id'], new_id=new_recipient_id) # Merge in zerver_userprofile_mirrordummy data['zerver_userprofile'] = data['zerver_userprofile'] + data['zerver_userprofile_mirrordummy'] del data['zerver_userprofile_mirrordummy'] data['zerver_userprofile'].sort(key=lambda r: r['id']) # To remap foreign key for UserProfile.last_active_message_id update_message_foreign_keys(import_dir) fix_datetime_fields(data, 'zerver_userprofile') update_model_ids(UserProfile, data, 'user_profile') re_map_foreign_keys(data, 'zerver_userprofile', 'realm', related_table="realm") re_map_foreign_keys(data, 'zerver_userprofile', 'bot_owner', related_table="user_profile") re_map_foreign_keys(data, 'zerver_userprofile', 'default_sending_stream', related_table="stream") re_map_foreign_keys(data, 'zerver_userprofile', 'default_events_register_stream', related_table="stream") re_map_foreign_keys(data, 'zerver_userprofile', 'last_active_message_id', related_table="message", id_field=True) for user_profile_dict in data['zerver_userprofile']: user_profile_dict['password'] = None user_profile_dict['api_key'] = generate_api_key() # Since Zulip doesn't use these permissions, drop them del user_profile_dict['user_permissions'] del user_profile_dict['groups'] user_profiles = [UserProfile(**item) for item in data['zerver_userprofile']] for user_profile in user_profiles: user_profile.set_unusable_password() UserProfile.objects.bulk_create(user_profiles) re_map_foreign_keys(data, 'zerver_defaultstream', 'stream', related_table="stream") re_map_foreign_keys(data, 'zerver_realmemoji', 'author', related_table="user_profile") for (table, model, related_table) in realm_tables: re_map_foreign_keys(data, table, 'realm', related_table="realm") update_model_ids(model, data, related_table) bulk_import_model(data, model) if 'zerver_huddle' in data: update_model_ids(Huddle, data, 'huddle') # We don't import Huddle yet, since we don't have the data to # compute huddle hashes until we've imported some of the # tables below. # TODO: double-check this. re_map_foreign_keys(data, 'zerver_recipient', 'type_id', related_table="stream", recipient_field=True, id_field=True) re_map_foreign_keys(data, 'zerver_recipient', 'type_id', related_table="user_profile", recipient_field=True, id_field=True) re_map_foreign_keys(data, 'zerver_recipient', 'type_id', related_table="huddle", recipient_field=True, id_field=True) update_model_ids(Recipient, data, 'recipient') bulk_import_model(data, Recipient) re_map_foreign_keys(data, 'zerver_subscription', 'user_profile', related_table="user_profile") get_huddles_from_subscription(data, 'zerver_subscription') re_map_foreign_keys(data, 'zerver_subscription', 'recipient', related_table="recipient") update_model_ids(Subscription, data, 'subscription') bulk_import_model(data, Subscription) if 'zerver_realmauditlog' in data: fix_datetime_fields(data, 'zerver_realmauditlog') re_map_foreign_keys(data, 'zerver_realmauditlog', 'realm', related_table="realm") re_map_foreign_keys(data, 'zerver_realmauditlog', 'modified_user', related_table='user_profile') re_map_foreign_keys(data, 'zerver_realmauditlog', 'acting_user', related_table='user_profile') re_map_foreign_keys(data, 'zerver_realmauditlog', 'modified_stream', related_table="stream") update_model_ids(RealmAuditLog, data, related_table="realmauditlog") bulk_import_model(data, RealmAuditLog) else: logging.info('about to call create_subscription_events') create_subscription_events( data=data, realm_id=realm.id, ) logging.info('done with create_subscription_events') if 'zerver_huddle' in data: process_huddle_hash(data, 'zerver_huddle') bulk_import_model(data, Huddle) if 'zerver_userhotspot' in data: fix_datetime_fields(data, 'zerver_userhotspot') re_map_foreign_keys(data, 'zerver_userhotspot', 'user', related_table='user_profile') update_model_ids(UserHotspot, data, 'userhotspot') bulk_import_model(data, UserHotspot) if 'zerver_mutedtopic' in data: re_map_foreign_keys(data, 'zerver_mutedtopic', 'user_profile', related_table='user_profile') re_map_foreign_keys(data, 'zerver_mutedtopic', 'stream', related_table='stream') re_map_foreign_keys(data, 'zerver_mutedtopic', 'recipient', related_table='recipient') update_model_ids(MutedTopic, data, 'mutedtopic') bulk_import_model(data, MutedTopic) if 'zerver_service' in data: re_map_foreign_keys(data, 'zerver_service', 'user_profile', related_table='user_profile') fix_service_tokens(data, 'zerver_service') update_model_ids(Service, data, 'service') bulk_import_model(data, Service) if 'zerver_usergroup' in data: re_map_foreign_keys(data, 'zerver_usergroup', 'realm', related_table='realm') re_map_foreign_keys_many_to_many(data, 'zerver_usergroup', 'members', related_table='user_profile') update_model_ids(UserGroup, data, 'usergroup') bulk_import_model(data, UserGroup) re_map_foreign_keys(data, 'zerver_usergroupmembership', 'user_group', related_table='usergroup') re_map_foreign_keys(data, 'zerver_usergroupmembership', 'user_profile', related_table='user_profile') update_model_ids(UserGroupMembership, data, 'usergroupmembership') bulk_import_model(data, UserGroupMembership) if 'zerver_botstoragedata' in data: re_map_foreign_keys(data, 'zerver_botstoragedata', 'bot_profile', related_table='user_profile') update_model_ids(BotStorageData, data, 'botstoragedata') bulk_import_model(data, BotStorageData) if 'zerver_botconfigdata' in data: re_map_foreign_keys(data, 'zerver_botconfigdata', 'bot_profile', related_table='user_profile') update_model_ids(BotConfigData, data, 'botconfigdata') bulk_import_model(data, BotConfigData) fix_datetime_fields(data, 'zerver_userpresence') re_map_foreign_keys(data, 'zerver_userpresence', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_userpresence', 'client', related_table='client') update_model_ids(UserPresence, data, 'user_presence') bulk_import_model(data, UserPresence) fix_datetime_fields(data, 'zerver_useractivity') re_map_foreign_keys(data, 'zerver_useractivity', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_useractivity', 'client', related_table='client') update_model_ids(UserActivity, data, 'useractivity') bulk_import_model(data, UserActivity) fix_datetime_fields(data, 'zerver_useractivityinterval') re_map_foreign_keys(data, 'zerver_useractivityinterval', 'user_profile', related_table="user_profile") update_model_ids(UserActivityInterval, data, 'useractivityinterval') bulk_import_model(data, UserActivityInterval) re_map_foreign_keys(data, 'zerver_customprofilefield', 'realm', related_table="realm") update_model_ids(CustomProfileField, data, related_table="customprofilefield") bulk_import_model(data, CustomProfileField) re_map_foreign_keys(data, 'zerver_customprofilefieldvalue', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_customprofilefieldvalue', 'field', related_table="customprofilefield") fix_customprofilefield(data) update_model_ids(CustomProfileFieldValue, data, related_table="customprofilefieldvalue") bulk_import_model(data, CustomProfileFieldValue) # Import uploaded files and avatars import_uploads(os.path.join(import_dir, "avatars"), processing_avatars=True) import_uploads(os.path.join(import_dir, "uploads")) # We need to have this check as the emoji files are only present in the data # importer from slack # For Zulip export, this doesn't exist if os.path.exists(os.path.join(import_dir, "emoji")): import_uploads(os.path.join(import_dir, "emoji"), processing_emojis=True) # Import zerver_message and zerver_usermessage import_message_data(import_dir) re_map_foreign_keys(data, 'zerver_reaction', 'message', related_table="message") re_map_foreign_keys(data, 'zerver_reaction', 'user_profile', related_table="user_profile") re_map_foreign_keys(data, 'zerver_reaction', 'emoji_code', related_table="realmemoji", id_field=True, reaction_field=True) update_model_ids(Reaction, data, 'reaction') bulk_import_model(data, Reaction) # Do attachments AFTER message data is loaded. # TODO: de-dup how we read these json files. fn = os.path.join(import_dir, "attachment.json") if not os.path.exists(fn): raise Exception("Missing attachment.json file!") logging.info("Importing attachment data from %s" % (fn,)) with open(fn) as f: data = ujson.load(f) import_attachments(data) return realm