def can_access_stream_history(user_profile: UserProfile, stream: Stream) -> bool: """Determine whether the provided user is allowed to access the history of the target stream. The stream is specified by name. This is used by the caller to determine whether this user can get historical messages before they joined for a narrowing search. Because of the way our search is currently structured, we may be passed an invalid stream here. We return False in that situation, and subsequent code will do validation and raise the appropriate JsonableError. Note that this function should only be used in contexts where access_stream is being called elsewhere to confirm that the user can actually see this stream. """ if stream.is_history_realm_public() and not user_profile.is_guest: return True if stream.is_history_public_to_subscribers(): # In this case, we check if the user is subscribed. error = _("Invalid stream name '%s'") % (stream.name,) try: (recipient, sub) = access_stream_common(user_profile, stream, error) except JsonableError: return False return True return False
def do_change_stream_post_policy( stream: Stream, stream_post_policy: int, *, acting_user: UserProfile ) -> None: old_post_policy = stream.stream_post_policy with transaction.atomic(): stream.stream_post_policy = stream_post_policy stream.save(update_fields=["stream_post_policy"]) RealmAuditLog.objects.create( realm=stream.realm, acting_user=acting_user, modified_stream=stream, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_time=timezone_now(), extra_data=orjson.dumps( { RealmAuditLog.OLD_VALUE: old_post_policy, RealmAuditLog.NEW_VALUE: stream_post_policy, "property": "stream_post_policy", } ).decode(), ) event = dict( op="update", type="stream", property="stream_post_policy", value=stream_post_policy, stream_id=stream.id, name=stream.name, ) send_event(stream.realm, event, can_access_stream_user_ids(stream)) # Backwards-compatibility code: We removed the # is_announcement_only property in early 2020, but we send a # duplicate event for legacy mobile clients that might want the # data. event = dict( op="update", type="stream", property="is_announcement_only", value=stream.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS, stream_id=stream.id, name=stream.name, ) send_event(stream.realm, event, can_access_stream_user_ids(stream)) send_change_stream_post_policy_notification( stream, old_post_policy=old_post_policy, new_post_policy=stream_post_policy, acting_user=acting_user, )
def subscriber_ids_with_stream_history_access(stream: Stream) -> Set[int]: """Returns the set of active user IDs who can access any message history on this stream (regardless of whether they have a UserMessage) based on the stream's configuration. 1. if !history_public_to_subscribers: History is not available to anyone 2. if history_public_to_subscribers and is_web_public: All subscribers can access the history including guests 3. if history_public_to_subscribers and !is_web_public: All subscribers can access the history excluding guests """ if not stream.is_history_public_to_subscribers(): return set() subscriptions = get_active_subscriptions_for_stream_id(stream.id) if stream.is_web_public: return set( subscriptions.filter(user_profile__is_active=True).values_list( "user_profile__id", flat=True)) return set( subscriptions.filter(user_profile__is_active=True).exclude( user_profile__role=600).values_list("user_profile__id", flat=True))
def access_stream_common( user_profile: UserProfile, stream: Stream, error: Text, require_active: bool = True) -> Tuple[Recipient, Subscription]: """Common function for backend code where the target use attempts to access the target stream, returning all the data fetched along the way. If that user does not have permission to access that stream, we throw an exception. A design goal is that the error message is the same for streams you can't access and streams that don't exist.""" # First, we don't allow any access to streams in other realms. if stream.realm_id != user_profile.realm_id: raise JsonableError(error) recipient = get_stream_recipient(stream.id) try: sub = Subscription.objects.get(user_profile=user_profile, recipient=recipient, active=require_active) except Subscription.DoesNotExist: sub = None # If the stream is in your realm and public, you can access it. if stream.is_public(): return (recipient, sub) # Or if you are subscribed to the stream, you can access it. if sub is not None: return (recipient, sub) # Otherwise it is a private stream and you're not on it, so throw # an error. raise JsonableError(error)
def bulk_create_streams(realm: Realm, stream_dict: Dict[Text, Dict[Text, Any]]) -> None: existing_streams = frozenset([ name.lower() for name in Stream.objects.filter( realm=realm).values_list('name', flat=True) ]) streams_to_create = [] # type: List[Stream] for name, options in stream_dict.items(): if name.lower() not in existing_streams: streams_to_create.append( Stream( realm=realm, name=name, description=options["description"], invite_only=options["invite_only"], is_in_zephyr_realm=realm.is_zephyr_mirror_realm, )) # Sort streams by name before creating them so that we can have a # reliable ordering of `stream_id` across different python versions. # This is required for test fixtures which contain `stream_id`. Prior # to python 3.3 hashes were not randomized but after a security fix # hash randomization was enabled in python 3.3 which made iteration # of dictionaries and sets completely unpredictable. Here the order # of elements while iterating `stream_dict` will be completely random # for python 3.3 and later versions. streams_to_create.sort(key=lambda x: x.name) Stream.objects.bulk_create(streams_to_create) recipients_to_create = [] # type: List[Recipient] for stream in Stream.objects.filter(realm=realm).values('id', 'name'): if stream['name'].lower() not in existing_streams: recipients_to_create.append( Recipient(type_id=stream['id'], type=Recipient.STREAM)) Recipient.objects.bulk_create(recipients_to_create)
def build_stream( date_created: Any, realm_id: int, name: str, description: str, stream_id: int, deactivated: bool = False, invite_only: bool = False, stream_post_policy: int = 1, ) -> ZerverFieldsT: # Other applications don't have the distinction of "private stream with public history" # vs "private stream with hidden history" - and we've traditionally imported private "streams" # of other products as private streams with hidden history. # So we can set the history_public_to_subscribers value based on the invite_only flag. history_public_to_subscribers = not invite_only stream = Stream( name=name, deactivated=deactivated, description=description.replace("\n", " "), # We don't set rendered_description here; it'll be added on import date_created=date_created, invite_only=invite_only, id=stream_id, stream_post_policy=stream_post_policy, history_public_to_subscribers=history_public_to_subscribers, ) stream_dict = model_to_dict(stream, exclude=["realm"]) stream_dict["realm"] = realm_id return stream_dict
def access_stream_common(user_profile: UserProfile, stream: Stream, error: Text, require_active: bool=True) -> Tuple[Recipient, Subscription]: """Common function for backend code where the target use attempts to access the target stream, returning all the data fetched along the way. If that user does not have permission to access that stream, we throw an exception. A design goal is that the error message is the same for streams you can't access and streams that don't exist.""" # First, we don't allow any access to streams in other realms. if stream.realm_id != user_profile.realm_id: raise JsonableError(error) recipient = get_stream_recipient(stream.id) try: sub = Subscription.objects.get(user_profile=user_profile, recipient=recipient, active=require_active) except Subscription.DoesNotExist: sub = None # If the stream is in your realm and public, you can access it. if stream.is_public(): return (recipient, sub) # Or if you are subscribed to the stream, you can access it. if sub is not None: return (recipient, sub) # Otherwise it is a private stream and you're not on it, so throw # an error. raise JsonableError(error)
def do_change_stream_description( stream: Stream, new_description: str, *, acting_user: UserProfile ) -> None: old_description = stream.description with transaction.atomic(): stream.description = new_description stream.rendered_description = render_stream_description(new_description) stream.save(update_fields=["description", "rendered_description"]) RealmAuditLog.objects.create( realm=stream.realm, acting_user=acting_user, modified_stream=stream, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_time=timezone_now(), extra_data=orjson.dumps( { RealmAuditLog.OLD_VALUE: old_description, RealmAuditLog.NEW_VALUE: new_description, "property": "description", } ).decode(), ) event = dict( type="stream", op="update", property="description", name=stream.name, stream_id=stream.id, value=new_description, rendered_description=stream.rendered_description, ) send_event(stream.realm, event, can_access_stream_user_ids(stream)) send_change_stream_description_notification( stream, old_description=old_description, new_description=new_description, acting_user=acting_user, )
def can_access_stream_user_ids(stream: Stream) -> Set[int]: # return user ids of users who can access the attributes of a # stream, such as its name/description. Useful for sending events # to all users with access to a stream's attributes. if stream.is_public(): # For a public stream, this is everyone in the realm # except unsubscribed guest users return public_stream_user_ids(stream) else: # for a private stream, it's subscribers plus realm admins. return private_stream_user_ids(stream.id) | { user.id for user in stream.realm.get_admin_users_and_bots() }
def bulk_create_streams( realm: Realm, stream_dict: Dict[str, Dict[str, Any]]) -> None: # nocoverage existing_streams = { name.lower() for name in Stream.objects.filter( realm=realm).values_list('name', flat=True) } streams_to_create: List[Stream] = [] for name, options in stream_dict.items(): if 'history_public_to_subscribers' not in options: options['history_public_to_subscribers'] = ( not options.get("invite_only", False) and not realm.is_zephyr_mirror_realm) if name.lower() not in existing_streams: streams_to_create.append( Stream( realm=realm, name=name, description=options["description"], rendered_description=render_stream_description( options["description"]), invite_only=options.get("invite_only", False), stream_post_policy=options.get( "stream_post_policy", Stream.STREAM_POST_POLICY_EVERYONE), history_public_to_subscribers=options[ "history_public_to_subscribers"], is_web_public=options.get("is_web_public", False), is_in_zephyr_realm=realm.is_zephyr_mirror_realm, ), ) # Sort streams by name before creating them so that we can have a # reliable ordering of `stream_id` across different python versions. # This is required for test fixtures which contain `stream_id`. Prior # to python 3.3 hashes were not randomized but after a security fix # hash randomization was enabled in python 3.3 which made iteration # of dictionaries and sets completely unpredictable. Here the order # of elements while iterating `stream_dict` will be completely random # for python 3.3 and later versions. streams_to_create.sort(key=lambda x: x.name) Stream.objects.bulk_create(streams_to_create) recipients_to_create: List[Recipient] = [] for stream in Stream.objects.filter(realm=realm).values('id', 'name'): if stream['name'].lower() not in existing_streams: recipients_to_create.append( Recipient(type_id=stream['id'], type=Recipient.STREAM)) Recipient.objects.bulk_create(recipients_to_create) bulk_set_users_or_streams_recipient_fields(Stream, streams_to_create, recipients_to_create)
def do_change_stream_message_retention_days( stream: Stream, acting_user: UserProfile, message_retention_days: Optional[int] = None) -> None: old_message_retention_days_value = stream.message_retention_days with transaction.atomic(): stream.message_retention_days = message_retention_days stream.save(update_fields=["message_retention_days"]) RealmAuditLog.objects.create( realm=stream.realm, acting_user=acting_user, modified_stream=stream, event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED, event_time=timezone_now(), extra_data=orjson.dumps({ RealmAuditLog.OLD_VALUE: old_message_retention_days_value, RealmAuditLog.NEW_VALUE: message_retention_days, }).decode(), ) event = dict( op="update", type="stream", property="message_retention_days", value=message_retention_days, stream_id=stream.id, name=stream.name, ) send_event(stream.realm, event, can_access_stream_user_ids(stream)) send_change_stream_message_retention_days_notification( user_profile=acting_user, stream=stream, old_value=old_message_retention_days_value, new_value=message_retention_days, )
def build_stream(date_created: Any, realm_id: int, name: str, description: str, stream_id: int, deactivated: bool=False, invite_only: bool=False) -> ZerverFieldsT: stream = Stream( name=name, deactivated=deactivated, description=description, date_created=date_created, invite_only=invite_only, id=stream_id) stream_dict = model_to_dict(stream, exclude=['realm']) stream_dict['realm'] = realm_id return stream_dict
def bulk_create_streams(realms, stream_list): existing_streams = set((stream.realm.domain, stream.name.lower()) for stream in Stream.objects.select_related().all()) streams_to_create = [] for (domain, name) in stream_list: if (domain, name.lower()) not in existing_streams: streams_to_create.append(Stream(realm=realms[domain], name=name)) Stream.objects.bulk_create(streams_to_create) recipients_to_create = [] for stream in Stream.objects.select_related().all(): if (stream.realm.domain, stream.name.lower()) not in existing_streams: recipients_to_create.append(Recipient(type_id=stream.id, type=Recipient.STREAM)) Recipient.objects.bulk_create(recipients_to_create)
def build_stream(date_created: Any, realm_id: int, name: str, description: str, stream_id: int, deactivated: bool=False, invite_only: bool=False) -> ZerverFieldsT: stream = Stream( name=name, deactivated=deactivated, description=description.replace("\n", " "), # We don't set rendered_description here; it'll be added on import date_created=date_created, invite_only=invite_only, id=stream_id) stream_dict = model_to_dict(stream, exclude=['realm']) stream_dict['realm'] = realm_id return stream_dict
def access_stream_common( user_profile: UserProfile, stream: Stream, error: str, require_active: bool = True, allow_realm_admin: bool = False, ) -> Optional[Subscription]: """Common function for backend code where the target use attempts to access the target stream, returning all the data fetched along the way. If that user does not have permission to access that stream, we throw an exception. A design goal is that the error message is the same for streams you can't access and streams that don't exist.""" # First, we don't allow any access to streams in other realms. if stream.realm_id != user_profile.realm_id: # Callers should verify this on their own, so this functions as defensive code. raise AssertionError("user_profile and stream realms don't match") try: assert stream.recipient_id is not None sub = Subscription.objects.get(user_profile=user_profile, recipient_id=stream.recipient_id, active=require_active) except Subscription.DoesNotExist: sub = None # Any realm user, even guests, can access web_public streams. if stream.is_web_public: return sub # If the stream is in your realm and public, you can access it. if stream.is_public() and not user_profile.is_guest: return sub # Or if you are subscribed to the stream, you can access it. if sub is not None: return sub # For some specific callers (e.g. getting list of subscribers, # removing other users from a stream, and updating stream name and # description), we allow realm admins to access stream even if # they are not subscribed to a private stream. if user_profile.is_realm_admin and allow_realm_admin: return sub # Otherwise it is a private stream and you're not on it, so throw # an error. raise JsonableError(error)
def bulk_create_streams(realms, stream_list): # type: (Mapping[text_type, Realm], Iterable[Tuple[text_type, text_type]]) -> None existing_streams = set((stream.realm.domain, stream.name.lower()) for stream in Stream.objects.select_related().all()) streams_to_create = [] # type: List[Stream] for (domain, name) in stream_list: if (domain, name.lower()) not in existing_streams: streams_to_create.append(Stream(realm=realms[domain], name=name)) Stream.objects.bulk_create(streams_to_create) recipients_to_create = [] # type: List[Recipient] for stream in Stream.objects.select_related().all(): if (stream.realm.domain, stream.name.lower()) not in existing_streams: recipients_to_create.append( Recipient(type_id=stream.id, type=Recipient.STREAM)) Recipient.objects.bulk_create(recipients_to_create)
def access_stream_common( user_profile: UserProfile, stream: Stream, error: str, require_active: bool = True, allow_realm_admin: bool = False ) -> Tuple[Recipient, Optional[Subscription]]: """Common function for backend code where the target use attempts to access the target stream, returning all the data fetched along the way. If that user does not have permission to access that stream, we throw an exception. A design goal is that the error message is the same for streams you can't access and streams that don't exist.""" # First, we don't allow any access to streams in other realms. if stream.realm_id != user_profile.realm_id: raise JsonableError(error) recipient = get_stream_recipient(stream.id) try: sub = Subscription.objects.get(user_profile=user_profile, recipient=recipient, active=require_active) except Subscription.DoesNotExist: sub = None # If the stream is in your realm and public, you can access it. if stream.is_public() and not user_profile.is_guest: return (recipient, sub) # Or if you are subscribed to the stream, you can access it. if sub is not None: return (recipient, sub) # For some specific callers (e.g. getting list of subscribers, # removing other users from a stream, and updating stream name and # description), we allow realm admins to access stream even if # they are not subscribed to a private stream. if user_profile.is_realm_admin and allow_realm_admin: return (recipient, sub) # Otherwise it is a private stream and you're not on it, so throw # an error. raise JsonableError(error)
def bulk_create_streams(realms, stream_dict): # type: (Mapping[text_type, Realm], Dict[text_type, Dict[text_type, Any]]) -> None existing_streams = set((stream.realm.domain, stream.name.lower()) for stream in Stream.objects.select_related().all()) streams_to_create = [] # type: List[Stream] for name, options in stream_dict.items(): if (options["domain"], name.lower()) not in existing_streams: streams_to_create.append( Stream(realm=realms[options["domain"]], name=name, description=options["description"], invite_only=options["invite_only"])) Stream.objects.bulk_create(streams_to_create) recipients_to_create = [] # type: List[Recipient] for stream in Stream.objects.select_related().all(): if (stream.realm.domain, stream.name.lower()) not in existing_streams: recipients_to_create.append( Recipient(type_id=stream.id, type=Recipient.STREAM)) Recipient.objects.bulk_create(recipients_to_create)
def access_stream_common(user_profile: UserProfile, stream: Stream, error: str, require_active: bool=True, allow_realm_admin: bool=False) -> Tuple[Recipient, Optional[Subscription]]: """Common function for backend code where the target use attempts to access the target stream, returning all the data fetched along the way. If that user does not have permission to access that stream, we throw an exception. A design goal is that the error message is the same for streams you can't access and streams that don't exist.""" # First, we don't allow any access to streams in other realms. if stream.realm_id != user_profile.realm_id: raise JsonableError(error) recipient = get_stream_recipient(stream.id) try: sub = Subscription.objects.get(user_profile=user_profile, recipient=recipient, active=require_active) except Subscription.DoesNotExist: sub = None # If the stream is in your realm and public, you can access it. if stream.is_public() and not user_profile.is_guest: return (recipient, sub) # Or if you are subscribed to the stream, you can access it. if sub is not None: return (recipient, sub) # For some specific callers (e.g. getting list of subscribers, # removing other users from a stream, and updating stream name and # description), we allow realm admins to access stream even if # they are not subscribed to a private stream. if user_profile.is_realm_admin and allow_realm_admin: return (recipient, sub) # Otherwise it is a private stream and you're not on it, so throw # an error. raise JsonableError(error)
def bulk_create_streams(realm, stream_dict): # type: (Realm, Dict[Text, Dict[Text, Any]]) -> None existing_streams = frozenset([ name.lower() for name in Stream.objects.filter( realm=realm).values_list('name', flat=True) ]) streams_to_create = [] # type: List[Stream] for name, options in stream_dict.items(): if name.lower() not in existing_streams: streams_to_create.append( Stream(realm=realm, name=name, description=options["description"], invite_only=options["invite_only"])) Stream.objects.bulk_create(streams_to_create) recipients_to_create = [] # type: List[Recipient] for stream in Stream.objects.filter(realm=realm).values('id', 'name'): if stream['name'].lower() not in existing_streams: recipients_to_create.append( Recipient(type_id=stream['id'], type=Recipient.STREAM)) Recipient.objects.bulk_create(recipients_to_create)
def subscriber_ids_with_stream_history_access(stream: Stream) -> Set[int]: """Returns the set of active user IDs who can access any message history on this stream (regardless of whether they have a UserMessage) based on the stream's configuration. 1. if !history_public_to_subscribers: History is not available to anyone 2. if history_public_to_subscribers: All subscribers can access the history including guests The results of this function need to be kept consistent with what can_access_stream_history would dictate. """ if not stream.is_history_public_to_subscribers(): return set() return set( get_active_subscriptions_for_stream_id( stream.id, include_deactivated_users=False).values_list("user_profile_id", flat=True))
def _set_stream_message_retention_value( self, stream: Stream, retention_period: Optional[int]) -> None: stream.message_retention_days = retention_period stream.save()
def send_stream_creation_event(stream: Stream, user_ids: List[int]) -> None: event = dict(type="stream", op="create", streams=[stream.to_dict()]) send_event(stream.realm, event, user_ids)
def do_rename_stream(stream: Stream, new_name: str, user_profile: UserProfile) -> Dict[str, str]: old_name = stream.name stream.name = new_name stream.save(update_fields=["name"]) RealmAuditLog.objects.create( realm=stream.realm, acting_user=user_profile, modified_stream=stream, event_type=RealmAuditLog.STREAM_NAME_CHANGED, event_time=timezone_now(), extra_data=orjson.dumps( { RealmAuditLog.OLD_VALUE: old_name, RealmAuditLog.NEW_VALUE: new_name, } ).decode(), ) recipient_id = stream.recipient_id messages = Message.objects.filter(recipient_id=recipient_id).only("id") # Update the display recipient and stream, which are easy single # items to set. old_cache_key = get_stream_cache_key(old_name, stream.realm_id) new_cache_key = get_stream_cache_key(stream.name, stream.realm_id) if old_cache_key != new_cache_key: cache_delete(old_cache_key) cache_set(new_cache_key, stream) cache_set(display_recipient_cache_key(recipient_id), stream.name) # Delete cache entries for everything else, which is cheaper and # clearer than trying to set them. display_recipient is the out of # date field in all cases. cache_delete_many(to_dict_cache_key_id(message.id) for message in messages) new_email = encode_email_address(stream, show_sender=True) # We will tell our users to essentially # update stream.name = new_name where name = old_name # and update stream.email = new_email where name = old_name. # We could optimize this by trying to send one message, but the # client code really wants one property update at a time, and # updating stream names is a pretty infrequent operation. # More importantly, we want to key these updates by id, not name, # since id is the immutable primary key, and obviously name is not. data_updates = [ ["email_address", new_email], ["name", new_name], ] for property, value in data_updates: event = dict( op="update", type="stream", property=property, value=value, stream_id=stream.id, name=old_name, ) send_event(stream.realm, event, can_access_stream_user_ids(stream)) sender = get_system_bot(settings.NOTIFICATION_BOT, stream.realm_id) with override_language(stream.realm.default_language): internal_send_stream_message( sender, stream, Realm.STREAM_EVENTS_NOTIFICATION_TOPIC, _("{user_name} renamed stream {old_stream_name} to {new_stream_name}.").format( user_name=silent_mention_syntax_for_user(user_profile), old_stream_name=f"**{old_name}**", new_stream_name=f"**{new_name}**", ), ) # Even though the token doesn't change, the web client needs to update the # email forwarding address to display the correctly-escaped new name. return {"email_address": new_email}
def do_change_stream_permission( stream: Stream, *, invite_only: Optional[bool] = None, history_public_to_subscribers: Optional[bool] = None, is_web_public: Optional[bool] = None, acting_user: UserProfile, ) -> None: old_invite_only_value = stream.invite_only old_history_public_to_subscribers_value = stream.history_public_to_subscribers old_is_web_public_value = stream.is_web_public # A note on these assertions: It's possible we'd be better off # making all callers of this function pass the full set of # parameters, rather than having default values. Doing so would # allow us to remove the messy logic below, where we sometimes # ignore the passed parameters. # # But absent such a refactoring, it's important to assert that # we're not requesting an unsupported configurations. if is_web_public: assert history_public_to_subscribers is not False assert invite_only is not True stream.is_web_public = True stream.invite_only = False stream.history_public_to_subscribers = True else: assert invite_only is not None # is_web_public is falsey history_public_to_subscribers = get_default_value_for_history_public_to_subscribers( stream.realm, invite_only, history_public_to_subscribers, ) stream.invite_only = invite_only stream.history_public_to_subscribers = history_public_to_subscribers stream.is_web_public = False with transaction.atomic(): stream.save(update_fields=["invite_only", "history_public_to_subscribers", "is_web_public"]) event_time = timezone_now() if old_invite_only_value != stream.invite_only: # Reset the Attachment.is_realm_public cache for all # messages in the stream whose permissions were changed. Attachment.objects.filter(messages__recipient_id=stream.recipient_id).update( is_realm_public=None ) # We need to do the same for ArchivedAttachment to avoid # bugs if deleted attachments are later restored. ArchivedAttachment.objects.filter(messages__recipient_id=stream.recipient_id).update( is_realm_public=None ) RealmAuditLog.objects.create( realm=stream.realm, acting_user=acting_user, modified_stream=stream, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_time=event_time, extra_data=orjson.dumps( { RealmAuditLog.OLD_VALUE: old_invite_only_value, RealmAuditLog.NEW_VALUE: stream.invite_only, "property": "invite_only", } ).decode(), ) if old_history_public_to_subscribers_value != stream.history_public_to_subscribers: RealmAuditLog.objects.create( realm=stream.realm, acting_user=acting_user, modified_stream=stream, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_time=event_time, extra_data=orjson.dumps( { RealmAuditLog.OLD_VALUE: old_history_public_to_subscribers_value, RealmAuditLog.NEW_VALUE: stream.history_public_to_subscribers, "property": "history_public_to_subscribers", } ).decode(), ) if old_is_web_public_value != stream.is_web_public: # Reset the Attachment.is_realm_public cache for all # messages in the stream whose permissions were changed. Attachment.objects.filter(messages__recipient_id=stream.recipient_id).update( is_web_public=None ) # We need to do the same for ArchivedAttachment to avoid # bugs if deleted attachments are later restored. ArchivedAttachment.objects.filter(messages__recipient_id=stream.recipient_id).update( is_web_public=None ) RealmAuditLog.objects.create( realm=stream.realm, acting_user=acting_user, modified_stream=stream, event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED, event_time=event_time, extra_data=orjson.dumps( { RealmAuditLog.OLD_VALUE: old_is_web_public_value, RealmAuditLog.NEW_VALUE: stream.is_web_public, "property": "is_web_public", } ).decode(), ) event = dict( op="update", type="stream", property="invite_only", value=stream.invite_only, history_public_to_subscribers=stream.history_public_to_subscribers, is_web_public=stream.is_web_public, stream_id=stream.id, name=stream.name, ) send_event(stream.realm, event, can_access_stream_user_ids(stream)) old_policy_name = get_stream_permission_policy_name( invite_only=old_invite_only_value, history_public_to_subscribers=old_history_public_to_subscribers_value, is_web_public=old_is_web_public_value, ) new_policy_name = get_stream_permission_policy_name( invite_only=stream.invite_only, history_public_to_subscribers=stream.history_public_to_subscribers, is_web_public=stream.is_web_public, ) send_change_stream_permission_notification( stream, old_policy_name=old_policy_name, new_policy_name=new_policy_name, acting_user=acting_user, )
def do_deactivate_stream( stream: Stream, log: bool = True, *, acting_user: Optional[UserProfile] ) -> None: # We want to mark all messages in the to-be-deactivated stream as # read for all users; otherwise they will pollute queries like # "Get the user's first unread message". Since this can be an # expensive operation, we do it via the deferred_work queue # processor. deferred_work_event = { "type": "mark_stream_messages_as_read_for_everyone", "stream_recipient_id": stream.recipient_id, } transaction.on_commit(lambda: queue_json_publish("deferred_work", deferred_work_event)) # Get the affected user ids *before* we deactivate everybody. affected_user_ids = can_access_stream_user_ids(stream) get_active_subscriptions_for_stream_id(stream.id, include_deactivated_users=True).update( active=False ) was_invite_only = stream.invite_only stream.deactivated = True stream.invite_only = True # Preserve as much as possible the original stream name while giving it a # special prefix that both indicates that the stream is deactivated and # frees up the original name for reuse. old_name = stream.name # Prepend a substring of the hashed stream ID to the new stream name streamID = str(stream.id) stream_id_hash_object = hashlib.sha512(streamID.encode()) hashed_stream_id = stream_id_hash_object.hexdigest()[0:7] new_name = (hashed_stream_id + "!DEACTIVATED:" + old_name)[: Stream.MAX_NAME_LENGTH] stream.name = new_name[: Stream.MAX_NAME_LENGTH] stream.save(update_fields=["name", "deactivated", "invite_only"]) # If this is a default stream, remove it, properly sending a # notification to browser clients. if DefaultStream.objects.filter(realm_id=stream.realm_id, stream_id=stream.id).exists(): do_remove_default_stream(stream) default_stream_groups_for_stream = DefaultStreamGroup.objects.filter(streams__id=stream.id) for group in default_stream_groups_for_stream: do_remove_streams_from_default_stream_group(stream.realm, group, [stream]) # Remove the old stream information from remote cache. old_cache_key = get_stream_cache_key(old_name, stream.realm_id) cache_delete(old_cache_key) stream_dict = stream.to_dict() stream_dict.update(dict(name=old_name, invite_only=was_invite_only)) event = dict(type="stream", op="delete", streams=[stream_dict]) transaction.on_commit(lambda: send_event(stream.realm, event, affected_user_ids)) event_time = timezone_now() RealmAuditLog.objects.create( realm=stream.realm, acting_user=acting_user, modified_stream=stream, event_type=RealmAuditLog.STREAM_DEACTIVATED, event_time=event_time, )
def get_web_public_streams(realm: Realm) -> List[APIStreamDict]: # nocoverage query = get_web_public_streams_queryset(realm) streams = Stream.get_client_data(query) return streams
def do_get_streams( user_profile: UserProfile, include_public: bool = True, include_web_public: bool = False, include_subscribed: bool = True, include_all_active: bool = False, include_default: bool = False, include_owner_subscribed: bool = False, ) -> List[APIStreamDict]: # This function is only used by API clients now. if include_all_active and not user_profile.is_realm_admin: raise JsonableError(_("User not authorized for this query")) include_public = include_public and user_profile.can_access_public_streams( ) # Start out with all active streams in the realm. query = Stream.objects.filter(realm=user_profile.realm, deactivated=False) if include_all_active: streams = Stream.get_client_data(query) else: # We construct a query as the or (|) of the various sources # this user requested streams from. query_filter: Optional[Q] = None def add_filter_option(option: Q) -> None: nonlocal query_filter if query_filter is None: query_filter = option else: query_filter |= option if include_subscribed: subscribed_stream_ids = get_subscribed_stream_ids_for_user( user_profile) recipient_check = Q(id__in=set(subscribed_stream_ids)) add_filter_option(recipient_check) if include_public: invite_only_check = Q(invite_only=False) add_filter_option(invite_only_check) if include_web_public: # This should match get_web_public_streams_queryset web_public_check = Q( is_web_public=True, invite_only=False, history_public_to_subscribers=True, deactivated=False, ) add_filter_option(web_public_check) if include_owner_subscribed and user_profile.is_bot: bot_owner = user_profile.bot_owner assert bot_owner is not None owner_stream_ids = get_subscribed_stream_ids_for_user(bot_owner) owner_subscribed_check = Q(id__in=set(owner_stream_ids)) add_filter_option(owner_subscribed_check) if query_filter is not None: query = query.filter(query_filter) streams = Stream.get_client_data(query) else: # Don't bother going to the database with no valid sources streams = [] streams.sort(key=lambda elt: elt["name"]) if include_default: is_default = {} default_streams = get_default_streams_for_realm(user_profile.realm_id) for default_stream in default_streams: is_default[default_stream.id] = True for stream in streams: stream["is_default"] = is_default.get(stream["stream_id"], False) return streams