Ejemplo n.º 1
0
def check_url(var_name: str, val: Text) -> None:
    validate = URLValidator()
    try:
        validate(val)
    except ValidationError as err:
        raise JsonableError(str(err.message))
Ejemplo n.º 2
0
def list_to_streams(
    streams_raw: Iterable[StreamDict],
    user_profile: UserProfile,
    autocreate: bool = False,
    admin_access_required: bool = False,
) -> Tuple[List[Stream], List[Stream]]:
    """Converts list of dicts to a list of Streams, validating input in the process

    For each stream name, we validate it to ensure it meets our
    requirements for a proper stream name using check_stream_name.

    This function in autocreate mode should be atomic: either an exception will be raised
    during a precheck, or all the streams specified will have been created if applicable.

    @param streams_raw The list of stream dictionaries to process;
      names should already be stripped of whitespace by the caller.
    @param user_profile The user for whom we are retrieving the streams
    @param autocreate Whether we should create streams if they don't already exist
    """
    # Validate all streams, getting extant ones, then get-or-creating the rest.

    stream_set = {stream_dict["name"] for stream_dict in streams_raw}

    for stream_name in stream_set:
        # Stream names should already have been stripped by the
        # caller, but it makes sense to verify anyway.
        assert stream_name == stream_name.strip()
        check_stream_name(stream_name)

    existing_streams: List[Stream] = []
    missing_stream_dicts: List[StreamDict] = []
    existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)

    if admin_access_required:
        existing_recipient_ids = [
            stream.recipient_id for stream in existing_stream_map.values()
        ]
        subs = Subscription.objects.filter(
            user_profile=user_profile,
            recipient_id__in=existing_recipient_ids,
            active=True)
        sub_map = {sub.recipient_id: sub for sub in subs}
        for stream in existing_stream_map.values():
            sub = sub_map.get(stream.recipient_id, None)
            check_stream_access_for_delete_or_update(user_profile, stream, sub)

    message_retention_days_not_none = False
    for stream_dict in streams_raw:
        stream_name = stream_dict["name"]
        stream = existing_stream_map.get(stream_name.lower())
        if stream is None:
            if stream_dict.get("message_retention_days", None) is not None:
                message_retention_days_not_none = True
            missing_stream_dicts.append(stream_dict)
        else:
            existing_streams.append(stream)

    if len(missing_stream_dicts) == 0:
        # This is the happy path for callers who expected all of these
        # streams to exist already.
        created_streams: List[Stream] = []
    else:
        # autocreate=True path starts here
        if not user_profile.can_create_streams():
            if user_profile.realm.create_stream_policy == Realm.POLICY_ADMINS_ONLY:
                raise JsonableError(
                    _("Only administrators can create streams."))
            if user_profile.realm.create_stream_policy == Realm.POLICY_FULL_MEMBERS_ONLY:
                raise JsonableError(
                    _("Your account is too new to create streams."))
            raise JsonableError(_("Not allowed for guest users"))
        elif not autocreate:
            raise JsonableError(
                _("Stream(s) ({}) do not exist").format(
                    ", ".join(stream_dict["name"]
                              for stream_dict in missing_stream_dicts), ))
        elif message_retention_days_not_none:
            if not user_profile.is_realm_owner:
                raise JsonableError(
                    _("User cannot create stream with this settings."))
            user_profile.realm.ensure_not_on_limited_plan()

        # We already filtered out existing streams, so dup_streams
        # will normally be an empty list below, but we protect against somebody
        # else racing to create the same stream.  (This is not an entirely
        # paranoid approach, since often on Zulip two people will discuss
        # creating a new stream, and both people eagerly do it.)
        created_streams, dup_streams = create_streams_if_needed(
            realm=user_profile.realm,
            stream_dicts=missing_stream_dicts,
            acting_user=user_profile)
        existing_streams += dup_streams

    return existing_streams, created_streams
Ejemplo n.º 3
0
def check_valid_bot_type(user_profile: UserProfile, bot_type: int) -> None:
    if bot_type not in user_profile.allowed_bot_types:
        raise JsonableError(_("Invalid bot type"))
Ejemplo n.º 4
0
def do_events_register(user_profile,
                       user_client,
                       apply_markdown=True,
                       event_types=None,
                       queue_lifespan_secs=0,
                       all_public_streams=False,
                       include_subscribers=True,
                       narrow=[],
                       fetch_event_types=None):
    # type: (UserProfile, Client, bool, Optional[Iterable[str]], int, bool, bool, Iterable[Sequence[Text]], Optional[Iterable[str]]) -> Dict[str, Any]

    # Technically we don't need to check this here because
    # build_narrow_filter will check it, but it's nicer from an error
    # handling perspective to do it before contacting Tornado
    check_supported_events_narrow_filter(narrow)

    # Note that we pass event_types, not fetch_event_types here, since
    # that's what controls which future events are sent.
    queue_id = request_event_queue(user_profile,
                                   user_client,
                                   apply_markdown,
                                   queue_lifespan_secs,
                                   event_types,
                                   all_public_streams,
                                   narrow=narrow)

    if queue_id is None:
        raise JsonableError(_("Could not allocate event queue"))

    if fetch_event_types is not None:
        event_types_set = set(fetch_event_types)  # type: Optional[Set[str]]
    elif event_types is not None:
        event_types_set = set(event_types)
    else:
        event_types_set = None

    # Fill up the UserMessage rows if a soft-deactivated user has returned
    maybe_catch_up_soft_deactivated_user(user_profile)

    ret = fetch_initial_state_data(user_profile,
                                   event_types_set,
                                   queue_id,
                                   include_subscribers=include_subscribers)

    # Apply events that came in while we were fetching initial data
    events = get_user_events(user_profile, queue_id, -1)
    apply_events(ret,
                 events,
                 user_profile,
                 include_subscribers=include_subscribers,
                 fetch_event_types=fetch_event_types)
    '''
    NOTE:

    Below is an example of post-processing initial state data AFTER we
    apply events.  For large payloads like `unread_msgs`, it's helpful
    to have an intermediate data structure that is easy to manipulate
    with O(1)-type operations as we apply events.

    Then, only at the end, we put it in the form that's more appropriate
    for client.
    '''
    if 'raw_unread_msgs' in ret:
        ret['unread_msgs'] = aggregate_unread_data(ret['raw_unread_msgs'])
        del ret['raw_unread_msgs']

    if len(events) > 0:
        ret['last_event_id'] = events[-1]['id']
    else:
        ret['last_event_id'] = -1
    return ret
Ejemplo n.º 5
0
def validate_login_email(email: Text) -> None:
    try:
        validate_email(email)
    except ValidationError as err:
        raise JsonableError(str(err.message))
Ejemplo n.º 6
0
def update_realm(
    request: HttpRequest,
    user_profile: UserProfile,
    name: Optional[str] = REQ(validator=check_string, default=None),
    description: Optional[str] = REQ(validator=check_string, default=None),
    emails_restricted_to_domains: Optional[bool] = REQ(validator=check_bool,
                                                       default=None),
    disallow_disposable_email_addresses: Optional[bool] = REQ(
        validator=check_bool, default=None),
    invite_required: Optional[bool] = REQ(validator=check_bool, default=None),
    invite_by_admins_only: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    name_changes_disabled: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    email_changes_disabled: Optional[bool] = REQ(validator=check_bool,
                                                 default=None),
    avatar_changes_disabled: Optional[bool] = REQ(validator=check_bool,
                                                  default=None),
    inline_image_preview: Optional[bool] = REQ(validator=check_bool,
                                               default=None),
    inline_url_embed_preview: Optional[bool] = REQ(validator=check_bool,
                                                   default=None),
    add_emoji_by_admins_only: Optional[bool] = REQ(validator=check_bool,
                                                   default=None),
    allow_message_deleting: Optional[bool] = REQ(validator=check_bool,
                                                 default=None),
    message_content_delete_limit_seconds: Optional[int] = REQ(
        converter=to_non_negative_int, default=None),
    allow_message_editing: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    allow_community_topic_editing: Optional[bool] = REQ(validator=check_bool,
                                                        default=None),
    mandatory_topics: Optional[bool] = REQ(validator=check_bool, default=None),
    message_content_edit_limit_seconds: Optional[int] = REQ(
        converter=to_non_negative_int, default=None),
    allow_edit_history: Optional[bool] = REQ(validator=check_bool,
                                             default=None),
    default_language: Optional[str] = REQ(validator=check_string,
                                          default=None),
    waiting_period_threshold: Optional[int] = REQ(
        converter=to_non_negative_int, default=None),
    authentication_methods: Optional[Dict[Any,
                                          Any]] = REQ(validator=check_dict([]),
                                                      default=None),
    notifications_stream_id: Optional[int] = REQ(validator=check_int,
                                                 default=None),
    signup_notifications_stream_id: Optional[int] = REQ(validator=check_int,
                                                        default=None),
    message_retention_days: Optional[int] = REQ(
        converter=to_not_negative_int_or_none, default=None),
    send_welcome_emails: Optional[bool] = REQ(validator=check_bool,
                                              default=None),
    digest_emails_enabled: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    message_content_allowed_in_email_notifications: Optional[bool] = REQ(
        validator=check_bool, default=None),
    bot_creation_policy: Optional[int] = REQ(
        converter=to_not_negative_int_or_none, default=None),
    create_stream_policy: Optional[int] = REQ(validator=check_int,
                                              default=None),
    invite_to_stream_policy: Optional[int] = REQ(validator=check_int,
                                                 default=None),
    email_address_visibility: Optional[int] = REQ(
        converter=to_not_negative_int_or_none, default=None),
    default_twenty_four_hour_time: Optional[bool] = REQ(validator=check_bool,
                                                        default=None),
    video_chat_provider: Optional[str] = REQ(validator=check_string,
                                             default=None),
    google_hangouts_domain: Optional[str] = REQ(validator=check_string,
                                                default=None),
    zoom_user_id: Optional[str] = REQ(validator=check_string, default=None),
    zoom_api_key: Optional[str] = REQ(validator=check_string, default=None),
    zoom_api_secret: Optional[str] = REQ(validator=check_string, default=None),
    digest_weekday: Optional[int] = REQ(validator=check_int, default=None),
) -> HttpResponse:
    realm = user_profile.realm

    # Additional validation/error checking beyond types go here, so
    # the entire request can succeed or fail atomically.
    if default_language is not None and default_language not in get_available_language_codes(
    ):
        raise JsonableError(_("Invalid language '%s'") % (default_language, ))
    if description is not None and len(description) > 1000:
        return json_error(_("Organization description is too long."))
    if name is not None and len(name) > Realm.MAX_REALM_NAME_LENGTH:
        return json_error(_("Organization name is too long."))
    if authentication_methods is not None and True not in list(
            authentication_methods.values()):
        return json_error(
            _("At least one authentication method must be enabled."))
    if video_chat_provider == "Google Hangouts":
        try:
            validate_domain(google_hangouts_domain)
        except ValidationError as e:
            return json_error(_('Invalid domain: {}').format(e.messages[0]))
    if video_chat_provider == "Zoom":
        if not zoom_api_secret:
            # Use the saved Zoom API secret if a new value isn't being sent
            zoom_api_secret = user_profile.realm.zoom_api_secret
        if not zoom_user_id:
            return json_error(_('User ID cannot be empty'))
        if not zoom_api_key:
            return json_error(_('API key cannot be empty'))
        if not zoom_api_secret:
            return json_error(_('API secret cannot be empty'))
        # If any of the Zoom settings have changed, validate the Zoom credentials.
        #
        # Technically, we could call some other API endpoint that
        # doesn't create a video call link, but this is a nicer
        # end-to-end test, since it verifies that the Zoom API user's
        # scopes includes the ability to create video calls, which is
        # the only capabiility we use.
        if ((zoom_user_id != realm.zoom_user_id
             or zoom_api_key != realm.zoom_api_key
             or zoom_api_secret != realm.zoom_api_secret)
                and not request_zoom_video_call_url(zoom_user_id, zoom_api_key,
                                                    zoom_api_secret)):
            return json_error(
                _('Invalid credentials for the %(third_party_service)s API.') %
                dict(third_party_service="Zoom"))

    # Additional validation of enum-style values
    if bot_creation_policy is not None and bot_creation_policy not in Realm.BOT_CREATION_POLICY_TYPES:
        return json_error(_("Invalid bot creation policy"))
    if email_address_visibility is not None and \
            email_address_visibility not in Realm.EMAIL_ADDRESS_VISIBILITY_TYPES:
        return json_error(_("Invalid email address visibility policy"))

    # The user of `locals()` here is a bit of a code smell, but it's
    # restricted to the elements present in realm.property_types.
    #
    # TODO: It should be possible to deduplicate this function up
    # further by some more advanced usage of the
    # `REQ/has_request_variables` extraction.
    req_vars = {
        k: v
        for k, v in list(locals().items()) if k in realm.property_types
    }
    data = {}  # type: Dict[str, Any]

    for k, v in list(req_vars.items()):
        if v is not None and getattr(realm, k) != v:
            do_set_realm_property(realm, k, v)
            if isinstance(v, str):
                data[k] = 'updated'
            else:
                data[k] = v

    # The following realm properties do not fit the pattern above
    # authentication_methods is not supported by the do_set_realm_property
    # framework because of its bitfield.
    if authentication_methods is not None and (
            realm.authentication_methods_dict() != authentication_methods):
        do_set_realm_authentication_methods(realm, authentication_methods)
        data['authentication_methods'] = authentication_methods
    # The message_editing settings are coupled to each other, and thus don't fit
    # into the do_set_realm_property framework.
    if ((allow_message_editing is not None
         and realm.allow_message_editing != allow_message_editing)
            or (message_content_edit_limit_seconds is not None
                and realm.message_content_edit_limit_seconds !=
                message_content_edit_limit_seconds)
            or (allow_community_topic_editing is not None
                and realm.allow_community_topic_editing !=
                allow_community_topic_editing)):
        if allow_message_editing is None:
            allow_message_editing = realm.allow_message_editing
        if message_content_edit_limit_seconds is None:
            message_content_edit_limit_seconds = realm.message_content_edit_limit_seconds
        if allow_community_topic_editing is None:
            allow_community_topic_editing = realm.allow_community_topic_editing
        do_set_realm_message_editing(realm, allow_message_editing,
                                     message_content_edit_limit_seconds,
                                     allow_community_topic_editing)
        data['allow_message_editing'] = allow_message_editing
        data[
            'message_content_edit_limit_seconds'] = message_content_edit_limit_seconds
        data['allow_community_topic_editing'] = allow_community_topic_editing

    if (message_content_delete_limit_seconds is not None
            and realm.message_content_delete_limit_seconds !=
            message_content_delete_limit_seconds):
        do_set_realm_message_deleting(realm,
                                      message_content_delete_limit_seconds)
        data[
            'message_content_delete_limit_seconds'] = message_content_delete_limit_seconds
    # Realm.notifications_stream and Realm.signup_notifications_stream are not boolean,
    # str or integer field, and thus doesn't fit into the do_set_realm_property framework.
    if notifications_stream_id is not None:
        if realm.notifications_stream is None or (realm.notifications_stream.id
                                                  != notifications_stream_id):
            new_notifications_stream = None
            if notifications_stream_id >= 0:
                (new_notifications_stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            notifications_stream_id)
            do_set_realm_notifications_stream(realm, new_notifications_stream,
                                              notifications_stream_id)
            data['notifications_stream_id'] = notifications_stream_id

    if signup_notifications_stream_id is not None:
        if realm.signup_notifications_stream is None or (
                realm.signup_notifications_stream.id !=
                signup_notifications_stream_id):
            new_signup_notifications_stream = None
            if signup_notifications_stream_id >= 0:
                (new_signup_notifications_stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            signup_notifications_stream_id)
            do_set_realm_signup_notifications_stream(
                realm, new_signup_notifications_stream,
                signup_notifications_stream_id)
            data[
                'signup_notifications_stream_id'] = signup_notifications_stream_id

    return json_success(data)
Ejemplo n.º 7
0
def check_valid_emoji_name(emoji_name):
    # type: (Text) -> None
    if re.match('^[0-9a-z.\-_]+(?<![.\-_])$', emoji_name):
        return
    raise JsonableError(_("Invalid characters in emoji name"))
Ejemplo n.º 8
0
def check_supported_events_narrow_filter(
        narrow: Iterable[Sequence[Text]]) -> None:
    for element in narrow:
        operator = element[0]
        if operator not in ["stream", "topic", "sender", "is"]:
            raise JsonableError(_("Operator %s not supported.") % (operator, ))
Ejemplo n.º 9
0
def add_reaction(
        request: HttpRequest,
        user_profile: UserProfile,
        message_id: int,
        emoji_name: str = REQ(),
        emoji_code: Optional[str] = REQ(default=None),
        reaction_type: Optional[str] = REQ(default=None),
) -> HttpResponse:
    message, user_message = access_message(user_profile, message_id)

    if emoji_code is None:
        # The emoji_code argument is only required for rare corner
        # cases discussed in the long block comment below.  For simple
        # API clients, we allow specifying just the name, and just
        # look up the code using the current name->code mapping.
        emoji_code = emoji_name_to_emoji_code(message.sender.realm,
                                              emoji_name)[0]

    if reaction_type is None:
        reaction_type = emoji_name_to_emoji_code(message.sender.realm,
                                                 emoji_name)[1]

    if Reaction.objects.filter(
            user_profile=user_profile,
            message=message,
            emoji_code=emoji_code,
            reaction_type=reaction_type,
    ).exists():
        raise JsonableError(_("Reaction already exists."))

    query = Reaction.objects.filter(message=message,
                                    emoji_code=emoji_code,
                                    reaction_type=reaction_type)
    if query.exists():
        # If another user has already reacted to this message with
        # same emoji code, we treat the new reaction as a vote for the
        # existing reaction.  So the emoji name used by that earlier
        # reaction takes precedence over whatever was passed in this
        # request.  This is necessary to avoid a message having 2
        # "different" emoji reactions with the same emoji code (and
        # thus same image) on the same message, which looks ugly.
        #
        # In this "voting for an existing reaction" case, we shouldn't
        # check whether the emoji code and emoji name match, since
        # it's possible that the (emoji_type, emoji_name, emoji_code)
        # triple for this existing rection xmay not pass validation
        # now (e.g. because it is for a realm emoji that has been
        # since deactivated).  We still want to allow users to add a
        # vote any old reaction they see in the UI even if that is a
        # deactivated custom emoji, so we just use the emoji name from
        # the existing reaction with no further validation.
        emoji_name = query.first().emoji_name
    else:
        # Otherwise, use the name provided in this request, but verify
        # it is valid in the user's realm (e.g. not a deactivated
        # realm emoji).
        check_emoji_request(user_profile.realm, emoji_name, emoji_code,
                            reaction_type)

    if user_message is None:
        create_historical_message(user_profile, message)

    do_add_reaction(user_profile, message, emoji_name, emoji_code,
                    reaction_type)

    return json_success()
Ejemplo n.º 10
0
def log_into_subdomain(request: HttpRequest, token: str) -> HttpResponse:
    """Given a valid authentication token (generated by
    redirect_and_log_into_subdomain called on auth.zulip.example.com),
    call login_or_register_remote_user, passing all the authentication
    result data that has been stored in redis, associated with this token.
    Obligatory fields for the data are 'subdomain' and 'email', because this endpoint
    needs to know which user and realm to log into. Others are optional and only used
    if the user account still needs to be made and they're passed as argument to the
    register_remote_user function.
    """
    if not has_api_key_format(token):  # The tokens are intended to have the same format as API keys.
        logging.warning("log_into_subdomain: Malformed token given: %s" % (token,))
        return HttpResponse(status=400)

    data = get_login_data(token)
    if data is None:
        logging.warning("log_into_subdomain: Invalid token given: %s" % (token,))
        return HttpResponse(status=400)

    # We extract fields provided by the caller via the data object.
    # The only fields that are required are email and subdomain (if we
    # are simply doing login); more fields are expected if this is a
    # new account registration flow or we're going to a specific
    # narrow after login.
    subdomain = get_subdomain(request)
    if data['subdomain'] != subdomain:
        raise JsonableError(_("Invalid subdomain"))
    email_address = data['email']

    full_name = data.get('name', '')
    is_signup = data.get('is_signup', False)
    redirect_to = data.get('next', '')
    mobile_flow_otp = data.get('mobile_flow_otp')
    desktop_flow_otp = data.get('desktop_flow_otp')
    full_name_validated = data.get('full_name_validated', False)
    multiuse_object_key = data.get('multiuse_object_key', '')

    # We cannot pass the actual authenticated user_profile object that
    # was fetched by the original authentication backend and passed
    # into redirect_and_log_into_subdomain through a signed URL token,
    # so we need to re-fetch it from the database.
    if is_signup:
        # If we are creating a new user account, user_profile will
        # always have been None, so we set that here.  In the event
        # that a user account with this email was somehow created in a
        # race, the eventual registration code will catch that and
        # throw an error, so we don't need to check for that here.
        user_profile = None
    else:
        # We're just trying to login.  We can be reasonably confident
        # that this subdomain actually has a corresponding active
        # realm, since the signed cookie proves there was one very
        # recently.  But as part of fetching the UserProfile object
        # for the target user, we use DummyAuthBackend, which
        # conveniently re-validates that the realm and user account
        # were not deactivated in the meantime.

        # Note: Ideally, we'd have a nice user-facing error message
        # for the case where this auth fails (because e.g. the realm
        # or user was deactivated since the signed cookie was
        # generated < 15 seconds ago), but the authentication result
        # is correct in those cases and such a race would be very
        # rare, so a nice error message is low priority.
        realm = get_realm(subdomain)
        user_profile = authenticate_remote_user(realm, email_address)

    return login_or_register_remote_user(request, email_address, user_profile,
                                         full_name,
                                         is_signup=is_signup, redirect_to=redirect_to,
                                         mobile_flow_otp=mobile_flow_otp,
                                         desktop_flow_otp=desktop_flow_otp,
                                         multiuse_object_key=multiuse_object_key,
                                         full_name_validated=full_name_validated)
Ejemplo n.º 11
0
def do_events_register(user_profile,
                       user_client,
                       apply_markdown=True,
                       client_gravatar=False,
                       event_types=None,
                       queue_lifespan_secs=0,
                       all_public_streams=False,
                       include_subscribers=True,
                       narrow=[],
                       fetch_event_types=None):
    # type: (UserProfile, Client, bool, bool, Optional[Iterable[str]], int, bool, bool, Iterable[Sequence[Text]], Optional[Iterable[str]]) -> Dict[str, Any]

    # Technically we don't need to check this here because
    # build_narrow_filter will check it, but it's nicer from an error
    # handling perspective to do it before contacting Tornado
    check_supported_events_narrow_filter(narrow)

    # Note that we pass event_types, not fetch_event_types here, since
    # that's what controls which future events are sent.
    queue_id = request_event_queue(user_profile,
                                   user_client,
                                   apply_markdown,
                                   client_gravatar,
                                   queue_lifespan_secs,
                                   event_types,
                                   all_public_streams,
                                   narrow=narrow)

    if queue_id is None:
        raise JsonableError(_("Could not allocate event queue"))

    if fetch_event_types is not None:
        event_types_set = set(fetch_event_types)  # type: Optional[Set[str]]
    elif event_types is not None:
        event_types_set = set(event_types)
    else:
        event_types_set = None

    # Fill up the UserMessage rows if a soft-deactivated user has returned
    maybe_catch_up_soft_deactivated_user(user_profile)

    ret = fetch_initial_state_data(user_profile,
                                   event_types_set,
                                   queue_id,
                                   client_gravatar=client_gravatar,
                                   include_subscribers=include_subscribers)

    # Apply events that came in while we were fetching initial data
    events = get_user_events(user_profile, queue_id, -1)
    apply_events(ret,
                 events,
                 user_profile,
                 include_subscribers=include_subscribers,
                 client_gravatar=client_gravatar,
                 fetch_event_types=fetch_event_types)
    '''
    NOTE:

    Below is an example of post-processing initial state data AFTER we
    apply events.  For large payloads like `unread_msgs`, it's helpful
    to have an intermediate data structure that is easy to manipulate
    with O(1)-type operations as we apply events.

    Then, only at the end, we put it in the form that's more appropriate
    for client.
    '''
    if 'raw_unread_msgs' in ret:
        ret['unread_msgs'] = aggregate_unread_data(ret['raw_unread_msgs'])
        del ret['raw_unread_msgs']
    '''
    See the note above; the same technique applies below.
    '''
    if 'raw_users' in ret:
        user_dicts = list(ret['raw_users'].values())

        ret['realm_users'] = [d for d in user_dicts if d['is_active']]
        ret['realm_non_active_users'] = [
            d for d in user_dicts if not d['is_active']
        ]
        '''
        Be aware that we do intentional aliasing in the below code.
        We can now safely remove the `is_active` field from all the
        dicts that got partitioned into the two lists above.

        We remove the field because it's already implied, and sending
        it to clients makes clients prone to bugs where they "trust"
        the field but don't actually update in live updates.  It also
        wastes bandwidth.
        '''
        for d in user_dicts:
            d.pop('is_active')

        del ret['raw_users']

    if len(events) > 0:
        ret['last_event_id'] = events[-1]['id']
    else:
        ret['last_event_id'] = -1
    return ret
Ejemplo n.º 12
0
def fetch_events(query: Mapping[str, Any]) -> Dict[str, Any]:
    queue_id: Optional[str] = query["queue_id"]
    dont_block: bool = query["dont_block"]
    last_event_id: Optional[int] = query["last_event_id"]
    user_profile_id: int = query["user_profile_id"]
    new_queue_data: Optional[MutableMapping[str, Any]] = query.get("new_queue_data")
    client_type_name: str = query["client_type_name"]
    handler_id: int = query["handler_id"]

    try:
        was_connected = False
        orig_queue_id = queue_id
        extra_log_data = ""
        if queue_id is None:
            if dont_block:
                assert new_queue_data is not None
                client = allocate_client_descriptor(new_queue_data)
                queue_id = client.event_queue.id
            else:
                raise JsonableError(_("Missing 'queue_id' argument"))
        else:
            if last_event_id is None:
                raise JsonableError(_("Missing 'last_event_id' argument"))
            client = get_client_descriptor(queue_id)
            if user_profile_id != client.user_profile_id:
                raise JsonableError(_("You are not authorized to get events from this queue"))
            if (
                client.event_queue.newest_pruned_id is not None
                and last_event_id < client.event_queue.newest_pruned_id
            ):
                raise JsonableError(_("An event newer than {event_id} has already been pruned!").format(
                    event_id=last_event_id,
                ))
            client.event_queue.prune(last_event_id)
            if (
                client.event_queue.newest_pruned_id is not None
                and last_event_id != client.event_queue.newest_pruned_id
            ):
                raise JsonableError(_("Event {event_id} was not in this queue").format(
                    event_id=last_event_id,
                ))
            was_connected = client.finish_current_handler()

        if not client.event_queue.empty() or dont_block:
            response: Dict[str, Any] = dict(
                events=client.event_queue.contents(),
                handler_id=handler_id,
            )
            if orig_queue_id is None:
                response['queue_id'] = queue_id
            if len(response["events"]) == 1:
                extra_log_data = "[{}/{}/{}]".format(queue_id, len(response["events"]),
                                                     response["events"][0]["type"])
            else:
                extra_log_data = "[{}/{}]".format(queue_id, len(response["events"]))
            if was_connected:
                extra_log_data += " [was connected]"
            return dict(type="response", response=response, extra_log_data=extra_log_data)

        # After this point, dont_block=False, the queue is empty, and we
        # have a pre-existing queue, so we wait for new events.
        if was_connected:
            logging.info("Disconnected handler for queue %s (%s/%s)",
                         queue_id, user_profile_id, client_type_name)
    except JsonableError as e:
        return dict(type="error", exception=e)

    client.connect_handler(handler_id, client_type_name)
    return dict(type="async")
Ejemplo n.º 13
0
def list_to_streams(streams_raw,
                    user_profile,
                    autocreate=False,
                    invite_only=False):
    # type: (Iterable[text_type], UserProfile, Optional[bool], Optional[bool]) -> Tuple[List[Stream], List[Stream]]
    """Converts plaintext stream names to a list of Streams, validating input in the process

    For each stream name, we validate it to ensure it meets our
    requirements for a proper stream name: that is, that it is shorter
    than Stream.MAX_NAME_LENGTH characters and passes
    valid_stream_name.

    This function in autocreate mode should be atomic: either an exception will be raised
    during a precheck, or all the streams specified will have been created if applicable.

    @param streams_raw The list of stream names to process
    @param user_profile The user for whom we are retreiving the streams
    @param autocreate Whether we should create streams if they don't already exist
    @param invite_only Whether newly created streams should have the invite_only bit set
    """
    existing_streams = []
    created_streams = []
    # Validate all streams, getting extant ones, then get-or-creating the rest.
    stream_set = set(stream_name.strip() for stream_name in streams_raw)
    rejects = []
    for stream_name in stream_set:
        if len(stream_name) > Stream.MAX_NAME_LENGTH:
            raise JsonableError(
                _("Stream name (%s) too long.") % (stream_name, ))
        if not valid_stream_name(stream_name):
            raise JsonableError(
                _("Invalid stream name (%s).") % (stream_name, ))

    existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)

    for stream_name in stream_set:
        stream = existing_stream_map.get(stream_name.lower())
        if stream is None:
            rejects.append(stream_name)
        else:
            existing_streams.append(stream)
    if rejects:
        if not user_profile.can_create_streams():
            raise JsonableError(_('User cannot create streams.'))
        elif not autocreate:
            raise JsonableError(
                _("Stream(s) (%s) do not exist") % ", ".join(rejects))

        for stream_name in rejects:
            stream, created = create_stream_if_needed(user_profile.realm,
                                                      stream_name,
                                                      invite_only=invite_only)
            if created:
                created_streams.append(stream)
            else:
                # We already checked for existing streams above; this
                # next line is present to handle races where a stream
                # was created while this function was executing.
                existing_streams.append(stream)

    return existing_streams, created_streams
Ejemplo n.º 14
0
def access_default_stream_group_by_id(realm: Realm, group_id: int) -> DefaultStreamGroup:
    try:
        return DefaultStreamGroup.objects.get(realm=realm, id=group_id)
    except DefaultStreamGroup.DoesNotExist:
        raise JsonableError(_("Default stream group with id '%s' does not exist." % (group_id,)))
Ejemplo n.º 15
0
def update_realm(request,
                 user_profile,
                 name=REQ(validator=check_string, default=None),
                 restricted_to_domain=REQ(validator=check_bool, default=None),
                 invite_required=REQ(validator=check_bool, default=None),
                 invite_by_admins_only=REQ(validator=check_bool, default=None),
                 create_stream_by_admins_only=REQ(validator=check_bool,
                                                  default=None),
                 allow_message_editing=REQ(validator=check_bool, default=None),
                 message_content_edit_limit_seconds=REQ(
                     converter=to_non_negative_int, default=None),
                 default_language=REQ(validator=check_string, default=None),
                 authentication_methods=REQ(validator=check_dict([]),
                                            default=None)):
    # type: (HttpRequest, UserProfile, Optional[str], Optional[bool], Optional[bool], Optional[bool], Optional[bool], Optional[bool], Optional[int], Optional[str], Optional[dict]) -> HttpResponse
    # Validation for default_language
    if default_language is not None and default_language not in get_available_language_codes(
    ):
        raise JsonableError(_("Invalid language '%s'" % (default_language, )))
    realm = user_profile.realm
    data = {}  # type: Dict[str, Any]
    if name is not None and realm.name != name:
        do_set_realm_name(realm, name)
        data['name'] = 'updated'
    if restricted_to_domain is not None and realm.restricted_to_domain != restricted_to_domain:
        do_set_realm_restricted_to_domain(realm, restricted_to_domain)
        data['restricted_to_domain'] = restricted_to_domain
    if invite_required is not None and realm.invite_required != invite_required:
        do_set_realm_invite_required(realm, invite_required)
        data['invite_required'] = invite_required
    if invite_by_admins_only is not None and realm.invite_by_admins_only != invite_by_admins_only:
        do_set_realm_invite_by_admins_only(realm, invite_by_admins_only)
        data['invite_by_admins_only'] = invite_by_admins_only
    if authentication_methods is not None and realm.authentication_methods != authentication_methods:
        if True not in list(authentication_methods.values()):
            return json_error(
                _("At least one authentication method must be enabled."),
                data={"reason": "no authentication"},
                status=403)
        else:
            do_set_realm_authentication_methods(realm, authentication_methods)
        data['authentication_methods'] = authentication_methods
    if create_stream_by_admins_only is not None and realm.create_stream_by_admins_only != create_stream_by_admins_only:
        do_set_realm_create_stream_by_admins_only(
            realm, create_stream_by_admins_only)
        data['create_stream_by_admins_only'] = create_stream_by_admins_only
    if (allow_message_editing is not None and realm.allow_message_editing != allow_message_editing) or \
       (message_content_edit_limit_seconds is not None and
        realm.message_content_edit_limit_seconds != message_content_edit_limit_seconds):
        if allow_message_editing is None:
            allow_message_editing = realm.allow_message_editing
        if message_content_edit_limit_seconds is None:
            message_content_edit_limit_seconds = realm.message_content_edit_limit_seconds
        do_set_realm_message_editing(realm, allow_message_editing,
                                     message_content_edit_limit_seconds)
        data['allow_message_editing'] = allow_message_editing
        data[
            'message_content_edit_limit_seconds'] = message_content_edit_limit_seconds
    if default_language is not None and realm.default_language != default_language:
        do_set_realm_default_language(realm, default_language)
        data['default_language'] = default_language
    return json_success(data)
Ejemplo n.º 16
0
def get_chart_data(request,
                   user_profile,
                   chart_name=REQ(),
                   min_length=REQ(converter=to_non_negative_int, default=None),
                   start=REQ(converter=to_utc_datetime, default=None),
                   end=REQ(converter=to_utc_datetime, default=None)):
    # type: (HttpRequest, UserProfile, Text, Optional[int], Optional[datetime], Optional[datetime]) -> HttpResponse
    if chart_name == 'number_of_humans':
        stat = COUNT_STATS['active_users:is_bot:day']
        tables = [RealmCount]
        subgroups = ['false', 'true']
        labels = ['human', 'bot']
        labels_sort_function = None
        include_empty_subgroups = True
    elif chart_name == 'messages_sent_over_time':
        stat = COUNT_STATS['messages_sent:is_bot:hour']
        tables = [RealmCount, UserCount]
        subgroups = ['false', 'true']
        labels = ['human', 'bot']
        labels_sort_function = None
        include_empty_subgroups = True
    elif chart_name == 'messages_sent_by_message_type':
        stat = COUNT_STATS['messages_sent:message_type:day']
        tables = [RealmCount, UserCount]
        subgroups = [
            'public_stream', 'private_stream', 'private_message',
            'huddle_message'
        ]
        labels = [
            'Public streams', 'Private streams', 'Private messages',
            'Group private messages'
        ]
        labels_sort_function = lambda data: sort_by_totals(data['realm'])
        include_empty_subgroups = True
    elif chart_name == 'messages_sent_by_client':
        stat = COUNT_STATS['messages_sent:client:day']
        tables = [RealmCount, UserCount]
        subgroups = [
            str(x)
            for x in Client.objects.values_list('id', flat=True).order_by('id')
        ]
        # these are further re-written by client_label_map
        labels = list(
            Client.objects.values_list('name', flat=True).order_by('id'))
        labels_sort_function = sort_client_labels
        include_empty_subgroups = False
    else:
        raise JsonableError(_("Unknown chart name: %s") % (chart_name, ))

    # Most likely someone using our API endpoint. The /stats page does not
    # pass a start or end in its requests.
    if start is not None and end is not None and start > end:
        raise JsonableError(
            _("Start time is later than end time. Start: %(start)s, End: %(end)s"
              ) % {
                  'start': start,
                  'end': end
              })

    realm = user_profile.realm
    if start is None:
        start = realm.date_created
    if end is None:
        end = last_successful_fill(stat.property)
    if end is None or start > end:
        logging.warning(
            "User from realm %s attempted to access /stats, but the computed "
            "start time: %s (creation time of realm) is later than the computed "
            "end time: %s (last successful analytics update). Is the "
            "analytics cron job running?" % (realm.string_id, start, end))
        raise JsonableError(
            _("No analytics data available. Please contact your server administrator."
              ))

    end_times = time_range(start, end, stat.frequency, min_length)
    data = {
        'end_times': end_times,
        'frequency': stat.frequency,
        'interval': stat.interval
    }
    for table in tables:
        if table == RealmCount:
            data['realm'] = get_time_series_by_subgroup(
                stat, RealmCount, realm.id, end_times, subgroups, labels,
                include_empty_subgroups)
        if table == UserCount:
            data['user'] = get_time_series_by_subgroup(
                stat, UserCount, user_profile.id, end_times, subgroups, labels,
                include_empty_subgroups)
    if labels_sort_function is not None:
        data['display_order'] = labels_sort_function(data)
    else:
        data['display_order'] = None
    return json_success(data=data)
Ejemplo n.º 17
0
 def wrapper(request, user_profile, *args, **kwargs):
     # type: (HttpRequest, UserProfile, *Any, **Any) -> HttpResponse
     if not user_profile.is_realm_admin:
         raise JsonableError(_("Must be a realm administrator"))
     return func(request, user_profile, *args, **kwargs)
Ejemplo n.º 18
0
def do_events_register(
    user_profile: UserProfile,
    user_client: Client,
    apply_markdown: bool = True,
    client_gravatar: bool = False,
    slim_presence: bool = False,
    event_types: Optional[Iterable[str]] = None,
    queue_lifespan_secs: int = 0,
    all_public_streams: bool = False,
    include_subscribers: bool = True,
    include_streams: bool = True,
    client_capabilities: Dict[str, bool] = {},
    narrow: Iterable[Sequence[str]] = [],
    fetch_event_types: Optional[Iterable[str]] = None
) -> Dict[str, Any]:
    # Technically we don't need to check this here because
    # build_narrow_filter will check it, but it's nicer from an error
    # handling perspective to do it before contacting Tornado
    check_supported_events_narrow_filter(narrow)

    notification_settings_null = client_capabilities.get('notification_settings_null', False)
    bulk_message_deletion = client_capabilities.get('bulk_message_deletion', False)
    user_avatar_url_field_optional = client_capabilities.get('user_avatar_url_field_optional', False)

    if user_profile.realm.email_address_visibility != Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
        # If real email addresses are not available to the user, their
        # clients cannot compute gravatars, so we force-set it to false.
        client_gravatar = False

    # Note that we pass event_types, not fetch_event_types here, since
    # that's what controls which future events are sent.
    queue_id = request_event_queue(user_profile, user_client,
                                   apply_markdown, client_gravatar, slim_presence,
                                   queue_lifespan_secs, event_types, all_public_streams,
                                   narrow=narrow,
                                   bulk_message_deletion=bulk_message_deletion)

    if queue_id is None:
        raise JsonableError(_("Could not allocate event queue"))

    if fetch_event_types is not None:
        event_types_set: Optional[Set[str]] = set(fetch_event_types)
    elif event_types is not None:
        event_types_set = set(event_types)
    else:
        event_types_set = None

    # Fill up the UserMessage rows if a soft-deactivated user has returned
    reactivate_user_if_soft_deactivated(user_profile)

    ret = fetch_initial_state_data(
        user_profile,
        event_types=event_types_set,
        queue_id=queue_id,
        client_gravatar=client_gravatar,
        user_avatar_url_field_optional=user_avatar_url_field_optional,
        slim_presence=slim_presence,
        include_subscribers=include_subscribers,
        include_streams=include_streams,
    )

    # Apply events that came in while we were fetching initial data
    events = get_user_events(user_profile, queue_id, -1)
    apply_events(
        user_profile,
        state=ret,
        events=events,
        fetch_event_types=fetch_event_types,
        client_gravatar=client_gravatar,
        slim_presence=slim_presence,
        include_subscribers=include_subscribers,
    )

    post_process_state(user_profile, ret, notification_settings_null)

    if len(events) > 0:
        ret['last_event_id'] = events[-1]['id']
    else:
        ret['last_event_id'] = -1
    return ret
Ejemplo n.º 19
0
def fetch_events(query: Mapping[str, Any]) -> Dict[str, Any]:
    queue_id = query["queue_id"]  # type: str
    dont_block = query["dont_block"]  # type: bool
    last_event_id = query["last_event_id"]  # type: int
    user_profile_id = query["user_profile_id"]  # type: int
    new_queue_data = query.get(
        "new_queue_data")  # type: Optional[MutableMapping[str, Any]]
    user_profile_email = query["user_profile_email"]  # type: str
    client_type_name = query["client_type_name"]  # type: str
    handler_id = query["handler_id"]  # type: int

    try:
        was_connected = False
        orig_queue_id = queue_id
        extra_log_data = ""
        if queue_id is None:
            if dont_block:
                client = allocate_client_descriptor(new_queue_data)
                queue_id = client.event_queue.id
            else:
                raise JsonableError(_("Missing 'queue_id' argument"))
        else:
            if last_event_id is None:
                raise JsonableError(_("Missing 'last_event_id' argument"))
            client = get_client_descriptor(queue_id)
            if client is None:
                raise BadEventQueueIdError(queue_id)
            if user_profile_id != client.user_profile_id:
                raise JsonableError(
                    _("You are not authorized to get events from this queue"))
            client.event_queue.prune(last_event_id)
            was_connected = client.finish_current_handler()

        if not client.event_queue.empty() or dont_block:
            response = dict(events=client.event_queue.contents(),
                            handler_id=handler_id)  # type: Dict[str, Any]
            if orig_queue_id is None:
                response['queue_id'] = queue_id
            if len(response["events"]) == 1:
                extra_log_data = "[%s/%s/%s]" % (queue_id,
                                                 len(response["events"]),
                                                 response["events"][0]["type"])
            else:
                extra_log_data = "[%s/%s]" % (queue_id, len(
                    response["events"]))
            if was_connected:
                extra_log_data += " [was connected]"
            return dict(type="response",
                        response=response,
                        extra_log_data=extra_log_data)

        # After this point, dont_block=False, the queue is empty, and we
        # have a pre-existing queue, so we wait for new events.
        if was_connected:
            logging.info("Disconnected handler for queue %s (%s/%s)" %
                         (queue_id, user_profile_email, client_type_name))
    except JsonableError as e:
        return dict(type="error", exception=e)

    client.connect_handler(handler_id, client_type_name)
    return dict(type="async")
Ejemplo n.º 20
0
def check_valid_emoji_name(emoji_name: str) -> None:
    if emoji_name:
        if re.match(r"^[0-9a-z.\-_]+(?<![.\-_])$", emoji_name):
            return
        raise JsonableError(_("Invalid characters in emoji name"))
    raise JsonableError(_("Emoji name is missing"))
Ejemplo n.º 21
0
def update_realm(
    request: HttpRequest,
    user_profile: UserProfile,
    name: Optional[str] = REQ(validator=check_string, default=None),
    description: Optional[str] = REQ(validator=check_string, default=None),
    emails_restricted_to_domains: Optional[bool] = REQ(validator=check_bool,
                                                       default=None),
    disallow_disposable_email_addresses: Optional[bool] = REQ(
        validator=check_bool, default=None),
    invite_required: Optional[bool] = REQ(validator=check_bool, default=None),
    invite_by_admins_only: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    name_changes_disabled: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    email_changes_disabled: Optional[bool] = REQ(validator=check_bool,
                                                 default=None),
    avatar_changes_disabled: Optional[bool] = REQ(validator=check_bool,
                                                  default=None),
    inline_image_preview: Optional[bool] = REQ(validator=check_bool,
                                               default=None),
    inline_url_embed_preview: Optional[bool] = REQ(validator=check_bool,
                                                   default=None),
    add_emoji_by_admins_only: Optional[bool] = REQ(validator=check_bool,
                                                   default=None),
    allow_message_deleting: Optional[bool] = REQ(validator=check_bool,
                                                 default=None),
    message_content_delete_limit_seconds: Optional[int] = REQ(
        converter=to_non_negative_int, default=None),
    allow_message_editing: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    allow_community_topic_editing: Optional[bool] = REQ(validator=check_bool,
                                                        default=None),
    mandatory_topics: Optional[bool] = REQ(validator=check_bool, default=None),
    message_content_edit_limit_seconds: Optional[int] = REQ(
        converter=to_non_negative_int, default=None),
    allow_edit_history: Optional[bool] = REQ(validator=check_bool,
                                             default=None),
    default_language: Optional[str] = REQ(validator=check_string,
                                          default=None),
    waiting_period_threshold: Optional[int] = REQ(
        converter=to_non_negative_int, default=None),
    authentication_methods: Optional[Dict[str,
                                          Any]] = REQ(validator=check_dict([]),
                                                      default=None),
    notifications_stream_id: Optional[int] = REQ(validator=check_int,
                                                 default=None),
    signup_notifications_stream_id: Optional[int] = REQ(validator=check_int,
                                                        default=None),
    message_retention_days_raw: Optional[Union[int, str]] = REQ(
        "message_retention_days", validator=check_string_or_int, default=None),
    send_welcome_emails: Optional[bool] = REQ(validator=check_bool,
                                              default=None),
    digest_emails_enabled: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    message_content_allowed_in_email_notifications: Optional[bool] = REQ(
        validator=check_bool, default=None),
    bot_creation_policy: Optional[int] = REQ(validator=check_int_in(
        Realm.BOT_CREATION_POLICY_TYPES),
                                             default=None),
    create_stream_policy: Optional[int] = REQ(validator=check_int_in(
        Realm.COMMON_POLICY_TYPES),
                                              default=None),
    invite_to_stream_policy: Optional[int] = REQ(validator=check_int_in(
        Realm.COMMON_POLICY_TYPES),
                                                 default=None),
    user_group_edit_policy: Optional[int] = REQ(validator=check_int_in(
        Realm.USER_GROUP_EDIT_POLICY_TYPES),
                                                default=None),
    private_message_policy: Optional[int] = REQ(validator=check_int_in(
        Realm.PRIVATE_MESSAGE_POLICY_TYPES),
                                                default=None),
    wildcard_mention_policy: Optional[int] = REQ(validator=check_int_in(
        Realm.WILDCARD_MENTION_POLICY_TYPES),
                                                 default=None),
    email_address_visibility: Optional[int] = REQ(validator=check_int_in(
        Realm.EMAIL_ADDRESS_VISIBILITY_TYPES),
                                                  default=None),
    default_twenty_four_hour_time: Optional[bool] = REQ(validator=check_bool,
                                                        default=None),
    video_chat_provider: Optional[int] = REQ(validator=check_int,
                                             default=None),
    default_code_block_language: Optional[str] = REQ(validator=check_string,
                                                     default=None),
    digest_weekday: Optional[int] = REQ(validator=check_int_in(
        Realm.DIGEST_WEEKDAY_VALUES),
                                        default=None),
) -> HttpResponse:
    realm = user_profile.realm

    # Additional validation/error checking beyond types go here, so
    # the entire request can succeed or fail atomically.
    if default_language is not None and default_language not in get_available_language_codes(
    ):
        raise JsonableError(
            _("Invalid language '{}'").format(default_language))
    if description is not None and len(description) > 1000:
        return json_error(_("Organization description is too long."))
    if name is not None and len(name) > Realm.MAX_REALM_NAME_LENGTH:
        return json_error(_("Organization name is too long."))
    if authentication_methods is not None:
        if not user_profile.is_realm_owner:
            raise OrganizationOwnerRequired()
        if True not in list(authentication_methods.values()):
            return json_error(
                _("At least one authentication method must be enabled."))
    if (video_chat_provider is not None and video_chat_provider
            not in {p['id']
                    for p in Realm.VIDEO_CHAT_PROVIDERS.values()}):
        return json_error(
            _("Invalid video_chat_provider {}").format(video_chat_provider))

    message_retention_days: Optional[int] = None
    if message_retention_days_raw is not None:
        if not user_profile.is_realm_owner:
            raise OrganizationOwnerRequired()
        realm.ensure_not_on_limited_plan()
        message_retention_days = parse_message_retention_days(
            message_retention_days_raw,
            Realm.MESSAGE_RETENTION_SPECIAL_VALUES_MAP)

    # The user of `locals()` here is a bit of a code smell, but it's
    # restricted to the elements present in realm.property_types.
    #
    # TODO: It should be possible to deduplicate this function up
    # further by some more advanced usage of the
    # `REQ/has_request_variables` extraction.
    req_vars = {
        k: v
        for k, v in list(locals().items()) if k in realm.property_types
    }
    data: Dict[str, Any] = {}

    for k, v in list(req_vars.items()):
        if v is not None and getattr(realm, k) != v:
            do_set_realm_property(realm, k, v, acting_user=user_profile)
            if isinstance(v, str):
                data[k] = 'updated'
            else:
                data[k] = v

    # The following realm properties do not fit the pattern above
    # authentication_methods is not supported by the do_set_realm_property
    # framework because of its bitfield.
    if authentication_methods is not None and (
            realm.authentication_methods_dict() != authentication_methods):
        do_set_realm_authentication_methods(realm,
                                            authentication_methods,
                                            acting_user=user_profile)
        data['authentication_methods'] = authentication_methods
    # The message_editing settings are coupled to each other, and thus don't fit
    # into the do_set_realm_property framework.
    if ((allow_message_editing is not None
         and realm.allow_message_editing != allow_message_editing)
            or (message_content_edit_limit_seconds is not None
                and realm.message_content_edit_limit_seconds !=
                message_content_edit_limit_seconds)
            or (allow_community_topic_editing is not None
                and realm.allow_community_topic_editing !=
                allow_community_topic_editing)):
        if allow_message_editing is None:
            allow_message_editing = realm.allow_message_editing
        if message_content_edit_limit_seconds is None:
            message_content_edit_limit_seconds = realm.message_content_edit_limit_seconds
        if allow_community_topic_editing is None:
            allow_community_topic_editing = realm.allow_community_topic_editing
        do_set_realm_message_editing(realm,
                                     allow_message_editing,
                                     message_content_edit_limit_seconds,
                                     allow_community_topic_editing,
                                     acting_user=user_profile)
        data['allow_message_editing'] = allow_message_editing
        data[
            'message_content_edit_limit_seconds'] = message_content_edit_limit_seconds
        data['allow_community_topic_editing'] = allow_community_topic_editing

    # Realm.notifications_stream and Realm.signup_notifications_stream are not boolean,
    # str or integer field, and thus doesn't fit into the do_set_realm_property framework.
    if notifications_stream_id is not None:
        if realm.notifications_stream is None or (realm.notifications_stream.id
                                                  != notifications_stream_id):
            new_notifications_stream = None
            if notifications_stream_id >= 0:
                (new_notifications_stream,
                 sub) = access_stream_by_id(user_profile,
                                            notifications_stream_id)
            do_set_realm_notifications_stream(realm,
                                              new_notifications_stream,
                                              notifications_stream_id,
                                              acting_user=user_profile)
            data['notifications_stream_id'] = notifications_stream_id

    if signup_notifications_stream_id is not None:
        if realm.signup_notifications_stream is None or (
                realm.signup_notifications_stream.id !=
                signup_notifications_stream_id):
            new_signup_notifications_stream = None
            if signup_notifications_stream_id >= 0:
                (new_signup_notifications_stream,
                 sub) = access_stream_by_id(user_profile,
                                            signup_notifications_stream_id)
            do_set_realm_signup_notifications_stream(
                realm,
                new_signup_notifications_stream,
                signup_notifications_stream_id,
                acting_user=user_profile)
            data[
                'signup_notifications_stream_id'] = signup_notifications_stream_id

    if default_code_block_language is not None:
        # Migrate '', used in the API to encode the default/None behavior of this feature.
        if default_code_block_language == '':
            data['default_code_block_language'] = None
        else:
            data['default_code_block_language'] = default_code_block_language

    return json_success(data)
Ejemplo n.º 22
0
def access_playground_by_id(realm: Realm, playground_id: int) -> RealmPlayground:
    try:
        realm_playground = RealmPlayground.objects.get(id=playground_id, realm=realm)
    except RealmPlayground.DoesNotExist:
        raise JsonableError(_("Invalid playground"))
    return realm_playground
Ejemplo n.º 23
0
def list_to_streams(streams_raw, user_profile, autocreate=False):
    # type: (Iterable[Mapping[str, Any]], UserProfile, Optional[bool]) -> Tuple[List[Stream], List[Stream]]
    """Converts list of dicts to a list of Streams, validating input in the process

    For each stream name, we validate it to ensure it meets our
    requirements for a proper stream name: that is, that it is shorter
    than Stream.MAX_NAME_LENGTH characters and passes
    valid_stream_name.

    This function in autocreate mode should be atomic: either an exception will be raised
    during a precheck, or all the streams specified will have been created if applicable.

    @param streams_raw The list of stream dictionaries to process;
      names should already be stripped of whitespace by the caller.
    @param user_profile The user for whom we are retreiving the streams
    @param autocreate Whether we should create streams if they don't already exist
    """
    # Validate all streams, getting extant ones, then get-or-creating the rest.

    stream_set = set(stream_dict["name"] for stream_dict in streams_raw)

    for stream_name in stream_set:
        # Stream names should already have been stripped by the
        # caller, but it makes sense to verify anyway.
        assert stream_name == stream_name.strip()
        if len(stream_name) > Stream.MAX_NAME_LENGTH:
            raise JsonableError(
                _("Stream name (%s) too long.") % (stream_name, ))
        if not valid_stream_name(stream_name):
            raise JsonableError(
                _("Invalid stream name (%s).") % (stream_name, ))

    existing_streams = []  # type: List[Stream]
    missing_stream_dicts = []  # type: List[Mapping[str, Any]]
    existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)

    for stream_dict in streams_raw:
        stream_name = stream_dict["name"]
        stream = existing_stream_map.get(stream_name.lower())
        if stream is None:
            missing_stream_dicts.append(stream_dict)
        else:
            existing_streams.append(stream)

    if len(missing_stream_dicts) == 0:
        # This is the happy path for callers who expected all of these
        # streams to exist already.
        created_streams = []  # type: List[Stream]
    else:
        # autocreate=True path starts here
        if not user_profile.can_create_streams():
            raise JsonableError(_('User cannot create streams.'))
        elif not autocreate:
            raise JsonableError(
                _("Stream(s) (%s) do not exist") %
                ", ".join(stream_dict["name"]
                          for stream_dict in missing_stream_dicts))

        # We already filtered out existing streams, so dup_streams
        # will normally be an empty list below, but we protect against somebody
        # else racing to create the same stream.  (This is not an entirely
        # paranoid approach, since often on Zulip two people will discuss
        # creating a new stream, and both people eagerly do it.)
        created_streams, dup_streams = create_streams_if_needed(
            realm=user_profile.realm, stream_dicts=missing_stream_dicts)
        existing_streams += dup_streams

    return existing_streams, created_streams
Ejemplo n.º 24
0
def check_emoji_admin(user_profile):
    # type: (UserProfile) -> None
    if user_profile.realm.add_emoji_by_admins_only and not user_profile.is_realm_admin:
        raise JsonableError(_("Must be a realm administrator"))
Ejemplo n.º 25
0
def check_dev_auth_backend() -> None:
    if settings.PRODUCTION:
        raise JsonableError(_("Endpoint not available in production."))
    if not dev_auth_enabled():
        raise JsonableError(_("DevAuthBackend not enabled."))
Ejemplo n.º 26
0
def update_realm(
    request: HttpRequest,
    user_profile: UserProfile,
    name: Optional[str] = REQ(validator=check_string, default=None),
    description: Optional[str] = REQ(validator=check_string, default=None),
    emails_restricted_to_domains: Optional[bool] = REQ(validator=check_bool,
                                                       default=None),
    disallow_disposable_email_addresses: Optional[bool] = REQ(
        validator=check_bool, default=None),
    invite_required: Optional[bool] = REQ(validator=check_bool, default=None),
    invite_by_admins_only: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    name_changes_disabled: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    email_changes_disabled: Optional[bool] = REQ(validator=check_bool,
                                                 default=None),
    inline_image_preview: Optional[bool] = REQ(validator=check_bool,
                                               default=None),
    inline_url_embed_preview: Optional[bool] = REQ(validator=check_bool,
                                                   default=None),
    create_stream_by_admins_only: Optional[bool] = REQ(validator=check_bool,
                                                       default=None),
    add_emoji_by_admins_only: Optional[bool] = REQ(validator=check_bool,
                                                   default=None),
    allow_message_deleting: Optional[bool] = REQ(validator=check_bool,
                                                 default=None),
    message_content_delete_limit_seconds: Optional[int] = REQ(
        converter=to_non_negative_int, default=None),
    allow_message_editing: Optional[bool] = REQ(validator=check_bool,
                                                default=None),
    allow_community_topic_editing: Optional[bool] = REQ(validator=check_bool,
                                                        default=None),
    mandatory_topics: Optional[bool] = REQ(validator=check_bool, default=None),
    message_content_edit_limit_seconds: Optional[int] = REQ(
        converter=to_non_negative_int, default=None),
    allow_edit_history: Optional[bool] = REQ(validator=check_bool,
                                             default=None),
    default_language: Optional[str] = REQ(validator=check_string,
                                          default=None),
    waiting_period_threshold: Optional[int] = REQ(
        converter=to_non_negative_int, default=None),
    authentication_methods: Optional[Dict[Any,
                                          Any]] = REQ(validator=check_dict([]),
                                                      default=None),
    notifications_stream_id: Optional[int] = REQ(validator=check_int,
                                                 default=None),
    signup_notifications_stream_id: Optional[int] = REQ(validator=check_int,
                                                        default=None),
    message_retention_days: Optional[int] = REQ(
        converter=to_not_negative_int_or_none, default=None),
    send_welcome_emails: Optional[bool] = REQ(validator=check_bool,
                                              default=None),
    bot_creation_policy: Optional[int] = REQ(
        converter=to_not_negative_int_or_none, default=None),
    default_twenty_four_hour_time: Optional[bool] = REQ(validator=check_bool,
                                                        default=None),
    video_chat_provider: Optional[str] = REQ(validator=check_string,
                                             default=None),
    google_hangouts_domain: Optional[str] = REQ(validator=check_string,
                                                default=None)
) -> HttpResponse:
    realm = user_profile.realm

    # Additional validation/error checking beyond types go here, so
    # the entire request can succeed or fail atomically.
    if default_language is not None and default_language not in get_available_language_codes(
    ):
        raise JsonableError(_("Invalid language '%s'" % (default_language, )))
    if description is not None and len(description) > 1000:
        return json_error(_("Organization description is too long."))
    if name is not None and len(name) > Realm.MAX_REALM_NAME_LENGTH:
        return json_error(_("Organization name is too long."))
    if authentication_methods is not None and True not in list(
            authentication_methods.values()):
        return json_error(
            _("At least one authentication method must be enabled."))
    if video_chat_provider == "Google Hangouts":
        try:
            validate_domain(google_hangouts_domain)
        except ValidationError as e:
            return json_error(_('Invalid domain: {}').format(e.messages[0]))

    # Additional validation of permissions values to add new bot
    if bot_creation_policy is not None and bot_creation_policy not in Realm.BOT_CREATION_POLICY_TYPES:
        return json_error(_("Invalid bot creation policy"))
    # The user of `locals()` here is a bit of a code smell, but it's
    # restricted to the elements present in realm.property_types.
    #
    # TODO: It should be possible to deduplicate this function up
    # further by some more advanced usage of the
    # `REQ/has_request_variables` extraction.
    req_vars = {
        k: v
        for k, v in list(locals().items()) if k in realm.property_types
    }
    data = {}  # type: Dict[str, Any]

    for k, v in list(req_vars.items()):
        if v is not None and getattr(realm, k) != v:
            do_set_realm_property(realm, k, v)
            if isinstance(v, str):
                data[k] = 'updated'
            else:
                data[k] = v

    # The following realm properties do not fit the pattern above
    # authentication_methods is not supported by the do_set_realm_property
    # framework because of its bitfield.
    if authentication_methods is not None and (
            realm.authentication_methods_dict() != authentication_methods):
        do_set_realm_authentication_methods(realm, authentication_methods)
        data['authentication_methods'] = authentication_methods
    # The message_editing settings are coupled to each other, and thus don't fit
    # into the do_set_realm_property framework.
    if ((allow_message_editing is not None
         and realm.allow_message_editing != allow_message_editing)
            or (message_content_edit_limit_seconds is not None
                and realm.message_content_edit_limit_seconds !=
                message_content_edit_limit_seconds)
            or (allow_community_topic_editing is not None
                and realm.allow_community_topic_editing !=
                allow_community_topic_editing)):
        if allow_message_editing is None:
            allow_message_editing = realm.allow_message_editing
        if message_content_edit_limit_seconds is None:
            message_content_edit_limit_seconds = realm.message_content_edit_limit_seconds
        if allow_community_topic_editing is None:
            allow_community_topic_editing = realm.allow_community_topic_editing
        do_set_realm_message_editing(realm, allow_message_editing,
                                     message_content_edit_limit_seconds,
                                     allow_community_topic_editing)
        data['allow_message_editing'] = allow_message_editing
        data[
            'message_content_edit_limit_seconds'] = message_content_edit_limit_seconds
        data['allow_community_topic_editing'] = allow_community_topic_editing

    if (message_content_delete_limit_seconds is not None
            and realm.message_content_delete_limit_seconds !=
            message_content_delete_limit_seconds):
        do_set_realm_message_deleting(realm,
                                      message_content_delete_limit_seconds)
        data[
            'message_content_delete_limit_seconds'] = message_content_delete_limit_seconds
    # Realm.notifications_stream and Realm.signup_notifications_stream are not boolean,
    # str or integer field, and thus doesn't fit into the do_set_realm_property framework.
    if notifications_stream_id is not None:
        if realm.notifications_stream is None or (realm.notifications_stream.id
                                                  != notifications_stream_id):
            new_notifications_stream = None
            if notifications_stream_id >= 0:
                (new_notifications_stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            notifications_stream_id)
            do_set_realm_notifications_stream(realm, new_notifications_stream,
                                              notifications_stream_id)
            data['notifications_stream_id'] = notifications_stream_id

    if signup_notifications_stream_id is not None:
        if realm.signup_notifications_stream is None or (
                realm.signup_notifications_stream.id !=
                signup_notifications_stream_id):
            new_signup_notifications_stream = None
            if signup_notifications_stream_id >= 0:
                (new_signup_notifications_stream, recipient,
                 sub) = access_stream_by_id(user_profile,
                                            signup_notifications_stream_id)
            do_set_realm_signup_notifications_stream(
                realm, new_signup_notifications_stream,
                signup_notifications_stream_id)
            data[
                'signup_notifications_stream_id'] = signup_notifications_stream_id

    return json_success(data)
Ejemplo n.º 27
0
def do_events_register(
        user_profile: UserProfile,
        user_client: Client,
        apply_markdown: bool = True,
        client_gravatar: bool = False,
        event_types: Optional[Iterable[str]] = None,
        queue_lifespan_secs: int = 0,
        all_public_streams: bool = False,
        include_subscribers: bool = True,
        narrow: Iterable[Sequence[str]] = [],
        fetch_event_types: Optional[Iterable[str]] = None) -> Dict[str, Any]:
    # Technically we don't need to check this here because
    # build_narrow_filter will check it, but it's nicer from an error
    # handling perspective to do it before contacting Tornado
    check_supported_events_narrow_filter(narrow)

    # Note that we pass event_types, not fetch_event_types here, since
    # that's what controls which future events are sent.
    queue_id = request_event_queue(user_profile,
                                   user_client,
                                   apply_markdown,
                                   client_gravatar,
                                   queue_lifespan_secs,
                                   event_types,
                                   all_public_streams,
                                   narrow=narrow)

    if queue_id is None:
        raise JsonableError(_("Could not allocate event queue"))

    if fetch_event_types is not None:
        event_types_set = set(fetch_event_types)  # type: Optional[Set[str]]
    elif event_types is not None:
        event_types_set = set(event_types)
    else:
        event_types_set = None

    # Fill up the UserMessage rows if a soft-deactivated user has returned
    maybe_catch_up_soft_deactivated_user(user_profile)

    ret = fetch_initial_state_data(user_profile,
                                   event_types_set,
                                   queue_id,
                                   client_gravatar=client_gravatar,
                                   include_subscribers=include_subscribers)

    # Apply events that came in while we were fetching initial data
    events = get_user_events(user_profile, queue_id, -1)
    apply_events(ret,
                 events,
                 user_profile,
                 include_subscribers=include_subscribers,
                 client_gravatar=client_gravatar,
                 fetch_event_types=fetch_event_types)

    post_process_state(ret)

    if len(events) > 0:
        ret['last_event_id'] = events[-1]['id']
    else:
        ret['last_event_id'] = -1
    return ret
Ejemplo n.º 28
0
def check_short_name(short_name_raw: str) -> str:
    short_name = short_name_raw.strip()
    if len(short_name) == 0:
        raise JsonableError(_("Bad name or username"))
    return short_name
Ejemplo n.º 29
0
def check_valid_interface_type(interface_type: Optional[int]) -> None:
    if interface_type not in Service.ALLOWED_INTERFACE_TYPES:
        raise JsonableError(_("Invalid interface type"))
Ejemplo n.º 30
0
def list_to_streams(
        streams_raw: Iterable[Mapping[str, Any]],
        user_profile: UserProfile,
        autocreate: bool = False) -> Tuple[List[Stream], List[Stream]]:
    """Converts list of dicts to a list of Streams, validating input in the process

    For each stream name, we validate it to ensure it meets our
    requirements for a proper stream name using check_stream_name.

    This function in autocreate mode should be atomic: either an exception will be raised
    during a precheck, or all the streams specified will have been created if applicable.

    @param streams_raw The list of stream dictionaries to process;
      names should already be stripped of whitespace by the caller.
    @param user_profile The user for whom we are retrieving the streams
    @param autocreate Whether we should create streams if they don't already exist
    """
    # Validate all streams, getting extant ones, then get-or-creating the rest.

    stream_set = {stream_dict["name"] for stream_dict in streams_raw}

    for stream_name in stream_set:
        # Stream names should already have been stripped by the
        # caller, but it makes sense to verify anyway.
        assert stream_name == stream_name.strip()
        check_stream_name(stream_name)

    existing_streams: List[Stream] = []
    missing_stream_dicts: List[Mapping[str, Any]] = []
    existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)

    message_retention_days_not_none = False
    for stream_dict in streams_raw:
        stream_name = stream_dict["name"]
        stream = existing_stream_map.get(stream_name.lower())
        if stream is None:
            if stream_dict.get('message_retention_days', None) is not None:
                message_retention_days_not_none = True
            missing_stream_dicts.append(stream_dict)
        else:
            existing_streams.append(stream)

    if len(missing_stream_dicts) == 0:
        # This is the happy path for callers who expected all of these
        # streams to exist already.
        created_streams: List[Stream] = []
    else:
        # autocreate=True path starts here
        if not user_profile.can_create_streams():
            raise JsonableError(_('User cannot create streams.'))
        elif not autocreate:
            raise JsonableError(
                _("Stream(s) ({}) do not exist").format(
                    ", ".join(stream_dict["name"]
                              for stream_dict in missing_stream_dicts), ))
        elif message_retention_days_not_none:
            if not user_profile.is_realm_owner:
                raise JsonableError(
                    _('User cannot create stream with this settings.'))
            user_profile.realm.ensure_not_on_limited_plan()

        # We already filtered out existing streams, so dup_streams
        # will normally be an empty list below, but we protect against somebody
        # else racing to create the same stream.  (This is not an entirely
        # paranoid approach, since often on Zulip two people will discuss
        # creating a new stream, and both people eagerly do it.)
        created_streams, dup_streams = create_streams_if_needed(
            realm=user_profile.realm, stream_dicts=missing_stream_dicts)
        existing_streams += dup_streams

    return existing_streams, created_streams