Exemple #1
0
def bulk_delete_cache_keys(message_ids_to_clear):
    # type: (List[int]) -> None
    while len(message_ids_to_clear) > 0:
        batch = message_ids_to_clear[0:5000]

        keys_to_delete = [to_dict_cache_key_id(message_id, True) for message_id in batch]
        cache_delete_many(keys_to_delete)
        keys_to_delete = [to_dict_cache_key_id(message_id, False) for message_id in batch]
        cache_delete_many(keys_to_delete)

        message_ids_to_clear = message_ids_to_clear[5000:]
Exemple #2
0
def bulk_delete_cache_keys(message_ids_to_clear):
    # type: (List[int]) -> None
    while len(message_ids_to_clear) > 0:
        batch = message_ids_to_clear[0:5000]

        keys_to_delete = [to_dict_cache_key_id(message_id, True) for message_id in batch]
        cache_delete_many(keys_to_delete)
        keys_to_delete = [to_dict_cache_key_id(message_id, False) for message_id in batch]
        cache_delete_many(keys_to_delete)

        message_ids_to_clear = message_ids_to_clear[5000:]
Exemple #3
0
    def test_cached_reaction_data(self) -> None:
        """
        Formatted reactions data is saved in cache.
        """
        sender = self.example_user("hamlet")
        reaction_info = {
            "emoji_name": "smile",
        }
        result = self.api_post(sender, "/api/v1/messages/1/reactions",
                               reaction_info)

        self.assert_json_success(result)
        self.assertEqual(200, result.status_code)
        key = to_dict_cache_key_id(1)
        message = extract_message_dict(cache_get(key)[0])

        expected_reaction_data = [{
            "emoji_name": "smile",
            "emoji_code": "1f642",
            "reaction_type": "unicode_emoji",
            "user": {
                "email": "*****@*****.**",
                "id": 10,
                "full_name": "King Hamlet",
            },
            "user_id": 10,
        }]
        self.assertEqual(expected_reaction_data, message["reactions"])
Exemple #4
0
    def test_cached_reaction_data(self) -> None:
        """
        Formatted reactions data is saved in cache.
        """
        sender = self.example_user("hamlet")
        reaction_info = {
            'emoji_name': 'smile'
        }
        result = self.api_post(sender, '/api/v1/messages/1/reactions',
                               reaction_info)

        self.assert_json_success(result)
        self.assertEqual(200, result.status_code)
        key = to_dict_cache_key_id(1)
        message = extract_message_dict(cache_get(key)[0])

        expected_reaction_data = [{
            'emoji_name': 'smile',
            'emoji_code': '263a',
            'reaction_type': 'unicode_emoji',
            'user': {
                'email': '*****@*****.**',
                'id': 10,
                'full_name': 'King Hamlet'
            },
            'user_id': 10
        }]
        self.assertEqual(expected_reaction_data, message['reactions'])
Exemple #5
0
def bulk_delete_cache_keys(message_ids_to_clear: List[int]) -> None:
    while len(message_ids_to_clear) > 0:
        batch = message_ids_to_clear[0:5000]

        keys_to_delete = [to_dict_cache_key_id(message_id) for message_id in batch]
        cache_delete_many(keys_to_delete)

        message_ids_to_clear = message_ids_to_clear[5000:]
Exemple #6
0
def bulk_delete_cache_keys(message_ids_to_clear: List[int]) -> None:
    while len(message_ids_to_clear) > 0:
        batch = message_ids_to_clear[0:5000]

        keys_to_delete = [to_dict_cache_key_id(message_id) for message_id in batch]
        cache_delete_many(keys_to_delete)

        message_ids_to_clear = message_ids_to_clear[5000:]
Exemple #7
0
def message_cache_items(items_for_remote_cache: Dict[str, Tuple[bytes]],
                        message: Message) -> None:
    """
    Note: this code is untested, and the caller has been
    commented out for a while.
    """
    key = to_dict_cache_key_id(message.id)
    value = MessageDict.to_dict_uncached([message])[message.id]
    items_for_remote_cache[key] = (value, )
Exemple #8
0
def message_cache_items(items_for_remote_cache: Dict[Text, Tuple[bytes]],
                        message: Message) -> None:
    '''
    Note: this code is untested, and the caller has been
    commented out for a while.
    '''
    key = to_dict_cache_key_id(message.id)
    value = MessageDict.to_dict_uncached(message)
    items_for_remote_cache[key] = (value,)
Exemple #9
0
def message_cache_items(items_for_remote_cache, message):
    # type: (Dict[Text, Tuple[bytes]], Message) -> None
    '''
    Note: this code is untested, and the caller has been
    commented out for a while.
    '''
    key = to_dict_cache_key_id(message.id)
    value = MessageDict.to_dict_uncached(message)
    items_for_remote_cache[key] = (value, )
Exemple #10
0
    def test_cached_reaction_data(self) -> None:
        """
        Formatted reactions data is saved in cache.
        """
        senders = [self.example_user("hamlet"), self.example_user("cordelia")]
        emojis = ["smile", "tada"]
        expected_emoji_codes = ["1f642", "1f389"]

        for sender, emoji in zip(senders, emojis):
            reaction_info = {
                "emoji_name": emoji,
            }
            result = self.api_post(sender, "/api/v1/messages/1/reactions",
                                   reaction_info)

            self.assert_json_success(result)
            self.assertEqual(200, result.status_code)

        key = to_dict_cache_key_id(1)
        message = extract_message_dict(cache_get(key)[0])

        expected_reaction_data = [
            {
                "emoji_name": emoji,
                "emoji_code": emoji_code,
                "reaction_type": "unicode_emoji",
                "user": {
                    "email": f"user{sender.id}@zulip.testserver",
                    "id": sender.id,
                    "full_name": sender.full_name,
                },
                "user_id": sender.id,
            }
            # It's important that we preserve the loop order in this
            # test, since this is our test to verify that we're
            # returning reactions in chronological order.
            for sender, emoji, emoji_code in zip(senders, emojis,
                                                 expected_emoji_codes)
        ]
        self.assertEqual(expected_reaction_data, message["reactions"])
Exemple #11
0
def message_cache_items(items_for_remote_cache, message):
    # type: (Dict[text_type, Tuple[binary_type]], Message) -> None
    items_for_remote_cache[to_dict_cache_key_id(message.id, True)] = (message.to_dict_uncached(True),)
Exemple #12
0
def get_old_messages_backend(request, user_profile,
                             anchor = REQ(converter=int),
                             num_before = REQ(converter=to_non_negative_int),
                             num_after = REQ(converter=to_non_negative_int),
                             narrow = REQ('narrow', converter=narrow_parameter, default=None),
                             use_first_unread_anchor = REQ(default=False, converter=ujson.loads),
                             apply_markdown=REQ(default=True,
                                                converter=ujson.loads)):
    # type: (HttpRequest, UserProfile, int, int, int, Optional[List[Dict[str, Any]]], bool, bool) -> HttpResponse
    include_history = ok_to_include_history(narrow, user_profile.realm)

    if include_history and not use_first_unread_anchor:
        query = select([column("id").label("message_id")], None, table("zerver_message"))
        inner_msg_id_col = literal_column("zerver_message.id")
    elif narrow is None:
        query = select([column("message_id"), column("flags")],
                       column("user_profile_id") == literal(user_profile.id),
                       table("zerver_usermessage"))
        inner_msg_id_col = column("message_id")
    else:
        # TODO: Don't do this join if we're not doing a search
        query = select([column("message_id"), column("flags")],
                       column("user_profile_id") == literal(user_profile.id),
                       join(table("zerver_usermessage"), table("zerver_message"),
                            literal_column("zerver_usermessage.message_id") ==
                            literal_column("zerver_message.id")))
        inner_msg_id_col = column("message_id")

    num_extra_messages = 1
    is_search = False

    if narrow is not None:
        # Add some metadata to our logging data for narrows
        verbose_operators = []
        for term in narrow:
            if term['operator'] == "is":
                verbose_operators.append("is:" + term['operand'])
            else:
                verbose_operators.append(term['operator'])
        request._log_data['extra'] = "[%s]" % (",".join(verbose_operators),)

        # Build the query for the narrow
        num_extra_messages = 0
        builder = NarrowBuilder(user_profile, inner_msg_id_col)
        search_term = None # type: Optional[Dict[str, Any]]
        for term in narrow:
            if term['operator'] == 'search':
                if not is_search:
                    search_term = term
                    query = query.column(column("subject")).column(column("rendered_content"))
                    is_search = True
                else:
                    # Join the search operators if there are multiple of them
                    search_term['operand'] += ' ' + term['operand']
            else:
                query = builder.add_term(query, term)
        if is_search:
            query = builder.add_term(query, search_term)

    # We add 1 to the number of messages requested if no narrow was
    # specified to ensure that the resulting list always contains the
    # anchor message.  If a narrow was specified, the anchor message
    # might not match the narrow anyway.
    if num_after != 0:
        num_after += num_extra_messages
    else:
        num_before += num_extra_messages

    sa_conn = get_sqlalchemy_connection()
    if use_first_unread_anchor:
        condition = column("flags").op("&")(UserMessage.flags.read.mask) == 0

        # We exclude messages on muted topics when finding the first unread
        # message in this narrow
        muting_conditions = exclude_muting_conditions(user_profile, narrow)
        if muting_conditions:
            condition = and_(condition, *muting_conditions)

        first_unread_query = query.where(condition)
        first_unread_query = first_unread_query.order_by(inner_msg_id_col.asc()).limit(1)
        first_unread_result = list(sa_conn.execute(first_unread_query).fetchall())
        if len(first_unread_result) > 0:
            anchor = first_unread_result[0][0]
        else:
            anchor = LARGER_THAN_MAX_MESSAGE_ID

    before_query = None
    after_query = None
    if num_before != 0:
        before_anchor = anchor
        if num_after != 0:
            # Don't include the anchor in both the before query and the after query
            before_anchor = anchor - 1
        before_query = query.where(inner_msg_id_col <= before_anchor) \
                            .order_by(inner_msg_id_col.desc()).limit(num_before)
    if num_after != 0:
        after_query = query.where(inner_msg_id_col >= anchor) \
                           .order_by(inner_msg_id_col.asc()).limit(num_after)

    if anchor == LARGER_THAN_MAX_MESSAGE_ID:
        # There's no need for an after_query if we're targeting just the target message.
        after_query = None

    if before_query is not None:
        if after_query is not None:
            query = union_all(before_query.self_group(), after_query.self_group())
        else:
            query = before_query
    elif after_query is not None:
        query = after_query
    else:
        # This can happen when a narrow is specified.
        query = query.where(inner_msg_id_col == anchor)

    main_query = alias(query)
    query = select(main_query.c, None, main_query).order_by(column("message_id").asc())
    # This is a hack to tag the query we use for testing
    query = query.prefix_with("/* get_old_messages */")
    query_result = list(sa_conn.execute(query).fetchall())

    # The following is a little messy, but ensures that the code paths
    # are similar regardless of the value of include_history.  The
    # 'user_messages' dictionary maps each message to the user's
    # UserMessage object for that message, which we will attach to the
    # rendered message dict before returning it.  We attempt to
    # bulk-fetch rendered message dicts from remote cache using the
    # 'messages' list.
    search_fields = dict() # type: Dict[int, Dict[str, Text]]
    message_ids = [] # type: List[int]
    user_message_flags = {} # type: Dict[int, List[str]]
    if include_history:
        message_ids = [row[0] for row in query_result]

        # TODO: This could be done with an outer join instead of two queries
        user_message_flags = dict((user_message.message_id, user_message.flags_list()) for user_message in
                                  UserMessage.objects.filter(user_profile=user_profile,
                                                             message__id__in=message_ids))
        for row in query_result:
            message_id = row[0]
            if user_message_flags.get(message_id) is None:
                user_message_flags[message_id] = ["read", "historical"]
            if is_search:
                (_, subject, rendered_content, content_matches, subject_matches) = row
                search_fields[message_id] = get_search_fields(rendered_content, subject,
                                                              content_matches, subject_matches)
    else:
        for row in query_result:
            message_id = row[0]
            flags = row[1]
            user_message_flags[message_id] = parse_usermessage_flags(flags)

            message_ids.append(message_id)

            if is_search:
                (_, _, subject, rendered_content, content_matches, subject_matches) = row
                search_fields[message_id] = get_search_fields(rendered_content, subject,
                                                              content_matches, subject_matches)

    cache_transformer = lambda row: MessageDict.build_dict_from_raw_db_row(row, apply_markdown)
    id_fetcher = lambda row: row['id']

    message_dicts = generic_bulk_cached_fetch(lambda message_id: to_dict_cache_key_id(message_id, apply_markdown),
                                              Message.get_raw_db_rows,
                                              message_ids,
                                              id_fetcher=id_fetcher,
                                              cache_transformer=cache_transformer,
                                              extractor=extract_message_dict,
                                              setter=stringify_message_dict)

    message_list = []
    for message_id in message_ids:
        msg_dict = message_dicts[message_id]
        msg_dict.update({"flags": user_message_flags[message_id]})
        msg_dict.update(search_fields.get(message_id, {}))
        message_list.append(msg_dict)

    statsd.incr('loaded_old_messages', len(message_list))
    ret = {'messages': message_list,
           "result": "success",
           "msg": ""}
    return json_success(ret)
Exemple #13
0
 def reload_message(msg_id: int) -> Message:
     # Get a clean copy of the message, and
     # clear the cache.
     cache_delete(to_dict_cache_key_id(msg_id))
     msg = Message.objects.get(id=msg_id)
     return msg
Exemple #14
0
def do_rename_stream(stream: Stream, new_name: str, user_profile: UserProfile) -> Dict[str, str]:
    old_name = stream.name
    stream.name = new_name
    stream.save(update_fields=["name"])

    RealmAuditLog.objects.create(
        realm=stream.realm,
        acting_user=user_profile,
        modified_stream=stream,
        event_type=RealmAuditLog.STREAM_NAME_CHANGED,
        event_time=timezone_now(),
        extra_data=orjson.dumps(
            {
                RealmAuditLog.OLD_VALUE: old_name,
                RealmAuditLog.NEW_VALUE: new_name,
            }
        ).decode(),
    )

    recipient_id = stream.recipient_id
    messages = Message.objects.filter(recipient_id=recipient_id).only("id")

    # Update the display recipient and stream, which are easy single
    # items to set.
    old_cache_key = get_stream_cache_key(old_name, stream.realm_id)
    new_cache_key = get_stream_cache_key(stream.name, stream.realm_id)
    if old_cache_key != new_cache_key:
        cache_delete(old_cache_key)
        cache_set(new_cache_key, stream)
    cache_set(display_recipient_cache_key(recipient_id), stream.name)

    # Delete cache entries for everything else, which is cheaper and
    # clearer than trying to set them. display_recipient is the out of
    # date field in all cases.
    cache_delete_many(to_dict_cache_key_id(message.id) for message in messages)
    new_email = encode_email_address(stream, show_sender=True)

    # We will tell our users to essentially
    # update stream.name = new_name where name = old_name
    # and update stream.email = new_email where name = old_name.
    # We could optimize this by trying to send one message, but the
    # client code really wants one property update at a time, and
    # updating stream names is a pretty infrequent operation.
    # More importantly, we want to key these updates by id, not name,
    # since id is the immutable primary key, and obviously name is not.
    data_updates = [
        ["email_address", new_email],
        ["name", new_name],
    ]
    for property, value in data_updates:
        event = dict(
            op="update",
            type="stream",
            property=property,
            value=value,
            stream_id=stream.id,
            name=old_name,
        )
        send_event(stream.realm, event, can_access_stream_user_ids(stream))
    sender = get_system_bot(settings.NOTIFICATION_BOT, stream.realm_id)
    with override_language(stream.realm.default_language):
        internal_send_stream_message(
            sender,
            stream,
            Realm.STREAM_EVENTS_NOTIFICATION_TOPIC,
            _("{user_name} renamed stream {old_stream_name} to {new_stream_name}.").format(
                user_name=silent_mention_syntax_for_user(user_profile),
                old_stream_name=f"**{old_name}**",
                new_stream_name=f"**{new_name}**",
            ),
        )
    # Even though the token doesn't change, the web client needs to update the
    # email forwarding address to display the correctly-escaped new name.
    return {"email_address": new_email}