Ejemplo n.º 1
0
def flush_realm_filter(sender, **kwargs):
    realm = kwargs['instance'].realm
    cache_delete(get_realm_filters_cache_key(realm.domain))
    try:
        per_request_realm_filters_cache.pop(realm.domain.lower())
    except KeyError:
        pass
Ejemplo n.º 2
0
def flush_realm_filter(sender, **kwargs):
    realm = kwargs['instance'].realm
    cache_delete(get_realm_filters_cache_key(realm.domain))
    try:
        per_request_realm_filters_cache.pop(realm.domain.lower())
    except KeyError:
        pass
Ejemplo n.º 3
0
    def tor_mock(
        self,
        side_effect: Optional[Exception] = None,
        read_data: Sequence[str] = ["1.2.3.4", "5.6.7.8"],
    ) -> Iterator[mock.Mock]:
        # We need to reset the circuitbreaker before starting.  We
        # patch the .opened property to be false, then call the
        # function, so it resets to closed.
        with mock.patch("builtins.open", mock.mock_open(read_data=orjson.dumps(["1.2.3.4"]))):
            with mock.patch(
                "circuitbreaker.CircuitBreaker.opened", new_callable=mock.PropertyMock
            ) as mock_opened:
                mock_opened.return_value = False
                decorator.get_tor_ips()

        # Having closed it, it's now cached.  Clear the cache.
        assert CircuitBreakerMonitor.get("get_tor_ips").closed
        cache_delete("tor_ip_addresses:")

        builtin_open = open
        if side_effect:
            tor_open = mock.MagicMock(side_effect=side_effect)
        else:
            tor_open = mock.mock_open(read_data=orjson.dumps(read_data))

        def selective_mock_open(*args: Any, **kwargs: Any) -> IO[Any]:
            if args[0] == settings.TOR_EXIT_NODE_FILE_PATH:
                return tor_open(*args, **kwargs)
            return builtin_open(*args, **kwargs)

        with mock.patch("builtins.open", selective_mock_open):
            yield tor_open
Ejemplo n.º 4
0
def reset_user_api_key(user_profile: Any) -> None:
    old_api_key = user_profile.api_key
    user_profile.api_key = generate_api_key()
    cache_delete(user_profile_by_api_key_cache_key(old_api_key))

    # Like with any API key change, we need to clear any server-side
    # state for sending push notifications to mobile app clients that
    # could have been registered with the old API key.  Fortunately,
    # we can just write to the queue processor that handles sending
    # those notices to the push notifications bouncer service.
    event = {'type': 'clear_push_device_tokens',
             'user_profile_id': user_profile.id}
    queue_json_publish("deferred_work", event)
Ejemplo n.º 5
0
    def test_cache_functions_raise_exception(self) -> None:
        invalid_key = "invalid_character:\n"
        good_key = "good_key"
        with self.assertRaises(InvalidCacheKeyException):
            cache_get(invalid_key)
        with self.assertRaises(InvalidCacheKeyException):
            cache_set(invalid_key, 0)
        with self.assertRaises(InvalidCacheKeyException):
            cache_delete(invalid_key)

        with self.assertRaises(InvalidCacheKeyException):
            cache_get_many([good_key, invalid_key])
        with self.assertRaises(InvalidCacheKeyException):
            cache_set_many({good_key: 0, invalid_key: 1})
        with self.assertRaises(InvalidCacheKeyException):
            cache_delete_many([good_key, invalid_key])
Ejemplo n.º 6
0
def do_regenerate_api_key(user_profile: UserProfile,
                          acting_user: UserProfile) -> str:
    old_api_key = user_profile.api_key
    new_api_key = generate_api_key()
    user_profile.api_key = new_api_key
    user_profile.save(update_fields=["api_key"])

    # We need to explicitly delete the old API key from our caches,
    # because the on-save handler for flushing the UserProfile object
    # in zerver/lib/cache.py only has access to the new API key.
    cache_delete(user_profile_by_api_key_cache_key(old_api_key))

    event_time = timezone_now()
    RealmAuditLog.objects.create(
        realm=user_profile.realm,
        acting_user=acting_user,
        modified_user=user_profile,
        event_type=RealmAuditLog.USER_API_KEY_CHANGED,
        event_time=event_time,
    )

    if user_profile.is_bot:
        send_event(
            user_profile.realm,
            dict(
                type="realm_bot",
                op="update",
                bot=dict(
                    user_id=user_profile.id,
                    api_key=new_api_key,
                ),
            ),
            bot_owner_user_ids(user_profile),
        )

    event = {
        "type": "clear_push_device_tokens",
        "user_profile_id": user_profile.id
    }
    queue_json_publish("deferred_work", event)

    return new_api_key
Ejemplo n.º 7
0
    def _send_message_with_test_org_url(self,
                                        sender: UserProfile,
                                        queue_should_run: bool = True,
                                        relative_url: bool = False) -> Message:
        url = "http://test.org/"
        # Ensure the cache for this is empty
        cache_delete(preview_url_cache_key(url))
        with mock_queue_publish(
                "zerver.actions.message_send.queue_json_publish") as patched:
            msg_id = self.send_personal_message(
                sender,
                self.example_user("cordelia"),
                content=url,
            )
            if queue_should_run:
                patched.assert_called_once()
                queue = patched.call_args[0][0]
                self.assertEqual(queue, "embed_links")
                event = patched.call_args[0][1]
            else:
                patched.assert_not_called()
                # If we nothing was put in the queue, we don't need to
                # run the queue processor or any of the following code
                return Message.objects.select_related("sender").get(id=msg_id)

        # Verify the initial message doesn't have the embedded links rendered
        msg = Message.objects.select_related("sender").get(id=msg_id)
        self.assertNotIn(f'<a href="{url}" title="The Rock">The Rock</a>',
                         msg.rendered_content)

        self.create_mock_response(url, relative_url=relative_url)

        # Run the queue processor to potentially rerender things
        with self.settings(TEST_SUITE=False):
            with self.assertLogs(level="INFO") as info_logs:
                FetchLinksEmbedData().consume(event)
            self.assertTrue(
                "INFO:root:Time spent on get_link_embed_data for http://test.org/: "
                in info_logs.output[0])

        msg = Message.objects.select_related("sender").get(id=msg_id)
        return msg
Ejemplo n.º 8
0
 def reload_message(msg_id: int) -> Message:
     # Get a clean copy of the message, and
     # clear the cache.
     cache_delete(to_dict_cache_key_id(msg_id))
     msg = Message.objects.get(id=msg_id)
     return msg
Ejemplo n.º 9
0
    def test_get_latest_github_release_download_link_for_platform(self) -> None:
        responses.add(
            responses.GET,
            "https://api.github.com/repos/zulip/zulip-desktop/releases/latest",
            json={"tag_name": "v5.4.3"},
            status=200,
        )

        responses.add(
            responses.HEAD,
            "https://github.com/zulip/zulip-desktop/releases/download/v5.4.3/Zulip-Web-Setup-5.4.3.exe",
            status=302,
        )
        self.assertEqual(
            get_latest_github_release_download_link_for_platform("windows"),
            "https://github.com/zulip/zulip-desktop/releases/download/v5.4.3/Zulip-Web-Setup-5.4.3.exe",
        )

        responses.add(
            responses.HEAD,
            "https://github.com/zulip/zulip-desktop/releases/download/v5.4.3/Zulip-5.4.3-x86_64.AppImage",
            status=302,
        )
        self.assertEqual(
            get_latest_github_release_download_link_for_platform("linux"),
            "https://github.com/zulip/zulip-desktop/releases/download/v5.4.3/Zulip-5.4.3-x86_64.AppImage",
        )

        responses.add(
            responses.HEAD,
            "https://github.com/zulip/zulip-desktop/releases/download/v5.4.3/Zulip-5.4.3-x64.dmg",
            status=302,
        )
        self.assertEqual(
            get_latest_github_release_download_link_for_platform("mac"),
            "https://github.com/zulip/zulip-desktop/releases/download/v5.4.3/Zulip-5.4.3-x64.dmg",
        )

        api_url = "https://api.github.com/repos/zulip/zulip-desktop/releases/latest"
        responses.replace(responses.GET, api_url, body=requests.RequestException())
        cache_delete("download_link:windows")
        with self.assertLogs(logger_string, level="ERROR") as error_log:
            self.assertEqual(
                get_latest_github_release_download_link_for_platform("windows"),
                "https://github.com/zulip/zulip-desktop/releases/latest",
            )
            self.assertEqual(
                error_log.output,
                [
                    f"ERROR:{logger_string}:Unable to fetch the latest release version from GitHub {api_url}"
                ],
            )

        responses.replace(
            responses.GET,
            "https://api.github.com/repos/zulip/zulip-desktop/releases/latest",
            json={"tag_name": "5.4.4"},
            status=200,
        )
        download_link = "https://github.com/zulip/zulip-desktop/releases/download/v5.4.4/Zulip-5.4.4-x86_64.AppImage"
        responses.add(responses.HEAD, download_link, status=404)
        cache_delete("download_link:linux")
        with self.assertLogs(logger_string, level="ERROR") as error_log:
            self.assertEqual(
                get_latest_github_release_download_link_for_platform("linux"),
                "https://github.com/zulip/zulip-desktop/releases/latest",
            )

            self.assertEqual(
                error_log.output,
                [f"ERROR:{logger_string}:App download link is broken {download_link}"],
            )

        with self.assertRaises(InvalidPlatform):
            get_latest_github_release_download_link_for_platform("plan9")
Ejemplo n.º 10
0
def do_rename_stream(stream: Stream, new_name: str, user_profile: UserProfile) -> Dict[str, str]:
    old_name = stream.name
    stream.name = new_name
    stream.save(update_fields=["name"])

    RealmAuditLog.objects.create(
        realm=stream.realm,
        acting_user=user_profile,
        modified_stream=stream,
        event_type=RealmAuditLog.STREAM_NAME_CHANGED,
        event_time=timezone_now(),
        extra_data=orjson.dumps(
            {
                RealmAuditLog.OLD_VALUE: old_name,
                RealmAuditLog.NEW_VALUE: new_name,
            }
        ).decode(),
    )

    recipient_id = stream.recipient_id
    messages = Message.objects.filter(recipient_id=recipient_id).only("id")

    # Update the display recipient and stream, which are easy single
    # items to set.
    old_cache_key = get_stream_cache_key(old_name, stream.realm_id)
    new_cache_key = get_stream_cache_key(stream.name, stream.realm_id)
    if old_cache_key != new_cache_key:
        cache_delete(old_cache_key)
        cache_set(new_cache_key, stream)
    cache_set(display_recipient_cache_key(recipient_id), stream.name)

    # Delete cache entries for everything else, which is cheaper and
    # clearer than trying to set them. display_recipient is the out of
    # date field in all cases.
    cache_delete_many(to_dict_cache_key_id(message.id) for message in messages)
    new_email = encode_email_address(stream, show_sender=True)

    # We will tell our users to essentially
    # update stream.name = new_name where name = old_name
    # and update stream.email = new_email where name = old_name.
    # We could optimize this by trying to send one message, but the
    # client code really wants one property update at a time, and
    # updating stream names is a pretty infrequent operation.
    # More importantly, we want to key these updates by id, not name,
    # since id is the immutable primary key, and obviously name is not.
    data_updates = [
        ["email_address", new_email],
        ["name", new_name],
    ]
    for property, value in data_updates:
        event = dict(
            op="update",
            type="stream",
            property=property,
            value=value,
            stream_id=stream.id,
            name=old_name,
        )
        send_event(stream.realm, event, can_access_stream_user_ids(stream))
    sender = get_system_bot(settings.NOTIFICATION_BOT, stream.realm_id)
    with override_language(stream.realm.default_language):
        internal_send_stream_message(
            sender,
            stream,
            Realm.STREAM_EVENTS_NOTIFICATION_TOPIC,
            _("{user_name} renamed stream {old_stream_name} to {new_stream_name}.").format(
                user_name=silent_mention_syntax_for_user(user_profile),
                old_stream_name=f"**{old_name}**",
                new_stream_name=f"**{new_name}**",
            ),
        )
    # Even though the token doesn't change, the web client needs to update the
    # email forwarding address to display the correctly-escaped new name.
    return {"email_address": new_email}
Ejemplo n.º 11
0
def do_deactivate_stream(
    stream: Stream, log: bool = True, *, acting_user: Optional[UserProfile]
) -> None:
    # We want to mark all messages in the to-be-deactivated stream as
    # read for all users; otherwise they will pollute queries like
    # "Get the user's first unread message".  Since this can be an
    # expensive operation, we do it via the deferred_work queue
    # processor.
    deferred_work_event = {
        "type": "mark_stream_messages_as_read_for_everyone",
        "stream_recipient_id": stream.recipient_id,
    }
    transaction.on_commit(lambda: queue_json_publish("deferred_work", deferred_work_event))

    # Get the affected user ids *before* we deactivate everybody.
    affected_user_ids = can_access_stream_user_ids(stream)

    get_active_subscriptions_for_stream_id(stream.id, include_deactivated_users=True).update(
        active=False
    )

    was_invite_only = stream.invite_only
    stream.deactivated = True
    stream.invite_only = True
    # Preserve as much as possible the original stream name while giving it a
    # special prefix that both indicates that the stream is deactivated and
    # frees up the original name for reuse.
    old_name = stream.name

    # Prepend a substring of the hashed stream ID to the new stream name
    streamID = str(stream.id)
    stream_id_hash_object = hashlib.sha512(streamID.encode())
    hashed_stream_id = stream_id_hash_object.hexdigest()[0:7]

    new_name = (hashed_stream_id + "!DEACTIVATED:" + old_name)[: Stream.MAX_NAME_LENGTH]

    stream.name = new_name[: Stream.MAX_NAME_LENGTH]
    stream.save(update_fields=["name", "deactivated", "invite_only"])

    # If this is a default stream, remove it, properly sending a
    # notification to browser clients.
    if DefaultStream.objects.filter(realm_id=stream.realm_id, stream_id=stream.id).exists():
        do_remove_default_stream(stream)

    default_stream_groups_for_stream = DefaultStreamGroup.objects.filter(streams__id=stream.id)
    for group in default_stream_groups_for_stream:
        do_remove_streams_from_default_stream_group(stream.realm, group, [stream])

    # Remove the old stream information from remote cache.
    old_cache_key = get_stream_cache_key(old_name, stream.realm_id)
    cache_delete(old_cache_key)

    stream_dict = stream.to_dict()
    stream_dict.update(dict(name=old_name, invite_only=was_invite_only))
    event = dict(type="stream", op="delete", streams=[stream_dict])
    transaction.on_commit(lambda: send_event(stream.realm, event, affected_user_ids))

    event_time = timezone_now()
    RealmAuditLog.objects.create(
        realm=stream.realm,
        acting_user=acting_user,
        modified_stream=stream,
        event_type=RealmAuditLog.STREAM_DEACTIVATED,
        event_time=event_time,
    )