Пример #1
0
    def test_set_realm_authentication_methods(self) -> None:
        now = timezone_now()
        realm = get_realm("zulip")
        user = self.example_user("hamlet")
        expected_old_value = realm.authentication_methods_dict()
        auth_method_dict = {
            "Google": False,
            "Email": False,
            "GitHub": False,
            "Apple": False,
            "Dev": True,
            "SAML": True,
            "GitLab": False,
            "OpenID Connect": False,
        }

        do_set_realm_authentication_methods(realm,
                                            auth_method_dict,
                                            acting_user=user)
        realm_audit_logs = RealmAuditLog.objects.filter(
            realm=realm,
            event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
            event_time__gte=now,
            acting_user=user,
        )
        self.assertEqual(realm_audit_logs.count(), 1)
        extra_data = orjson.loads(
            assert_is_not_none(realm_audit_logs[0].extra_data))
        expected_new_value = auth_method_dict
        self.assertEqual(extra_data[RealmAuditLog.OLD_VALUE],
                         expected_old_value)
        self.assertEqual(extra_data[RealmAuditLog.NEW_VALUE],
                         expected_new_value)
Пример #2
0
def compute_plan_parameters(
    tier: int,
    automanage_licenses: bool,
    billing_schedule: int,
    discount: Optional[Decimal],
    free_trial: bool = False,
) -> Tuple[datetime, datetime, datetime, int]:
    # Everything in Stripe is stored as timestamps with 1 second resolution,
    # so standardize on 1 second resolution.
    # TODO talk about leap seconds?
    billing_cycle_anchor = timezone_now().replace(microsecond=0)
    if billing_schedule == CustomerPlan.ANNUAL:
        period_end = add_months(billing_cycle_anchor, 12)
    elif billing_schedule == CustomerPlan.MONTHLY:
        period_end = add_months(billing_cycle_anchor, 1)
    else:  # nocoverage
        raise InvalidBillingSchedule(billing_schedule)

    price_per_license = get_price_per_license(tier, billing_schedule, discount)

    next_invoice_date = period_end
    if automanage_licenses:
        next_invoice_date = add_months(billing_cycle_anchor, 1)
    if free_trial:
        period_end = billing_cycle_anchor + timedelta(
            days=assert_is_not_none(settings.FREE_TRIAL_DAYS)
        )
        next_invoice_date = period_end
    return billing_cycle_anchor, next_invoice_date, period_end, price_per_license
Пример #3
0
    def _get_parameters(self) -> pika.ConnectionParameters:
        credentials = pika.PlainCredentials(
            settings.RABBITMQ_USERNAME,
            assert_is_not_none(settings.RABBITMQ_PASSWORD))

        # With BlockingConnection, we are passed
        # self.rabbitmq_heartbeat=0, which asks to explicitly disable
        # the RabbitMQ heartbeat feature.  This is correct since that
        # heartbeat doesn't make sense with BlockingConnection (we do
        # need it for TornadoConnection).
        #
        # Where we've disabled RabbitMQ's heartbeat, the only
        # keepalive on this connection is the TCP keepalive (defaults:
        # `/proc/sys/net/ipv4/tcp_keepalive_*`).  On most Linux
        # systems, the default is to start sending keepalive packets
        # after TCP_KEEPIDLE (7200 seconds) of inactivity; after that
        # point, it send them every TCP_KEEPINTVL (typically 75s).
        # Some Kubernetes / Docker Swarm networks can kill "idle" TCP
        # connections after as little as ~15 minutes of inactivity.
        # To avoid this killing our RabbitMQ connections, we set
        # TCP_KEEPIDLE to something significantly below 15 minutes.
        tcp_options = None
        if self.rabbitmq_heartbeat == 0:
            tcp_options = dict(TCP_KEEPIDLE=60 * 5)

        return pika.ConnectionParameters(
            settings.RABBITMQ_HOST,
            heartbeat=self.rabbitmq_heartbeat,
            tcp_options=tcp_options,
            credentials=credentials,
        )
Пример #4
0
def get_topics_backend(
    request: HttpRequest,
    maybe_user_profile: Union[UserProfile, AnonymousUser],
    stream_id: int = REQ(converter=to_non_negative_int, path_only=True),
) -> HttpResponse:

    if not maybe_user_profile.is_authenticated:
        is_web_public_query = True
        user_profile: Optional[UserProfile] = None
    else:
        is_web_public_query = False
        assert isinstance(maybe_user_profile, UserProfile)
        user_profile = maybe_user_profile
        assert user_profile is not None

    if is_web_public_query:
        realm = get_valid_realm_from_request(request)
        stream = access_web_public_stream(stream_id, realm)
        result = get_topic_history_for_public_stream(
            recipient_id=assert_is_not_none(stream.recipient_id))

    else:
        assert user_profile is not None

        (stream, sub) = access_stream_by_id(user_profile, stream_id)

        result = get_topic_history_for_stream(
            user_profile=user_profile,
            recipient_id=stream.recipient_id,
            public_history=stream.is_history_public_to_subscribers(),
        )

    return json_success(request, data=dict(topics=result))
Пример #5
0
def write_local_file(type: str, path: str, file_data: bytes) -> None:
    file_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR),
                             type, path)

    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    with open(file_path, "wb") as f:
        f.write(file_data)
Пример #6
0
    def test_realm_activation(self) -> None:
        realm = get_realm("zulip")
        user = self.example_user("desdemona")
        do_deactivate_realm(realm, acting_user=user)
        log_entry = RealmAuditLog.objects.get(
            realm=realm,
            event_type=RealmAuditLog.REALM_DEACTIVATED,
            acting_user=user)
        extra_data = orjson.loads(assert_is_not_none(log_entry.extra_data))
        self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT])

        do_reactivate_realm(realm)
        log_entry = RealmAuditLog.objects.get(
            realm=realm, event_type=RealmAuditLog.REALM_REACTIVATED)
        extra_data = orjson.loads(assert_is_not_none(log_entry.extra_data))
        self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT])
Пример #7
0
def get_local_file_path(path_id: str) -> Optional[str]:
    local_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR),
                              "files", path_id)
    if os.path.isfile(local_path):
        return local_path
    else:
        return None
Пример #8
0
def get_realms_to_default_discount_dict() -> Dict[str, Decimal]:
    realms_to_default_discount: Dict[str, Any] = {}
    customers = Customer.objects.exclude(default_discount=None).exclude(default_discount=0)
    for customer in customers:
        realms_to_default_discount[customer.realm.string_id] = assert_is_not_none(
            customer.default_discount
        )
    return realms_to_default_discount
Пример #9
0
    def test_no_realm_api_page_og_url(self) -> None:
        response = self.client_get("/api/", subdomain="")
        self.assertEqual(response.status_code, 200)

        bs = BeautifulSoup(response.content, features="lxml")
        open_graph_url = assert_is_not_none(bs.select_one('meta[property="og:url"]')).get("content")

        assert isinstance(open_graph_url, str)
        self.assertTrue(open_graph_url.endswith("/api/"))
Пример #10
0
def delete_local_file(type: str, path: str) -> bool:
    file_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR), type, path)
    if os.path.isfile(file_path):
        # This removes the file but the empty folders still remain.
        os.remove(file_path)
        return True
    file_name = path.split("/")[-1]
    logging.warning("%s does not exist. Its entry in the database will be removed.", file_name)
    return False
Пример #11
0
 def test_change_role(self) -> None:
     realm = get_realm("zulip")
     now = timezone_now()
     user_profile = self.example_user("hamlet")
     acting_user = self.example_user("iago")
     do_change_user_role(user_profile,
                         UserProfile.ROLE_REALM_ADMINISTRATOR,
                         acting_user=acting_user)
     do_change_user_role(user_profile,
                         UserProfile.ROLE_MEMBER,
                         acting_user=acting_user)
     do_change_user_role(user_profile,
                         UserProfile.ROLE_GUEST,
                         acting_user=acting_user)
     do_change_user_role(user_profile,
                         UserProfile.ROLE_MEMBER,
                         acting_user=acting_user)
     do_change_user_role(user_profile,
                         UserProfile.ROLE_REALM_OWNER,
                         acting_user=acting_user)
     do_change_user_role(user_profile,
                         UserProfile.ROLE_MEMBER,
                         acting_user=acting_user)
     do_change_user_role(user_profile,
                         UserProfile.ROLE_MODERATOR,
                         acting_user=acting_user)
     do_change_user_role(user_profile,
                         UserProfile.ROLE_MEMBER,
                         acting_user=acting_user)
     old_values_seen = set()
     new_values_seen = set()
     for event in RealmAuditLog.objects.filter(
             event_type=RealmAuditLog.USER_ROLE_CHANGED,
             realm=realm,
             modified_user=user_profile,
             acting_user=acting_user,
             event_time__gte=now,
             event_time__lte=now + timedelta(minutes=60),
     ):
         extra_data = orjson.loads(assert_is_not_none(event.extra_data))
         self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT])
         self.assertIn(RealmAuditLog.OLD_VALUE, extra_data)
         self.assertIn(RealmAuditLog.NEW_VALUE, extra_data)
         old_values_seen.add(extra_data[RealmAuditLog.OLD_VALUE])
         new_values_seen.add(extra_data[RealmAuditLog.NEW_VALUE])
     self.assertEqual(
         old_values_seen,
         {
             UserProfile.ROLE_GUEST,
             UserProfile.ROLE_MEMBER,
             UserProfile.ROLE_REALM_ADMINISTRATOR,
             UserProfile.ROLE_REALM_OWNER,
             UserProfile.ROLE_MODERATOR,
         },
     )
     self.assertEqual(old_values_seen, new_values_seen)
Пример #12
0
def get_user_mutes(user_profile: UserProfile) -> List[Dict[str, int]]:
    rows = MutedUser.objects.filter(user_profile=user_profile).values(
        "muted_user_id",
        "date_muted",
    )
    return [{
        "id":
        row["muted_user_id"],
        "timestamp":
        datetime_to_timestamp(assert_is_not_none(row["date_muted"])),
    } for row in rows]
Пример #13
0
    def check_title_and_description(
        self,
        path: str,
        title: str,
        in_description: List[str],
        not_in_description: List[str],
        status_code: int = 200,
    ) -> None:
        response = self.client_get(path)
        self.assertEqual(response.status_code, status_code)
        bs = BeautifulSoup(response.content, features="lxml")
        open_graph_title = assert_is_not_none(
            bs.select_one('meta[property="og:title"]')).get("content")
        self.assertEqual(open_graph_title, title)

        open_graph_description = assert_is_not_none(
            bs.select_one('meta[property="og:description"]')).get("content")
        for substring in in_description:
            self.assertIn(substring, open_graph_description)
        for substring in not_in_description:
            self.assertNotIn(substring, open_graph_description)
Пример #14
0
    def ensure_avatar_image(self, user_profile: UserProfile, is_medium: bool = False) -> None:
        file_extension = "-medium.png" if is_medium else ".png"
        file_path = user_avatar_path(user_profile)

        output_path = os.path.join(
            assert_is_not_none(settings.LOCAL_UPLOADS_DIR),
            "avatars",
            file_path + file_extension,
        )
        if os.path.isfile(output_path):
            return

        image_path = os.path.join(
            assert_is_not_none(settings.LOCAL_UPLOADS_DIR), "avatars", file_path + ".original"
        )
        with open(image_path, "rb") as f:
            image_data = f.read()
        if is_medium:
            resized_avatar = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
        else:
            resized_avatar = resize_avatar(image_data)
        write_local_file("avatars", file_path + file_extension, resized_avatar)
Пример #15
0
    def test_login_page_realm_icon(self) -> None:
        realm = get_realm("zulip")
        realm.icon_source = "U"
        realm.save(update_fields=["icon_source"])
        realm_icon = get_realm_icon_url(realm)

        response = self.client_get("/login/")
        self.assertEqual(response.status_code, 200)

        bs = BeautifulSoup(response.content, features="lxml")
        open_graph_image = assert_is_not_none(
            bs.select_one('meta[property="og:image"]')).get("content")
        self.assertEqual(open_graph_image, f"{realm.uri}{realm_icon}")
Пример #16
0
def delete_realm_export(request: HttpRequest, user: UserProfile, export_id: int) -> HttpResponse:
    try:
        audit_log_entry = RealmAuditLog.objects.get(
            id=export_id, realm=user.realm, event_type=RealmAuditLog.REALM_EXPORTED
        )
    except RealmAuditLog.DoesNotExist:
        raise JsonableError(_("Invalid data export ID"))

    export_data = orjson.loads(assert_is_not_none(audit_log_entry.extra_data))
    if "deleted_timestamp" in export_data:
        raise JsonableError(_("Export already deleted"))
    do_delete_realm_export(user, audit_log_entry)
    return json_success()
Пример #17
0
    def test_login_page_realm_icon_absolute_url(self) -> None:
        realm = get_realm("zulip")
        realm.icon_source = "U"
        realm.save(update_fields=["icon_source"])
        icon_url = f"https://foo.s3.amazonaws.com/{realm.id}/realm/icon.png?version={1}"
        with patch("zerver.lib.realm_icon.upload_backend.get_realm_icon_url",
                   return_value=icon_url):
            response = self.client_get("/login/")
        self.assertEqual(response.status_code, 200)

        bs = BeautifulSoup(response.content, features="lxml")
        open_graph_image = assert_is_not_none(
            bs.select_one('meta[property="og:image"]')).get("content")
        self.assertEqual(open_graph_image, icon_url)
Пример #18
0
 def upload_export_tarball(
     self,
     realm: Realm,
     tarball_path: str,
     percent_callback: Optional[Callable[[Any], None]] = None,
 ) -> str:
     path = os.path.join(
         "exports",
         str(realm.id),
         secrets.token_urlsafe(18),
         os.path.basename(tarball_path),
     )
     abs_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR), "avatars", path)
     os.makedirs(os.path.dirname(abs_path), exist_ok=True)
     shutil.copy(tarball_path, abs_path)
     public_url = realm.uri + "/user_avatars/" + path
     return public_url
Пример #19
0
 def test_user_activation(self) -> None:
     realm = get_realm("zulip")
     now = timezone_now()
     user = do_create_user("email",
                           "password",
                           realm,
                           "full_name",
                           acting_user=None)
     do_deactivate_user(user, acting_user=user)
     do_activate_mirror_dummy_user(user, acting_user=user)
     do_deactivate_user(user, acting_user=user)
     do_reactivate_user(user, acting_user=user)
     self.assertEqual(
         RealmAuditLog.objects.filter(event_time__gte=now).count(), 6)
     event_types = list(
         RealmAuditLog.objects.filter(
             realm=realm,
             acting_user=user,
             modified_user=user,
             modified_stream=None,
             event_time__gte=now,
             event_time__lte=now + timedelta(minutes=60),
         ).order_by("event_time").values_list("event_type", flat=True))
     self.assertEqual(
         event_types,
         [
             RealmAuditLog.USER_CREATED,
             RealmAuditLog.USER_DEACTIVATED,
             RealmAuditLog.USER_ACTIVATED,
             RealmAuditLog.USER_DEACTIVATED,
             RealmAuditLog.USER_REACTIVATED,
         ],
     )
     for event in RealmAuditLog.objects.filter(
             realm=realm,
             acting_user=user,
             modified_user=user,
             modified_stream=None,
             event_time__gte=now,
             event_time__lte=now + timedelta(minutes=60),
     ):
         extra_data = orjson.loads(assert_is_not_none(event.extra_data))
         self.check_role_count_schema(extra_data[RealmAuditLog.ROLE_COUNT])
         self.assertNotIn(RealmAuditLog.OLD_VALUE, extra_data)
Пример #20
0
def delete_in_topic(
    request: HttpRequest,
    user_profile: UserProfile,
    stream_id: int = REQ(converter=to_non_negative_int, path_only=True),
    topic_name: str = REQ("topic_name"),
) -> HttpResponse:
    (stream, sub) = access_stream_by_id(user_profile, stream_id)

    messages = messages_for_topic(assert_is_not_none(stream.recipient_id), topic_name)
    if not stream.is_history_public_to_subscribers():
        # Don't allow the user to delete messages that they don't have access to.
        deletable_message_ids = UserMessage.objects.filter(
            user_profile=user_profile, message_id__in=messages
        ).values_list("message_id", flat=True)
        messages = messages.filter(id__in=deletable_message_ids)

    messages = messages.select_for_update(of=("self",))

    do_delete_messages(user_profile.realm, messages)

    return json_success()
Пример #21
0
    def test_set_realm_message_editing(self) -> None:
        now = timezone_now()
        realm = get_realm("zulip")
        user = self.example_user("hamlet")
        values_expected = [
            {
                "property": "message_content_edit_limit_seconds",
                RealmAuditLog.OLD_VALUE:
                realm.message_content_edit_limit_seconds,
                RealmAuditLog.NEW_VALUE: 1000,
            },
            {
                "property": "edit_topic_policy",
                RealmAuditLog.OLD_VALUE: Realm.POLICY_EVERYONE,
                RealmAuditLog.NEW_VALUE: Realm.POLICY_ADMINS_ONLY,
            },
        ]

        do_set_realm_message_editing(realm,
                                     True,
                                     1000,
                                     Realm.POLICY_ADMINS_ONLY,
                                     acting_user=user)
        realm_audit_logs = RealmAuditLog.objects.filter(
            realm=realm,
            event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
            event_time__gte=now,
            acting_user=user,
        ).order_by("id")
        self.assertEqual(realm_audit_logs.count(), 2)
        self.assertEqual(
            [
                orjson.loads(assert_is_not_none(entry.extra_data))
                for entry in realm_audit_logs
            ],
            values_expected,
        )
Пример #22
0
def read_local_file(type: str, path: str) -> bytes:
    file_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR),
                             type, path)
    with open(file_path, "rb") as f:
        return f.read()
Пример #23
0
    def test_openapi_arguments(self) -> None:
        """This end-to-end API documentation test compares the arguments
        defined in the actual code using @has_request_variables and
        REQ(), with the arguments declared in our API documentation
        for every API endpoint in Zulip.

        First, we import the fancy-Django version of zproject/urls.py
        by doing this, each has_request_variables wrapper around each
        imported view function gets called to generate the wrapped
        view function and thus filling the global arguments_map variable.
        Basically, we're exploiting code execution during import.

            Then we need to import some view modules not already imported in
        urls.py. We use this different syntax because of the linters complaining
        of an unused import (which is correct, but we do this for triggering the
        has_request_variables decorator).

            At the end, we perform a reverse mapping test that verifies that
        every URL pattern defined in the OpenAPI documentation actually exists
        in code.
        """

        from zproject import urls as urlconf

        # We loop through all the API patterns, looking in particular
        # for those using the rest_dispatch decorator; we then parse
        # its mapping of (HTTP_METHOD -> FUNCTION).
        for p in urlconf.v1_api_and_json_patterns + urlconf.v1_api_mobile_patterns:
            if p.callback is not rest_dispatch:
                # Endpoints not using rest_dispatch don't have extra data.
                methods_endpoints: Dict[str, Any] = dict(
                    GET=p.callback,
                )
            else:
                methods_endpoints = assert_is_not_none(p.default_args)

            # since the module was already imported and is now residing in
            # memory, we won't actually face any performance penalties here.
            for method, value in methods_endpoints.items():
                if callable(value):
                    function: Callable[..., HttpResponse] = value
                    tags: Set[str] = set()
                else:
                    function, tags = value

                if function is get_events:
                    # Work around the fact that the registered
                    # get_events view function isn't where we do
                    # @has_request_variables.
                    #
                    # TODO: Make this configurable via an optional argument
                    # to has_request_variables, e.g.
                    # @has_request_variables(view_func_name="zerver.tornado.views.get_events")
                    function = get_events_backend

                function_name = f"{function.__module__}.{function.__name__}"

                # Our accounting logic in the `has_request_variables()`
                # code means we have the list of all arguments
                # accepted by every view function in arguments_map.
                accepted_arguments = set(arguments_map[function_name])

                regex_pattern = p.pattern.regex.pattern
                url_pattern = self.convert_regex_to_url_pattern(regex_pattern)

                if "intentionally_undocumented" in tags:
                    self.ensure_no_documentation_if_intentionally_undocumented(url_pattern, method)
                    continue

                if url_pattern in self.pending_endpoints:
                    # HACK: After all pending_endpoints have been resolved, we should remove
                    # this segment and the "msg" part of the `ensure_no_...` method.
                    msg = f"""
We found some OpenAPI documentation for {method} {url_pattern},
so maybe we shouldn't include it in pending_endpoints.
"""
                    self.ensure_no_documentation_if_intentionally_undocumented(
                        url_pattern, method, msg
                    )
                    continue

                try:
                    # Don't include OpenAPI parameters that live in
                    # the path; these are not extracted by REQ.
                    openapi_parameters = get_openapi_parameters(
                        url_pattern, method, include_url_parameters=False
                    )
                except Exception:  # nocoverage
                    raise AssertionError(f"Could not find OpenAPI docs for {method} {url_pattern}")

                # We now have everything we need to understand the
                # function as defined in our urls.py:
                #
                # * method is the HTTP method, e.g. GET, POST, or PATCH
                #
                # * p.pattern.regex.pattern is the URL pattern; might require
                #   some processing to match with OpenAPI rules
                #
                # * accepted_arguments is the full set of arguments
                #   this method accepts (from the REQ declarations in
                #   code).
                #
                # * The documented parameters for the endpoint as recorded in our
                #   OpenAPI data in zerver/openapi/zulip.yaml.
                #
                # We now compare these to confirm that the documented
                # argument list matches what actually appears in the
                # codebase.

                openapi_parameter_names = {parameter["name"] for parameter in openapi_parameters}

                if len(accepted_arguments - openapi_parameter_names) > 0:  # nocoverage
                    print("Undocumented parameters for", url_pattern, method, function_name)
                    print(" +", openapi_parameter_names)
                    print(" -", accepted_arguments)
                    assert url_pattern in self.buggy_documentation_endpoints
                elif len(openapi_parameter_names - accepted_arguments) > 0:  # nocoverage
                    print("Documented invalid parameters for", url_pattern, method, function_name)
                    print(" -", openapi_parameter_names)
                    print(" +", accepted_arguments)
                    assert url_pattern in self.buggy_documentation_endpoints
                else:
                    self.assertEqual(openapi_parameter_names, accepted_arguments)
                    self.check_argument_types(function, openapi_parameters)
                    self.checked_endpoints.add(url_pattern)

        self.check_for_non_existant_openapi_endpoints()
Пример #24
0
def support(request: HttpRequest) -> HttpResponse:
    context: Dict[str, Any] = {}

    if "success_message" in request.session:
        context["success_message"] = request.session["success_message"]
        del request.session["success_message"]

    if settings.BILLING_ENABLED and request.method == "POST":
        # We check that request.POST only has two keys in it: The
        # realm_id and a field to change.
        keys = set(request.POST.keys())
        if "csrfmiddlewaretoken" in keys:
            keys.remove("csrfmiddlewaretoken")
        if len(keys) != 2:
            raise JsonableError(_("Invalid parameters"))

        realm_id: str = assert_is_not_none(request.POST.get("realm_id"))
        realm = Realm.objects.get(id=realm_id)

        acting_user = request.user
        assert isinstance(acting_user, UserProfile)
        if request.POST.get("plan_type", None) is not None:
            new_plan_type = int(
                assert_is_not_none(request.POST.get("plan_type")))
            current_plan_type = realm.plan_type
            do_change_plan_type(realm, new_plan_type, acting_user=acting_user)
            msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(new_plan_type)} "
            context["success_message"] = msg
        elif request.POST.get("discount", None) is not None:
            new_discount = Decimal(
                assert_is_not_none(request.POST.get("discount")))
            current_discount = get_discount_for_realm(realm) or 0
            attach_discount_to_realm(realm,
                                     new_discount,
                                     acting_user=acting_user)
            context[
                "success_message"] = f"Discount of {realm.string_id} changed to {new_discount}% from {current_discount}%."
        elif request.POST.get("new_subdomain", None) is not None:
            new_subdomain: str = assert_is_not_none(
                request.POST.get("new_subdomain"))
            old_subdomain = realm.string_id
            try:
                check_subdomain_available(new_subdomain)
            except ValidationError as error:
                context["error_message"] = error.message
            else:
                do_change_realm_subdomain(realm,
                                          new_subdomain,
                                          acting_user=acting_user)
                request.session[
                    "success_message"] = f"Subdomain changed from {old_subdomain} to {new_subdomain}"
                return HttpResponseRedirect(
                    reverse("support") + "?" + urlencode({"q": new_subdomain}))
        elif request.POST.get("status", None) is not None:
            status = request.POST.get("status")
            if status == "active":
                do_send_realm_reactivation_email(realm,
                                                 acting_user=acting_user)
                context[
                    "success_message"] = f"Realm reactivation email sent to admins of {realm.string_id}."
            elif status == "deactivated":
                do_deactivate_realm(realm, acting_user=acting_user)
                context["success_message"] = f"{realm.string_id} deactivated."
        elif request.POST.get("billing_method", None) is not None:
            billing_method = request.POST.get("billing_method")
            if billing_method == "send_invoice":
                update_billing_method_of_current_plan(
                    realm, charge_automatically=False, acting_user=acting_user)
                context[
                    "success_message"] = f"Billing method of {realm.string_id} updated to pay by invoice."
            elif billing_method == "charge_automatically":
                update_billing_method_of_current_plan(
                    realm, charge_automatically=True, acting_user=acting_user)
                context[
                    "success_message"] = f"Billing method of {realm.string_id} updated to charge automatically."
        elif request.POST.get("sponsorship_pending", None) is not None:
            sponsorship_pending = request.POST.get("sponsorship_pending")
            if sponsorship_pending == "true":
                update_sponsorship_status(realm, True, acting_user=acting_user)
                context[
                    "success_message"] = f"{realm.string_id} marked as pending sponsorship."
            elif sponsorship_pending == "false":
                update_sponsorship_status(realm,
                                          False,
                                          acting_user=acting_user)
                context[
                    "success_message"] = f"{realm.string_id} is no longer pending sponsorship."
        elif request.POST.get("approve_sponsorship") is not None:
            if request.POST.get(
                    "approve_sponsorship") == "approve_sponsorship":
                approve_sponsorship(realm, acting_user=acting_user)
                context[
                    "success_message"] = f"Sponsorship approved for {realm.string_id}"
        elif request.POST.get("downgrade_method", None) is not None:
            downgrade_method = request.POST.get("downgrade_method")
            if downgrade_method == "downgrade_at_billing_cycle_end":
                downgrade_at_the_end_of_billing_cycle(realm)
                context[
                    "success_message"] = f"{realm.string_id} marked for downgrade at the end of billing cycle"
            elif downgrade_method == "downgrade_now_without_additional_licenses":
                downgrade_now_without_creating_additional_invoices(realm)
                context[
                    "success_message"] = f"{realm.string_id} downgraded without creating additional invoices"
            elif downgrade_method == "downgrade_now_void_open_invoices":
                downgrade_now_without_creating_additional_invoices(realm)
                voided_invoices_count = void_all_open_invoices(realm)
                context[
                    "success_message"] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices"
        elif request.POST.get("scrub_realm", None) is not None:
            if request.POST.get("scrub_realm") == "scrub_realm":
                do_scrub_realm(realm, acting_user=acting_user)
                context["success_message"] = f"{realm.string_id} scrubbed."

    query = request.GET.get("q", None)
    if query:
        key_words = get_invitee_emails_set(query)

        users = set(UserProfile.objects.filter(delivery_email__in=key_words))
        realms = set(Realm.objects.filter(string_id__in=key_words))

        for key_word in key_words:
            try:
                URLValidator()(key_word)
                parse_result = urllib.parse.urlparse(key_word)
                hostname = parse_result.hostname
                assert hostname is not None
                if parse_result.port:
                    hostname = f"{hostname}:{parse_result.port}"
                subdomain = get_subdomain_from_hostname(hostname)
                try:
                    realms.add(get_realm(subdomain))
                except Realm.DoesNotExist:
                    pass
            except ValidationError:
                users.update(
                    UserProfile.objects.filter(full_name__iexact=key_word))

        for realm in realms:
            realm.customer = get_customer_by_realm(realm)

            current_plan = get_current_plan_by_realm(realm)
            if current_plan is not None:
                new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed(
                    current_plan, timezone_now())
                if last_ledger_entry is not None:
                    if new_plan is not None:
                        realm.current_plan = new_plan
                    else:
                        realm.current_plan = current_plan
                    realm.current_plan.licenses = last_ledger_entry.licenses
                    realm.current_plan.licenses_used = get_latest_seat_count(
                        realm)

        # full_names can have , in them
        users.update(UserProfile.objects.filter(full_name__iexact=query))

        context["users"] = users
        context["realms"] = realms

        confirmations: List[Dict[str, Any]] = []

        preregistration_users = PreregistrationUser.objects.filter(
            email__in=key_words)
        confirmations += get_confirmations(
            [
                Confirmation.USER_REGISTRATION, Confirmation.INVITATION,
                Confirmation.REALM_CREATION
            ],
            preregistration_users,
            hostname=request.get_host(),
        )

        multiuse_invites = MultiuseInvite.objects.filter(realm__in=realms)
        confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE],
                                           multiuse_invites)

        confirmations += get_confirmations([Confirmation.REALM_REACTIVATION],
                                           [realm.id for realm in realms])

        context["confirmations"] = confirmations

    def get_realm_owner_emails_as_string(realm: Realm) -> str:
        return ", ".join(realm.get_human_owner_users().order_by(
            "delivery_email").values_list("delivery_email", flat=True))

    def get_realm_admin_emails_as_string(realm: Realm) -> str:
        return ", ".join(
            realm.get_human_admin_users(include_realm_owners=False).order_by(
                "delivery_email").values_list("delivery_email", flat=True))

    context[
        "get_realm_owner_emails_as_string"] = get_realm_owner_emails_as_string
    context[
        "get_realm_admin_emails_as_string"] = get_realm_admin_emails_as_string
    context["get_discount_for_realm"] = get_discount_for_realm
    context["get_org_type_display_name"] = get_org_type_display_name
    context["realm_icon_url"] = realm_icon_url
    context["Confirmation"] = Confirmation
    return render(request, "analytics/support.html", context=context)
Пример #25
0
def add_missing_messages(user_profile: UserProfile) -> None:
    """This function takes a soft-deactivated user, and computes and adds
    to the database any UserMessage rows that were not created while
    the user was soft-deactivated.  The end result is that from the
    perspective of the message database, it should be impossible to
    tell that the user was soft-deactivated at all.

    At a high level, the algorithm is as follows:

    * Find all the streams that the user was at any time a subscriber
      of when or after they were soft-deactivated (`recipient_ids`
      below).

    * Find all the messages sent to those streams since the user was
      soft-deactivated.  This will be a superset of the target
      UserMessages we need to create in two ways: (1) some UserMessage
      rows will have already been created in do_send_messages because
      the user had a nonzero set of flags (the fact that we do so in
      do_send_messages simplifies things considerably, since it means
      we don't need to inspect message content to look for things like
      mentions here), and (2) the user might not have been subscribed
      to all of the streams in recipient_ids for the entire time
      window.

    * Correct the list from the previous state by excluding those with
      existing UserMessage rows.

    * Correct the list from the previous state by excluding those
      where the user wasn't subscribed at the time, using the
      RealmAuditLog data to determine exactly when the user was
      subscribed/unsubscribed.

    * Create the UserMessage rows.

    For further documentation, see:

      https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html#soft-deactivation

    """
    assert user_profile.last_active_message_id is not None
    all_stream_subs = list(
        Subscription.objects.filter(user_profile=user_profile,
                                    recipient__type=Recipient.STREAM).values(
                                        "recipient_id", "recipient__type_id"))

    # For stream messages we need to check messages against data from
    # RealmAuditLog for visibility to user. So we fetch the subscription logs.
    stream_ids = [sub["recipient__type_id"] for sub in all_stream_subs]
    events = [
        RealmAuditLog.SUBSCRIPTION_CREATED,
        RealmAuditLog.SUBSCRIPTION_DEACTIVATED,
        RealmAuditLog.SUBSCRIPTION_ACTIVATED,
    ]

    # Important: We order first by event_last_message_id, which is the
    # official ordering, and then tiebreak by RealmAuditLog event ID.
    # That second tiebreak is important in case a user is subscribed
    # and then unsubscribed without any messages being sent in the
    # meantime.  Without that tiebreak, we could end up incorrectly
    # processing the ordering of those two subscription changes.
    subscription_logs = list(
        RealmAuditLog.objects.select_related("modified_stream").filter(
            modified_user=user_profile,
            modified_stream_id__in=stream_ids,
            event_type__in=events).order_by("event_last_message_id", "id"))

    all_stream_subscription_logs: DefaultDict[
        int, List[RealmAuditLog]] = defaultdict(list)
    for log in subscription_logs:
        all_stream_subscription_logs[assert_is_not_none(
            log.modified_stream_id)].append(log)

    recipient_ids = []
    for sub in all_stream_subs:
        stream_subscription_logs = all_stream_subscription_logs[
            sub["recipient__type_id"]]
        if stream_subscription_logs[
                -1].event_type == RealmAuditLog.SUBSCRIPTION_DEACTIVATED:
            assert stream_subscription_logs[
                -1].event_last_message_id is not None
            if (stream_subscription_logs[-1].event_last_message_id <=
                    user_profile.last_active_message_id):
                # We are going to short circuit this iteration as its no use
                # iterating since user unsubscribed before soft-deactivation
                continue
        recipient_ids.append(sub["recipient_id"])

    all_stream_msgs = list(
        Message.objects.filter(
            recipient_id__in=recipient_ids,
            id__gt=user_profile.last_active_message_id).order_by("id").values(
                "id", "recipient__type_id"))
    already_created_ums = set(
        UserMessage.objects.filter(
            user_profile=user_profile,
            message__recipient__type=Recipient.STREAM,
            message_id__gt=user_profile.last_active_message_id,
        ).values_list("message_id", flat=True))

    # Filter those messages for which UserMessage rows have been already created
    all_stream_msgs = [
        msg for msg in all_stream_msgs if msg["id"] not in already_created_ums
    ]

    stream_messages: DefaultDict[int, List[Message]] = defaultdict(list)
    for msg in all_stream_msgs:
        stream_messages[msg["recipient__type_id"]].append(msg)

    # Calling this function to filter out stream messages based upon
    # subscription logs and then store all UserMessage objects for bulk insert
    # This function does not perform any SQL related task and gets all the data
    # required for its operation in its params.
    user_messages_to_insert = filter_by_subscription_history(
        user_profile, stream_messages, all_stream_subscription_logs)

    # Doing a bulk create for all the UserMessage objects stored for creation.
    while len(user_messages_to_insert) > 0:
        messages, user_messages_to_insert = (
            user_messages_to_insert[0:BULK_CREATE_BATCH_SIZE],
            user_messages_to_insert[BULK_CREATE_BATCH_SIZE:],
        )
        UserMessage.objects.bulk_create(messages)
        user_profile.last_active_message_id = messages[-1].message_id
        user_profile.save(update_fields=["last_active_message_id"])
Пример #26
0
def filter_by_subscription_history(
    user_profile: UserProfile,
    all_stream_messages: DefaultDict[int, List[Message]],
    all_stream_subscription_logs: DefaultDict[int, List[RealmAuditLog]],
) -> List[UserMessage]:
    user_messages_to_insert: List[UserMessage] = []

    def store_user_message_to_insert(message: Message) -> None:
        message = UserMessage(user_profile=user_profile,
                              message_id=message["id"],
                              flags=0)
        user_messages_to_insert.append(message)

    for (stream_id, stream_messages_raw) in all_stream_messages.items():
        stream_subscription_logs = all_stream_subscription_logs[stream_id]
        # Make a copy of the original list of messages, which we will
        # mutate in the loop below.
        stream_messages = list(stream_messages_raw)

        for log_entry in stream_subscription_logs:
            # For each stream, we iterate through all of the changes
            # to the user's subscription to that stream, ordered by
            # event_last_message_id, to determine whether the user was
            # subscribed to the target stream at that time.
            #
            # For each message, we're looking for the first event for
            # the user's subscription to the target stream after the
            # message was sent.
            # * If it's an unsubscribe, we know the user was subscribed
            #   when the message was sent, and create a UserMessage
            # * If it's a subscribe, we know the user was not, and we
            #   skip the message by mutating the stream_messages list
            #   to skip that message.

            if len(stream_messages) == 0:
                # Because stream_messages gets mutated below, this
                # check belongs in this inner loop, not the outer loop.
                break

            event_last_message_id = assert_is_not_none(
                log_entry.event_last_message_id)

            if log_entry.event_type == RealmAuditLog.SUBSCRIPTION_DEACTIVATED:
                # If the event shows the user was unsubscribed after
                # event_last_message_id, we know they must have been
                # subscribed immediately before the event.
                for stream_message in stream_messages:
                    if stream_message["id"] <= event_last_message_id:
                        store_user_message_to_insert(stream_message)
                    else:
                        break
            elif log_entry.event_type in (
                    RealmAuditLog.SUBSCRIPTION_ACTIVATED,
                    RealmAuditLog.SUBSCRIPTION_CREATED,
            ):
                initial_msg_count = len(stream_messages)
                for i, stream_message in enumerate(stream_messages):
                    if stream_message["id"] > event_last_message_id:
                        stream_messages = stream_messages[i:]
                        break
                final_msg_count = len(stream_messages)
                if initial_msg_count == final_msg_count:
                    if stream_messages[-1]["id"] <= event_last_message_id:
                        stream_messages = []
            else:
                raise AssertionError(
                    f"{log_entry.event_type} is not a subscription event.")

        if len(stream_messages) > 0:
            # We do this check for last event since if the last subscription
            # event was a subscription_deactivated then we don't want to create
            # UserMessage rows for any of the remaining messages.
            if stream_subscription_logs[-1].event_type in (
                    RealmAuditLog.SUBSCRIPTION_ACTIVATED,
                    RealmAuditLog.SUBSCRIPTION_CREATED,
            ):
                for stream_message in stream_messages:
                    store_user_message_to_insert(stream_message)
    return user_messages_to_insert