def remove_subscriptions_backend(request, user_profile, streams_raw = REQ("subscriptions", validator=check_list(check_string)), principals = REQ(validator=check_list(check_string), default=None)): removing_someone_else = principals and \ set(principals) != set((user_profile.email,)) if removing_someone_else and not user_profile.is_realm_admin: # You can only unsubscribe other people from a stream if you are a realm # admin. return json_error("This action requires administrative rights") streams, _ = list_to_streams(streams_raw, user_profile) for stream in streams: if removing_someone_else and stream.invite_only and \ not subscribed_to_stream(user_profile, stream): # Even as an admin, you can't remove other people from an # invite-only stream you're not on. return json_error("Cannot administer invite-only streams this way") if principals: people_to_unsub = set(principal_to_user_profile( user_profile, principal) for principal in principals) else: people_to_unsub = set([user_profile]) result = dict(removed=[], not_subscribed=[]) # type: Dict[str, List[str]] (removed, not_subscribed) = bulk_remove_subscriptions(people_to_unsub, streams) for (subscriber, stream) in removed: result["removed"].append(stream.name) for (subscriber, stream) in not_subscribed: result["not_subscribed"].append(stream.name) return json_success(result)
def test_muted_topics_events(self): muted_topics_checker = check_dict([ ('type', equals('muted_topics')), ('muted_topics', check_list(check_list(check_string, 2))), ]) events = self.do_test(lambda: do_set_muted_topics(self.user_profile, [["Denmark", "topic"]])) error = muted_topics_checker('events[0]', events[0]) self.assert_on_error(error)
def remote_server_post_analytics(request: HttpRequest, entity: Union[UserProfile, RemoteZulipServer], realm_counts: List[Dict[str, Any]]=REQ( validator=check_list(check_dict_only([ ('property', check_string), ('realm', check_int), ('id', check_int), ('end_time', check_float), ('subgroup', check_none_or(check_string)), ('value', check_int), ]))), installation_counts: List[Dict[str, Any]]=REQ( validator=check_list(check_dict_only([ ('property', check_string), ('id', check_int), ('end_time', check_float), ('subgroup', check_none_or(check_string)), ('value', check_int), ])))) -> HttpResponse: validate_entity(entity) server = cast(RemoteZulipServer, entity) validate_count_stats(server, RemoteRealmCount, realm_counts) validate_count_stats(server, RemoteInstallationCount, installation_counts) BATCH_SIZE = 1000 while len(realm_counts) > 0: batch = realm_counts[0:BATCH_SIZE] realm_counts = realm_counts[BATCH_SIZE:] objects_to_create = [] for item in batch: objects_to_create.append(RemoteRealmCount( property=item['property'], realm_id=item['realm'], remote_id=item['id'], server=server, end_time=datetime.datetime.fromtimestamp(item['end_time'], tz=timezone_utc), subgroup=item['subgroup'], value=item['value'])) RemoteRealmCount.objects.bulk_create(objects_to_create) while len(installation_counts) > 0: batch = installation_counts[0:BATCH_SIZE] installation_counts = installation_counts[BATCH_SIZE:] objects_to_create = [] for item in batch: objects_to_create.append(RemoteInstallationCount( property=item['property'], remote_id=item['id'], server=server, end_time=datetime.datetime.fromtimestamp(item['end_time'], tz=timezone_utc), subgroup=item['subgroup'], value=item['value'])) RemoteInstallationCount.objects.bulk_create(objects_to_create) return json_success()
def get_events_backend( request, user_profile, handler, user_client=REQ(converter=get_client, default=None), last_event_id=REQ(converter=int, default=None), queue_id=REQ(default=None), apply_markdown=REQ(default=False, validator=check_bool), all_public_streams=REQ(default=False, validator=check_bool), event_types=REQ(default=None, validator=check_list(check_string)), dont_block=REQ(default=False, validator=check_bool), narrow=REQ(default=[], validator=check_list(None)), lifespan_secs=REQ(default=0, converter=int), ): # type: (HttpRequest, UserProfile, BaseHandler, Optional[Client], Optional[int], Optional[List[text_type]], bool, bool, Optional[text_type], bool, Iterable[Sequence[text_type]], int) -> Union[HttpResponse, _RespondAsynchronously] if user_client is None: user_client = request.client events_query = dict( user_profile_id=user_profile.id, user_profile_email=user_profile.email, queue_id=queue_id, last_event_id=last_event_id, event_types=event_types, client_type_name=user_client.name, all_public_streams=all_public_streams, lifespan_secs=lifespan_secs, narrow=narrow, dont_block=dont_block, handler_id=handler.handler_id, ) if queue_id is None: events_query["new_queue_data"] = dict( user_profile_id=user_profile.id, realm_id=user_profile.realm.id, user_profile_email=user_profile.email, event_types=event_types, client_type_name=user_client.name, apply_markdown=apply_markdown, all_public_streams=all_public_streams, queue_timeout=lifespan_secs, last_connection_time=time.time(), narrow=narrow, ) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data["extra"] = result["extra_log_data"] if result["type"] == "async": handler._request = request return RespondAsynchronously if result["type"] == "error": return json_error(result["message"]) return json_success(result["response"])
def get_events_backend(request: HttpRequest, user_profile: UserProfile, handler: BaseHandler, user_client: Optional[Client]=REQ(converter=get_client, default=None), last_event_id: Optional[int]=REQ(converter=int, default=None), queue_id: Optional[List[str]]=REQ(default=None), apply_markdown: bool=REQ(default=False, validator=check_bool), client_gravatar: bool=REQ(default=False, validator=check_bool), all_public_streams: bool=REQ(default=False, validator=check_bool), event_types: Optional[str]=REQ(default=None, validator=check_list(check_string)), dont_block: bool=REQ(default=False, validator=check_bool), narrow: Iterable[Sequence[str]]=REQ(default=[], validator=check_list(None)), lifespan_secs: int=REQ(default=0, converter=int) ) -> Union[HttpResponse, _RespondAsynchronously]: if user_client is None: valid_user_client = request.client else: valid_user_client = user_client events_query = dict( user_profile_id = user_profile.id, user_profile_email = user_profile.email, queue_id = queue_id, last_event_id = last_event_id, event_types = event_types, client_type_name = valid_user_client.name, all_public_streams = all_public_streams, lifespan_secs = lifespan_secs, narrow = narrow, dont_block = dont_block, handler_id = handler.handler_id) if queue_id is None: events_query['new_queue_data'] = dict( user_profile_id = user_profile.id, realm_id = user_profile.realm_id, user_profile_email = user_profile.email, event_types = event_types, client_type_name = valid_user_client.name, apply_markdown = apply_markdown, client_gravatar = client_gravatar, all_public_streams = all_public_streams, queue_timeout = lifespan_secs, last_connection_time = time.time(), narrow = narrow) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data['extra'] = result["extra_log_data"] if result["type"] == "async": handler._request = request return RespondAsynchronously if result["type"] == "error": raise result["exception"] return json_success(result["response"])
def update_subscriptions_backend(request, user_profile, delete=REQ(validator=check_list(check_string), default=[]), add=REQ(validator=check_list(check_dict([('name', check_string)])), default=[])): # type: (HttpRequest, UserProfile, Iterable[Text], Iterable[Mapping[str, Any]]) -> HttpResponse if not add and not delete: return json_error(_('Nothing to do. Specify at least one of "add" or "delete".')) method_kwarg_pairs = [ (add_subscriptions_backend, dict(streams_raw=add)), (remove_subscriptions_backend, dict(streams_raw=delete)) ] # type: List[FuncKwargPair] return compose_views(request, user_profile, method_kwarg_pairs)
def get_events_backend(request, user_profile, handler = None, user_client = REQ(converter=get_client, default=None), last_event_id = REQ(converter=int, default=None), queue_id = REQ(default=None), apply_markdown = REQ(default=False, validator=check_bool), all_public_streams = REQ(default=False, validator=check_bool), event_types = REQ(default=None, validator=check_list(check_string)), dont_block = REQ(default=False, validator=check_bool), narrow = REQ(default=[], validator=check_list(None)), lifespan_secs = REQ(default=0, converter=int)): if user_client is None: user_client = request.client was_connected = False orig_queue_id = queue_id if queue_id is None: if dont_block: client = allocate_client_descriptor(user_profile.id, user_profile.realm.id, event_types, user_client, apply_markdown, all_public_streams, lifespan_secs, narrow=narrow) queue_id = client.event_queue.id else: return json_error("Missing 'queue_id' argument") else: if last_event_id is None: return json_error("Missing 'last_event_id' argument") client = get_client_descriptor(queue_id) if client is None: return json_error("Bad event queue id: %s" % (queue_id,)) if user_profile.id != client.user_profile_id: return json_error("You are not authorized to get events from this queue") client.event_queue.prune(last_event_id) was_connected = client.finish_current_handler() if not client.event_queue.empty() or dont_block: ret = {'events': client.event_queue.contents()} if orig_queue_id is None: ret['queue_id'] = queue_id request._log_data['extra'] = "[%s/%s]" % (queue_id, len(ret["events"])) if was_connected: request._log_data['extra'] += " [was connected]" return json_success(ret) handler._request = request if was_connected: logging.info("Disconnected handler for queue %s (%s/%s)" % (queue_id, user_profile.email, user_client.name)) client.connect_handler(handler) # runtornado recognizes this special return value. return RespondAsynchronously
def events_register_backend(request, user_profile, apply_markdown=True, all_public_streams=None, event_types=REQ(validator=check_list(check_string), default=None), narrow=REQ(validator=check_list(check_list(check_string, length=2)), default=[]), queue_lifespan_secs=REQ(converter=int, default=0)): # type: (HttpRequest, UserProfile, bool, Optional[bool], Optional[Iterable[str]], Iterable[Sequence[Text]], int) -> HttpResponse all_public_streams = _default_all_public_streams(user_profile, all_public_streams) narrow = _default_narrow(user_profile, narrow) ret = do_events_register(user_profile, request.client, apply_markdown, event_types, queue_lifespan_secs, all_public_streams, narrow=narrow) return json_success(ret)
def update_subscriptions_backend(request, user_profile, delete=REQ(validator=check_list(check_string), default=[]), add=REQ(validator=check_list(check_dict([['name', check_string]])), default=[])): if not add and not delete: return json_error('Nothing to do. Specify at least one of "add" or "delete".') json_dict = {} # type: Dict[str, Any] for method, items in ((add_subscriptions_backend, add), (remove_subscriptions_backend, delete)): response = method(request, user_profile, streams_raw=items) if response.status_code != 200: transaction.rollback() return response json_dict.update(ujson.loads(response.content)) return json_success(json_dict)
def update_user_group_backend(request: HttpRequest, user_profile: UserProfile, user_group_id: int=REQ(validator=check_int), delete: List[int]=REQ(validator=check_list(check_int), default=[]), add: List[int]=REQ(validator=check_list(check_int), default=[]) ) -> HttpResponse: if not add and not delete: return json_error(_('Nothing to do. Specify at least one of "add" or "delete".')) method_kwarg_pairs = [ (add_members_to_group_backend, dict(user_group_id=user_group_id, members=add)), (remove_members_from_group_backend, dict(user_group_id=user_group_id, members=delete)) ] # type: List[FuncKwargPair] return compose_views(request, user_profile, method_kwarg_pairs)
def test_check_dict(self): keys = [ ('names', check_list(check_string)), ('city', check_string), ] x = { 'names': ['alice', 'bob'], 'city': 'Boston', } error = check_dict(keys)('x', x) self.assertEqual(error, None) x = 999 error = check_dict(keys)('x', x) self.assertEqual(error, 'x is not a dict') x = {} error = check_dict(keys)('x', x) self.assertEqual(error, 'names key is missing from x') x = { 'names': ['alice', 'bob', {}] } error = check_dict(keys)('x', x) self.assertEqual(error, 'x["names"][2] is not a string') x = { 'names': ['alice', 'bob'], 'city': 5 } error = check_dict(keys)('x', x) self.assertEqual(error, 'x["city"] is not a string')
def test_register_events(self): realm_user_add_checker = check_dict([ ('type', equals('realm_user')), ('op', equals('add')), ('person', check_dict([ ('email', check_string), ('full_name', check_string), ('is_admin', check_bool), ('is_bot', check_bool), ])), ]) stream_create_checker = check_dict([ ('type', equals('stream')), ('op', equals('create')), ('streams', check_list(check_dict([ ('description', check_string), ('invite_only', check_bool), ('name', check_string), ('stream_id', check_int), ]))) ]) events = self.do_test(lambda: self.register("test1", "test1")) error = realm_user_add_checker('events[0]', events[0]) self.assert_on_error(error) error = stream_create_checker('events[1]', events[1]) self.assert_on_error(error)
def test_check_list(self): x = 999 error = check_list(check_string)('x', x) self.assertEqual(error, 'x is not a list') x = ["hello", 5] error = check_list(check_string)('x', x) self.assertEqual(error, 'x[1] is not a string') x = [["yo"], ["hello", "goodbye", 5]] error = check_list(check_list(check_string))('x', x) self.assertEqual(error, 'x[1][2] is not a string') x = ["hello", "goodbye", "hello again"] error = check_list(check_string, length=2)('x', x) self.assertEqual(error, 'x should have exactly 2 items')
def messages_in_narrow_backend(request, user_profile, msg_ids = REQ(validator=check_list(check_int)), narrow = REQ(converter=narrow_parameter)): # type: (HttpRequest, UserProfile, List[int], List[Dict[str, Any]]) -> HttpResponse # Note that this function will only work on messages the user # actually received # TODO: We assume that the narrow is a search. For now this works because # the browser only ever calls this function for searches, since it can't # apply that narrow operator itself. query = select([column("message_id"), column("subject"), column("rendered_content")], and_(column("user_profile_id") == literal(user_profile.id), column("message_id").in_(msg_ids)), join(table("zerver_usermessage"), table("zerver_message"), literal_column("zerver_usermessage.message_id") == literal_column("zerver_message.id"))) builder = NarrowBuilder(user_profile, column("message_id")) for term in narrow: query = builder.add_term(query, term) sa_conn = get_sqlalchemy_connection() query_result = list(sa_conn.execute(query).fetchall()) search_fields = dict() for row in query_result: (message_id, subject, rendered_content, content_matches, subject_matches) = row search_fields[message_id] = get_search_fields(rendered_content, subject, content_matches, subject_matches) return json_success({"messages": search_fields})
def update_user_custom_profile_data( request: HttpRequest, user_profile: UserProfile, data: List[Dict[str, Union[int, str]]]=REQ(validator=check_list( check_dict([('id', check_int)])))) -> HttpResponse: for item in data: field_id = item['id'] try: field = CustomProfileField.objects.get(id=field_id) except CustomProfileField.DoesNotExist: return json_error(_('Field id {id} not found.').format(id=field_id)) validators = CustomProfileField.FIELD_VALIDATORS extended_validators = CustomProfileField.EXTENDED_FIELD_VALIDATORS field_type = field.field_type value = item['value'] var_name = '{}'.format(field.name) if field_type in validators: validator = validators[field_type] result = validator(var_name, value) else: # Check extended validators. extended_validator = extended_validators[field_type] field_data = field.field_data result = extended_validator(var_name, field_data, value) if result is not None: return json_error(result) do_update_user_custom_profile_data(user_profile, data) # We need to call this explicitly otherwise constraints are not check return json_success()
def add_user_group(request: HttpRequest, user_profile: UserProfile, name: str=REQ(), members: List[int]=REQ(validator=check_list(check_int), default=[]), description: str=REQ()) -> HttpResponse: user_profiles = user_ids_to_users(members, user_profile.realm) check_add_user_group(user_profile.realm, name, user_profiles, description) return json_success()
def update_subscriptions_backend(request, user_profile, delete=REQ(validator=check_list(check_string), default=[]), add=REQ(validator=check_list(check_dict([('name', check_string)])), default=[])): # type: (HttpRequest, UserProfile, Iterable[text_type], Iterable[Mapping[str, Any]]) -> HttpResponse if not add and not delete: return json_error(_('Nothing to do. Specify at least one of "add" or "delete".')) json_dict = {} # type: Dict[str, Any] method_items_pairs = ((add_subscriptions_backend, add), (remove_subscriptions_backend, delete)) # type: Tuple[FuncItPair, FuncItPair] for method, items in method_items_pairs: response = method(request, user_profile, streams_raw=items) if response.status_code != 200: transaction.rollback() return response json_dict.update(ujson.loads(response.content)) return json_success(json_dict)
def remove_storage(request, user_profile, keys=REQ(validator=check_list(check_string), default=None)): # type: (HttpRequest, UserProfile, Optional[List[str]]) -> HttpResponse keys = keys or get_keys_in_bot_storage(user_profile) try: remove_bot_storage(user_profile, keys) except StateError as e: return json_error(str(e)) return json_success()
def create_default_stream_group(request: HttpRequest, user_profile: UserProfile, group_name: Text=REQ(), description: Text=REQ(), stream_names: List[Text]=REQ(validator=check_list(check_string))) -> None: streams = [] for stream_name in stream_names: (stream, recipient, sub) = access_stream_by_name(user_profile, stream_name) streams.append(stream) do_create_default_stream_group(user_profile.realm, group_name, description, streams) return json_success()
def json_subscription_property( request, user_profile, subscription_data=REQ( validator=check_list( check_dict( [ ("stream", check_string), ("property", check_string), ("value", check_variable_type([check_string, check_bool])), ] ) ) ), ): # type: (HttpRequest, UserProfile, List[Dict[str, Any]]) -> HttpResponse """ This is the entry point to changing subscription properties. This is a bulk endpoint: requestors always provide a subscription_data list containing dictionaries for each stream of interest. Requests are of the form: [{"stream": "devel", "property": "in_home_view", "value": False}, {"stream": "devel", "property": "color", "value": "#c2c2c2"}] """ if request.method != "POST": return json_error(_("Invalid verb")) property_converters = { "color": check_string, "in_home_view": check_bool, "desktop_notifications": check_bool, "audible_notifications": check_bool, "pin_to_top": check_bool, } response_data = [] for change in subscription_data: stream_name = change["stream"] property = change["property"] value = change["value"] if property not in property_converters: return json_error(_("Unknown subscription property: %s") % (property,)) sub = get_subscription_or_die(stream_name, user_profile)[0] property_conversion = property_converters[property](property, value) if property_conversion: return json_error(property_conversion) do_change_subscription_property(user_profile, sub, stream_name, property, value) response_data.append({"stream": stream_name, "property": property, "value": value}) return json_success({"subscription_data": response_data})
def update_message_flags(request, user_profile, messages=REQ('messages', validator=check_list(check_int)), operation=REQ('op'), flag=REQ('flag'), all=REQ('all', validator=check_bool, default=False)): request._log_data["extra"] = "[%s %s]" % (operation, flag) do_update_message_flags(user_profile, operation, flag, messages, all) return json_success({'result': 'success', 'messages': messages, 'msg': ''})
def update_user_custom_profile_data( request: HttpRequest, user_profile: UserProfile, data: List[Dict[str, Union[int, str, List[int]]]]=REQ(validator=check_list( check_dict([('id', check_int)])))) -> HttpResponse: validate_user_custom_profile_data(user_profile.realm.id, data) do_update_user_custom_profile_data(user_profile, data) # We need to call this explicitly otherwise constraints are not check return json_success()
def events_register_backend( request: HttpRequest, user_profile: UserProfile, apply_markdown: bool=REQ(default=False, validator=check_bool), client_gravatar: bool=REQ(default=False, validator=check_bool), all_public_streams: Optional[bool]=REQ(default=None, validator=check_bool), include_subscribers: bool=REQ(default=False, validator=check_bool), event_types: Optional[Iterable[str]]=REQ(validator=check_list(check_string), default=None), fetch_event_types: Optional[Iterable[str]]=REQ(validator=check_list(check_string), default=None), narrow: NarrowT=REQ(validator=check_list(check_list(check_string, length=2)), default=[]), queue_lifespan_secs: int=REQ(converter=int, default=0) ) -> HttpResponse: all_public_streams = _default_all_public_streams(user_profile, all_public_streams) narrow = _default_narrow(user_profile, narrow) ret = do_events_register(user_profile, request.client, apply_markdown, client_gravatar, event_types, queue_lifespan_secs, all_public_streams, narrow=narrow, include_subscribers=include_subscribers, fetch_event_types=fetch_event_types) return json_success(ret)
def remove_subscriptions_backend( request: HttpRequest, user_profile: UserProfile, streams_raw: Iterable[Text]=REQ("subscriptions", validator=check_list(check_string)), principals: Optional[Iterable[Text]]=REQ(validator=check_list(check_string), default=None), ) -> HttpResponse: removing_someone_else = principals and \ set(principals) != set((user_profile.email,)) if removing_someone_else and not user_profile.is_realm_admin: # You can only unsubscribe other people from a stream if you are a realm # admin. return json_error(_("This action requires administrative rights")) streams_as_dict = [] for stream_name in streams_raw: streams_as_dict.append({"name": stream_name.strip()}) streams, __ = list_to_streams(streams_as_dict, user_profile) for stream in streams: if removing_someone_else and stream.invite_only and \ not subscribed_to_stream(user_profile, stream.id): # Even as an admin, you can't remove other people from an # invite-only stream you're not on. return json_error(_("Cannot administer invite-only streams this way")) if principals: people_to_unsub = set(principal_to_user_profile( user_profile, principal) for principal in principals) else: people_to_unsub = set([user_profile]) result = dict(removed=[], not_subscribed=[]) # type: Dict[str, List[Text]] (removed, not_subscribed) = bulk_remove_subscriptions(people_to_unsub, streams, acting_user=user_profile) for (subscriber, removed_stream) in removed: result["removed"].append(removed_stream.name) for (subscriber, not_subscribed_stream) in not_subscribed: result["not_subscribed"].append(not_subscribed_stream.name) return json_success(result)
def get_storage( request: HttpRequest, user_profile: UserProfile, keys: Optional[List[str]]=REQ(validator=check_list(check_string), default=None) ) -> HttpResponse: keys = keys or get_keys_in_bot_storage(user_profile) try: storage = {key: get_bot_storage(user_profile, key) for key in keys} except StateError as e: return json_error(str(e)) return json_success({'storage': storage})
def update_subscription_properties_backend( request: HttpRequest, user_profile: UserProfile, subscription_data: List[Dict[str, Any]]=REQ( validator=check_list( check_dict([("stream_id", check_int), ("property", check_string), ("value", check_variable_type([check_string, check_bool]))]) ) ), ) -> HttpResponse: """ This is the entry point to changing subscription properties. This is a bulk endpoint: requestors always provide a subscription_data list containing dictionaries for each stream of interest. Requests are of the form: [{"stream_id": "1", "property": "in_home_view", "value": False}, {"stream_id": "1", "property": "color", "value": "#c2c2c2"}] """ property_converters = {"color": check_color, "in_home_view": check_bool, "desktop_notifications": check_bool, "audible_notifications": check_bool, "push_notifications": check_bool, "email_notifications": check_bool, "pin_to_top": check_bool} response_data = [] for change in subscription_data: stream_id = change["stream_id"] property = change["property"] value = change["value"] if property not in property_converters: return json_error(_("Unknown subscription property: %s") % (property,)) (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id) if sub is None: return json_error(_("Not subscribed to stream id %d") % (stream_id,)) property_conversion = property_converters[property](property, value) if property_conversion: return json_error(property_conversion) do_change_subscription_property(user_profile, sub, stream, property, value) response_data.append({'stream_id': stream_id, 'property': property, 'value': value}) return json_success({"subscription_data": response_data})
def get_events_backend(request, user_profile, handler, user_client = REQ(converter=get_client, default=None), last_event_id = REQ(converter=int, default=None), queue_id = REQ(default=None), apply_markdown = REQ(default=False, validator=check_bool), all_public_streams = REQ(default=False, validator=check_bool), event_types = REQ(default=None, validator=check_list(check_string)), dont_block = REQ(default=False, validator=check_bool), narrow = REQ(default=[], validator=check_list(None)), lifespan_secs = REQ(default=0, converter=int)): if user_client is None: user_client = request.client (result, log_data) = fetch_events( user_profile.id, user_profile.realm_id, user_profile.email, queue_id, last_event_id, event_types, user_client.name, apply_markdown, all_public_streams, lifespan_secs, narrow, dont_block, handler.handler_id) request._log_data['extra'] = log_data if result == RespondAsynchronously: handler._request = request return result return json_success(result)
def generate_multiuse_invite_backend(request: HttpRequest, user_profile: UserProfile, stream_ids: List[int]=REQ(validator=check_list(check_int), default=[])) -> HttpResponse: streams = [] for stream_id in stream_ids: try: (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id) except JsonableError: return json_error(_("Invalid stream id {}. No invites were sent.".format(stream_id))) streams.append(stream) invite_link = do_create_multiuse_invite_link(user_profile, streams) return json_success({'invite_link': invite_link})
def test_alert_words_events(self): alert_words_checker = check_dict([ ('type', equals('alert_words')), ('alert_words', check_list(check_string)), ]) events = self.do_test(lambda: do_add_alert_words(self.user_profile, ["alert_word"])) error = alert_words_checker('events[0]', events[0]) self.assert_on_error(error) events = self.do_test(lambda: do_remove_alert_words(self.user_profile, ["alert_word"])) error = alert_words_checker('events[0]', events[0]) self.assert_on_error(error)
def test_realm_filter_events(self): schema_checker = check_dict([ ('type', equals('realm_filters')), ('realm_filters', check_list(None)), # TODO: validate tuples in the list ]) events = self.do_test(lambda: do_add_realm_filter(get_realm("zulip.com"), "#[123]", "https://realm.com/my_realm_filter/%(id)s")) error = schema_checker('events[0]', events[0]) self.assert_on_error(error) self.do_test(lambda: do_remove_realm_filter(get_realm("zulip.com"), "#[123]")) error = schema_checker('events[0]', events[0]) self.assert_on_error(error)
def update_message_flags(request, user_profile, messages=REQ(validator=check_list(check_int)), operation=REQ('op'), flag=REQ(), all=REQ(validator=check_bool, default=False), stream_name=REQ(default=None), topic_name=REQ(default=None)): # type: (HttpRequest, UserProfile, List[int], Text, Text, bool, Optional[Text], Optional[Text]) -> HttpResponse if all: target_count_str = "all" else: target_count_str = str(len(messages)) log_data_str = "[%s %s/%s]" % (operation, flag, target_count_str) request._log_data["extra"] = log_data_str stream = None if stream_name is not None: stream = get_stream(stream_name, user_profile.realm) if not stream: raise JsonableError(_('No such stream \'%s\'') % (stream_name,)) if topic_name: topic_exists = UserMessage.objects.filter(user_profile=user_profile, message__recipient__type_id=stream.id, message__recipient__type=Recipient.STREAM, message__subject__iexact=topic_name).exists() if not topic_exists: raise JsonableError(_('No such topic \'%s\'') % (topic_name,)) count = do_update_message_flags(user_profile, operation, flag, messages, all, stream, topic_name) # If we succeed, update log data str with the actual count for how # many messages were updated. if count != len(messages): log_data_str = "[%s %s/%s] actually %s" % (operation, flag, target_count_str, count) request._log_data["extra"] = log_data_str return json_success({'result': 'success', 'messages': messages, 'msg': ''})
def invite_users_backend( request: HttpRequest, user_profile: UserProfile, invitee_emails_raw: str = REQ("invitee_emails"), invite_as: Optional[int] = REQ( validator=check_int, default=PreregistrationUser.INVITE_AS['MEMBER']), stream_ids: List[int] = REQ(validator=check_list(check_int)), ) -> HttpResponse: if user_profile.realm.invite_by_admins_only and not user_profile.is_realm_admin: raise OrganizationAdministratorRequired() if invite_as not in PreregistrationUser.INVITE_AS.values(): return json_error(_("Must be invited as an valid type of user")) if invite_as == PreregistrationUser.INVITE_AS[ 'REALM_ADMIN'] and not user_profile.is_realm_admin: return json_error(_("Must be an organization administrator")) if not invitee_emails_raw: return json_error(_("You must specify at least one email address.")) if not stream_ids: return json_error( _("You must specify at least one stream for invitees to join.")) invitee_emails = get_invitee_emails_set(invitee_emails_raw) streams: List[Stream] = [] for stream_id in stream_ids: try: (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id) except JsonableError: return json_error( _("Stream does not exist with id: {}. No invites were sent."). format(stream_id)) streams.append(stream) do_invite_users(user_profile, invitee_emails, streams, invite_as) return json_success()
def update_message_flags(request, user_profile, messages=REQ('messages', validator=check_list(check_int)), operation=REQ('op'), flag=REQ('flag'), all=REQ('all', validator=check_bool, default=False), stream_name=REQ('stream_name', default=None), topic_name=REQ('topic_name', default=None)): request._log_data["extra"] = "[%s %s]" % (operation, flag) stream = None if stream_name is not None: stream = get_stream(stream_name, user_profile.realm) if not stream: raise JsonableError(_('No such stream \'%s\'') % (stream_name,)) if topic_name: topic_exists = UserMessage.objects.filter(user_profile=user_profile, message__recipient__type_id=stream.id, message__recipient__type=Recipient.STREAM, message__subject__iexact=topic_name).exists() if not topic_exists: raise JsonableError(_('No such topic \'%s\'') % (topic_name,)) do_update_message_flags(user_profile, operation, flag, messages, all, stream, topic_name) return json_success({'result': 'success', 'messages': messages, 'msg': ''})
def update_default_stream_group_streams( request: HttpRequest, user_profile: UserProfile, group_id: int, op: str = REQ(), stream_names: List[str] = REQ(validator=check_list(check_string)) ) -> None: group = access_default_stream_group_by_id(user_profile.realm, group_id) streams = [] for stream_name in stream_names: (stream, recipient, sub) = access_stream_by_name(user_profile, stream_name) streams.append(stream) if op == 'add': do_add_streams_to_default_stream_group(user_profile.realm, group, streams) elif op == 'remove': do_remove_streams_from_default_stream_group(user_profile.realm, group, streams) else: return json_error( _('Invalid value for "op". Specify one of "add" or "remove".')) return json_success()
def remote_server_post_analytics( request: HttpRequest, entity: Union[UserProfile, RemoteZulipServer], realm_counts: List[Dict[str, Any]] = REQ(json_validator=check_list( check_dict_only([ ("property", check_string), ("realm", check_int), ("id", check_int), ("end_time", check_float), ("subgroup", check_none_or(check_string)), ("value", check_int), ]))), installation_counts: List[Dict[str, Any]] = REQ(json_validator=check_list( check_dict_only([ ("property", check_string), ("id", check_int), ("end_time", check_float), ("subgroup", check_none_or(check_string)), ("value", check_int), ]))), realmauditlog_rows: Optional[List[Dict[str, Any]]] = REQ( json_validator=check_list( check_dict_only([ ("id", check_int), ("realm", check_int), ("event_time", check_float), ("backfilled", check_bool), ("extra_data", check_none_or(check_string)), ("event_type", check_int), ])), default=None, ), ) -> HttpResponse: server = validate_entity(entity) validate_incoming_table_data(server, RemoteRealmCount, realm_counts, True) validate_incoming_table_data(server, RemoteInstallationCount, installation_counts, True) if realmauditlog_rows is not None: validate_incoming_table_data(server, RemoteRealmAuditLog, realmauditlog_rows) row_objects = [ RemoteRealmCount( property=row["property"], realm_id=row["realm"], remote_id=row["id"], server=server, end_time=datetime.datetime.fromtimestamp(row["end_time"], tz=datetime.timezone.utc), subgroup=row["subgroup"], value=row["value"], ) for row in realm_counts ] batch_create_table_data(server, RemoteRealmCount, row_objects) row_objects = [ RemoteInstallationCount( property=row["property"], remote_id=row["id"], server=server, end_time=datetime.datetime.fromtimestamp(row["end_time"], tz=datetime.timezone.utc), subgroup=row["subgroup"], value=row["value"], ) for row in installation_counts ] batch_create_table_data(server, RemoteInstallationCount, row_objects) if realmauditlog_rows is not None: row_objects = [ RemoteRealmAuditLog( realm_id=row["realm"], remote_id=row["id"], server=server, event_time=datetime.datetime.fromtimestamp( row["event_time"], tz=datetime.timezone.utc), backfilled=row["backfilled"], extra_data=row["extra_data"], event_type=row["event_type"], ) for row in realmauditlog_rows ] batch_create_table_data(server, RemoteRealmAuditLog, row_objects) return json_success()
allow_deactivated=True, allow_bots=True, for_admin=True) if target.is_bot: assert target.bot_type is not None check_bot_creation_policy(user_profile, target.bot_type) do_reactivate_user(target, acting_user=user_profile) return json_success() check_profile_data: Validator[List[Dict[str, Optional[Union[ int, ProfileDataElementValue]]]]] = check_list( check_dict_only([ ("id", check_int), ( "value", check_none_or( check_union([check_string, check_list(check_int)]), ), ), ]), ) @has_request_variables def update_user_backend( request: HttpRequest, user_profile: UserProfile, user_id: int, full_name: Optional[str] = REQ(default=None, json_validator=check_string), role: Optional[int] = REQ( default=None, json_validator=check_int_in(UserProfile.ROLE_TYPES, ),
target = access_user_by_id(user_profile, user_id, allow_deactivated=True, allow_bots=True) if target.is_bot: assert target.bot_type is not None check_bot_creation_policy(user_profile, target.bot_type) do_reactivate_user(target, acting_user=user_profile) return json_success() check_profile_data: Validator[List[Dict[str, Optional[Union[ int, str, List[int]]]]]] = check_list( check_dict_only([ ('id', check_int), ('value', check_none_or( check_union([check_int, check_string, check_list(check_int)]), )), ]), ) @has_request_variables def update_user_backend( request: HttpRequest, user_profile: UserProfile, user_id: int, full_name: Optional[str] = REQ(default=None, validator=check_string), role: Optional[int] = REQ(default=None, validator=check_int_in( UserProfile.ROLE_TYPES, )), profile_data: Optional[List[Dict[str, Optional[Union[
def add_subscriptions_backend( request: HttpRequest, user_profile: UserProfile, streams_raw: Iterable[Mapping[str, str]] = REQ( "subscriptions", validator=check_list(check_dict([('name', check_string)]))), invite_only: bool = REQ(validator=check_bool, default=False), history_public_to_subscribers: Optional[bool] = REQ(validator=check_bool, default=None), announce: bool = REQ(validator=check_bool, default=False), principals: List[str] = REQ(validator=check_list(check_string), default=[]), authorization_errors_fatal: bool = REQ(validator=check_bool, default=True), ) -> HttpResponse: stream_dicts = [] for stream_dict in streams_raw: stream_dict_copy = {} # type: Dict[str, Any] for field in stream_dict: stream_dict_copy[field] = stream_dict[field] # Strip the stream name here. stream_dict_copy['name'] = stream_dict_copy['name'].strip() stream_dict_copy["invite_only"] = invite_only stream_dict_copy[ "history_public_to_subscribers"] = history_public_to_subscribers stream_dicts.append(stream_dict_copy) # Validation of the streams arguments, including enforcement of # can_create_streams policy and check_stream_name policy is inside # list_to_streams. existing_streams, created_streams = \ list_to_streams(stream_dicts, user_profile, autocreate=True) authorized_streams, unauthorized_streams = \ filter_stream_authorization(user_profile, existing_streams) if len(unauthorized_streams) > 0 and authorization_errors_fatal: return json_error( _("Unable to access stream (%s).") % unauthorized_streams[0].name) # Newly created streams are also authorized for the creator streams = authorized_streams + created_streams if len(principals) > 0: if user_profile.realm.is_zephyr_mirror_realm and not all( stream.invite_only for stream in streams): return json_error( _("You can only invite other Zephyr mirroring users to invite-only streams." )) subscribers = set( principal_to_user_profile(user_profile, principal) for principal in principals) else: subscribers = set([user_profile]) (subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers, acting_user=user_profile) # We can assume unique emails here for now, but we should eventually # convert this function to be more id-centric. email_to_user_profile = dict() # type: Dict[str, UserProfile] result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) # type: Dict[str, Any] for (subscriber, stream) in subscribed: result["subscribed"][subscriber.email].append(stream.name) email_to_user_profile[subscriber.email] = subscriber for (subscriber, stream) in already_subscribed: result["already_subscribed"][subscriber.email].append(stream.name) bots = dict( (subscriber.email, subscriber.is_bot) for subscriber in subscribers) newly_created_stream_names = {s.name for s in created_streams} private_stream_names = {s.name for s in streams if s.invite_only} # Inform the user if someone else subscribed them to stuff, # or if a new stream was created with the "announce" option. notifications = [] if len(principals) > 0 and result["subscribed"]: for email, subscribed_stream_names in result["subscribed"].items(): if email == user_profile.email: # Don't send a Zulip if you invited yourself. continue if bots[email]: # Don't send invitation Zulips to bots continue # For each user, we notify them about newly subscribed streams, except for # streams that were newly created. notify_stream_names = set( subscribed_stream_names) - newly_created_stream_names if not notify_stream_names: continue msg = you_were_just_subscribed_message( acting_user=user_profile, stream_names=notify_stream_names, private_stream_names=private_stream_names) sender = get_system_bot(settings.NOTIFICATION_BOT) notifications.append( internal_prep_private_message( realm=user_profile.realm, sender=sender, recipient_user=email_to_user_profile[email], content=msg)) if announce and len( created_streams) > 0 and settings.NOTIFICATION_BOT is not None: notifications_stream = user_profile.realm.get_notifications_stream() if notifications_stream is not None: if len(created_streams) > 1: stream_strs = ", ".join('#**%s**' % s.name for s in created_streams) stream_msg = "the following streams: %s" % (stream_strs, ) else: stream_msg = "a new stream #**%s**." % created_streams[0].name msg = ("%s just created %s" % (user_profile.full_name, stream_msg)) sender = get_system_bot(settings.NOTIFICATION_BOT) stream_name = notifications_stream.name topic = 'Streams' notifications.append( internal_prep_stream_message(realm=user_profile.realm, sender=sender, stream_name=stream_name, topic=topic, content=msg)) if not user_profile.realm.is_zephyr_mirror_realm: for stream in created_streams: notifications.append(prep_stream_welcome_message(stream)) if len(notifications) > 0: do_send_messages(notifications) result["subscribed"] = dict(result["subscribed"]) result["already_subscribed"] = dict(result["already_subscribed"]) if not authorization_errors_fatal: result["unauthorized"] = [s.name for s in unauthorized_streams] return json_success(result)
def remove_alert_words(request: HttpRequest, user_profile: UserProfile, alert_words: List[str]=REQ(validator=check_list(check_string)) ) -> HttpResponse: do_remove_alert_words(user_profile, alert_words) return json_success({'alert_words': user_alert_words(user_profile)})
def events_register_backend( request: HttpRequest, user_profile: UserProfile, apply_markdown: bool = REQ(default=False, json_validator=check_bool), client_gravatar: bool = REQ(default=False, json_validator=check_bool), slim_presence: bool = REQ(default=False, json_validator=check_bool), all_public_streams: Optional[bool] = REQ(default=None, json_validator=check_bool), include_subscribers: bool = REQ(default=False, json_validator=check_bool), client_capabilities: Optional[Dict[str, bool]] = REQ( json_validator=check_dict( [ # This field was accidentally made required when it was added in v2.0.0-781; # this was not realized until after the release of Zulip 2.1.2. (It remains # required to help ensure backwards compatibility of client code.) ("notification_settings_null", check_bool), ], [ # Any new fields of `client_capabilities` should be optional. Add them here. ("bulk_message_deletion", check_bool), ("user_avatar_url_field_optional", check_bool), ("stream_typing_notifications", check_bool), ], value_validator=check_bool, ), default=None, ), event_types: Optional[Sequence[str]] = REQ( json_validator=check_list(check_string), default=None ), fetch_event_types: Optional[Sequence[str]] = REQ( json_validator=check_list(check_string), default=None ), narrow: NarrowT = REQ( json_validator=check_list(check_list(check_string, length=2)), default=[] ), queue_lifespan_secs: int = REQ(converter=int, default=0, documentation_pending=True), ) -> HttpResponse: if all_public_streams and not user_profile.can_access_public_streams(): return json_error(_("User not authorized for this query")) all_public_streams = _default_all_public_streams(user_profile, all_public_streams) narrow = _default_narrow(user_profile, narrow) if client_capabilities is None: client_capabilities = {} ret = do_events_register( user_profile, request.client, apply_markdown, client_gravatar, slim_presence, event_types, queue_lifespan_secs, all_public_streams, narrow=narrow, include_subscribers=include_subscribers, client_capabilities=client_capabilities, fetch_event_types=fetch_event_types, ) return json_success(ret)
def get_events_backend(request: HttpRequest, user_profile: UserProfile, handler: BaseHandler, # user_client is intended only for internal Django=>Tornado requests # and thus shouldn't be documented for external use. user_client: Optional[Client]=REQ(converter=get_client, default=None, intentionally_undocumented=True), last_event_id: Optional[int]=REQ(converter=int, default=None), queue_id: Optional[str]=REQ(default=None), # apply_markdown, client_gravatar, all_public_streams, and various # other parameters are only used when registering a new queue via this # endpoint. This is a feature used primarily by get_events_internal # and not expected to be used by third-party clients. apply_markdown: bool=REQ(default=False, validator=check_bool, intentionally_undocumented=True), client_gravatar: bool=REQ(default=False, validator=check_bool, intentionally_undocumented=True), all_public_streams: bool=REQ(default=False, validator=check_bool, intentionally_undocumented=True), event_types: Optional[str]=REQ(default=None, validator=check_list(check_string), intentionally_undocumented=True), dont_block: bool=REQ(default=False, validator=check_bool), narrow: Iterable[Sequence[str]]=REQ(default=[], validator=check_list(None), intentionally_undocumented=True), lifespan_secs: int=REQ(default=0, converter=to_non_negative_int, intentionally_undocumented=True) ) -> Union[HttpResponse, _RespondAsynchronously]: if user_client is None: valid_user_client = request.client else: valid_user_client = user_client events_query = dict( user_profile_id = user_profile.id, user_profile_email = user_profile.email, queue_id = queue_id, last_event_id = last_event_id, event_types = event_types, client_type_name = valid_user_client.name, all_public_streams = all_public_streams, lifespan_secs = lifespan_secs, narrow = narrow, dont_block = dont_block, handler_id = handler.handler_id) if queue_id is None: events_query['new_queue_data'] = dict( user_profile_id = user_profile.id, realm_id = user_profile.realm_id, user_profile_email = user_profile.email, event_types = event_types, client_type_name = valid_user_client.name, apply_markdown = apply_markdown, client_gravatar = client_gravatar, all_public_streams = all_public_streams, queue_timeout = lifespan_secs, last_connection_time = time.time(), narrow = narrow) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data['extra'] = result["extra_log_data"] if result["type"] == "async": handler._request = request return RespondAsynchronously if result["type"] == "error": raise result["exception"] return json_success(result["response"])
def add_alert_words(request: HttpRequest, user_profile: UserProfile, alert_words: List[str]=REQ(validator=check_list(check_string), default=[]) ) -> HttpResponse: do_add_alert_words(user_profile, clean_alert_words(alert_words)) return json_success()
def update_subscription_properties_backend( request: HttpRequest, user_profile: UserProfile, subscription_data: List[Dict[str, Any]] = REQ(json_validator=check_list( check_dict([ ("stream_id", check_int), ("property", check_string), ("value", check_union([check_string, check_bool])), ]), ), ), ) -> HttpResponse: """ This is the entry point to changing subscription properties. This is a bulk endpoint: requestors always provide a subscription_data list containing dictionaries for each stream of interest. Requests are of the form: [{"stream_id": "1", "property": "is_muted", "value": False}, {"stream_id": "1", "property": "color", "value": "#c2c2c2"}] """ property_converters = { "color": check_color, "in_home_view": check_bool, "is_muted": check_bool, "desktop_notifications": check_bool, "audible_notifications": check_bool, "push_notifications": check_bool, "email_notifications": check_bool, "pin_to_top": check_bool, "wildcard_mentions_notify": check_bool, } for change in subscription_data: stream_id = change["stream_id"] property = change["property"] value = change["value"] if property not in property_converters: raise JsonableError( _("Unknown subscription property: {}").format(property)) (stream, sub) = access_stream_by_id(user_profile, stream_id) if sub is None: raise JsonableError( _("Not subscribed to stream id {}").format(stream_id)) try: value = property_converters[property](property, value) except ValidationError as error: raise JsonableError(error.message) do_change_subscription_property(user_profile, sub, stream, property, value, acting_user=user_profile) # TODO: Do this more generally, see update_realm_user_settings_defaults.realm.py from zerver.lib.request import RequestNotes request_notes = RequestNotes.get_notes(request) for req_var in request.POST: if req_var not in request_notes.processed_parameters: request_notes.ignored_parameters.add(req_var) result: Dict[str, Any] = {} if len(request_notes.ignored_parameters) > 0: result["ignored_parameters_unsupported"] = list( request_notes.ignored_parameters) return json_success(request, data=result)
def check_send_webhook_message( request: HttpRequest, user_profile: UserProfile, topic: str, body: str, complete_event_type: Optional[str] = None, stream: Optional[str] = REQ(default=None), user_specified_topic: Optional[str] = REQ("topic", default=None), only_events: Optional[List[str]] = REQ( default=None, json_validator=check_list(check_string)), exclude_events: Optional[List[str]] = REQ( default=None, json_validator=check_list(check_string)), unquote_url_parameters: bool = False, ) -> None: if complete_event_type is not None: # Here, we implement Zulip's generic support for filtering # events sent by the third-party service. # # If complete_event_type is passed to this function, we will check the event # type against user configured lists of only_events and exclude events. # If the event does not satisfy the configuration, the function will return # without sending any messages. # # We match items in only_events and exclude_events using Unix # shell-style wildcards. if (only_events is not None and all(not fnmatch.fnmatch(complete_event_type, pattern) for pattern in only_events)) or ( exclude_events is not None and any( fnmatch.fnmatch(complete_event_type, pattern) for pattern in exclude_events)): return client = RequestNotes.get_notes(request).client assert client is not None if stream is None: assert user_profile.bot_owner is not None check_send_private_message(user_profile, client, user_profile.bot_owner, body) else: # Some third-party websites (such as Atlassian's Jira), tend to # double escape their URLs in a manner that escaped space characters # (%20) are never properly decoded. We work around that by making sure # that the URL parameters are decoded on our end. if unquote_url_parameters: stream = unquote(stream) if user_specified_topic is not None: topic = user_specified_topic if unquote_url_parameters: topic = unquote(topic) try: if stream.isdecimal(): check_send_stream_message_by_id(user_profile, client, int(stream), topic, body) else: check_send_stream_message(user_profile, client, stream, topic, body) except StreamDoesNotExistError: # A PM will be sent to the bot_owner by check_message, notifying # that the webhook bot just tried to send a message to a non-existent # stream, so we don't need to re-raise it since it clutters up # webhook-errors.log pass
def add_subscriptions_backend( request: HttpRequest, user_profile: UserProfile, streams_raw: Iterable[Dict[str, str]] = REQ( "subscriptions", validator=check_list( check_dict_only([('name', check_string)], optional_keys=[ ('color', check_color), ('description', check_capped_string( Stream.MAX_DESCRIPTION_LENGTH)), ]))), invite_only: bool = REQ(validator=check_bool, default=False), is_announcement_only: bool = REQ(validator=check_bool, default=False), history_public_to_subscribers: Optional[bool] = REQ(validator=check_bool, default=None), announce: bool = REQ(validator=check_bool, default=False), principals: List[str] = REQ(validator=check_list(check_string), default=[]), authorization_errors_fatal: bool = REQ(validator=check_bool, default=True), ) -> HttpResponse: stream_dicts = [] color_map = {} for stream_dict in streams_raw: # 'color' field is optional # check for its presence in the streams_raw first if 'color' in stream_dict: color_map[stream_dict['name']] = stream_dict['color'] if 'description' in stream_dict: # We don't allow newline characters in stream descriptions. stream_dict['description'] = stream_dict['description'].replace( "\n", " ") stream_dict_copy = {} # type: Dict[str, Any] for field in stream_dict: stream_dict_copy[field] = stream_dict[field] # Strip the stream name here. stream_dict_copy['name'] = stream_dict_copy['name'].strip() stream_dict_copy["invite_only"] = invite_only stream_dict_copy["is_announcement_only"] = is_announcement_only stream_dict_copy[ "history_public_to_subscribers"] = history_public_to_subscribers stream_dicts.append(stream_dict_copy) # Validation of the streams arguments, including enforcement of # can_create_streams policy and check_stream_name policy is inside # list_to_streams. existing_streams, created_streams = \ list_to_streams(stream_dicts, user_profile, autocreate=True) authorized_streams, unauthorized_streams = \ filter_stream_authorization(user_profile, existing_streams) if len(unauthorized_streams) > 0 and authorization_errors_fatal: return json_error( _("Unable to access stream (%s).") % unauthorized_streams[0].name) # Newly created streams are also authorized for the creator streams = authorized_streams + created_streams if len(principals) > 0: if user_profile.realm.is_zephyr_mirror_realm and not all( stream.invite_only for stream in streams): return json_error( _("You can only invite other Zephyr mirroring users to private streams." )) if not user_profile.can_subscribe_other_users(): if user_profile.realm.invite_to_stream_policy == Realm.INVITE_TO_STREAM_POLICY_ADMINS: return json_error( _("Only administrators can modify other users' subscriptions." )) # Realm.INVITE_TO_STREAM_POLICY_MEMBERS only fails if the # user is a guest, which happens in the decorator above. assert user_profile.realm.invite_to_stream_policy == \ Realm.INVITE_TO_STREAM_POLICY_WAITING_PERIOD return json_error( _("Your account is too new to modify other users' subscriptions." )) subscribers = set( principal_to_user_profile(user_profile, principal) for principal in principals) else: subscribers = set([user_profile]) (subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers, acting_user=user_profile, color_map=color_map) # We can assume unique emails here for now, but we should eventually # convert this function to be more id-centric. email_to_user_profile = dict() # type: Dict[str, UserProfile] result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) # type: Dict[str, Any] for (subscriber, stream) in subscribed: result["subscribed"][subscriber.email].append(stream.name) email_to_user_profile[subscriber.email] = subscriber for (subscriber, stream) in already_subscribed: result["already_subscribed"][subscriber.email].append(stream.name) bots = dict( (subscriber.email, subscriber.is_bot) for subscriber in subscribers) newly_created_stream_names = {s.name for s in created_streams} # Inform the user if someone else subscribed them to stuff, # or if a new stream was created with the "announce" option. notifications = [] if len(principals) > 0 and result["subscribed"]: for email, subscribed_stream_names in result["subscribed"].items(): if email == user_profile.email: # Don't send a Zulip if you invited yourself. continue if bots[email]: # Don't send invitation Zulips to bots continue # For each user, we notify them about newly subscribed streams, except for # streams that were newly created. notify_stream_names = set( subscribed_stream_names) - newly_created_stream_names if not notify_stream_names: continue msg = you_were_just_subscribed_message( acting_user=user_profile, stream_names=notify_stream_names, ) sender = get_system_bot(settings.NOTIFICATION_BOT) notifications.append( internal_prep_private_message( realm=user_profile.realm, sender=sender, recipient_user=email_to_user_profile[email], content=msg)) if announce and len( created_streams) > 0 and settings.NOTIFICATION_BOT is not None: notifications_stream = user_profile.realm.get_notifications_stream() if notifications_stream is not None: if len(created_streams) > 1: stream_strs = ", ".join('#**%s**' % (s.name, ) for s in created_streams) stream_msg = "the following streams: %s" % (stream_strs, ) else: stream_msg = "a new stream #**%s**." % ( created_streams[0].name, ) msg = ("@_**%s|%d** just created %s" % (user_profile.full_name, user_profile.id, stream_msg)) sender = get_system_bot(settings.NOTIFICATION_BOT) topic = 'Streams' notifications.append( internal_prep_stream_message( realm=user_profile.realm, sender=sender, stream=notifications_stream, topic=topic, content=msg, )) if not user_profile.realm.is_zephyr_mirror_realm: for stream in created_streams: notifications.append(prep_stream_welcome_message(stream)) if len(notifications) > 0: do_send_messages(notifications) result["subscribed"] = dict(result["subscribed"]) result["already_subscribed"] = dict(result["already_subscribed"]) if not authorization_errors_fatal: result["unauthorized"] = [s.name for s in unauthorized_streams] return json_success(result)
def get_total(request, numbers=REQ(validator=check_list(check_int))): return sum(numbers)
def get_story_update_batch_body(payload: WildValue, action: WildValue) -> Optional[str]: # When the user selects one or more stories with the checkbox, they can perform # a batch update on multiple stories while changing multiple attribtues at the # same time. changes = action["changes"] kwargs = { "name_template": STORY_NAME_TEMPLATE.format( name=action["name"].tame(check_string), app_url=action["app_url"].tame(check_string), ), "workflow_state_template": "", } templates = [] last_change = "other" move_sub_templates = [] if "epic_id" in changes: last_change = "epic" epic_id = changes["epic_id"] old_reference = get_reference_by_id( payload, epic_id.get("old").tame(check_none_or(check_int)) ) new_reference = get_reference_by_id( payload, epic_id.get("new").tame(check_none_or(check_int)) ) move_sub_templates.append( STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format( entity_type="Epic", old=None if old_reference is None else old_reference["name"].tame(check_string), new=None if new_reference is None else new_reference["name"].tame(check_string), ) ) if "project_id" in changes: last_change = "project" project_id = changes["project_id"] old_reference = get_reference_by_id( payload, project_id.get("old").tame(check_none_or(check_int)) ) new_reference = get_reference_by_id( payload, project_id.get("new").tame(check_none_or(check_int)) ) move_sub_templates.append( STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format( entity_type="Project", old=None if old_reference is None else old_reference["name"].tame(check_string), new=None if new_reference is None else new_reference["name"].tame(check_string), ) ) if len(move_sub_templates) > 0: templates.append( STORY_UPDATE_BATCH_CHANGED_TEMPLATE.format( operation="was moved", sub_templates=", ".join(move_sub_templates), ) ) if "story_type" in changes: last_change = "type" story_type = changes["story_type"] templates.append( STORY_UPDATE_BATCH_CHANGED_TEMPLATE.format( operation="{} changed".format("was" if len(templates) == 0 else "and"), sub_templates=STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format( entity_type="type", old=story_type.get("old").tame(check_none_or(check_string)), new=story_type.get("new").tame(check_none_or(check_string)), ), ) ) if "label_ids" in changes: label_ids = changes["label_ids"] # If this is a payload for when no label is added, ignore it if "adds" in label_ids: label_ids_added = label_ids["adds"].tame(check_list(check_int)) last_change = "label" labels = get_story_joined_label_list(payload, action, label_ids_added) templates.append( STORY_UPDATE_BATCH_ADD_REMOVE_TEMPLATE.format( operation="{} added".format("was" if len(templates) == 0 else "and"), entity="the new label{plural} {labels}".format( plural="s" if len(label_ids) > 1 else "", labels=labels ), ) ) if "workflow_state_id" in changes: last_change = "state" workflow_state_id = changes["workflow_state_id"] old_reference = get_reference_by_id( payload, workflow_state_id.get("old").tame(check_none_or(check_int)) ) new_reference = get_reference_by_id( payload, workflow_state_id.get("new").tame(check_none_or(check_int)) ) kwargs.update( workflow_state_template=TRAILING_WORKFLOW_STATE_CHANGE_TEMPLATE.format( old=None if old_reference is None else old_reference["name"].tame(check_string), new=None if new_reference is None else new_reference["name"].tame(check_string), ) ) # Use the default template for state change if it is the only one change. if len(templates) <= 1 or (len(templates) == 0 and last_change == "state"): event: str = "{}_{}".format("story_update", last_change) alternative_body_func = EVENT_BODY_FUNCTION_MAPPER.get(event) # If last_change is not one of "epic", "project", "type", "label" and "state" # we should ignore the action as there is no way for us to render the changes. if alternative_body_func is None: return None return alternative_body_func(payload, action) kwargs.update(templates=", ".join(templates)) return STORY_UPDATE_BATCH_TEMPLATE.format(**kwargs)
def remove_user_custom_profile_data(request: HttpRequest, user_profile: UserProfile, data: List[int]=REQ(validator=check_list( check_int))) -> HttpResponse: for field_id in data: check_remove_custom_profile_field_value(user_profile, field_id) return json_success()
def remove_alert_words(request, user_profile, alert_words=REQ(validator=check_list(check_string), default=[])): do_remove_alert_words(user_profile, alert_words) return json_success()
check_required_string, check_string, check_string_in, check_union, ) from zerver.models import Draft, UserProfile from zerver.tornado.django_api import send_event VALID_DRAFT_TYPES: Set[str] = {"", "private", "stream"} # A validator to verify if the structure (syntax) of a dictionary # meets the requirements to be a draft dictionary: draft_dict_validator = check_dict_only( required_keys=[ ("type", check_string_in(VALID_DRAFT_TYPES)), ("to", check_list(check_int) ), # The ID of the stream to send to, or a list of user IDs. ("topic", check_string ), # This string can simply be empty for private type messages. ("content", check_required_string), ], optional_keys=[ ("timestamp", check_union([check_int, check_float])), # A Unix timestamp. ], ) def further_validated_draft_dict(draft_dict: Dict[str, Any], user_profile: UserProfile) -> Dict[str, Any]: """Take a draft_dict that was already validated by draft_dict_validator then
def add_alert_words(request: HttpRequest, user_profile: UserProfile, alert_words: List[str]=REQ(validator=check_list(check_capped_string(100))) ) -> HttpResponse: do_add_alert_words(user_profile, clean_alert_words(alert_words)) return json_success({'alert_words': user_alert_words(user_profile)})
def reorder_realm_custom_profile_fields(request: HttpRequest, user_profile: UserProfile, order: List[int]=REQ(validator=check_list( check_int))) -> HttpResponse: try_reorder_realm_custom_profile_fields(user_profile.realm, order) return json_success()
def add_subscriptions_backend( request, user_profile, streams_raw=REQ("subscriptions", validator=check_list(check_dict([('name', check_string) ]))), invite_only=REQ(validator=check_bool, default=False), announce=REQ(validator=check_bool, default=False), principals=REQ(validator=check_list(check_string), default=None), authorization_errors_fatal=REQ(validator=check_bool, default=True)): # type: (HttpRequest, UserProfile, Iterable[Mapping[str, text_type]], bool, bool, Optional[List[text_type]], bool) -> HttpResponse stream_names = [] for stream_dict in streams_raw: stream_name = stream_dict["name"].strip() if len(stream_name) > Stream.MAX_NAME_LENGTH: return json_error( _("Stream name (%s) too long.") % (stream_name, )) if not valid_stream_name(stream_name): return json_error(_("Invalid stream name (%s).") % (stream_name, )) stream_names.append(stream_name) # Enforcement of can_create_streams policy is inside list_to_streams. existing_streams, created_streams = \ list_to_streams(stream_names, user_profile, autocreate=True, invite_only=invite_only) authorized_streams, unauthorized_streams = \ filter_stream_authorization(user_profile, existing_streams) if len(unauthorized_streams) > 0 and authorization_errors_fatal: return json_error( _("Unable to access stream (%s).") % unauthorized_streams[0].name) # Newly created streams are also authorized for the creator streams = authorized_streams + created_streams if principals is not None: if user_profile.realm.is_zephyr_mirror_realm and not all( stream.invite_only for stream in streams): return json_error( _("You can only invite other Zephyr mirroring users to invite-only streams." )) subscribers = set( principal_to_user_profile(user_profile, principal) for principal in principals) else: subscribers = set([user_profile]) (subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers) result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) # type: Dict[str, Any] for (subscriber, stream) in subscribed: result["subscribed"][subscriber.email].append(stream.name) for (subscriber, stream) in already_subscribed: result["already_subscribed"][subscriber.email].append(stream.name) private_streams = dict( (stream.name, stream.invite_only) for stream in streams) bots = dict( (subscriber.email, subscriber.is_bot) for subscriber in subscribers) # Inform the user if someone else subscribed them to stuff, # or if a new stream was created with the "announce" option. notifications = [] if principals and result["subscribed"]: for email, subscriptions in six.iteritems(result["subscribed"]): if email == user_profile.email: # Don't send a Zulip if you invited yourself. continue if bots[email]: # Don't send invitation Zulips to bots continue if len(subscriptions) == 1: msg = ("Hi there! We thought you'd like to know that %s just " "subscribed you to the%s stream [%s](%s)." % ( user_profile.full_name, " **invite-only**" if private_streams[subscriptions[0]] else "", subscriptions[0], stream_link(subscriptions[0]), )) else: msg = ("Hi there! We thought you'd like to know that %s just " "subscribed you to the following streams: \n\n" % (user_profile.full_name, )) for stream in subscriptions: msg += "* [%s](%s)%s\n" % (stream, stream_link(stream), " (**invite-only**)" if private_streams[stream] else "") if len([s for s in subscriptions if not private_streams[s]]) > 0: msg += "\nYou can see historical content on a non-invite-only stream by narrowing to it." notifications.append( internal_prep_message(settings.NOTIFICATION_BOT, "private", email, "", msg)) if announce and len(created_streams) > 0: notifications_stream = user_profile.realm.notifications_stream if notifications_stream is not None: if len(created_streams) > 1: stream_msg = "the following streams: %s" % \ (", ".join('`%s`' % (s.name,) for s in created_streams),) else: stream_msg = "a new stream `%s`" % (created_streams[0].name) stream_buttons = ' '.join( stream_button(s.name) for s in created_streams) msg = ("%s just created %s. %s" % (user_profile.full_name, stream_msg, stream_buttons)) notifications.append( internal_prep_message(settings.NOTIFICATION_BOT, "stream", notifications_stream.name, "Streams", msg, realm=notifications_stream.realm)) else: msg = ("Hi there! %s just created a new stream '%s'. %s" % (user_profile.full_name, created_streams[0].name, stream_button(created_streams[0].name))) for realm_user_dict in get_active_user_dicts_in_realm( user_profile.realm): # Don't announce to yourself or to people you explicitly added # (who will get the notification above instead). if realm_user_dict['email'] in principals or realm_user_dict[ 'email'] == user_profile.email: continue notifications.append( internal_prep_message(settings.NOTIFICATION_BOT, "private", realm_user_dict['email'], "", msg)) if len(notifications) > 0: do_send_messages(notifications) result["subscribed"] = dict(result["subscribed"]) result["already_subscribed"] = dict(result["already_subscribed"]) if not authorization_errors_fatal: result["unauthorized"] = [ stream.name for stream in unauthorized_streams ] return json_success(result)
allow_bots=True, for_admin=True) if target.is_bot: assert target.bot_type is not None check_bot_creation_policy(user_profile, target.bot_type) do_reactivate_user(target, acting_user=user_profile) return json_success() check_profile_data: Validator[List[Dict[str, Optional[Union[ int, str, List[int]]]]]] = check_list( check_dict_only([ ("id", check_int), ( "value", check_none_or( check_union( [check_int, check_string, check_list(check_int)]), ), ), ]), ) @has_request_variables def update_user_backend( request: HttpRequest, user_profile: UserProfile, user_id: int, full_name: Optional[str] = REQ(default=None, json_validator=check_string), role: Optional[int] = REQ( default=None,
def add_subscriptions_backend( request: HttpRequest, user_profile: UserProfile, streams_raw: Iterable[Dict[str, str]] = REQ( "subscriptions", validator=check_list( check_dict_only([('name', check_string)], optional_keys=[ ('color', check_color), ('description', check_capped_string( Stream.MAX_DESCRIPTION_LENGTH)), ]), )), invite_only: bool = REQ(validator=check_bool, default=False), stream_post_policy: int = REQ(validator=check_int_in( Stream.STREAM_POST_POLICY_TYPES), default=Stream.STREAM_POST_POLICY_EVERYONE), history_public_to_subscribers: Optional[bool] = REQ(validator=check_bool, default=None), message_retention_days: Union[str, int] = REQ(validator=check_string_or_int, default="realm_default"), announce: bool = REQ(validator=check_bool, default=False), principals: Union[Sequence[str], Sequence[int]] = REQ(validator=check_union( [check_list(check_string), check_list(check_int)]), default=[]), authorization_errors_fatal: bool = REQ(validator=check_bool, default=True), ) -> HttpResponse: stream_dicts = [] color_map = {} for stream_dict in streams_raw: # 'color' field is optional # check for its presence in the streams_raw first if 'color' in stream_dict: color_map[stream_dict['name']] = stream_dict['color'] if 'description' in stream_dict: # We don't allow newline characters in stream descriptions. stream_dict['description'] = stream_dict['description'].replace( "\n", " ") stream_dict_copy: Dict[str, Any] = {} for field in stream_dict: stream_dict_copy[field] = stream_dict[field] # Strip the stream name here. stream_dict_copy['name'] = stream_dict_copy['name'].strip() stream_dict_copy["invite_only"] = invite_only stream_dict_copy["stream_post_policy"] = stream_post_policy stream_dict_copy[ "history_public_to_subscribers"] = history_public_to_subscribers stream_dict_copy[ "message_retention_days"] = parse_message_retention_days( message_retention_days) stream_dicts.append(stream_dict_copy) # Validation of the streams arguments, including enforcement of # can_create_streams policy and check_stream_name policy is inside # list_to_streams. existing_streams, created_streams = \ list_to_streams(stream_dicts, user_profile, autocreate=True) authorized_streams, unauthorized_streams = \ filter_stream_authorization(user_profile, existing_streams) if len(unauthorized_streams) > 0 and authorization_errors_fatal: return json_error( _("Unable to access stream ({stream_name}).").format( stream_name=unauthorized_streams[0].name, )) # Newly created streams are also authorized for the creator streams = authorized_streams + created_streams if len(principals) > 0: if user_profile.realm.is_zephyr_mirror_realm and not all( stream.invite_only for stream in streams): return json_error( _("You can only invite other Zephyr mirroring users to private streams." )) if not user_profile.can_subscribe_other_users(): if user_profile.realm.invite_to_stream_policy == Realm.POLICY_ADMINS_ONLY: return json_error( _("Only administrators can modify other users' subscriptions." )) # Realm.POLICY_MEMBERS_ONLY only fails if the # user is a guest, which happens in the decorator above. assert user_profile.realm.invite_to_stream_policy == \ Realm.POLICY_FULL_MEMBERS_ONLY return json_error( _("Your account is too new to modify other users' subscriptions." )) subscribers = { principal_to_user_profile(user_profile, principal) for principal in principals } else: subscribers = {user_profile} (subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers, acting_user=user_profile, color_map=color_map) # We can assume unique emails here for now, but we should eventually # convert this function to be more id-centric. email_to_user_profile: Dict[str, UserProfile] = dict() result: Dict[str, Any] = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) for (subscriber, stream) in subscribed: result["subscribed"][subscriber.email].append(stream.name) email_to_user_profile[subscriber.email] = subscriber for (subscriber, stream) in already_subscribed: result["already_subscribed"][subscriber.email].append(stream.name) bots = {subscriber.email: subscriber.is_bot for subscriber in subscribers} newly_created_stream_names = {s.name for s in created_streams} # Inform the user if someone else subscribed them to stuff, # or if a new stream was created with the "announce" option. notifications = [] if len(principals) > 0 and result["subscribed"]: for email, subscribed_stream_names in result["subscribed"].items(): if email == user_profile.email: # Don't send a Zulip if you invited yourself. continue if bots[email]: # Don't send invitation Zulips to bots continue # For each user, we notify them about newly subscribed streams, except for # streams that were newly created. notify_stream_names = set( subscribed_stream_names) - newly_created_stream_names if not notify_stream_names: continue msg = you_were_just_subscribed_message( acting_user=user_profile, stream_names=notify_stream_names, ) sender = get_system_bot(settings.NOTIFICATION_BOT) notifications.append( internal_prep_private_message( realm=user_profile.realm, sender=sender, recipient_user=email_to_user_profile[email], content=msg)) if announce and len(created_streams) > 0: notifications_stream = user_profile.realm.get_notifications_stream() if notifications_stream is not None: if len(created_streams) > 1: content = _( "@_**%(user_name)s|%(user_id)d** created the following streams: %(stream_str)s." ) else: content = _( "@_**%(user_name)s|%(user_id)d** created a new stream %(stream_str)s." ) content = content % { 'user_name': user_profile.full_name, 'user_id': user_profile.id, 'stream_str': ", ".join(f'#**{s.name}**' for s in created_streams) } sender = get_system_bot(settings.NOTIFICATION_BOT) topic = _('new streams') notifications.append( internal_prep_stream_message( realm=user_profile.realm, sender=sender, stream=notifications_stream, topic=topic, content=content, ), ) if not user_profile.realm.is_zephyr_mirror_realm and len( created_streams) > 0: sender = get_system_bot(settings.NOTIFICATION_BOT) for stream in created_streams: notifications.append( internal_prep_stream_message( realm=user_profile.realm, sender=sender, stream=stream, topic=Realm.STREAM_EVENTS_NOTIFICATION_TOPIC, content=_('Stream created by @_**{user_name}|{user_id}**.' ).format( user_name=user_profile.full_name, user_id=user_profile.id, ), ), ) if len(notifications) > 0: do_send_messages(notifications, mark_as_read=[user_profile.id]) result["subscribed"] = dict(result["subscribed"]) result["already_subscribed"] = dict(result["already_subscribed"]) if not authorization_errors_fatal: result["unauthorized"] = [s.name for s in unauthorized_streams] return json_success(result)
def remote_server_post_analytics( request: HttpRequest, entity: Union[UserProfile, RemoteZulipServer], realm_counts: List[Dict[str, Any]] = REQ(validator=check_list( check_dict_only([ ('property', check_string), ('realm', check_int), ('id', check_int), ('end_time', check_float), ('subgroup', check_none_or(check_string)), ('value', check_int), ]))), installation_counts: List[Dict[str, Any]] = REQ(validator=check_list( check_dict_only([ ('property', check_string), ('id', check_int), ('end_time', check_float), ('subgroup', check_none_or(check_string)), ('value', check_int), ]))) ) -> HttpResponse: validate_entity(entity) server = cast(RemoteZulipServer, entity) validate_count_stats(server, RemoteRealmCount, realm_counts) validate_count_stats(server, RemoteInstallationCount, installation_counts) BATCH_SIZE = 1000 while len(realm_counts) > 0: batch = realm_counts[0:BATCH_SIZE] realm_counts = realm_counts[BATCH_SIZE:] objects_to_create = [] for item in batch: objects_to_create.append( RemoteRealmCount(property=item['property'], realm_id=item['realm'], remote_id=item['id'], server=server, end_time=datetime.datetime.fromtimestamp( item['end_time'], tz=timezone_utc), subgroup=item['subgroup'], value=item['value'])) RemoteRealmCount.objects.bulk_create(objects_to_create) while len(installation_counts) > 0: batch = installation_counts[0:BATCH_SIZE] installation_counts = installation_counts[BATCH_SIZE:] objects_to_create = [] for item in batch: objects_to_create.append( RemoteInstallationCount( property=item['property'], remote_id=item['id'], server=server, end_time=datetime.datetime.fromtimestamp(item['end_time'], tz=timezone_utc), subgroup=item['subgroup'], value=item['value'])) RemoteInstallationCount.objects.bulk_create(objects_to_create) return json_success()
def get_events_backend( request: HttpRequest, user_profile: UserProfile, # user_client is intended only for internal Django=>Tornado requests # and thus shouldn't be documented for external use. user_client: Optional[Client] = REQ(converter=get_client, default=None, intentionally_undocumented=True), last_event_id: Optional[int] = REQ(converter=int, default=None), queue_id: Optional[str] = REQ(default=None), # apply_markdown, client_gravatar, all_public_streams, and various # other parameters are only used when registering a new queue via this # endpoint. This is a feature used primarily by get_events_internal # and not expected to be used by third-party clients. apply_markdown: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), client_gravatar: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), slim_presence: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), all_public_streams: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), event_types: Optional[Sequence[str]] = REQ( default=None, validator=check_list(check_string), intentionally_undocumented=True), dont_block: bool = REQ(default=False, validator=check_bool), narrow: Iterable[Sequence[str]] = REQ(default=[], validator=check_list( check_list(check_string)), intentionally_undocumented=True), lifespan_secs: int = REQ(default=0, converter=to_non_negative_int, intentionally_undocumented=True), bulk_message_deletion: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), ) -> HttpResponse: # Extract the Tornado handler from the request handler: AsyncDjangoHandler = request._tornado_handler if user_client is None: valid_user_client = request.client else: valid_user_client = user_client events_query = dict( user_profile_id=user_profile.id, queue_id=queue_id, last_event_id=last_event_id, event_types=event_types, client_type_name=valid_user_client.name, all_public_streams=all_public_streams, lifespan_secs=lifespan_secs, narrow=narrow, dont_block=dont_block, handler_id=handler.handler_id, ) if queue_id is None: events_query["new_queue_data"] = dict( user_profile_id=user_profile.id, realm_id=user_profile.realm_id, event_types=event_types, client_type_name=valid_user_client.name, apply_markdown=apply_markdown, client_gravatar=client_gravatar, slim_presence=slim_presence, all_public_streams=all_public_streams, queue_timeout=lifespan_secs, last_connection_time=time.time(), narrow=narrow, bulk_message_deletion=bulk_message_deletion, ) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data["extra"] = result["extra_log_data"] if result["type"] == "async": # Mark this response with .asynchronous; this will result in # Tornado discarding the response and instead long-polling the # request. See zulip_finish for more design details. handler._request = request response = json_success() response.asynchronous = True return response if result["type"] == "error": raise result["exception"] return json_success(result["response"])
request: HttpRequest, user_profile: UserProfile, include_subscribers: bool = REQ(json_validator=check_bool, default=False), ) -> HttpResponse: subscribed, _ = gather_subscriptions( user_profile, include_subscribers=include_subscribers, ) return json_success({"subscriptions": subscribed}) add_subscriptions_schema = check_list( check_dict_only( required_keys=[("name", check_string)], optional_keys=[ ("color", check_color), ("description", check_capped_string(Stream.MAX_DESCRIPTION_LENGTH)), ], ), ) remove_subscriptions_schema = check_list(check_string) @has_request_variables def update_subscriptions_backend( request: HttpRequest, user_profile: UserProfile, delete: Sequence[str] = REQ(json_validator=remove_subscriptions_schema, default=[]), add: Sequence[Mapping[str,
def update_subscription_properties_backend( request: HttpRequest, user_profile: UserProfile, subscription_data: List[Dict[str, Any]] = REQ(json_validator=check_list( check_dict([ ("stream_id", check_int), ("property", check_string), ("value", check_union([check_string, check_bool])), ]), ), ), ) -> HttpResponse: """ This is the entry point to changing subscription properties. This is a bulk endpoint: requestors always provide a subscription_data list containing dictionaries for each stream of interest. Requests are of the form: [{"stream_id": "1", "property": "is_muted", "value": False}, {"stream_id": "1", "property": "color", "value": "#c2c2c2"}] """ property_converters = { "color": check_color, "in_home_view": check_bool, "is_muted": check_bool, "desktop_notifications": check_bool, "audible_notifications": check_bool, "push_notifications": check_bool, "email_notifications": check_bool, "pin_to_top": check_bool, "wildcard_mentions_notify": check_bool, } response_data = [] for change in subscription_data: stream_id = change["stream_id"] property = change["property"] value = change["value"] if property not in property_converters: return json_error( _("Unknown subscription property: {}").format(property)) (stream, sub) = access_stream_by_id(user_profile, stream_id) if sub is None: return json_error( _("Not subscribed to stream id {}").format(stream_id)) try: value = property_converters[property](property, value) except ValidationError as error: return json_error(error.message) do_change_subscription_property(user_profile, sub, stream, property, value, acting_user=user_profile) response_data.append({ "stream_id": stream_id, "property": property, "value": value }) return json_success({"subscription_data": response_data})
check_optional_value = check_union( [ # force vertical formatting check_bool, check_int, check_string, equals(None), ] ) check_alert_words = check_events_dict( required_keys=[ # force vertical formatting ("type", equals("alert_words")), ("alert_words", check_list(check_string)), ] ) _check_custom_profile_field = check_dict_only( required_keys=[ ("id", check_int), ("type", check_int), ("name", check_string), ("hint", check_string), ("field_data", check_string), ("order", check_int), ] ) check_custom_profile_fields = check_events_dict(