def add_subscriptions_backend( request, user_profile, streams_raw=REQ("subscriptions", validator=check_list(check_dict([['name', check_string]]))), invite_only=REQ(validator=check_bool, default=False), announce=REQ(validator=check_bool, default=False), principals=REQ(validator=check_list(check_string), default=None), authorization_errors_fatal=REQ(validator=check_bool, default=True)): if not user_profile.can_create_streams(): return json_error('User cannot create streams.') stream_names = [] for stream in streams_raw: stream_name = stream["name"].strip() if len(stream_name) > Stream.MAX_NAME_LENGTH: return json_error("Stream name (%s) too long." % (stream_name, )) if not valid_stream_name(stream_name): return json_error("Invalid stream name (%s)." % (stream_name, )) stream_names.append(stream_name) existing_streams, created_streams = \ list_to_streams(stream_names, user_profile, autocreate=True, invite_only=invite_only) authorized_streams, unauthorized_streams = \ filter_stream_authorization(user_profile, existing_streams) if len(unauthorized_streams) > 0 and authorization_errors_fatal: return json_error("Unable to access stream (%s)." % unauthorized_streams[0].name) # Newly created streams are also authorized for the creator streams = authorized_streams + created_streams if principals is not None: if user_profile.realm.domain == 'mit.edu' and not all( stream.invite_only for stream in streams): return json_error( "You can only invite other mit.edu users to invite-only streams." ) subscribers = set( principal_to_user_profile(user_profile, principal) for principal in principals) else: subscribers = set([user_profile]) (subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers) result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) for (subscriber, stream) in subscribed: result["subscribed"][subscriber.email].append(stream.name) for (subscriber, stream) in already_subscribed: result["already_subscribed"][subscriber.email].append(stream.name) private_streams = dict( (stream.name, stream.invite_only) for stream in streams) bots = dict( (subscriber.email, subscriber.is_bot) for subscriber in subscribers) # Inform the user if someone else subscribed them to stuff, # or if a new stream was created with the "announce" option. notifications = [] if principals and result["subscribed"]: for email, subscriptions in result["subscribed"].iteritems(): if email == user_profile.email: # Don't send a Zulip if you invited yourself. continue if bots[email]: # Don't send invitation Zulips to bots continue if len(subscriptions) == 1: msg = ("Hi there! We thought you'd like to know that %s just " "subscribed you to the%s stream [%s](%s)." % ( user_profile.full_name, " **invite-only**" if private_streams[subscriptions[0]] else "", subscriptions[0], stream_link(subscriptions[0]), )) else: msg = ("Hi there! We thought you'd like to know that %s just " "subscribed you to the following streams: \n\n" % (user_profile.full_name, )) for stream in subscriptions: msg += "* [%s](%s)%s\n" % (stream, stream_link(stream), " (**invite-only**)" if private_streams[stream] else "") if len([s for s in subscriptions if not private_streams[s]]) > 0: msg += "\nYou can see historical content on a non-invite-only stream by narrowing to it." notifications.append( internal_prep_message(settings.NOTIFICATION_BOT, "private", email, "", msg)) if announce and len(created_streams) > 0: notifications_stream = user_profile.realm.notifications_stream if notifications_stream is not None: if len(created_streams) > 1: stream_msg = "the following streams: %s" % \ (", ".join('`%s`' % (s.name,) for s in created_streams),) else: stream_msg = "a new stream `%s`" % (created_streams[0].name) stream_buttons = ' '.join( stream_button(s.name) for s in created_streams) msg = ("%s just created %s. %s" % (user_profile.full_name, stream_msg, stream_buttons)) notifications.append( internal_prep_message(settings.NOTIFICATION_BOT, "stream", notifications_stream.name, "Streams", msg, realm=notifications_stream.realm)) else: msg = ("Hi there! %s just created a new stream '%s'. %s" % (user_profile.full_name, created_streams[0].name, stream_button(created_streams[0].name))) for realm_user_dict in get_active_user_dicts_in_realm( user_profile.realm): # Don't announce to yourself or to people you explicitly added # (who will get the notification above instead). if realm_user_dict['email'] in principals or realm_user_dict[ 'email'] == user_profile.email: continue notifications.append( internal_prep_message(settings.NOTIFICATION_BOT, "private", realm_user_dict['email'], "", msg)) if len(notifications) > 0: do_send_messages(notifications) result["subscribed"] = dict(result["subscribed"]) result["already_subscribed"] = dict(result["already_subscribed"]) if not authorization_errors_fatal: result["unauthorized"] = [ stream.name for stream in unauthorized_streams ] return json_success(result)
def json_change_notify_settings( request, user_profile, enable_stream_desktop_notifications=REQ(validator=check_bool, default=None), enable_stream_sounds=REQ(validator=check_bool, default=None), enable_desktop_notifications=REQ(validator=check_bool, default=None), enable_sounds=REQ(validator=check_bool, default=None), enable_offline_email_notifications=REQ(validator=check_bool, default=None), enable_offline_push_notifications=REQ(validator=check_bool, default=None), enable_online_push_notifications=REQ(validator=check_bool, default=None), enable_digest_emails=REQ(validator=check_bool, default=None), pm_content_in_desktop_notifications=REQ(validator=check_bool, default=None)): # type: (HttpRequest, UserProfile, Optional[bool], Optional[bool], Optional[bool], Optional[bool], Optional[bool], Optional[bool], Optional[bool], Optional[bool], Optional[bool]) -> HttpResponse result = {} # Stream notification settings. if enable_stream_desktop_notifications is not None and \ user_profile.enable_stream_desktop_notifications != enable_stream_desktop_notifications: do_change_enable_stream_desktop_notifications( user_profile, enable_stream_desktop_notifications) result[ 'enable_stream_desktop_notifications'] = enable_stream_desktop_notifications if enable_stream_sounds is not None and \ user_profile.enable_stream_sounds != enable_stream_sounds: do_change_enable_stream_sounds(user_profile, enable_stream_sounds) result['enable_stream_sounds'] = enable_stream_sounds # PM and @-mention settings. if enable_desktop_notifications is not None and \ user_profile.enable_desktop_notifications != enable_desktop_notifications: do_change_enable_desktop_notifications(user_profile, enable_desktop_notifications) result['enable_desktop_notifications'] = enable_desktop_notifications if enable_sounds is not None and \ user_profile.enable_sounds != enable_sounds: do_change_enable_sounds(user_profile, enable_sounds) result['enable_sounds'] = enable_sounds if enable_offline_email_notifications is not None and \ user_profile.enable_offline_email_notifications != enable_offline_email_notifications: do_change_enable_offline_email_notifications( user_profile, enable_offline_email_notifications) result[ 'enable_offline_email_notifications'] = enable_offline_email_notifications if enable_offline_push_notifications is not None and \ user_profile.enable_offline_push_notifications != enable_offline_push_notifications: do_change_enable_offline_push_notifications( user_profile, enable_offline_push_notifications) result[ 'enable_offline_push_notifications'] = enable_offline_push_notifications if enable_online_push_notifications is not None and \ user_profile.enable_online_push_notifications != enable_online_push_notifications: do_change_enable_online_push_notifications( user_profile, enable_online_push_notifications) result[ 'enable_online_push_notifications'] = enable_online_push_notifications if enable_digest_emails is not None and \ user_profile.enable_digest_emails != enable_digest_emails: do_change_enable_digest_emails(user_profile, enable_digest_emails) result['enable_digest_emails'] = enable_digest_emails if pm_content_in_desktop_notifications is not None and \ user_profile.pm_content_in_desktop_notifications != pm_content_in_desktop_notifications: do_change_pm_content_in_desktop_notifications( user_profile, pm_content_in_desktop_notifications) result[ 'pm_content_in_desktop_notifications'] = pm_content_in_desktop_notifications return json_success(result)
def json_change_notify_settings( request, user_profile, enable_stream_desktop_notifications=REQ(validator=check_bool, default=None), enable_stream_sounds=REQ(validator=check_bool, default=None), enable_desktop_notifications=REQ(validator=check_bool, default=None), enable_sounds=REQ(validator=check_bool, default=None), enable_offline_email_notifications=REQ(validator=check_bool, default=None), enable_offline_push_notifications=REQ(validator=check_bool, default=None), enable_digest_emails=REQ(validator=check_bool, default=None)): result = {} # Stream notification settings. if enable_stream_desktop_notifications is not None and \ user_profile.enable_stream_desktop_notifications != enable_stream_desktop_notifications: do_change_enable_stream_desktop_notifications( user_profile, enable_stream_desktop_notifications) result[ 'enable_stream_desktop_notifications'] = enable_stream_desktop_notifications if enable_stream_sounds is not None and \ user_profile.enable_stream_sounds != enable_stream_sounds: do_change_enable_stream_sounds(user_profile, enable_stream_sounds) result['enable_stream_sounds'] = enable_stream_sounds # PM and @-mention settings. if enable_desktop_notifications is not None and \ user_profile.enable_desktop_notifications != enable_desktop_notifications: do_change_enable_desktop_notifications(user_profile, enable_desktop_notifications) result['enable_desktop_notifications'] = enable_desktop_notifications if enable_sounds is not None and \ user_profile.enable_sounds != enable_sounds: do_change_enable_sounds(user_profile, enable_sounds) result['enable_sounds'] = enable_sounds if enable_offline_email_notifications is not None and \ user_profile.enable_offline_email_notifications != enable_offline_email_notifications: do_change_enable_offline_email_notifications( user_profile, enable_offline_email_notifications) result[ 'enable_offline_email_notifications'] = enable_offline_email_notifications if enable_offline_push_notifications is not None and \ user_profile.enable_offline_push_notifications != enable_offline_push_notifications: do_change_enable_offline_push_notifications( user_profile, enable_offline_push_notifications) result[ 'enable_offline_push_notifications'] = enable_offline_push_notifications if enable_digest_emails is not None and \ user_profile.enable_digest_emails != enable_digest_emails: do_change_enable_digest_emails(user_profile, enable_digest_emails) result['enable_digest_emails'] = enable_digest_emails return json_success(result)
def json_fetch_raw_message(request, user_profile, message_id=REQ(converter=to_non_negative_int)): # type: (HttpRequest, UserProfile, int) -> HttpResponse (message, user_message) = access_message(user_profile, message_id) return json_success({"raw_content": message.content})
def api_github_landing( request, user_profile, event=REQ(), payload=REQ(validator=check_dict([])), branches=REQ(default=''), stream=REQ(default=''), version=REQ(converter=to_non_negative_int, default=1), commit_stream=REQ(default=''), issue_stream=REQ(default=''), exclude_pull_requests=REQ(converter=flexible_boolean, default=False), exclude_issues=REQ(converter=flexible_boolean, default=False), exclude_commits=REQ(converter=flexible_boolean, default=False), emphasize_branch_in_topic=REQ(converter=flexible_boolean, default=False), ): # type: (HttpRequest, UserProfile, Text, Mapping[Text, Any], Text, Text, int, Text, Text, bool, bool, bool, bool) -> HttpResponse repository = payload['repository'] # Special hook for capturing event data. If we see our special test repo, log the payload from github. try: if is_test_repository(repository) and settings.PRODUCTION: with open('/var/log/zulip/github-payloads', 'a') as f: f.write( ujson.dumps({ 'event': event, 'payload': payload, 'branches': branches, 'stream': stream, 'version': version, 'commit_stream': commit_stream, 'issue_stream': issue_stream, 'exclude_pull_requests': exclude_pull_requests, 'exclude_issues': exclude_issues, 'exclude_commits': exclude_commits, 'emphasize_branch_in_topic': emphasize_branch_in_topic, })) f.write('\n') except Exception: logging.exception('Error while capturing Github event') if not stream: stream = 'commits' short_ref = re.sub(r'^refs/heads/', '', payload.get('ref', '')) kwargs = dict() if emphasize_branch_in_topic and short_ref: kwargs['topic_focus'] = short_ref allowed_events = set() if not exclude_pull_requests: allowed_events.add('pull_request') if not exclude_issues: allowed_events.add('issues') allowed_events.add('issue_comment') if not exclude_commits: allowed_events.add('push') allowed_events.add('commit_comment') if event not in allowed_events: return json_success() # We filter issue_comment events for issue creation events if event == 'issue_comment' and payload['action'] != 'created': return json_success() if event == 'push': # If we are given a whitelist of branches, then we silently ignore # any push notification on a branch that is not in our whitelist. if branches and short_ref not in re.split('[\s,;|]+', branches): return json_success() # Map payload to the handler with the right version if version == 2: target_stream, subject, content = api_github_v2( user_profile, event, payload, branches, stream, commit_stream, issue_stream, **kwargs) else: target_stream, subject, content = api_github_v1( user_profile, event, payload, branches, stream, **kwargs) request.client = get_client('ZulipGitHubWebhook') return send_message_backend(request, user_profile, message_type_name='stream', message_to=[target_stream], forged=False, subject_name=subject, message_content=content)
def mark_hotspot_as_read(request, user, hotspot=REQ(validator=check_string)): # type: (HttpRequest, UserProfile, str) -> HttpResponse if hotspot not in ALL_HOTSPOTS: return json_error(_('Unknown hotspot: %s') % (hotspot,)) do_mark_hotspot_as_read(user, hotspot) return json_success()
def send_message_backend(request, user_profile, message_type_name = REQ('type'), message_to = REQ('to', converter=extract_recipients, default=[]), forged = REQ(default=False), subject_name = REQ('subject', lambda x: x.strip(), None), message_content = REQ('content'), realm_str = REQ('realm_str', default=None), local_id = REQ(default=None), queue_id = REQ(default=None)): # type: (HttpRequest, UserProfile, Text, List[Text], bool, Optional[Text], Text, Optional[Text], Optional[Text], Optional[Text]) -> HttpResponse client = request.client is_super_user = request.user.is_api_super_user if forged and not is_super_user: return json_error(_("User not authorized for this query")) realm = None if realm_str and realm_str != user_profile.realm.string_id: if not is_super_user: # The email gateway bot needs to be able to send messages in # any realm. return json_error(_("User not authorized for this query")) realm = get_realm(realm_str) if not realm: return json_error(_("Unknown realm %s") % (realm_str,)) if client.name in ["zephyr_mirror", "irc_mirror", "jabber_mirror", "JabberMirror"]: # Here's how security works for mirroring: # # For private messages, the message must be (1) both sent and # received exclusively by users in your realm, and (2) # received by the forwarding user. # # For stream messages, the message must be (1) being forwarded # by an API superuser for your realm and (2) being sent to a # mirrored stream (any stream for the Zephyr and Jabber # mirrors, but only streams with names starting with a "#" for # IRC mirrors) # # The security checks are split between the below code # (especially create_mirrored_message_users which checks the # same-realm constraint) and recipient_for_emails (which # checks that PMs are received by the forwarding user) if "sender" not in request.POST: return json_error(_("Missing sender")) if message_type_name != "private" and not is_super_user: return json_error(_("User not authorized for this query")) (valid_input, mirror_sender) = \ create_mirrored_message_users(request, user_profile, message_to) if not valid_input: return json_error(_("Invalid mirrored message")) if client.name == "zephyr_mirror" and not user_profile.realm.is_zephyr_mirror_realm: return json_error(_("Invalid mirrored realm")) if (client.name == "irc_mirror" and message_type_name != "private" and not message_to[0].startswith("#")): return json_error(_("IRC stream names must start with #")) sender = mirror_sender else: sender = user_profile ret = check_send_message(sender, client, message_type_name, message_to, subject_name, message_content, forged=forged, forged_timestamp = request.POST.get('time'), forwarder_user_profile=user_profile, realm=realm, local_id=local_id, sender_queue_id=queue_id) return json_success({"id": ret})
def update_display_settings_backend( request: HttpRequest, user_profile: UserProfile, twenty_four_hour_time: Optional[bool] = REQ(validator=check_bool, default=None), dense_mode: Optional[bool] = REQ(validator=check_bool, default=None), starred_message_counts: Optional[bool] = REQ(validator=check_bool, default=None), fluid_layout_width: Optional[bool] = REQ(validator=check_bool, default=None), high_contrast_mode: Optional[bool] = REQ(validator=check_bool, default=None), night_mode: Optional[bool] = REQ(validator=check_bool, default=None), translate_emoticons: Optional[bool] = REQ(validator=check_bool, default=None), default_language: Optional[bool] = REQ(validator=check_string, default=None), left_side_userlist: Optional[bool] = REQ(validator=check_bool, default=None), emojiset: Optional[str] = REQ(validator=check_string, default=None), demote_inactive_streams: Optional[int] = REQ(validator=check_int, default=None), timezone: Optional[str] = REQ(validator=check_string, default=None) ) -> HttpResponse: if (default_language is not None and default_language not in get_available_language_codes()): raise JsonableError(_("Invalid language '%s'") % (default_language, )) if (timezone is not None and timezone not in get_all_timezones()): raise JsonableError(_("Invalid timezone '%s'") % (timezone, )) if (emojiset is not None and emojiset not in UserProfile.emojiset_choices()): raise JsonableError(_("Invalid emojiset '%s'") % (emojiset, )) if (demote_inactive_streams is not None and demote_inactive_streams not in UserProfile.DEMOTE_STREAMS_CHOICES): raise JsonableError( _("Invalid setting value '%s'") % (demote_inactive_streams, )) request_settings = { k: v for k, v in list(locals().items()) if k in user_profile.property_types } result = {} # type: Dict[str, Any] for k, v in list(request_settings.items()): if v is not None and getattr(user_profile, k) != v: do_set_user_display_setting(user_profile, k, v) result[k] = v return json_success(result)
def json_change_notify_settings( request: HttpRequest, user_profile: UserProfile, enable_stream_desktop_notifications: Optional[bool] = REQ( validator=check_bool, default=None), enable_stream_email_notifications: Optional[bool] = REQ( validator=check_bool, default=None), enable_stream_push_notifications: Optional[bool] = REQ( validator=check_bool, default=None), enable_stream_sounds: Optional[bool] = REQ(validator=check_bool, default=None), enable_desktop_notifications: Optional[bool] = REQ(validator=check_bool, default=None), enable_sounds: Optional[bool] = REQ(validator=check_bool, default=None), enable_offline_email_notifications: Optional[bool] = REQ( validator=check_bool, default=None), enable_offline_push_notifications: Optional[bool] = REQ( validator=check_bool, default=None), enable_online_push_notifications: Optional[bool] = REQ( validator=check_bool, default=None), enable_digest_emails: Optional[bool] = REQ(validator=check_bool, default=None), message_content_in_email_notifications: Optional[bool] = REQ( validator=check_bool, default=None), pm_content_in_desktop_notifications: Optional[bool] = REQ( validator=check_bool, default=None), realm_name_in_notifications: Optional[bool] = REQ(validator=check_bool, default=None) ) -> HttpResponse: result = {} # Stream notification settings. req_vars = { k: v for k, v in list(locals().items()) if k in user_profile.notification_setting_types } for k, v in list(req_vars.items()): if v is not None and getattr(user_profile, k) != v: do_change_notification_settings(user_profile, k, v) result[k] = v return json_success(result)
def report_error(request, deployment, type=REQ(), report=REQ(validator=check_dict([]))): # type: (HttpRequest, Deployment, text_type, Dict[str, Any]) -> HttpResponse return do_report_error(deployment.name, type, report)
def api_stripe_webhook(request, user_profile, payload=REQ(argument_type='body'), stream=REQ(default='test'), topic=REQ(default=None)): # type: (HttpRequest, UserProfile, Dict[str, Any], Text, Optional[Text]) -> HttpResponse body = None event_type = payload["type"] try: data_object = payload["data"]["object"] if event_type.startswith('charge'): charge_url = "https://dashboard.stripe.com/payments/{}" amount_string = amount(payload["data"]["object"]["amount"], payload["data"]["object"]["currency"]) if event_type.startswith('charge.dispute'): charge_id = data_object["charge"] link = charge_url.format(charge_id) body_template = "A charge dispute for **{amount}** has been {rest}.\n"\ "The charge in dispute {verb} **[{charge}]({link})**." if event_type == "charge.dispute.closed": rest = "closed as **{}**".format(data_object['status']) verb = 'was' else: rest = "created" verb = 'is' body = body_template.format(amount=amount_string, rest=rest, verb=verb, charge=charge_id, link=link) else: charge_id = data_object["id"] link = charge_url.format(charge_id) body_template = "A charge with id **[{charge_id}]({link})** for **{amount}** has {verb}." if event_type == "charge.failed": verb = "failed" else: verb = "succeeded" body = body_template.format(charge_id=charge_id, link=link, amount=amount_string, verb=verb) if topic is None: topic = "Charge {}".format(charge_id) elif event_type.startswith('customer'): object_id = data_object["id"] if event_type.startswith('customer.subscription'): link = "https://dashboard.stripe.com/subscriptions/{}".format(object_id) if event_type == "customer.subscription.created": amount_string = amount(data_object["plan"]["amount"], data_object["plan"]["currency"]) body_template = "A new customer subscription for **{amount}** " \ "every **{interval}** has been created.\n" \ "The subscription has id **[{id}]({link})**." body = body_template.format( amount=amount_string, interval=data_object['plan']['interval'], id=object_id, link=link ) elif event_type == "customer.subscription.deleted": body_template = "The customer subscription with id **[{id}]({link})** was deleted." body = body_template.format(id=object_id, link=link) else: # customer.subscription.trial_will_end DAY = 60 * 60 * 24 # seconds in a day # days_left should always be three according to # https://stripe.com/docs/api/python#event_types, but do the # computation just to be safe. days_left = int((data_object["trial_end"] - time.time() + DAY//2) // DAY) body_template = "The customer subscription trial with id **[{id}]({link})** will end in {days} days." body = body_template.format(id=object_id, link=link, days=days_left) else: link = "https://dashboard.stripe.com/customers/{}".format(object_id) body_template = "{beginning} customer with id **[{id}]({link})** {rest}." if event_type == "customer.created": beginning = "A new" if data_object["email"] is None: rest = "has been created" else: rest = "and email **{}** has been created".format(data_object['email']) else: beginning = "A" rest = "has been deleted" body = body_template.format(beginning=beginning, id=object_id, link=link, rest=rest) if topic is None: topic = "Customer {}".format(object_id) elif event_type == "invoice.payment_failed": object_id = data_object['id'] link = "https://dashboard.stripe.com/invoices/{}".format(object_id) amount_string = amount(data_object["amount_due"], data_object["currency"]) body_template = "An invoice payment on invoice with id **[{id}]({link})** and "\ "with **{amount}** due has failed." body = body_template.format(id=object_id, amount=amount_string, link=link) if topic is None: topic = "Invoice {}".format(object_id) elif event_type.startswith('order'): object_id = data_object['id'] link = "https://dashboard.stripe.com/orders/{}".format(object_id) amount_string = amount(data_object["amount"], data_object["currency"]) body_template = "{beginning} order with id **[{id}]({link})** for **{amount}** has {end}." if event_type == "order.payment_failed": beginning = "An order payment on" end = "failed" elif event_type == "order.payment_succeeded": beginning = "An order payment on" end = "succeeded" else: beginning = "The" end = "been updated" body = body_template.format(beginning=beginning, id=object_id, link=link, amount=amount_string, end=end) if topic is None: topic = "Order {}".format(object_id) elif event_type.startswith('transfer'): object_id = data_object['id'] link = "https://dashboard.stripe.com/transfers/{}".format(object_id) amount_string = amount(data_object["amount"], data_object["currency"]) body_template = "The transfer with description **{description}** and id **[{id}]({link})** " \ "for amount **{amount}** has {end}." if event_type == "transfer.failed": end = 'failed' else: end = "been paid" body = body_template.format( description=data_object['description'], id=object_id, link=link, amount=amount_string, end=end ) if topic is None: topic = "Transfer {}".format(object_id) except KeyError as e: return json_error(_("Missing key {} in JSON".format(str(e)))) if body is None: return json_error(_("We don't support {} event".format(event_type))) check_send_message(user_profile, request.client, 'stream', [stream], topic, body) return json_success()
def zcommand_backend( request: HttpRequest, user_profile: UserProfile, command: str = REQ("command")) -> HttpResponse: return json_success(process_zcommands(command, user_profile))
def send_message_backend( request: HttpRequest, user_profile: UserProfile, message_type_name: str = REQ("type"), req_to: Optional[str] = REQ("to", default=None), forged_str: Optional[str] = REQ("forged", default=None, documentation_pending=True), topic_name: Optional[str] = REQ_topic(), message_content: str = REQ("content"), widget_content: Optional[str] = REQ(default=None, documentation_pending=True), realm_str: Optional[str] = REQ("realm_str", default=None, documentation_pending=True), local_id: Optional[str] = REQ(default=None), queue_id: Optional[str] = REQ(default=None), delivery_type: str = REQ("delivery_type", default="send_now", documentation_pending=True), defer_until: Optional[str] = REQ("deliver_at", default=None, documentation_pending=True), tz_guess: Optional[str] = REQ("tz_guess", default=None, documentation_pending=True), ) -> HttpResponse: # If req_to is None, then we default to an # empty list of recipients. message_to: Union[Sequence[int], Sequence[str]] = [] if req_to is not None: if message_type_name == "stream": stream_indicator = extract_stream_indicator(req_to) # For legacy reasons check_send_message expects # a list of streams, instead of a single stream. # # Also, mypy can't detect that a single-item # list populated from a Union[int, str] is actually # a Union[Sequence[int], Sequence[str]]. if isinstance(stream_indicator, int): message_to = [stream_indicator] else: message_to = [stream_indicator] else: message_to = extract_private_recipients(req_to) # Temporary hack: We're transitioning `forged` from accepting # `yes` to accepting `true` like all of our normal booleans. forged = forged_str is not None and forged_str in ["yes", "true"] client = request.client can_forge_sender = request.user.can_forge_sender if forged and not can_forge_sender: return json_error(_("User not authorized for this query")) realm = None if realm_str and realm_str != user_profile.realm.string_id: # The realm_str parameter does nothing, because it has to match # the user's realm - but we keep it around for backward compatibility. return json_error(_("User not authorized for this query")) if client.name in [ "zephyr_mirror", "irc_mirror", "jabber_mirror", "JabberMirror" ]: # Here's how security works for mirroring: # # For private messages, the message must be (1) both sent and # received exclusively by users in your realm, and (2) # received by the forwarding user. # # For stream messages, the message must be (1) being forwarded # by an API superuser for your realm and (2) being sent to a # mirrored stream. # # The most important security checks are in # `create_mirrored_message_users` below, which checks the # same-realm constraint. if "sender" not in request.POST: return json_error(_("Missing sender")) if message_type_name != "private" and not can_forge_sender: return json_error(_("User not authorized for this query")) # For now, mirroring only works with recipient emails, not for # recipient user IDs. if not all(isinstance(to_item, str) for to_item in message_to): return json_error( _("Mirroring not allowed with recipient user IDs")) # We need this manual cast so that mypy doesn't complain about # create_mirrored_message_users not being able to accept a Sequence[int] # type parameter. message_to = cast(Sequence[str], message_to) try: mirror_sender = create_mirrored_message_users( request, user_profile, message_to) except InvalidMirrorInput: return json_error(_("Invalid mirrored message")) if client.name == "zephyr_mirror" and not user_profile.realm.is_zephyr_mirror_realm: return json_error( _("Zephyr mirroring is not allowed in this organization")) sender = mirror_sender else: if "sender" in request.POST: return json_error(_("Invalid mirrored message")) sender = user_profile if (delivery_type == "send_later" or delivery_type == "remind") and defer_until is None: return json_error( _("Missing deliver_at in a request for delayed message delivery")) if (delivery_type == "send_later" or delivery_type == "remind") and defer_until is not None: return handle_deferred_message( sender, client, message_type_name, message_to, topic_name, message_content, delivery_type, defer_until, tz_guess, forwarder_user_profile=user_profile, realm=realm, ) ret = check_send_message( sender, client, message_type_name, message_to, topic_name, message_content, forged=forged, forged_timestamp=request.POST.get("time"), forwarder_user_profile=user_profile, realm=realm, local_id=local_id, sender_queue_id=queue_id, widget_content=widget_content, ) return json_success({"id": ret})
def json_stream_exists(request, user_profile, stream=REQ, autosubscribe=REQ(default=False)): return stream_exists_backend(request, user_profile, stream, autosubscribe)
def get_events_backend( request: HttpRequest, user_profile: UserProfile, # user_client is intended only for internal Django=>Tornado requests # and thus shouldn't be documented for external use. user_client: Optional[Client] = REQ(converter=get_client, default=None, intentionally_undocumented=True), last_event_id: Optional[int] = REQ(converter=int, default=None), queue_id: Optional[str] = REQ(default=None), # apply_markdown, client_gravatar, all_public_streams, and various # other parameters are only used when registering a new queue via this # endpoint. This is a feature used primarily by get_events_internal # and not expected to be used by third-party clients. apply_markdown: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), client_gravatar: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), slim_presence: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), all_public_streams: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), event_types: Optional[str] = REQ(default=None, validator=check_list(check_string), intentionally_undocumented=True), dont_block: bool = REQ(default=False, validator=check_bool), narrow: Iterable[Sequence[str]] = REQ(default=[], validator=check_list(None), intentionally_undocumented=True), lifespan_secs: int = REQ(default=0, converter=to_non_negative_int, intentionally_undocumented=True), bulk_message_deletion: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True) ) -> HttpResponse: # Extract the Tornado handler from the request handler: AsyncDjangoHandler = request._tornado_handler if user_client is None: valid_user_client = request.client else: valid_user_client = user_client events_query = dict(user_profile_id=user_profile.id, queue_id=queue_id, last_event_id=last_event_id, event_types=event_types, client_type_name=valid_user_client.name, all_public_streams=all_public_streams, lifespan_secs=lifespan_secs, narrow=narrow, dont_block=dont_block, handler_id=handler.handler_id) if queue_id is None: events_query['new_queue_data'] = dict( user_profile_id=user_profile.id, realm_id=user_profile.realm_id, event_types=event_types, client_type_name=valid_user_client.name, apply_markdown=apply_markdown, client_gravatar=client_gravatar, slim_presence=slim_presence, all_public_streams=all_public_streams, queue_timeout=lifespan_secs, last_connection_time=time.time(), narrow=narrow, bulk_message_deletion=bulk_message_deletion) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data['extra'] = result["extra_log_data"] if result["type"] == "async": # Mark this response with .asynchronous; this will result in # Tornado discarding the response and instead long-polling the # request. See zulip_finish for more design details. handler._request = request response = json_success() response.asynchronous = True return response if result["type"] == "error": raise result["exception"] return json_success(result["response"])
def add_bot_backend(request, user_profile, full_name=REQ, short_name=REQ, default_sending_stream=REQ(default=None), default_events_register_stream=REQ(default=None), default_all_public_streams=REQ(validator=check_bool, default=None)): short_name += "-bot" email = short_name + "@" + user_profile.realm.domain form = CreateUserForm({'full_name': full_name, 'email': email}) if not form.is_valid(): # We validate client-side as well return json_error('Bad name or username') try: get_user_profile_by_email(email) return json_error("Username already in use") except UserProfile.DoesNotExist: pass if len(request.FILES) == 0: avatar_source = UserProfile.AVATAR_FROM_GRAVATAR elif len(request.FILES) != 1: return json_error("You may only upload one file at a time") else: user_file = list(request.FILES.values())[0] upload_avatar_image(user_file, user_profile, email) avatar_source = UserProfile.AVATAR_FROM_USER if default_sending_stream is not None: default_sending_stream = stream_or_none(default_sending_stream, user_profile.realm) if default_sending_stream and not default_sending_stream.is_public() and not \ subscribed_to_stream(user_profile, default_sending_stream): return json_error('Insufficient permission') if default_events_register_stream is not None: default_events_register_stream = stream_or_none( default_events_register_stream, user_profile.realm) if default_events_register_stream and not default_events_register_stream.is_public() and not \ subscribed_to_stream(user_profile, default_events_register_stream): return json_error('Insufficient permission') bot_profile = do_create_user( email=email, password='', realm=user_profile.realm, full_name=full_name, short_name=short_name, active=True, bot_type=UserProfile.DEFAULT_BOT, bot_owner=user_profile, avatar_source=avatar_source, default_sending_stream=default_sending_stream, default_events_register_stream=default_events_register_stream, default_all_public_streams=default_all_public_streams) json_result = dict( api_key=bot_profile.api_key, avatar_url=avatar_url(bot_profile), default_sending_stream=get_stream_name( bot_profile.default_sending_stream), default_events_register_stream=get_stream_name( bot_profile.default_events_register_stream), default_all_public_streams=bot_profile.default_all_public_streams, ) return json_success(json_result)
def json_change_settings(request, user_profile, full_name=REQ(default=""), email=REQ(default=""), old_password=REQ(default=""), new_password=REQ(default=""), confirm_password=REQ(default="")): # type: (HttpRequest, UserProfile, Text, Text, Text, Text, Text) -> HttpResponse if not (full_name or new_password or email): return json_error(_("No new data supplied")) if new_password != "" or confirm_password != "": if new_password != confirm_password: return json_error( _("New password must match confirmation password!")) if not authenticate(username=user_profile.email, password=old_password): return json_error(_("Wrong password!")) do_change_password(user_profile, new_password) # In Django 1.10, password changes invalidates sessions, see # https://docs.djangoproject.com/en/1.10/topics/auth/default/#session-invalidation-on-password-change # for details. To avoid this logging the user out of their own # session (which would provide a confusing UX at best), we # update the session hash here. update_session_auth_hash(request, user_profile) # We also save the session to the DB immediately to mitigate # race conditions. In theory, there is still a race condition # and to completely avoid it we will have to use some kind of # mutex lock in `django.contrib.auth.get_user` where session # is verified. To make that lock work we will have to control # the AuthenticationMiddleware which is currently controlled # by Django, request.session.save() result = {} # type: Dict[str, Any] new_email = email.strip() if user_profile.email != email and new_email != '': if user_profile.realm.email_changes_disabled: return json_error( _("Email address changes are disabled in this organization.")) error, skipped = validate_email(user_profile, new_email) if error: return json_error(error) if skipped: return json_error(skipped) do_start_email_change_process(user_profile, new_email) result['account_email'] = _( "Check your email for a confirmation link. ") if user_profile.full_name != full_name and full_name.strip() != "": if name_changes_disabled(user_profile.realm): # Failingly silently is fine -- they can't do it through the UI, so # they'd have to be trying to break the rules. pass else: # Note that check_change_full_name strips the passed name automatically result['full_name'] = check_change_full_name( user_profile, full_name, user_profile) return json_success(result)
def lookup_endpoints_for_user(request, email=REQ()): try: return json_response(realm_for_email(email).deployment.endpoints) except AttributeError: return json_error("Cannot determine endpoint for user.", status=404)
def get_old_messages_backend(request, user_profile, anchor = REQ(converter=int), num_before = REQ(converter=to_non_negative_int), num_after = REQ(converter=to_non_negative_int), narrow = REQ('narrow', converter=narrow_parameter, default=None), use_first_unread_anchor = REQ(default=False, converter=ujson.loads), apply_markdown=REQ(default=True, converter=ujson.loads)): # type: (HttpRequest, UserProfile, int, int, int, Optional[List[Dict[str, Any]]], bool, bool) -> HttpResponse include_history = ok_to_include_history(narrow, user_profile.realm) if include_history and not use_first_unread_anchor: query = select([column("id").label("message_id")], None, "zerver_message") inner_msg_id_col = literal_column("zerver_message.id") elif narrow is None: query = select([column("message_id"), column("flags")], column("user_profile_id") == literal(user_profile.id), "zerver_usermessage") inner_msg_id_col = column("message_id") else: # TODO: Don't do this join if we're not doing a search query = select([column("message_id"), column("flags")], column("user_profile_id") == literal(user_profile.id), join("zerver_usermessage", "zerver_message", literal_column("zerver_usermessage.message_id") == literal_column("zerver_message.id"))) inner_msg_id_col = column("message_id") num_extra_messages = 1 is_search = False if narrow is not None: # Add some metadata to our logging data for narrows verbose_operators = [] for term in narrow: if term['operator'] == "is": verbose_operators.append("is:" + term['operand']) else: verbose_operators.append(term['operator']) request._log_data['extra'] = "[%s]" % (",".join(verbose_operators),) # Build the query for the narrow num_extra_messages = 0 builder = NarrowBuilder(user_profile, inner_msg_id_col) search_term = None # type: Optional[Dict[str, Any]] for term in narrow: if term['operator'] == 'search': if not is_search: search_term = term query = query.column("subject").column("rendered_content") is_search = True else: # Join the search operators if there are multiple of them search_term['operand'] += ' ' + term['operand'] else: query = builder.add_term(query, term) if is_search: query = builder.add_term(query, search_term) # We add 1 to the number of messages requested if no narrow was # specified to ensure that the resulting list always contains the # anchor message. If a narrow was specified, the anchor message # might not match the narrow anyway. if num_after != 0: num_after += num_extra_messages else: num_before += num_extra_messages sa_conn = get_sqlalchemy_connection() if use_first_unread_anchor: condition = column("flags").op("&")(UserMessage.flags.read.mask) == 0 # We exclude messages on muted topics when finding the first unread # message in this narrow muting_conditions = exclude_muting_conditions(user_profile, narrow) if muting_conditions: condition = and_(condition, *muting_conditions) first_unread_query = query.where(condition) first_unread_query = first_unread_query.order_by(inner_msg_id_col.asc()).limit(1) first_unread_result = list(sa_conn.execute(first_unread_query).fetchall()) if len(first_unread_result) > 0: anchor = first_unread_result[0][0] else: anchor = 10000000000000000 before_query = None after_query = None if num_before != 0: before_anchor = anchor if num_after != 0: # Don't include the anchor in both the before query and the after query before_anchor = anchor - 1 before_query = query.where(inner_msg_id_col <= before_anchor) \ .order_by(inner_msg_id_col.desc()).limit(num_before) if num_after != 0: after_query = query.where(inner_msg_id_col >= anchor) \ .order_by(inner_msg_id_col.asc()).limit(num_after) if num_before == 0 and num_after == 0: # This can happen when a narrow is specified. after_query = query.where(inner_msg_id_col == anchor) if before_query is not None: if after_query is not None: query = union_all(before_query.self_group(), after_query.self_group()) else: query = before_query else: query = after_query main_query = alias(query) query = select(main_query.c, None, main_query).order_by(column("message_id").asc()) # This is a hack to tag the query we use for testing query = query.prefix_with("/* get_old_messages */") query_result = list(sa_conn.execute(query).fetchall()) # The following is a little messy, but ensures that the code paths # are similar regardless of the value of include_history. The # 'user_messages' dictionary maps each message to the user's # UserMessage object for that message, which we will attach to the # rendered message dict before returning it. We attempt to # bulk-fetch rendered message dicts from remote cache using the # 'messages' list. search_fields = dict() # type: Dict[int, Dict[str, Text]] message_ids = [] # type: List[int] user_message_flags = {} # type: Dict[int, List[str]] if include_history: message_ids = [row[0] for row in query_result] # TODO: This could be done with an outer join instead of two queries user_message_flags = dict((user_message.message_id, user_message.flags_list()) for user_message in UserMessage.objects.filter(user_profile=user_profile, message__id__in=message_ids)) for row in query_result: message_id = row[0] if user_message_flags.get(message_id) is None: user_message_flags[message_id] = ["read", "historical"] if is_search: (_, subject, rendered_content, content_matches, subject_matches) = row search_fields[message_id] = get_search_fields(rendered_content, subject, content_matches, subject_matches) else: for row in query_result: message_id = row[0] flags = row[1] user_message_flags[message_id] = parse_usermessage_flags(flags) message_ids.append(message_id) if is_search: (_, _, subject, rendered_content, content_matches, subject_matches) = row search_fields[message_id] = get_search_fields(rendered_content, subject, content_matches, subject_matches) cache_transformer = lambda row: MessageDict.build_dict_from_raw_db_row(row, apply_markdown) id_fetcher = lambda row: row['id'] message_dicts = generic_bulk_cached_fetch(lambda message_id: to_dict_cache_key_id(message_id, apply_markdown), Message.get_raw_db_rows, message_ids, id_fetcher=id_fetcher, cache_transformer=cache_transformer, extractor=extract_message_dict, setter=stringify_message_dict) message_list = [] for message_id in message_ids: msg_dict = message_dicts[message_id] msg_dict.update({"flags": user_message_flags[message_id]}) msg_dict.update(search_fields.get(message_id, {})) message_list.append(msg_dict) statsd.incr('loaded_old_messages', len(message_list)) ret = {'messages': message_list, "result": "success", "msg": ""} return json_success(ret)
def update_display_settings_backend( request: HttpRequest, user_profile: UserProfile, twenty_four_hour_time: Optional[bool]=REQ(validator=check_bool, default=None), dense_mode: Optional[bool]=REQ(validator=check_bool, default=None), starred_message_counts: Optional[bool]=REQ(validator=check_bool, default=None), fluid_layout_width: Optional[bool]=REQ(validator=check_bool, default=None), high_contrast_mode: Optional[bool]=REQ(validator=check_bool, default=None), night_mode: Optional[bool]=REQ(validator=check_bool, default=None), translate_emoticons: Optional[bool]=REQ(validator=check_bool, default=None), default_language: Optional[bool]=REQ(validator=check_string, default=None), left_side_userlist: Optional[bool]=REQ(validator=check_bool, default=None), emojiset: Optional[str]=REQ(validator=check_string_in( emojiset_choices), default=None), demote_inactive_streams: Optional[int]=REQ(validator=check_int_in( UserProfile.DEMOTE_STREAMS_CHOICES), default=None), timezone: Optional[str]=REQ(validator=check_string_in(all_timezones), default=None)) -> HttpResponse: # We can't use REQ for this widget because # get_available_language_codes requires provisioning to be # complete. if (default_language is not None and default_language not in get_available_language_codes()): raise JsonableError(_("Invalid default_language")) request_settings = {k: v for k, v in list(locals().items()) if k in user_profile.property_types} result: Dict[str, Any] = {} for k, v in list(request_settings.items()): if v is not None and getattr(user_profile, k) != v: do_set_user_display_setting(user_profile, k, v) result[k] = v return json_success(result)
def update_message_backend(request, user_profile, message_id=REQ(converter=to_non_negative_int), subject=REQ(default=None), propagate_mode=REQ(default="change_one"), content=REQ(default=None)): # type: (HttpRequest, UserProfile, int, Optional[Text], Optional[str], Optional[Text]) -> HttpResponse if not user_profile.realm.allow_message_editing: return json_error(_("Your organization has turned off message editing.")) try: message = Message.objects.select_related().get(id=message_id) except Message.DoesNotExist: raise JsonableError(_("Unknown message id")) # You only have permission to edit a message if: # 1. You sent it, OR: # 2. This is a topic-only edit for a (no topic) message, OR: # 3. This is a topic-only edit and you are an admin. if message.sender == user_profile: pass elif (content is None) and ((message.topic_name() == "(no topic)") or user_profile.is_realm_admin): pass else: raise JsonableError(_("You don't have permission to edit this message")) # If there is a change to the content, check that it hasn't been too long # Allow an extra 20 seconds since we potentially allow editing 15 seconds # past the limit, and in case there are network issues, etc. The 15 comes # from (min_seconds_to_edit + seconds_left_buffer) in message_edit.js; if # you change this value also change those two parameters in message_edit.js. edit_limit_buffer = 20 if content is not None and user_profile.realm.message_content_edit_limit_seconds > 0: deadline_seconds = user_profile.realm.message_content_edit_limit_seconds + edit_limit_buffer if (now() - message.pub_date) > datetime.timedelta(seconds=deadline_seconds): raise JsonableError(_("The time limit for editing this message has past")) if subject is None and content is None: return json_error(_("Nothing to change")) if subject is not None: subject = subject.strip() if subject == "": raise JsonableError(_("Topic can't be empty")) rendered_content = None links_for_embed = set() # type: Set[Text] if content is not None: content = content.strip() if content == "": content = "(deleted)" content = truncate_body(content) # We exclude UserMessage.flags.historical rows since those # users did not receive the message originally, and thus # probably are not relevant for reprocessed alert_words, # mentions and similar rendering features. This may be a # decision we change in the future. ums = UserMessage.objects.filter(message=message.id, flags=~UserMessage.flags.historical) message_users = {get_user_profile_by_id(um.user_profile_id) for um in ums} # If rendering fails, the called code will raise a JsonableError. rendered_content = render_incoming_message(message, content=content, message_users=message_users) links_for_embed |= message.links_for_preview do_update_message(user_profile, message, subject, propagate_mode, content, rendered_content) if links_for_embed and getattr(settings, 'INLINE_URL_EMBED_PREVIEW', None): event_data = { 'message_id': message.id, 'message_content': message.content, 'urls': links_for_embed} queue_json_publish('embed_links', event_data, lambda x: None) return json_success()
def json_change_notify_settings( request: HttpRequest, user_profile: UserProfile, enable_stream_desktop_notifications: Optional[bool]=REQ(validator=check_bool, default=None), enable_stream_email_notifications: Optional[bool]=REQ(validator=check_bool, default=None), enable_stream_push_notifications: Optional[bool]=REQ(validator=check_bool, default=None), enable_stream_audible_notifications: Optional[bool]=REQ(validator=check_bool, default=None), wildcard_mentions_notify: Optional[bool]=REQ(validator=check_bool, default=None), notification_sound: Optional[str]=REQ(validator=check_string, default=None), enable_desktop_notifications: Optional[bool]=REQ(validator=check_bool, default=None), enable_sounds: Optional[bool]=REQ(validator=check_bool, default=None), enable_offline_email_notifications: Optional[bool]=REQ(validator=check_bool, default=None), enable_offline_push_notifications: Optional[bool]=REQ(validator=check_bool, default=None), enable_online_push_notifications: Optional[bool]=REQ(validator=check_bool, default=None), enable_digest_emails: Optional[bool]=REQ(validator=check_bool, default=None), enable_login_emails: Optional[bool]=REQ(validator=check_bool, default=None), message_content_in_email_notifications: Optional[bool]=REQ(validator=check_bool, default=None), pm_content_in_desktop_notifications: Optional[bool]=REQ(validator=check_bool, default=None), desktop_icon_count_display: Optional[int]=REQ(validator=check_int, default=None), realm_name_in_notifications: Optional[bool]=REQ(validator=check_bool, default=None), presence_enabled: Optional[bool]=REQ(validator=check_bool, default=None), ) -> HttpResponse: result = {} # Stream notification settings. if (notification_sound is not None and notification_sound not in get_available_notification_sounds()): raise JsonableError(_("Invalid notification sound '%s'") % (notification_sound,)) req_vars = {k: v for k, v in list(locals().items()) if k in user_profile.notification_setting_types} for k, v in list(req_vars.items()): if v is not None and getattr(user_profile, k) != v: do_change_notification_settings(user_profile, k, v) result[k] = v return json_success(result)
def get_chart_data(request, user_profile, chart_name=REQ(), min_length=REQ(converter=to_non_negative_int, default=None), start=REQ(converter=to_utc_datetime, default=None), end=REQ(converter=to_utc_datetime, default=None)): # type: (HttpRequest, UserProfile, Text, Optional[int], Optional[datetime], Optional[datetime]) -> HttpResponse if chart_name == 'number_of_humans': stat = COUNT_STATS['realm_active_humans::day'] tables = [RealmCount] subgroup_to_label = {None: 'human'} # type: Dict[Optional[str], str] labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_over_time': stat = COUNT_STATS['messages_sent:is_bot:hour'] tables = [RealmCount, UserCount] subgroup_to_label = {'false': 'human', 'true': 'bot'} labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_by_message_type': stat = COUNT_STATS['messages_sent:message_type:day'] tables = [RealmCount, UserCount] subgroup_to_label = { 'public_stream': 'Public streams', 'private_stream': 'Private streams', 'private_message': 'Private messages', 'huddle_message': 'Group private messages' } labels_sort_function = lambda data: sort_by_totals(data['realm']) include_empty_subgroups = True elif chart_name == 'messages_sent_by_client': stat = COUNT_STATS['messages_sent:client:day'] tables = [RealmCount, UserCount] # Note that the labels are further re-written by client_label_map subgroup_to_label = { str(id): name for id, name in Client.objects.values_list('id', 'name') } labels_sort_function = sort_client_labels include_empty_subgroups = False else: raise JsonableError(_("Unknown chart name: %s") % (chart_name, )) # Most likely someone using our API endpoint. The /stats page does not # pass a start or end in its requests. if start is not None and end is not None and start > end: raise JsonableError( _("Start time is later than end time. Start: %(start)s, End: %(end)s" ) % { 'start': start, 'end': end }) realm = user_profile.realm if start is None: start = realm.date_created if end is None: end = last_successful_fill(stat.property) if end is None or start > end: logging.warning( "User from realm %s attempted to access /stats, but the computed " "start time: %s (creation time of realm) is later than the computed " "end time: %s (last successful analytics update). Is the " "analytics cron job running?" % (realm.string_id, start, end)) raise JsonableError( _("No analytics data available. Please contact your server administrator." )) end_times = time_range(start, end, stat.frequency, min_length) data = {'end_times': end_times, 'frequency': stat.frequency} for table in tables: if table == RealmCount: data['realm'] = get_time_series_by_subgroup( stat, RealmCount, realm.id, end_times, subgroup_to_label, include_empty_subgroups) if table == UserCount: data['user'] = get_time_series_by_subgroup( stat, UserCount, user_profile.id, end_times, subgroup_to_label, include_empty_subgroups) if labels_sort_function is not None: data['display_order'] = labels_sort_function(data) else: data['display_order'] = None return json_success(data=data)
def change_enter_sends(request: HttpRequest, user_profile: UserProfile, enter_sends: bool=REQ(validator=check_bool)) -> HttpResponse: do_change_enter_sends(user_profile, enter_sends) return json_success()
def remove(request, user_profile, attachment_id=REQ(validator=check_int)): # type: (HttpRequest, UserProfile, int) -> HttpResponse attachment = access_attachment_by_id(user_profile, attachment_id, needs_owner=True) remove_attachment(user_profile, attachment) return json_success()
def json_change_settings(request: HttpRequest, user_profile: UserProfile, full_name: str=REQ(default=""), email: str=REQ(default=""), old_password: str=REQ(default=""), new_password: str=REQ(default="")) -> HttpResponse: if not (full_name or new_password or email): return json_error(_("Please fill out all fields.")) if new_password != "": return_data: Dict[str, Any] = {} if email_belongs_to_ldap(user_profile.realm, user_profile.delivery_email): return json_error(_("Your Zulip password is managed in LDAP")) try: if not authenticate(request, username=user_profile.delivery_email, password=old_password, realm=user_profile.realm, return_data=return_data): return json_error(_("Wrong password!")) except RateLimited as e: secs_to_freedom = int(float(str(e))) return json_error( _("You're making too many attempts! Try again in %s seconds.") % (secs_to_freedom,) ) if not check_password_strength(new_password): return json_error(_("New password is too weak!")) do_change_password(user_profile, new_password) # In Django 1.10, password changes invalidates sessions, see # https://docs.djangoproject.com/en/1.10/topics/auth/default/#session-invalidation-on-password-change # for details. To avoid this logging the user out of their own # session (which would provide a confusing UX at best), we # update the session hash here. update_session_auth_hash(request, user_profile) # We also save the session to the DB immediately to mitigate # race conditions. In theory, there is still a race condition # and to completely avoid it we will have to use some kind of # mutex lock in `django.contrib.auth.get_user` where session # is verified. To make that lock work we will have to control # the AuthenticationMiddleware which is currently controlled # by Django, request.session.save() result: Dict[str, Any] = {} new_email = email.strip() if user_profile.delivery_email != new_email and new_email != '': if user_profile.realm.email_changes_disabled and not user_profile.is_realm_admin: return json_error(_("Email address changes are disabled in this organization.")) error = validate_email_is_valid( new_email, get_realm_email_validator(user_profile.realm), ) if error: return json_error(error) try: validate_email_not_already_in_realm( user_profile.realm, new_email, verbose=False, ) except ValidationError as e: return json_error(e.message) do_start_email_change_process(user_profile, new_email) result['account_email'] = _("Check your email for a confirmation link. ") if user_profile.full_name != full_name and full_name.strip() != "": if name_changes_disabled(user_profile.realm) and not user_profile.is_realm_admin: # Failingly silently is fine -- they can't do it through the UI, so # they'd have to be trying to break the rules. pass else: # Note that check_change_full_name strips the passed name automatically result['full_name'] = check_change_full_name(user_profile, full_name, user_profile) return json_success(result)
def change_enter_sends(request, user_profile, enter_sends=REQ(validator=check_bool)): # type: (HttpRequest, UserProfile, bool) -> HttpResponse do_change_enter_sends(user_profile, enter_sends) return json_success()
def json_set_muted_topics(request, user_profile, muted_topics=REQ(validator=check_list(check_list(check_string, length=2)), default=[])): # type: (HttpRequest, UserProfile, List[List[text_type]]) -> HttpResponse do_set_muted_topics(user_profile, muted_topics) return json_success()
def json_change_enter_sends(request, user_profile, enter_sends=REQ('enter_sends', validator=check_bool)): do_change_enter_sends(user_profile, enter_sends) return json_success()
def add_bot_backend(request, user_profile, full_name_raw=REQ("full_name"), short_name=REQ(), default_sending_stream_name=REQ('default_sending_stream', default=None), default_events_register_stream_name=REQ( 'default_events_register_stream', default=None), default_all_public_streams=REQ(validator=check_bool, default=None)): # type: (HttpRequest, UserProfile, Text, Text, Optional[Text], Optional[Text], Optional[bool]) -> HttpResponse short_name += "-bot" full_name = check_full_name(full_name_raw) email = short_name + "@" + user_profile.realm.domain form = CreateUserForm({'full_name': full_name, 'email': email}) if not form.is_valid(): # We validate client-side as well return json_error(_('Bad name or username')) try: get_user_profile_by_email(email) return json_error(_("Username already in use")) except UserProfile.DoesNotExist: pass if len(request.FILES) == 0: avatar_source = UserProfile.AVATAR_FROM_GRAVATAR elif len(request.FILES) != 1: return json_error(_("You may only upload one file at a time")) else: user_file = list(request.FILES.values())[0] upload_avatar_image(user_file, user_profile, email) avatar_source = UserProfile.AVATAR_FROM_USER default_sending_stream = None if default_sending_stream_name is not None: (default_sending_stream, ignored_rec, ignored_sub) = access_stream_by_name(user_profile, default_sending_stream_name) default_events_register_stream = None if default_events_register_stream_name is not None: (default_events_register_stream, ignored_rec, ignored_sub) = access_stream_by_name( user_profile, default_events_register_stream_name) bot_profile = do_create_user( email=email, password='', realm=user_profile.realm, full_name=full_name, short_name=short_name, active=True, bot_type=UserProfile.DEFAULT_BOT, bot_owner=user_profile, avatar_source=avatar_source, default_sending_stream=default_sending_stream, default_events_register_stream=default_events_register_stream, default_all_public_streams=default_all_public_streams) json_result = dict( api_key=bot_profile.api_key, avatar_url=avatar_url(bot_profile), default_sending_stream=get_stream_name( bot_profile.default_sending_stream), default_events_register_stream=get_stream_name( bot_profile.default_events_register_stream), default_all_public_streams=bot_profile.default_all_public_streams, ) return json_success(json_result)