def points_during_day(data, noon): """Returns all the points in the dataset that occur in the 12 hours around the datetime object that is passed in. data must be sorted.""" before = datetime_to_timestamp(noon - timedelta(hours=12)) after = datetime_to_timestamp(noon + timedelta(hours=12)) between = [pt for pt in data if pt[1] > before and pt[1] < after] return between
def test_datetime_and_timestamp_conversions(self) -> None: timestamp = 1483228800 for dt in [ parser.parse('2017-01-01 00:00:00.123 UTC'), parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=timezone_utc), parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=pytz.utc)]: self.assertEqual(timestamp_to_datetime(timestamp), dt-timedelta(microseconds=123000)) self.assertEqual(datetime_to_timestamp(dt), timestamp) for dt in [ parser.parse('2017-01-01 00:00:00.123+01:00'), parser.parse('2017-01-01 00:00:00.123')]: with self.assertRaises(TimezoneNotUTCException): datetime_to_timestamp(dt)
def get_presence_backend(request: HttpRequest, user_profile: UserProfile, email: Text) -> HttpResponse: try: target = get_user(email, user_profile.realm) except UserProfile.DoesNotExist: return json_error(_('No such user')) if not target.is_active: return json_error(_('No such user')) if target.is_bot: return json_error(_('Presence is not supported for bot users.')) presence_dict = UserPresence.get_status_dict_by_user(target) if len(presence_dict) == 0: return json_error(_('No presence data for %s' % (target.email,))) # For initial version, we just include the status and timestamp keys result = dict(presence=presence_dict[target.email]) aggregated_info = result['presence']['aggregated'] aggr_status_duration = datetime_to_timestamp(timezone_now()) - aggregated_info['timestamp'] if aggr_status_duration > settings.OFFLINE_THRESHOLD_SECS: aggregated_info['status'] = 'offline' for val in result['presence'].values(): val.pop('client', None) val.pop('pushable', None) return json_success(result)
def test_start_and_end(self) -> None: stat = COUNT_STATS['realm_active_humans::day'] self.insert_data(stat, [None], []) stat = COUNT_STATS['1day_actives::day'] self.insert_data(stat, [None], []) stat = COUNT_STATS['active_users_audit:is_bot:day'] self.insert_data(stat, ['false'], []) end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day] # valid start and end result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans', 'start': end_time_timestamps[1], 'end': end_time_timestamps[2]}) self.assert_json_success(result) data = result.json() self.assertEqual(data['end_times'], end_time_timestamps[1:3]) self.assertEqual(data['everyone'], {'_1day': [0, 100], '_15day': [0, 100], 'all_time': [0, 100]}) # start later then end result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans', 'start': end_time_timestamps[2], 'end': end_time_timestamps[1]}) self.assert_json_error_contains(result, 'Start time is later than')
def _send_presence_for_aggregated_tests(self, email, status, validate_time): # type: (str, str, datetime.datetime) -> Dict[str, Dict[str, Any]] self.login(email) timezone_util = 'zerver.views.presence.timezone_now' with mock.patch(timezone_util, return_value=validate_time - datetime.timedelta(seconds=5)): self.client_post("/json/users/me/presence", {'status': status}) with mock.patch(timezone_util, return_value=validate_time - datetime.timedelta(seconds=2)): self.client_post("/api/v1/users/me/presence", {'status': status}, HTTP_USER_AGENT="ZulipAndroid/1.0", **self.api_auth(email)) with mock.patch(timezone_util, return_value=validate_time - datetime.timedelta(seconds=7)): latest_result = self.client_post("/api/v1/users/me/presence", {'status': status}, HTTP_USER_AGENT="ZulipIOS/1.0", **self.api_auth(email)) latest_result_dict = latest_result.json() self.assertDictEqual( latest_result_dict['presences'][email]['aggregated'], { 'status': status, 'timestamp': datetime_to_timestamp(validate_time - datetime.timedelta(seconds=2)), 'client': 'ZulipAndroid' } ) result = self.client_get("/json/users/%s/presence" % (email,)) return result.json()
def get_gcm_payload(user_profile, message): # type: (UserProfile, Message) -> Dict[str, Any] content = message.content content_truncated = (len(content) > 200) if content_truncated: content = content[:200] + "..." android_data = { 'user': user_profile.email, 'event': 'message', 'alert': get_alert_from_message(message), 'zulip_message_id': message.id, # message_id is reserved for CCS 'time': datetime_to_timestamp(message.pub_date), 'content': content, 'content_truncated': content_truncated, 'sender_email': message.sender.email, 'sender_full_name': message.sender.full_name, 'sender_avatar_url': absolute_avatar_url(message.sender), } if message.recipient.type == Recipient.STREAM: android_data['recipient_type'] = "stream" android_data['stream'] = get_display_recipient(message.recipient) android_data['topic'] = message.subject elif message.recipient.type in (Recipient.HUDDLE, Recipient.PERSONAL): android_data['recipient_type'] = "private" return android_data
def test_messages_sent_by_client(self): # type: () -> None stat = COUNT_STATS['messages_sent:client:day'] client1 = Client.objects.create(name='client 1') _client1 = Client.objects.create(name='_client 1') client2 = Client.objects.create(name='client 2') client3 = Client.objects.create(name='client 3') _client3 = Client.objects.create(name='_client 3') client4 = Client.objects.create(name='client 4') self.insert_data(stat, [client4.id, client3.id, client2.id], [client1.id, _client1.id, client4.id, _client3.id]) result = self.client_get('/json/analytics/chart_data', {'chart_name': 'messages_sent_by_client'}) self.assert_json_success(result) data = ujson.loads(result.content) self.assertEqual(data, { 'msg': '', 'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], 'frequency': CountStat.DAY, 'interval': CountStat.DAY, 'realm': {'client 4': self.data(100), 'client 3': self.data(101), 'client 2': self.data(102)}, 'user': {'client 1': self.data(401), 'client 4': self.data(202), 'client 3': self.data(203)}, 'display_order': ['client 1', 'client 2', 'client 3', 'client 4'], 'result': 'success', })
def archive(request: HttpRequest, stream_id: int, topic_name: str) -> HttpResponse: def get_response(rendered_message_list: List[str], is_web_public: bool, stream_name: str) -> HttpResponse: return render( request, 'zerver/archive/index.html', context={ 'is_web_public': is_web_public, 'message_list': rendered_message_list, 'stream': stream_name, 'topic': topic_name, } ) try: stream = get_stream_by_id(stream_id) except JsonableError: return get_response([], False, '') if not stream.is_web_public: return get_response([], False, '') all_messages = list(Message.objects.select_related( 'sender').filter(recipient__type_id=stream_id, subject=topic_name).order_by('pub_date')) if not all_messages: return get_response([], True, stream.name) rendered_message_list = [] prev_sender = None for msg in all_messages: include_sender = False status_message = Message.is_status_message(msg.content, msg.rendered_content) if not prev_sender or prev_sender != msg.sender or status_message: if status_message: prev_sender = None else: prev_sender = msg.sender include_sender = True if status_message: status_message = msg.rendered_content[4+3: -4] context = { 'sender_full_name': msg.sender.full_name, 'timestampstr': datetime_to_timestamp(msg.last_edit_time if msg.last_edit_time else msg.pub_date), 'message_content': msg.rendered_content, 'avatar_url': get_gravatar_url(msg.sender.email, 1), 'include_sender': include_sender, 'status_message': status_message, } rendered_msg = loader.render_to_string('zerver/archive/single_message.html', context) rendered_message_list.append(rendered_msg) return get_response(rendered_message_list, True, stream.name)
def to_presence_dict(client_name=None, status=None, timestamp=None, push_enabled=None, has_push_devices=None, is_mirror_dummy=None): presence_val = UserPresence.status_to_string(status) timestamp = datetime_to_timestamp(timestamp) return dict( client=client_name, status=presence_val, timestamp=timestamp, pushable=(push_enabled and has_push_devices), )
def test_aggregated_presense_idle(self) -> None: validate_time = timezone_now() result_dict = self._send_presence_for_aggregated_tests(str(self.example_email("othello")), 'idle', validate_time) self.assertDictEqual( result_dict['presence']['aggregated'], { "status": "idle", "timestamp": datetime_to_timestamp(validate_time - datetime.timedelta(seconds=2)) } )
def test_min_length(self) -> None: stat = COUNT_STATS['realm_active_humans::day'] self.insert_data(stat, [None], []) # test min_length is too short to change anything result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans', 'min_length': 2}) self.assert_json_success(result) data = result.json() self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in self.end_times_day]) self.assertEqual(data['realm'], {'human': self.data(100)}) # test min_length larger than filled data result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans', 'min_length': 5}) self.assert_json_success(result) data = result.json() end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)] self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times]) self.assertEqual(data['realm'], {'human': [0]+self.data(100)})
def test_min_length(self): # type: () -> None stat = COUNT_STATS['active_users:is_bot:day'] self.insert_data(stat, ['true', 'false'], []) # test min_length is too short to change anything result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans', 'min_length': 2}) self.assert_json_success(result) data = ujson.loads(result.content) self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in self.end_times_day]) self.assertEqual(data['realm'], {'bot': self.data(100), 'human': self.data(101)}) # test min_length larger than filled data result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans', 'min_length': 5}) self.assert_json_success(result) data = ujson.loads(result.content) end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)] self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times]) self.assertEqual(data['realm'], {'bot': [0]+self.data(100), 'human': [0]+self.data(101)})
def fill_edit_history_entries(message_history, message): # type: (List[Dict[str, Any]], Message) -> None """This fills out the message edit history entries from the database, which are designed to have the minimum data possible, to instead have the current topic + content as of that time, plus data on whatever changed. This makes it much simpler to do future processing. Note that this mutates what is passed to it, which is sorta a bad pattern. """ prev_content = message.content prev_rendered_content = message.rendered_content prev_topic = message.subject assert(datetime_to_timestamp(message.last_edit_time) == message_history[0]['timestamp']) for entry in message_history: entry['topic'] = prev_topic if 'prev_subject' in entry: # We replace use of 'subject' with 'topic' for downstream simplicity prev_topic = entry['prev_subject'] entry['prev_topic'] = prev_topic del entry['prev_subject'] entry['content'] = prev_content entry['rendered_content'] = prev_rendered_content if 'prev_content' in entry: del entry['prev_rendered_content_version'] prev_content = entry['prev_content'] prev_rendered_content = entry['prev_rendered_content'] entry['content_html_diff'] = highlight_html_differences( prev_rendered_content, entry['rendered_content']) message_history.append(dict( topic = prev_topic, content = prev_content, rendered_content = prev_rendered_content, timestamp = datetime_to_timestamp(message.pub_date), user_id = message.sender_id, ))
def to_log_dict(self): return dict( id = self.id, sender_email = self.sender.email, sender_domain = self.sender.realm.domain, sender_full_name = self.sender.full_name, sender_short_name = self.sender.short_name, sending_client = self.sending_client.name, type = self.recipient.type_name(), recipient = get_display_recipient(self.recipient), subject = self.subject, content = self.content, timestamp = datetime_to_timestamp(self.pub_date))
def test_aggregated_presense_offline(self) -> None: email = self.example_email("othello") self.login(email) validate_time = timezone_now() with self.settings(OFFLINE_THRESHOLD_SECS=1): result_dict = self._send_presence_for_aggregated_tests(str(email), 'idle', validate_time) self.assertDictEqual( result_dict['presence']['aggregated'], { "status": "offline", "timestamp": datetime_to_timestamp(validate_time - datetime.timedelta(seconds=2)) } )
def test_number_of_humans(self) -> None: stat = COUNT_STATS['realm_active_humans::day'] self.insert_data(stat, [None], []) result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans'}) self.assert_json_success(result) data = result.json() self.assertEqual(data, { 'msg': '', 'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], 'frequency': CountStat.DAY, 'realm': {'human': self.data(100)}, 'display_order': None, 'result': 'success', })
def get_gcm_payload(user_profile: UserProfile, message: Message) -> Dict[str, Any]: data = get_common_payload(message) content, truncated = truncate_content(get_mobile_push_content(message.rendered_content)) data.update({ 'user': user_profile.email, 'event': 'message', 'alert': get_gcm_alert(message), 'zulip_message_id': message.id, # message_id is reserved for CCS 'time': datetime_to_timestamp(message.pub_date), 'content': content, 'content_truncated': truncated, 'sender_full_name': message.sender.full_name, 'sender_avatar_url': absolute_avatar_url(message.sender), }) return data
def update_user_activity(request, user_profile): # update_active_status also pushes to rabbitmq, and it seems # redundant to log that here as well. if request.META["PATH_INFO"] == '/json/users/me/presence': return if hasattr(request, '_query'): query = request._query else: query = request.META['PATH_INFO'] event={'query': query, 'user_profile_id': user_profile.id, 'time': datetime_to_timestamp(now()), 'client': request.client.name} queue_json_publish("user_activity", event, lambda event: None)
def test_aggregated_presense_mixed(self) -> None: email = self.example_email("othello") self.login(email) validate_time = timezone_now() with mock.patch('zerver.views.presence.timezone_now', return_value=validate_time - datetime.timedelta(seconds=3)): self.api_post(email, "/api/v1/users/me/presence", {'status': 'active'}, HTTP_USER_AGENT="ZulipTestDev/1.0") result_dict = self._send_presence_for_aggregated_tests(str(email), 'idle', validate_time) self.assertDictEqual( result_dict['presence']['aggregated'], { "status": "idle", "timestamp": datetime_to_timestamp(validate_time - datetime.timedelta(seconds=2)) } )
def test_messages_sent_over_time(self) -> None: stat = COUNT_STATS['messages_sent:is_bot:hour'] self.insert_data(stat, ['true', 'false'], ['false']) result = self.client_get('/json/analytics/chart_data', {'chart_name': 'messages_sent_over_time'}) self.assert_json_success(result) data = result.json() self.assertEqual(data, { 'msg': '', 'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour], 'frequency': CountStat.HOUR, 'everyone': {'bot': self.data(100), 'human': self.data(101)}, 'user': {'bot': self.data(0), 'human': self.data(200)}, 'display_order': None, 'result': 'success', })
def test_aggregated_info(self) -> None: email = self.example_email("othello") validate_time = timezone_now() self._send_presence_for_aggregated_tests(str(self.example_email("othello")), 'active', validate_time) with mock.patch('zerver.views.presence.timezone_now', return_value=validate_time - datetime.timedelta(seconds=1)): result = self.api_post(email, "/api/v1/users/me/presence", {'status': 'active'}, HTTP_USER_AGENT="ZulipTestDev/1.0") result_dict = result.json() self.assertDictEqual( result_dict['presences'][email]['aggregated'], { 'status': 'active', 'timestamp': datetime_to_timestamp(validate_time - datetime.timedelta(seconds=1)), 'client': 'ZulipTestDev' } )
def test_number_of_humans(self): # type: () -> None stat = COUNT_STATS['active_users:is_bot:day'] self.insert_data(stat, ['true', 'false'], []) result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans'}) self.assert_json_success(result) data = ujson.loads(result.content) self.assertEqual(data, { 'msg': '', 'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], 'frequency': CountStat.DAY, 'interval': CountStat.GAUGE, 'realm': {'bot': self.data(100), 'human': self.data(101)}, 'display_order': None, 'result': 'success', })
def update_user_activity(request, user_profile): # type: (HttpRequest, UserProfile) -> None # update_active_status also pushes to rabbitmq, and it seems # redundant to log that here as well. if request.META["PATH_INFO"] == "/json/users/me/presence": return if hasattr(request, "_query"): query = request._query else: query = request.META["PATH_INFO"] event = { "query": query, "user_profile_id": user_profile.id, "time": datetime_to_timestamp(now()), "client": request.client.name, } queue_json_publish("user_activity", event, lambda event: None)
def get_message_payload_gcm( user_profile: UserProfile, message: Message, ) -> Tuple[Dict[str, Any], Dict[str, Any]]: '''A `message` payload + options, for Android via GCM/FCM.''' data = get_message_payload(message) content, truncated = truncate_content(get_mobile_push_content(message.rendered_content)) data.update({ 'user': user_profile.email, 'event': 'message', 'alert': get_gcm_alert(message), 'zulip_message_id': message.id, # message_id is reserved for CCS 'time': datetime_to_timestamp(message.pub_date), 'content': content, 'content_truncated': truncated, 'sender_full_name': message.sender.full_name, 'sender_avatar_url': absolute_avatar_url(message.sender), }) gcm_options = {'priority': 'high'} return data, gcm_options
def test_messages_sent_by_message_type(self) -> None: stat = COUNT_STATS['messages_sent:message_type:day'] self.insert_data(stat, ['public_stream', 'private_message'], ['public_stream', 'private_stream']) result = self.client_get('/json/analytics/chart_data', {'chart_name': 'messages_sent_by_message_type'}) self.assert_json_success(result) data = result.json() self.assertEqual(data, { 'msg': '', 'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], 'frequency': CountStat.DAY, 'everyone': {'Public streams': self.data(100), 'Private streams': self.data(0), 'Private messages': self.data(101), 'Group private messages': self.data(0)}, 'user': {'Public streams': self.data(200), 'Private streams': self.data(201), 'Private messages': self.data(0), 'Group private messages': self.data(0)}, 'display_order': ['Private messages', 'Public streams', 'Private streams', 'Group private messages'], 'result': 'success', })
def test_start_and_end(self): # type: () -> None stat = COUNT_STATS['realm_active_humans::day'] self.insert_data(stat, [None], []) end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day] # valid start and end result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans', 'start': end_time_timestamps[1], 'end': end_time_timestamps[2]}) self.assert_json_success(result) data = ujson.loads(result.content) self.assertEqual(data['end_times'], end_time_timestamps[1:3]) self.assertEqual(data['realm'], {'human': [0, 100]}) # start later then end result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans', 'start': end_time_timestamps[2], 'end': end_time_timestamps[1]}) self.assert_json_error_contains(result, 'Start time is later than')
def process_billing_log_entry(processor: BillingProcessor, log_row: RealmAuditLog) -> None: processor.state = BillingProcessor.STARTED processor.log_row = log_row processor.save() customer = Customer.objects.get(realm=log_row.realm) timestamp = datetime_to_timestamp(log_row.event_time) idempotency_key = 'process_billing_log_entry:%s' % (log_row.id,) extra_args = {} # type: Dict[str, Any] if log_row.extra_data is not None: extra_args = ujson.loads(log_row.extra_data) processing_functions = { RealmAuditLog.STRIPE_PLAN_QUANTITY_RESET: do_set_subscription_quantity, RealmAuditLog.USER_CREATED: increment_subscription_quantity, RealmAuditLog.USER_ACTIVATED: increment_subscription_quantity, RealmAuditLog.USER_DEACTIVATED: decrement_subscription_quantity, RealmAuditLog.USER_REACTIVATED: increment_subscription_quantity, } # type: Dict[str, Callable[..., None]] processing_functions[log_row.event_type](customer, timestamp, idempotency_key, **extra_args) processor.state = BillingProcessor.DONE processor.save()
def _send_presence_for_aggregated_tests( self, user: UserProfile, status: str, validate_time: datetime.datetime) -> Dict[str, Dict[str, Any]]: self.login_user(user) timezone_util = 'zerver.views.presence.timezone_now' with mock.patch(timezone_util, return_value=validate_time - datetime.timedelta(seconds=5)): self.client_post("/json/users/me/presence", {'status': status}) with mock.patch(timezone_util, return_value=validate_time - datetime.timedelta(seconds=2)): self.api_post(user, "/api/v1/users/me/presence", {'status': status}, HTTP_USER_AGENT="ZulipAndroid/1.0") with mock.patch(timezone_util, return_value=validate_time - datetime.timedelta(seconds=7)): latest_result = self.api_post(user, "/api/v1/users/me/presence", {'status': status}, HTTP_USER_AGENT="ZulipIOS/1.0") latest_result_dict = latest_result.json() self.assertDictEqual( latest_result_dict['presences'][user.email]['aggregated'], { 'status': status, 'timestamp': datetime_to_timestamp(validate_time - datetime.timedelta(seconds=2)), 'client': 'ZulipAndroid', }, ) result = self.client_get(f"/json/users/{user.email}/presence") return result.json()
def test_add_muted_user_valid_data(self) -> None: hamlet = self.example_user("hamlet") self.login_user(hamlet) cordelia = self.example_user("cordelia") mute_time = datetime(2021, 1, 1, tzinfo=timezone.utc) with mock.patch("zerver.views.muting.timezone_now", return_value=mute_time): url = "/api/v1/users/me/muted_users/{}".format(cordelia.id) result = self.api_post(hamlet, url) self.assert_json_success(result) self.assertIn( { "id": cordelia.id, "timestamp": datetime_to_timestamp(mute_time), }, get_user_mutes(hamlet), ) self.assertIsNotNone(get_mute_object(hamlet, cordelia)) audit_log_entries = list( RealmAuditLog.objects.filter(acting_user=hamlet, modified_user=hamlet).values_list( "event_type", "event_time", "extra_data")) self.assert_length(audit_log_entries, 1) audit_log_entry = audit_log_entries[0] self.assertEqual( audit_log_entry, ( RealmAuditLog.USER_MUTED, mute_time, orjson.dumps({ "muted_user_id": cordelia.id }).decode(), ), )
def test_messages_sent_by_client(self): # type: () -> None stat = COUNT_STATS['messages_sent:client:day'] client1 = Client.objects.create(name='client 1') client2 = Client.objects.create(name='client 2') client3 = Client.objects.create(name='client 3') client4 = Client.objects.create(name='client 4') self.insert_data(stat, [client4.id, client3.id, client2.id], [client3.id, client1.id]) result = self.client_get('/json/analytics/chart_data', {'chart_name': 'messages_sent_by_client'}) self.assert_json_success(result) data = ujson.loads(result.content) self.assertEqual(data, { 'msg': '', 'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], 'frequency': CountStat.DAY, 'realm': {'client 4': self.data(100), 'client 3': self.data(101), 'client 2': self.data(102)}, 'user': {'client 3': self.data(200), 'client 1': self.data(201)}, 'display_order': ['client 1', 'client 2', 'client 3', 'client 4'], 'result': 'success', })
def update_user_activity(request: HttpRequest, user_profile: UserProfile, query: Optional[str]) -> None: # update_active_status also pushes to RabbitMQ, and it seems # redundant to log that here as well. if request.META["PATH_INFO"] == "/json/users/me/presence": return request_notes = RequestNotes.get_notes(request) if query is not None: pass elif request_notes.query is not None: query = request_notes.query else: query = request.META["PATH_INFO"] assert request_notes.client is not None event = { "query": query, "user_profile_id": user_profile.id, "time": datetime_to_timestamp(timezone_now()), "client_id": request_notes.client.id, } queue_json_publish("user_activity", event, lambda event: None)
def get_presence_backend(request: HttpRequest, user_profile: UserProfile, email: str) -> HttpResponse: try: target = get_active_user_by_delivery_email(email, user_profile.realm) except UserProfile.DoesNotExist: return json_error(_('No such user')) if target.is_bot: return json_error(_('Presence is not supported for bot users.')) presence_dict = UserPresence.get_status_dict_by_user(target) if len(presence_dict) == 0: return json_error(_('No presence data for %s') % (target.email,)) # For initial version, we just include the status and timestamp keys result = dict(presence=presence_dict[target.email]) aggregated_info = result['presence']['aggregated'] aggr_status_duration = datetime_to_timestamp(timezone_now()) - aggregated_info['timestamp'] if aggr_status_duration > settings.OFFLINE_THRESHOLD_SECS: aggregated_info['status'] = 'offline' for val in result['presence'].values(): val.pop('client', None) val.pop('pushable', None) return json_success(result)
def test_number_of_humans(self) -> None: stat = COUNT_STATS['realm_active_humans::day'] self.insert_data(stat, [None], []) result = self.client_get('/json/analytics/chart_data', {'chart_name': 'number_of_humans'}) self.assert_json_success(result) data = result.json() self.assertEqual( data, { 'msg': '', 'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], 'frequency': CountStat.DAY, 'realm': { 'human': self.data(100) }, 'display_order': None, 'result': 'success', })
def test_get_user_mutes(self) -> None: hamlet = self.example_user("hamlet") cordelia = self.example_user("cordelia") muted_users = get_user_mutes(hamlet) self.assertEqual(muted_users, []) mute_time = datetime(2021, 1, 1, tzinfo=timezone.utc) with mock.patch("zerver.views.muting.timezone_now", return_value=mute_time): url = "/api/v1/users/me/muted_users/{}".format(cordelia.id) result = self.api_post(hamlet, url) self.assert_json_success(result) muted_users = get_user_mutes(hamlet) self.assert_length(muted_users, 1) self.assertDictEqual( muted_users[0], { "id": cordelia.id, "timestamp": datetime_to_timestamp(mute_time), }, )
def test_aggregated_info(self) -> None: user = self.example_user("othello") validate_time = timezone_now() self._send_presence_for_aggregated_tests(user, "active", validate_time) with mock.patch( "zerver.views.presence.timezone_now", return_value=validate_time - datetime.timedelta(seconds=1), ): result = self.api_post( user, "/api/v1/users/me/presence", {"status": "active"}, HTTP_USER_AGENT="ZulipTestDev/1.0", ) result_dict = result.json() self.assertDictEqual( result_dict["presences"][user.email]["aggregated"], { "status": "active", "timestamp": datetime_to_timestamp(validate_time - datetime.timedelta(seconds=1)), "client": "ZulipTestDev", }, )
def test_aggregated_info(self) -> None: email = self.example_email("othello") validate_time = timezone_now() self._send_presence_for_aggregated_tests( str(self.example_email("othello")), 'active', validate_time) with mock.patch('zerver.views.presence.timezone_now', return_value=validate_time - datetime.timedelta(seconds=1)): result = self.api_post(email, "/api/v1/users/me/presence", {'status': 'active'}, HTTP_USER_AGENT="ZulipTestDev/1.0") result_dict = result.json() self.assertDictEqual( result_dict['presences'][email]['aggregated'], { 'status': 'active', 'timestamp': datetime_to_timestamp(validate_time - datetime.timedelta(seconds=1)), 'client': 'ZulipTestDev' })
def get_presence_backend(request: HttpRequest, user_profile: UserProfile, user_id_or_email: str) -> HttpResponse: # This isn't used by the webapp; it's available for API use by # bots and other clients. We may want to add slim_presence # support for it (or just migrate its API wholesale) later. try: try: user_id = int(user_id_or_email) target = get_active_user_profile_by_id_in_realm( user_id, user_profile.realm) except ValueError: email = user_id_or_email target = get_active_user(email, user_profile.realm) except UserProfile.DoesNotExist: return json_error(_("No such user")) if target.is_bot: return json_error(_("Presence is not supported for bot users.")) presence_dict = get_presence_for_user(target.id) if len(presence_dict) == 0: return json_error( _("No presence data for {user_id_or_email}").format( user_id_or_email=user_id_or_email)) # For initial version, we just include the status and timestamp keys result = dict(presence=presence_dict[target.email]) aggregated_info = result["presence"]["aggregated"] aggr_status_duration = datetime_to_timestamp( timezone_now()) - aggregated_info["timestamp"] if aggr_status_duration > settings.OFFLINE_THRESHOLD_SECS: aggregated_info["status"] = "offline" for val in result["presence"].values(): val.pop("client", None) val.pop("pushable", None) return json_success(result)
def test_messages_sent_by_message_type(self) -> None: stat = COUNT_STATS["messages_sent:message_type:day"] self.insert_data( stat, ["public_stream", "private_message"], ["public_stream", "private_stream"] ) result = self.client_get( "/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"} ) self.assert_json_success(result) data = result.json() self.assertEqual( data, { "msg": "", "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], "frequency": CountStat.DAY, "everyone": { "Public streams": self.data(100), "Private streams": self.data(0), "Private messages": self.data(101), "Group private messages": self.data(0), }, "user": { "Public streams": self.data(200), "Private streams": self.data(201), "Private messages": self.data(0), "Group private messages": self.data(0), }, "display_order": [ "Private messages", "Public streams", "Private streams", "Group private messages", ], "result": "success", }, )
def test_messages_sent_by_message_type(self): # type: () -> None stat = COUNT_STATS['messages_sent:message_type:day'] self.insert_data(stat, ['public_stream', 'private_message'], ['public_stream', 'private_stream']) result = self.client_get( '/json/analytics/chart_data', {'chart_name': 'messages_sent_by_message_type'}) self.assert_json_success(result) data = result.json() self.assertEqual( data, { 'msg': '', 'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], 'frequency': CountStat.DAY, 'realm': { 'Public streams': self.data(100), 'Private streams': self.data(0), 'Private messages': self.data(101), 'Group private messages': self.data(0) }, 'user': { 'Public streams': self.data(200), 'Private streams': self.data(201), 'Private messages': self.data(0), 'Group private messages': self.data(0) }, 'display_order': [ 'Private messages', 'Public streams', 'Private streams', 'Group private messages' ], 'result': 'success', })
def test_messages_sent_by_client(self) -> None: stat = COUNT_STATS["messages_sent:client:day"] client1 = Client.objects.create(name="client 1") client2 = Client.objects.create(name="client 2") client3 = Client.objects.create(name="client 3") client4 = Client.objects.create(name="client 4") self.insert_data(stat, [client4.id, client3.id, client2.id], [client3.id, client1.id]) result = self.client_get("/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}) self.assert_json_success(result) data = result.json() self.assertEqual( data, { "msg": "", "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], "frequency": CountStat.DAY, "everyone": { "client 4": self.data(100), "client 3": self.data(101), "client 2": self.data(102), }, "user": { "client 3": self.data(200), "client 1": self.data(201) }, "display_order": ["client 1", "client 2", "client 3", "client 4"], "result": "success", }, )
def test_get_user_mutes(self) -> None: othello = self.example_user("othello") cordelia = self.example_user("cordelia") muted_users = get_user_mutes(othello) self.assertEqual(muted_users, []) mute_time = datetime(2021, 1, 1, tzinfo=timezone.utc) with mock.patch( "zerver.lib.user_mutes.timezone_now", return_value=mute_time, ): add_user_mute(user_profile=othello, muted_user=cordelia) muted_users = get_user_mutes(othello) self.assertEqual(len(muted_users), 1) self.assertDictEqual( muted_users[0], { "id": cordelia.id, "timestamp": datetime_to_timestamp(mute_time), }, )
def process_billing_log_entry(processor: BillingProcessor, log_row: RealmAuditLog) -> None: processor.state = BillingProcessor.STARTED processor.log_row = log_row processor.save() customer = Customer.objects.get(realm=log_row.realm) timestamp = datetime_to_timestamp(log_row.event_time) idempotency_key = 'process_billing_log_entry:%s' % (log_row.id, ) extra_args = {} # type: Dict[str, Any] if log_row.extra_data is not None: extra_args = ujson.loads(log_row.extra_data) processing_functions = { RealmAuditLog.STRIPE_PLAN_QUANTITY_RESET: do_set_subscription_quantity, RealmAuditLog.USER_CREATED: increment_subscription_quantity, RealmAuditLog.USER_ACTIVATED: increment_subscription_quantity, RealmAuditLog.USER_DEACTIVATED: decrement_subscription_quantity, RealmAuditLog.USER_REACTIVATED: increment_subscription_quantity, } # type: Dict[str, Callable[..., None]] processing_functions[log_row.event_type](customer, timestamp, idempotency_key, **extra_args) processor.state = BillingProcessor.DONE processor.save()
def test_number_of_humans(self) -> None: stat = COUNT_STATS["realm_active_humans::day"] self.insert_data(stat, [None], []) stat = COUNT_STATS["1day_actives::day"] self.insert_data(stat, [None], []) stat = COUNT_STATS["active_users_audit:is_bot:day"] self.insert_data(stat, ["false"], []) result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"}) data = self.assert_json_success(result) self.assertEqual( data, { "msg": "", "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], "frequency": CountStat.DAY, "everyone": { "_1day": self.data(100), "_15day": self.data(100), "all_time": self.data(100), }, "display_order": None, "result": "success", }, )
def invoice_plan(plan: CustomerPlan, event_time: datetime) -> None: if plan.invoicing_status == CustomerPlan.STARTED: raise NotImplementedError( 'Plan with invoicing_status==STARTED needs manual resolution.') make_end_of_cycle_updates_if_needed(plan, event_time) assert (plan.invoiced_through is not None) licenses_base = plan.invoiced_through.licenses invoice_item_created = False for ledger_entry in LicenseLedger.objects.filter( plan=plan, id__gt=plan.invoiced_through.id, event_time__lte=event_time).order_by('id'): price_args: Dict[str, int] = {} if ledger_entry.is_renewal: if plan.fixed_price is not None: price_args = {'amount': plan.fixed_price} else: assert (plan.price_per_license is not None) # needed for mypy price_args = { 'unit_amount': plan.price_per_license, 'quantity': ledger_entry.licenses } description = "Zulip Standard - renewal" elif ledger_entry.licenses != licenses_base: assert (plan.price_per_license) last_renewal = LicenseLedger.objects.filter( plan=plan, is_renewal=True, event_time__lte=ledger_entry.event_time) \ .order_by('-id').first().event_time period_end = start_of_next_billing_cycle(plan, ledger_entry.event_time) proration_fraction = (period_end - ledger_entry.event_time) / ( period_end - last_renewal) price_args = { 'unit_amount': int(plan.price_per_license * proration_fraction + .5), 'quantity': ledger_entry.licenses - licenses_base } description = "Additional license ({} - {})".format( ledger_entry.event_time.strftime('%b %-d, %Y'), period_end.strftime('%b %-d, %Y')) if price_args: plan.invoiced_through = ledger_entry plan.invoicing_status = CustomerPlan.STARTED plan.save(update_fields=['invoicing_status', 'invoiced_through']) idempotency_key: Optional[str] = 'ledger_entry:{}'.format( ledger_entry.id) if settings.TEST_SUITE: idempotency_key = None stripe.InvoiceItem.create( currency='usd', customer=plan.customer.stripe_customer_id, description=description, discountable=False, period={ 'start': datetime_to_timestamp(ledger_entry.event_time), 'end': datetime_to_timestamp( start_of_next_billing_cycle(plan, ledger_entry.event_time)) }, idempotency_key=idempotency_key, **price_args) invoice_item_created = True plan.invoiced_through = ledger_entry plan.invoicing_status = CustomerPlan.DONE plan.save(update_fields=['invoicing_status', 'invoiced_through']) licenses_base = ledger_entry.licenses if invoice_item_created: if plan.charge_automatically: billing_method = 'charge_automatically' days_until_due = None else: billing_method = 'send_invoice' days_until_due = DEFAULT_INVOICE_DAYS_UNTIL_DUE stripe_invoice = stripe.Invoice.create( auto_advance=True, billing=billing_method, customer=plan.customer.stripe_customer_id, days_until_due=days_until_due, statement_descriptor='Zulip Standard') stripe.Invoice.finalize_invoice(stripe_invoice) plan.next_invoice_date = next_invoice_date(plan) plan.save(update_fields=['next_invoice_date'])
def process_initial_upgrade(user: UserProfile, licenses: int, automanage_licenses: bool, billing_schedule: int, stripe_token: Optional[str]) -> None: realm = user.realm customer = update_or_create_stripe_customer(user, stripe_token=stripe_token) charge_automatically = stripe_token is not None free_trial = settings.FREE_TRIAL_DAYS not in (None, 0) if get_current_plan_by_customer(customer) is not None: # Unlikely race condition from two people upgrading (clicking "Make payment") # at exactly the same time. Doesn't fully resolve the race condition, but having # a check here reduces the likelihood. billing_logger.warning( "Customer %s trying to upgrade, but has an active subscription", customer, ) raise BillingError('subscribing with existing subscription', BillingError.TRY_RELOADING) billing_cycle_anchor, next_invoice_date, period_end, price_per_license = compute_plan_parameters( automanage_licenses, billing_schedule, customer.default_discount, free_trial) # The main design constraint in this function is that if you upgrade with a credit card, and the # charge fails, everything should be rolled back as if nothing had happened. This is because we # expect frequent card failures on initial signup. # Hence, if we're going to charge a card, do it at the beginning, even if we later may have to # adjust the number of licenses. if charge_automatically: if not free_trial: stripe_charge = stripe.Charge.create( amount=price_per_license * licenses, currency='usd', customer=customer.stripe_customer_id, description="Upgrade to Zulip Standard, ${} x {}".format( price_per_license / 100, licenses), receipt_email=user.delivery_email, statement_descriptor='Zulip Standard') # Not setting a period start and end, but maybe we should? Unclear what will make things # most similar to the renewal case from an accounting perspective. description = "Payment (Card ending in {})".format( cast(stripe.Card, stripe_charge.source).last4) stripe.InvoiceItem.create(amount=price_per_license * licenses * -1, currency='usd', customer=customer.stripe_customer_id, description=description, discountable=False) # TODO: The correctness of this relies on user creation, deactivation, etc being # in a transaction.atomic() with the relevant RealmAuditLog entries with transaction.atomic(): # billed_licenses can greater than licenses if users are added between the start of # this function (process_initial_upgrade) and now billed_licenses = max(get_latest_seat_count(realm), licenses) plan_params = { 'automanage_licenses': automanage_licenses, 'charge_automatically': charge_automatically, 'price_per_license': price_per_license, 'discount': customer.default_discount, 'billing_cycle_anchor': billing_cycle_anchor, 'billing_schedule': billing_schedule, 'tier': CustomerPlan.STANDARD } if free_trial: plan_params['status'] = CustomerPlan.FREE_TRIAL plan = CustomerPlan.objects.create(customer=customer, next_invoice_date=next_invoice_date, **plan_params) ledger_entry = LicenseLedger.objects.create( plan=plan, is_renewal=True, event_time=billing_cycle_anchor, licenses=billed_licenses, licenses_at_next_renewal=billed_licenses) plan.invoiced_through = ledger_entry plan.save(update_fields=['invoiced_through']) RealmAuditLog.objects.create( realm=realm, acting_user=user, event_time=billing_cycle_anchor, event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED, extra_data=ujson.dumps(plan_params)) if not free_trial: stripe.InvoiceItem.create( currency='usd', customer=customer.stripe_customer_id, description='Zulip Standard', discountable=False, period={ 'start': datetime_to_timestamp(billing_cycle_anchor), 'end': datetime_to_timestamp(period_end) }, quantity=billed_licenses, unit_amount=price_per_license) if charge_automatically: billing_method = 'charge_automatically' days_until_due = None else: billing_method = 'send_invoice' days_until_due = DEFAULT_INVOICE_DAYS_UNTIL_DUE stripe_invoice = stripe.Invoice.create( auto_advance=True, billing=billing_method, customer=customer.stripe_customer_id, days_until_due=days_until_due, statement_descriptor='Zulip Standard') stripe.Invoice.finalize_invoice(stripe_invoice) from zerver.lib.actions import do_change_plan_type do_change_plan_type(realm, Realm.STANDARD)
def check_subscription_save(subscription: stripe.Subscription, idempotency_key: str) -> None: self.assertEqual(subscription.quantity, quantity) log_row = RealmAuditLog.objects.filter( event_type=event_type, requires_billing_update=True).order_by('-id').first() self.assertEqual(idempotency_key, 'process_billing_log_entry:%s' % (log_row.id,)) self.assertEqual(subscription.proration_date, datetime_to_timestamp(log_row.event_time))
def get_web_public_subs(realm: Realm) -> SubscriptionInfo: color_idx = 0 def get_next_color() -> str: nonlocal color_idx color = STREAM_ASSIGNMENT_COLORS[color_idx] color_idx = (color_idx + 1) % len(STREAM_ASSIGNMENT_COLORS) return color subscribed = [] for stream in get_web_public_streams_queryset(realm): # Add Stream fields. date_created = datetime_to_timestamp(stream.date_created) description = stream.description first_message_id = stream.first_message_id history_public_to_subscribers = stream.history_public_to_subscribers invite_only = stream.invite_only is_announcement_only = stream.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS is_web_public = stream.is_web_public message_retention_days = stream.message_retention_days name = stream.name rendered_description = stream.rendered_description stream_id = stream.id stream_post_policy = stream.stream_post_policy # Add versions of the Subscription fields based on a simulated # new user subscription set. audible_notifications = True color = get_next_color() desktop_notifications = True email_address = "" email_notifications = True in_home_view = True is_muted = False pin_to_top = False push_notifications = True role = Subscription.ROLE_MEMBER stream_weekly_traffic = get_average_weekly_stream_traffic( stream.id, stream.date_created, {}) wildcard_mentions_notify = True sub = SubscriptionStreamDict( audible_notifications=audible_notifications, color=color, date_created=date_created, description=description, desktop_notifications=desktop_notifications, email_address=email_address, email_notifications=email_notifications, first_message_id=first_message_id, history_public_to_subscribers=history_public_to_subscribers, in_home_view=in_home_view, invite_only=invite_only, is_announcement_only=is_announcement_only, is_muted=is_muted, is_web_public=is_web_public, message_retention_days=message_retention_days, name=name, pin_to_top=pin_to_top, push_notifications=push_notifications, rendered_description=rendered_description, role=role, stream_id=stream_id, stream_post_policy=stream_post_policy, stream_weekly_traffic=stream_weekly_traffic, wildcard_mentions_notify=wildcard_mentions_notify, ) subscribed.append(sub) return SubscriptionInfo( subscriptions=subscribed, unsubscribed=[], never_subscribed=[], )
def build_message_dict( message: Optional[Message], message_id: int, last_edit_time: Optional[datetime.datetime], edit_history: Optional[str], content: str, topic_name: str, date_sent: datetime.datetime, rendered_content: Optional[str], rendered_content_version: Optional[int], sender_id: int, sender_realm_id: int, sending_client_name: str, recipient_id: int, recipient_type: int, recipient_type_id: int, reactions: List[Dict[str, Any]], submessages: List[Dict[str, Any]]) -> Dict[str, Any]: obj = dict(id=message_id, sender_id=sender_id, content=content, recipient_type_id=recipient_type_id, recipient_type=recipient_type, recipient_id=recipient_id, timestamp=datetime_to_timestamp(date_sent), client=sending_client_name) obj[TOPIC_NAME] = topic_name obj['sender_realm_id'] = sender_realm_id # Render topic_links with the stream's realm instead of the # user's realm; this is important for messages sent by # cross-realm bots like NOTIFICATION_BOT. # # TODO: We could potentially avoid this database query in # common cases by optionally passing through the # stream_realm_id through the code path from do_send_messages # (where we've already fetched the data). It would involve # somewhat messy plumbing, but would probably be worth it. rendering_realm_id = sender_realm_id if message and recipient_type == Recipient.STREAM: rendering_realm_id = Stream.objects.get( id=recipient_type_id).realm_id obj[TOPIC_LINKS] = bugdown.topic_links(rendering_realm_id, topic_name) if last_edit_time is not None: obj['last_edit_timestamp'] = datetime_to_timestamp(last_edit_time) assert edit_history is not None obj['edit_history'] = ujson.loads(edit_history) if Message.need_to_render_content(rendered_content, rendered_content_version, bugdown.version): if message is None: # We really shouldn't be rendering objects in this method, but there is # a scenario where we upgrade the version of bugdown and fail to run # management commands to re-render historical messages, and then we # need to have side effects. This method is optimized to not need full # blown ORM objects, but the bugdown renderer is unfortunately highly # coupled to Message, and we also need to persist the new rendered content. # If we don't have a message object passed in, we get one here. The cost # of going to the DB here should be overshadowed by the cost of rendering # and updating the row. # TODO: see #1379 to eliminate bugdown dependencies message = Message.objects.select_related().get(id=message_id) assert message is not None # Hint for mypy. # It's unfortunate that we need to have side effects on the message # in some cases. rendered_content = save_message_rendered_content(message, content) if rendered_content is not None: obj['rendered_content'] = rendered_content else: obj['rendered_content'] = ( '<p>[Zulip note: Sorry, we could not ' + 'understand the formatting of your message]</p>') if rendered_content is not None: obj['is_me_message'] = Message.is_status_message( content, rendered_content) else: obj['is_me_message'] = False obj['reactions'] = [ ReactionDict.build_dict_from_raw_db_row(reaction) for reaction in reactions ] obj['submessages'] = submessages return obj
def build_message_dict( message: Optional[Message], message_id: int, last_edit_time: Optional[datetime.datetime], edit_history: Optional[str], content: str, subject: str, pub_date: datetime.datetime, rendered_content: Optional[str], rendered_content_version: Optional[int], sender_id: int, sender_realm_id: int, sending_client_name: str, recipient_id: int, recipient_type: int, recipient_type_id: int, reactions: List[Dict[str, Any]], submessages: List[Dict[str, Any]]) -> Dict[str, Any]: obj = dict(id=message_id, sender_id=sender_id, content=content, recipient_type_id=recipient_type_id, recipient_type=recipient_type, recipient_id=recipient_id, subject=subject, timestamp=datetime_to_timestamp(pub_date), client=sending_client_name) obj['sender_realm_id'] = sender_realm_id obj['raw_display_recipient'] = get_display_recipient_by_id( recipient_id, recipient_type, recipient_type_id) obj['subject_links'] = bugdown.topic_links(sender_realm_id, subject) if last_edit_time is not None: obj['last_edit_timestamp'] = datetime_to_timestamp(last_edit_time) assert edit_history is not None obj['edit_history'] = ujson.loads(edit_history) if Message.need_to_render_content(rendered_content, rendered_content_version, bugdown.version): if message is None: # We really shouldn't be rendering objects in this method, but there is # a scenario where we upgrade the version of bugdown and fail to run # management commands to re-render historical messages, and then we # need to have side effects. This method is optimized to not need full # blown ORM objects, but the bugdown renderer is unfortunately highly # coupled to Message, and we also need to persist the new rendered content. # If we don't have a message object passed in, we get one here. The cost # of going to the DB here should be overshadowed by the cost of rendering # and updating the row. # TODO: see #1379 to eliminate bugdown dependencies message = Message.objects.select_related().get(id=message_id) assert message is not None # Hint for mypy. # It's unfortunate that we need to have side effects on the message # in some cases. rendered_content = save_message_rendered_content(message, content) if rendered_content is not None: obj['rendered_content'] = rendered_content else: obj['rendered_content'] = ( '<p>[Zulip note: Sorry, we could not ' + 'understand the formatting of your message]</p>') if rendered_content is not None: obj['is_me_message'] = Message.is_status_message( content, rendered_content) else: obj['is_me_message'] = False obj['reactions'] = [ ReactionDict.build_dict_from_raw_db_row(reaction) for reaction in reactions ] obj['submessages'] = submessages return obj
def build_message_dict( apply_markdown, message, message_id, last_edit_time, edit_history, content, subject, pub_date, rendered_content, rendered_content_version, sender_id, sender_email, sender_realm_domain, sender_full_name, sender_short_name, sender_avatar_source, sender_is_mirror_dummy, sending_client_name, recipient_id, recipient_type, recipient_type_id, ): global bugdown if bugdown is None: from zerver.lib import bugdown avatar_url = get_avatar_url(sender_avatar_source, sender_email) display_recipient = get_display_recipient_by_id( recipient_id, recipient_type, recipient_type_id ) if recipient_type == Recipient.STREAM: display_type = "stream" elif recipient_type in (Recipient.HUDDLE, Recipient.PERSONAL): display_type = "private" if len(display_recipient) == 1: # add the sender in if this isn't a message between # someone and his self, preserving ordering recip = {'email': sender_email, 'domain': sender_realm_domain, 'full_name': sender_full_name, 'short_name': sender_short_name, 'id': sender_id, 'is_mirror_dummy': sender_is_mirror_dummy} if recip['email'] < display_recipient[0]['email']: display_recipient = [recip, display_recipient[0]] elif recip['email'] > display_recipient[0]['email']: display_recipient = [display_recipient[0], recip] obj = dict( id = message_id, sender_email = sender_email, sender_full_name = sender_full_name, sender_short_name = sender_short_name, sender_domain = sender_realm_domain, sender_id = sender_id, type = display_type, display_recipient = display_recipient, recipient_id = recipient_id, subject = subject, timestamp = datetime_to_timestamp(pub_date), gravatar_hash = gravatar_hash(sender_email), # Deprecated June 2013 avatar_url = avatar_url, client = sending_client_name) obj['subject_links'] = bugdown.subject_links(sender_realm_domain.lower(), subject) if last_edit_time != None: obj['last_edit_timestamp'] = datetime_to_timestamp(last_edit_time) obj['edit_history'] = ujson.loads(edit_history) if apply_markdown: if Message.need_to_render_content(rendered_content, rendered_content_version): if message is None: # We really shouldn't be rendering objects in this method, but there is # a scenario where we upgrade the version of bugdown and fail to run # management commands to re-render historical messages, and then we # need to have side effects. This method is optimized to not need full # blown ORM objects, but the bugdown renderer is unfortunately highly # coupled to Message, and we also need to persist the new rendered content. # If we don't have a message object passed in, we get one here. The cost # of going to the DB here should be overshadowed by the cost of rendering # and updating the row. message = Message.objects.select_related().get(id=message_id) # It's unfortunate that we need to have side effects on the message # in some cases. rendered_content = message.render_markdown(content, sender_realm_domain) message.set_rendered_content(rendered_content, True) if rendered_content is not None: obj['content'] = rendered_content else: obj['content'] = '<p>[Zulip note: Sorry, we could not understand the formatting of your message]</p>' obj['content_type'] = 'text/html' else: obj['content'] = content obj['content_type'] = 'text/x-markdown' return obj
def set_expirable_session_var(session: Session, var_name: str, var_value: Any, expiry_seconds: int) -> None: expire_at = datetime_to_timestamp(timezone_now() + timedelta(seconds=expiry_seconds)) session[var_name] = {'value': var_value, 'expire_at': expire_at}
def build_message_dict( apply_markdown, message, message_id, last_edit_time, edit_history, content, subject, pub_date, rendered_content, rendered_content_version, sender_id, sender_email, sender_realm_domain, sender_full_name, sender_short_name, sender_avatar_source, sender_is_mirror_dummy, sending_client_name, recipient_id, recipient_type, recipient_type_id, ): global bugdown if bugdown is None: from zerver.lib import bugdown avatar_url = get_avatar_url(sender_avatar_source, sender_email) display_recipient = get_display_recipient_by_id( recipient_id, recipient_type, recipient_type_id) if recipient_type == Recipient.STREAM: display_type = "stream" elif recipient_type in (Recipient.HUDDLE, Recipient.PERSONAL): display_type = "private" if len(display_recipient) == 1: # add the sender in if this isn't a message between # someone and his self, preserving ordering recip = { 'email': sender_email, 'domain': sender_realm_domain, 'full_name': sender_full_name, 'short_name': sender_short_name, 'id': sender_id, 'is_mirror_dummy': sender_is_mirror_dummy } if recip['email'] < display_recipient[0]['email']: display_recipient = [recip, display_recipient[0]] elif recip['email'] > display_recipient[0]['email']: display_recipient = [display_recipient[0], recip] obj = dict( id=message_id, sender_email=sender_email, sender_full_name=sender_full_name, sender_short_name=sender_short_name, sender_domain=sender_realm_domain, sender_id=sender_id, type=display_type, display_recipient=display_recipient, recipient_id=recipient_id, subject=subject, timestamp=datetime_to_timestamp(pub_date), gravatar_hash=gravatar_hash(sender_email), # Deprecated June 2013 avatar_url=avatar_url, client=sending_client_name) obj['subject_links'] = bugdown.subject_links( sender_realm_domain.lower(), subject) if last_edit_time != None: obj['last_edit_timestamp'] = datetime_to_timestamp(last_edit_time) obj['edit_history'] = ujson.loads(edit_history) if apply_markdown: if Message.need_to_render_content(rendered_content, rendered_content_version): if message is None: # We really shouldn't be rendering objects in this method, but there is # a scenario where we upgrade the version of bugdown and fail to run # management commands to re-render historical messages, and then we # need to have side effects. This method is optimized to not need full # blown ORM objects, but the bugdown renderer is unfortunately highly # coupled to Message, and we also need to persist the new rendered content. # If we don't have a message object passed in, we get one here. The cost # of going to the DB here should be overshadowed by the cost of rendering # and updating the row. message = Message.objects.select_related().get( id=message_id) # It's unfortunate that we need to have side effects on the message # in some cases. rendered_content = message.render_markdown( content, sender_realm_domain) message.set_rendered_content(rendered_content, True) if rendered_content is not None: obj['content'] = rendered_content else: obj['content'] = '<p>[Zulip note: Sorry, we could not understand the formatting of your message]</p>' obj['content_type'] = 'text/html' else: obj['content'] = content obj['content_type'] = 'text/x-markdown' return obj
def import_uploads_s3(bucket_name: str, import_dir: Path, processing_avatars: bool=False, processing_emojis: bool=False) -> None: upload_backend = S3UploadBackend() conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) bucket = conn.get_bucket(bucket_name, validate=True) records_filename = os.path.join(import_dir, "records.json") with open(records_filename) as records_file: records = ujson.loads(records_file.read()) re_map_foreign_keys_internal(records, 'records', 'realm_id', related_table="realm", id_field=True) timestamp = datetime_to_timestamp(timezone_now()) if not processing_emojis: re_map_foreign_keys_internal(records, 'records', 'user_profile_id', related_table="user_profile", id_field=True) for record in records: key = Key(bucket) if processing_avatars: # For avatars, we need to rehash the user's email with the # new server's avatar salt avatar_path = user_avatar_path_from_ids(record['user_profile_id'], record['realm_id']) key.key = avatar_path if record['s3_path'].endswith('.original'): key.key += '.original' elif processing_emojis: # For emojis we follow the function 'upload_emoji_image' emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( realm_id=record['realm_id'], emoji_file_name=record['file_name']) key.key = emoji_path record['last_modified'] = timestamp else: # Should be kept in sync with its equivalent in zerver/lib/uploads in the # function 'upload_message_image' s3_file_name = "/".join([ str(record['realm_id']), random_name(18), sanitize_name(os.path.basename(record['path'])) ]) key.key = s3_file_name path_maps['attachment_path'][record['s3_path']] = s3_file_name # Exported custom emoji from tools like Slack don't have # the data for what user uploaded them in `user_profile_id`. if not processing_emojis: user_profile_id = int(record['user_profile_id']) # Support email gateway bot and other cross-realm messages if user_profile_id in id_maps["user_profile"]: logging.info("Uploaded by ID mapped user: %s!" % (user_profile_id,)) user_profile_id = id_maps["user_profile"][user_profile_id] user_profile = get_user_profile_by_id(user_profile_id) key.set_metadata("user_profile_id", str(user_profile.id)) key.set_metadata("orig_last_modified", record['last_modified']) key.set_metadata("realm_id", str(record['realm_id'])) # Zulip exports will always have a content-type, but third-party exports might not. content_type = record.get("content_type", guess_type(record['s3_path'])[0]) headers = {'Content-Type': content_type} key.set_contents_from_filename(os.path.join(import_dir, record['path']), headers=headers) if processing_avatars: # Ensure that we have medium-size avatar images for every # avatar. TODO: This implementation is hacky, both in that it # does get_user_profile_by_id for each user, and in that it # might be better to require the export to just have these. upload_backend = S3UploadBackend() for record in records: if record['s3_path'].endswith('.original'): user_profile = get_user_profile_by_id(record['user_profile_id']) upload_backend.ensure_medium_avatar_image(user_profile=user_profile)
def build_message_dict(apply_markdown, message, message_id, last_edit_time, edit_history, content, subject, pub_date, rendered_content, rendered_content_version, sender_id, sender_email, sender_realm_id, sender_realm_str, sender_full_name, sender_short_name, sender_avatar_source, sender_avatar_version, sender_is_mirror_dummy, sending_client_name, recipient_id, recipient_type, recipient_type_id, reactions): # type: (bool, Optional[Message], int, Optional[datetime.datetime], Optional[Text], Text, Text, datetime.datetime, Optional[Text], Optional[int], int, Text, int, Text, Text, Text, Text, int, bool, Text, int, int, int, List[Dict[str, Any]]) -> Dict[str, Any] avatar_url = get_avatar_url(sender_avatar_source, sender_email, sender_avatar_version) display_recipient = get_display_recipient_by_id( recipient_id, recipient_type, recipient_type_id) if recipient_type == Recipient.STREAM: display_type = "stream" elif recipient_type in (Recipient.HUDDLE, Recipient.PERSONAL): assert not isinstance(display_recipient, Text) display_type = "private" if len(display_recipient) == 1: # add the sender in if this isn't a message between # someone and his self, preserving ordering recip = { 'email': sender_email, 'full_name': sender_full_name, 'short_name': sender_short_name, 'id': sender_id, 'is_mirror_dummy': sender_is_mirror_dummy } if recip['email'] < display_recipient[0]['email']: display_recipient = [recip, display_recipient[0]] elif recip['email'] > display_recipient[0]['email']: display_recipient = [display_recipient[0], recip] obj = dict( id=message_id, sender_email=sender_email, sender_full_name=sender_full_name, sender_short_name=sender_short_name, sender_realm_str=sender_realm_str, sender_id=sender_id, type=display_type, display_recipient=display_recipient, recipient_id=recipient_id, subject=subject, timestamp=datetime_to_timestamp(pub_date), gravatar_hash=gravatar_hash(sender_email), # Deprecated June 2013 avatar_url=avatar_url, client=sending_client_name) if obj['type'] == 'stream': obj['stream_id'] = recipient_type_id obj['subject_links'] = bugdown.subject_links(sender_realm_id, subject) if last_edit_time is not None: obj['last_edit_timestamp'] = datetime_to_timestamp(last_edit_time) obj['edit_history'] = ujson.loads(edit_history) if apply_markdown: if Message.need_to_render_content(rendered_content, rendered_content_version, bugdown.version): if message is None: # We really shouldn't be rendering objects in this method, but there is # a scenario where we upgrade the version of bugdown and fail to run # management commands to re-render historical messages, and then we # need to have side effects. This method is optimized to not need full # blown ORM objects, but the bugdown renderer is unfortunately highly # coupled to Message, and we also need to persist the new rendered content. # If we don't have a message object passed in, we get one here. The cost # of going to the DB here should be overshadowed by the cost of rendering # and updating the row. # TODO: see #1379 to eliminate bugdown dependencies message = Message.objects.select_related().get( id=message_id) # It's unfortunate that we need to have side effects on the message # in some cases. rendered_content = render_markdown(message, content, realm=message.get_realm()) message.rendered_content = rendered_content message.rendered_content_version = bugdown.version message.save_rendered_content() if rendered_content is not None: obj['content'] = rendered_content else: obj['content'] = u'<p>[Zulip note: Sorry, we could not understand the formatting of your message]</p>' obj['content_type'] = 'text/html' else: obj['content'] = content obj['content_type'] = 'text/x-markdown' obj['reactions'] = [ ReactionDict.build_dict_from_raw_db_row(reaction) for reaction in reactions ] return obj
def build_message_dict( message_id: int, last_edit_time: Optional[datetime.datetime], edit_history: Optional[str], content: str, topic_name: str, date_sent: datetime.datetime, rendered_content: Optional[str], rendered_content_version: Optional[int], sender_id: int, sender_realm_id: int, sending_client_name: str, rendering_realm_id: int, recipient_id: int, recipient_type: int, recipient_type_id: int, reactions: List[RawReactionRow], submessages: List[Dict[str, Any]], ) -> Dict[str, Any]: obj = dict( id=message_id, sender_id=sender_id, content=content, recipient_type_id=recipient_type_id, recipient_type=recipient_type, recipient_id=recipient_id, timestamp=datetime_to_timestamp(date_sent), client=sending_client_name, ) obj[TOPIC_NAME] = topic_name obj["sender_realm_id"] = sender_realm_id # Render topic_links with the stream's realm instead of the # sender's realm; this is important for messages sent by # cross-realm bots like NOTIFICATION_BOT. obj[TOPIC_LINKS] = topic_links(rendering_realm_id, topic_name) if last_edit_time is not None: obj["last_edit_timestamp"] = datetime_to_timestamp(last_edit_time) assert edit_history is not None obj["edit_history"] = orjson.loads(edit_history) if Message.need_to_render_content(rendered_content, rendered_content_version, markdown_version): # We really shouldn't be rendering objects in this method, but there is # a scenario where we upgrade the version of Markdown and fail to run # management commands to re-render historical messages, and then we # need to have side effects. This method is optimized to not need full # blown ORM objects, but the Markdown renderer is unfortunately highly # coupled to Message, and we also need to persist the new rendered content. # If we don't have a message object passed in, we get one here. The cost # of going to the DB here should be overshadowed by the cost of rendering # and updating the row. # TODO: see #1379 to eliminate Markdown dependencies message = Message.objects.select_related().get(id=message_id) assert message is not None # Hint for mypy. # It's unfortunate that we need to have side effects on the message # in some cases. rendered_content = save_message_rendered_content(message, content) if rendered_content is not None: obj["rendered_content"] = rendered_content else: obj["rendered_content"] = ( "<p>[Zulip note: Sorry, we could not " + "understand the formatting of your message]</p>") if rendered_content is not None: obj["is_me_message"] = Message.is_status_message( content, rendered_content) else: obj["is_me_message"] = False obj["reactions"] = [ ReactionDict.build_dict_from_raw_db_row(reaction) for reaction in reactions ] obj["submessages"] = submessages return obj
def fill_edit_history_entries( raw_edit_history: List[EditHistoryEvent], message: Message) -> List[FormattedEditHistoryEvent]: """ This fills out the message edit history entries from the database to have the current topic + content as of that time, plus data on whatever changed. This makes it much simpler to do future processing. """ prev_content = message.content prev_rendered_content = message.rendered_content prev_topic = message.topic_name() # Make sure that the latest entry in the history corresponds to the # message's last edit time if len(raw_edit_history) > 0: assert message.last_edit_time is not None assert datetime_to_timestamp( message.last_edit_time) == raw_edit_history[0]["timestamp"] formatted_edit_history: List[FormattedEditHistoryEvent] = [] for edit_history_event in raw_edit_history: formatted_entry: FormattedEditHistoryEvent = { "content": prev_content, "rendered_content": prev_rendered_content, "timestamp": edit_history_event["timestamp"], "topic": prev_topic, "user_id": edit_history_event["user_id"], } if "prev_topic" in edit_history_event: prev_topic = edit_history_event["prev_topic"] formatted_entry["prev_topic"] = prev_topic # Fill current values for content/rendered_content. if "prev_content" in edit_history_event: formatted_entry["prev_content"] = edit_history_event[ "prev_content"] prev_content = formatted_entry["prev_content"] formatted_entry["prev_rendered_content"] = edit_history_event[ "prev_rendered_content"] prev_rendered_content = formatted_entry["prev_rendered_content"] assert prev_rendered_content is not None rendered_content = formatted_entry["rendered_content"] assert rendered_content is not None formatted_entry["content_html_diff"] = highlight_html_differences( prev_rendered_content, rendered_content, message.id) if "prev_stream" in edit_history_event: formatted_entry["prev_stream"] = edit_history_event["prev_stream"] formatted_entry["stream"] = edit_history_event["stream"] formatted_edit_history.append(formatted_entry) initial_message_history: FormattedEditHistoryEvent = { "content": prev_content, "rendered_content": prev_rendered_content, "timestamp": datetime_to_timestamp(message.date_sent), "topic": prev_topic, "user_id": message.sender_id, } formatted_edit_history.append(initial_message_history) return formatted_edit_history
def process_initial_upgrade(user: UserProfile, licenses: int, automanage_licenses: bool, billing_schedule: int, stripe_token: Optional[str]) -> None: realm = user.realm customer = update_or_create_stripe_customer(user, stripe_token=stripe_token) if CustomerPlan.objects.filter(customer=customer, status=CustomerPlan.ACTIVE).exists(): # Unlikely race condition from two people upgrading (clicking "Make payment") # at exactly the same time. Doesn't fully resolve the race condition, but having # a check here reduces the likelihood. billing_logger.warning( "Customer {} trying to upgrade, but has an active subscription".format(customer)) raise BillingError('subscribing with existing subscription', BillingError.TRY_RELOADING) billing_cycle_anchor, next_invoice_date, period_end, price_per_license = compute_plan_parameters( automanage_licenses, billing_schedule, customer.default_discount) # The main design constraint in this function is that if you upgrade with a credit card, and the # charge fails, everything should be rolled back as if nothing had happened. This is because we # expect frequent card failures on initial signup. # Hence, if we're going to charge a card, do it at the beginning, even if we later may have to # adjust the number of licenses. charge_automatically = stripe_token is not None if charge_automatically: stripe_charge = stripe.Charge.create( amount=price_per_license * licenses, currency='usd', customer=customer.stripe_customer_id, description="Upgrade to Zulip Standard, ${} x {}".format(price_per_license/100, licenses), receipt_email=user.email, statement_descriptor='Zulip Standard') # Not setting a period start and end, but maybe we should? Unclear what will make things # most similar to the renewal case from an accounting perspective. stripe.InvoiceItem.create( amount=price_per_license * licenses * -1, currency='usd', customer=customer.stripe_customer_id, description="Payment (Card ending in {})".format(cast(stripe.Card, stripe_charge.source).last4), discountable=False) # TODO: The correctness of this relies on user creation, deactivation, etc being # in a transaction.atomic() with the relevant RealmAuditLog entries with transaction.atomic(): # billed_licenses can greater than licenses if users are added between the start of # this function (process_initial_upgrade) and now billed_licenses = max(get_seat_count(realm), licenses) plan_params = { 'automanage_licenses': automanage_licenses, 'charge_automatically': charge_automatically, 'price_per_license': price_per_license, 'discount': customer.default_discount, 'billing_cycle_anchor': billing_cycle_anchor, 'billing_schedule': billing_schedule, 'tier': CustomerPlan.STANDARD} plan = CustomerPlan.objects.create( customer=customer, next_invoice_date=next_invoice_date, **plan_params) ledger_entry = LicenseLedger.objects.create( plan=plan, is_renewal=True, event_time=billing_cycle_anchor, licenses=billed_licenses, licenses_at_next_renewal=billed_licenses) plan.invoiced_through = ledger_entry plan.save(update_fields=['invoiced_through']) RealmAuditLog.objects.create( realm=realm, acting_user=user, event_time=billing_cycle_anchor, event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED, extra_data=ujson.dumps(plan_params)) stripe.InvoiceItem.create( currency='usd', customer=customer.stripe_customer_id, description='Zulip Standard', discountable=False, period = {'start': datetime_to_timestamp(billing_cycle_anchor), 'end': datetime_to_timestamp(period_end)}, quantity=billed_licenses, unit_amount=price_per_license) if charge_automatically: billing_method = 'charge_automatically' days_until_due = None else: billing_method = 'send_invoice' days_until_due = DEFAULT_INVOICE_DAYS_UNTIL_DUE stripe_invoice = stripe.Invoice.create( auto_advance=True, billing=billing_method, customer=customer.stripe_customer_id, days_until_due=days_until_due, statement_descriptor='Zulip Standard') stripe.Invoice.finalize_invoice(stripe_invoice) from zerver.lib.actions import do_change_plan_type do_change_plan_type(realm, Realm.STANDARD)
def invoice_plan(plan: CustomerPlan, event_time: datetime) -> None: if plan.invoicing_status == CustomerPlan.STARTED: raise NotImplementedError('Plan with invoicing_status==STARTED needs manual resolution.') add_plan_renewal_to_license_ledger_if_needed(plan, event_time) assert(plan.invoiced_through is not None) licenses_base = plan.invoiced_through.licenses invoice_item_created = False for ledger_entry in LicenseLedger.objects.filter(plan=plan, id__gt=plan.invoiced_through.id, event_time__lte=event_time).order_by('id'): price_args = {} # type: Dict[str, int] if ledger_entry.is_renewal: if plan.fixed_price is not None: price_args = {'amount': plan.fixed_price} else: assert(plan.price_per_license is not None) # needed for mypy price_args = {'unit_amount': plan.price_per_license, 'quantity': ledger_entry.licenses} description = "Zulip Standard - renewal" elif ledger_entry.licenses != licenses_base: assert(plan.price_per_license) last_renewal = LicenseLedger.objects.filter( plan=plan, is_renewal=True, event_time__lte=ledger_entry.event_time) \ .order_by('-id').first().event_time period_end = next_renewal_date(plan, ledger_entry.event_time) proration_fraction = (period_end - ledger_entry.event_time) / (period_end - last_renewal) price_args = {'unit_amount': int(plan.price_per_license * proration_fraction + .5), 'quantity': ledger_entry.licenses - licenses_base} description = "Additional license ({} - {})".format( ledger_entry.event_time.strftime('%b %-d, %Y'), period_end.strftime('%b %-d, %Y')) if price_args: plan.invoiced_through = ledger_entry plan.invoicing_status = CustomerPlan.STARTED plan.save(update_fields=['invoicing_status', 'invoiced_through']) idempotency_key = 'ledger_entry:{}'.format(ledger_entry.id) # type: Optional[str] if settings.TEST_SUITE: idempotency_key = None stripe.InvoiceItem.create( currency='usd', customer=plan.customer.stripe_customer_id, description=description, discountable=False, period = {'start': datetime_to_timestamp(ledger_entry.event_time), 'end': datetime_to_timestamp(next_renewal_date(plan, ledger_entry.event_time))}, idempotency_key=idempotency_key, **price_args) invoice_item_created = True plan.invoiced_through = ledger_entry plan.invoicing_status = CustomerPlan.DONE plan.save(update_fields=['invoicing_status', 'invoiced_through']) licenses_base = ledger_entry.licenses if invoice_item_created: if plan.charge_automatically: billing_method = 'charge_automatically' days_until_due = None else: billing_method = 'send_invoice' days_until_due = DEFAULT_INVOICE_DAYS_UNTIL_DUE stripe_invoice = stripe.Invoice.create( auto_advance=True, billing=billing_method, customer=plan.customer.stripe_customer_id, days_until_due=days_until_due, statement_descriptor='Zulip Standard') stripe.Invoice.finalize_invoice(stripe_invoice) plan.next_invoice_date = next_invoice_date(plan) plan.save(update_fields=['next_invoice_date'])
def build_stream_dict_for_sub( user: UserProfile, sub_dict: RawSubscriptionDict, raw_stream_dict: RawStreamDict, recent_traffic: Dict[int, int], ) -> SubscriptionStreamDict: # Handle Stream.API_FIELDS date_created = datetime_to_timestamp(raw_stream_dict["date_created"]) description = raw_stream_dict["description"] first_message_id = raw_stream_dict["first_message_id"] history_public_to_subscribers = raw_stream_dict[ "history_public_to_subscribers"] invite_only = raw_stream_dict["invite_only"] is_web_public = raw_stream_dict["is_web_public"] message_retention_days = raw_stream_dict["message_retention_days"] name = raw_stream_dict["name"] rendered_description = raw_stream_dict["rendered_description"] stream_id = raw_stream_dict["id"] stream_post_policy = raw_stream_dict["stream_post_policy"] # Handle Subscription.API_FIELDS. color = sub_dict["color"] is_muted = sub_dict["is_muted"] pin_to_top = sub_dict["pin_to_top"] audible_notifications = sub_dict["audible_notifications"] desktop_notifications = sub_dict["desktop_notifications"] email_notifications = sub_dict["email_notifications"] push_notifications = sub_dict["push_notifications"] wildcard_mentions_notify = sub_dict["wildcard_mentions_notify"] role = sub_dict["role"] # Backwards-compatibility for clients that haven't been # updated for the in_home_view => is_muted API migration. in_home_view = not is_muted # Backwards-compatibility for clients that haven't been # updated for the is_announcement_only -> stream_post_policy # migration. is_announcement_only = raw_stream_dict[ "stream_post_policy"] == Stream.STREAM_POST_POLICY_ADMINS # Add a few computed fields not directly from the data models. stream_weekly_traffic = get_average_weekly_stream_traffic( raw_stream_dict["id"], raw_stream_dict["date_created"], recent_traffic) email_address = encode_email_address_helper(raw_stream_dict["name"], raw_stream_dict["email_token"], show_sender=True) # Our caller may add a subscribers field. return SubscriptionStreamDict( audible_notifications=audible_notifications, color=color, date_created=date_created, description=description, desktop_notifications=desktop_notifications, email_address=email_address, email_notifications=email_notifications, first_message_id=first_message_id, history_public_to_subscribers=history_public_to_subscribers, in_home_view=in_home_view, invite_only=invite_only, is_announcement_only=is_announcement_only, is_muted=is_muted, is_web_public=is_web_public, message_retention_days=message_retention_days, name=name, pin_to_top=pin_to_top, push_notifications=push_notifications, rendered_description=rendered_description, role=role, stream_id=stream_id, stream_post_policy=stream_post_policy, stream_weekly_traffic=stream_weekly_traffic, wildcard_mentions_notify=wildcard_mentions_notify, )
def archive(request: HttpRequest, stream_id: int, topic_name: str) -> HttpResponse: def get_response(rendered_message_list: List[str], is_web_public: bool, stream_name: str) -> HttpResponse: return render(request, 'zerver/archive/index.html', context={ 'is_web_public': is_web_public, 'message_list': rendered_message_list, 'stream': stream_name, 'topic': topic_name, }) try: stream = get_stream_by_id(stream_id) except JsonableError: return get_response([], False, '') if not stream.is_web_public: return get_response([], False, '') all_messages = list( Message.objects.select_related('sender').filter( recipient__type_id=stream_id, subject=topic_name).order_by('pub_date')) if not all_messages: return get_response([], True, stream.name) rendered_message_list = [] prev_sender = None for msg in all_messages: include_sender = False status_message = Message.is_status_message(msg.content, msg.rendered_content) if not prev_sender or prev_sender != msg.sender or status_message: if status_message: prev_sender = None else: prev_sender = msg.sender include_sender = True if status_message: status_message = msg.rendered_content[4 + 3:-4] context = { 'sender_full_name': msg.sender.full_name, 'timestampstr': datetime_to_timestamp( msg.last_edit_time if msg.last_edit_time else msg.pub_date), 'message_content': msg.rendered_content, 'avatar_url': get_gravatar_url(msg.sender.email, 1), 'include_sender': include_sender, 'status_message': status_message, } rendered_msg = loader.render_to_string( 'zerver/archive/single_message.html', context) rendered_message_list.append(rendered_msg) return get_response(rendered_message_list, True, stream.name)