def test_convert_to_UTC(self) -> None: utc_datetime = parser.parse('2017-01-01 00:00:00.123 UTC') for dt in [ parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=timezone_utc), parser.parse('2017-01-01 00:00:00.123'), parser.parse('2017-01-01 05:00:00.123+05')]: self.assertEqual(convert_to_UTC(dt), utc_datetime)
def handle_deferred_message(sender: UserProfile, client: Client, message_type_name: str, message_to: Union[Sequence[str], Sequence[int]], topic_name: Optional[str], message_content: str, delivery_type: str, defer_until: str, tz_guess: Optional[str], forwarder_user_profile: UserProfile, realm: Optional[Realm]) -> HttpResponse: deliver_at = None local_tz = 'UTC' if tz_guess: local_tz = tz_guess elif sender.timezone: local_tz = sender.timezone try: deliver_at = dateparser(defer_until) except ValueError: return json_error(_("Invalid time format")) deliver_at_usertz = deliver_at if deliver_at_usertz.tzinfo is None: user_tz = get_timezone(local_tz) deliver_at_usertz = user_tz.normalize(user_tz.localize(deliver_at)) deliver_at = convert_to_UTC(deliver_at_usertz) if deliver_at <= timezone_now(): return json_error(_("Time must be in the future.")) check_schedule_message(sender, client, message_type_name, message_to, topic_name, message_content, delivery_type, deliver_at, realm=realm, forwarder_user_profile=forwarder_user_profile) return json_success({"deliver_at": str(deliver_at_usertz)})
def test_convert_to_UTC(self) -> None: utc_datetime = parser.parse("2017-01-01 00:00:00.123 UTC") for dt in [ parser.parse("2017-01-01 00:00:00.123").replace(tzinfo=timezone.utc), parser.parse("2017-01-01 00:00:00.123"), parser.parse("2017-01-01 05:00:00.123+05"), ]: self.assertEqual(convert_to_UTC(dt), utc_datetime)
def test_convert_to_UTC(self) -> None: utc_datetime = parser.parse('2017-01-01 00:00:00.123 UTC') for dt in [ parser.parse('2017-01-01 00:00:00.123').replace( tzinfo=timezone_utc), parser.parse('2017-01-01 00:00:00.123'), parser.parse('2017-01-01 05:00:00.123+05') ]: self.assertEqual(convert_to_UTC(dt), utc_datetime)
def get_chart_data( request: HttpRequest, user_profile: UserProfile, chart_name: Text = REQ(), min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None), start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None) ) -> HttpResponse: if chart_name == 'number_of_humans': stat = COUNT_STATS['realm_active_humans::day'] tables = [RealmCount] subgroup_to_label = {None: 'human'} # type: Dict[Optional[str], str] labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_over_time': stat = COUNT_STATS['messages_sent:is_bot:hour'] tables = [RealmCount, UserCount] subgroup_to_label = {'false': 'human', 'true': 'bot'} labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_by_message_type': stat = COUNT_STATS['messages_sent:message_type:day'] tables = [RealmCount, UserCount] subgroup_to_label = { 'public_stream': 'Public streams', 'private_stream': 'Private streams', 'private_message': 'Private messages', 'huddle_message': 'Group private messages' } labels_sort_function = lambda data: sort_by_totals(data['realm']) include_empty_subgroups = True elif chart_name == 'messages_sent_by_client': stat = COUNT_STATS['messages_sent:client:day'] tables = [RealmCount, UserCount] # Note that the labels are further re-written by client_label_map subgroup_to_label = { str(id): name for id, name in Client.objects.values_list('id', 'name') } labels_sort_function = sort_client_labels include_empty_subgroups = False else: raise JsonableError(_("Unknown chart name: %s") % (chart_name, )) # Most likely someone using our API endpoint. The /stats page does not # pass a start or end in its requests. if start is not None: start = convert_to_UTC(start) if end is not None: end = convert_to_UTC(end) if start is not None and end is not None and start > end: raise JsonableError( _("Start time is later than end time. Start: %(start)s, End: %(end)s" ) % { 'start': start, 'end': end }) realm = user_profile.realm if start is None: start = realm.date_created if end is None: end = last_successful_fill(stat.property) if end is None or start > end: logging.warning( "User from realm %s attempted to access /stats, but the computed " "start time: %s (creation time of realm) is later than the computed " "end time: %s (last successful analytics update). Is the " "analytics cron job running?" % (realm.string_id, start, end)) raise JsonableError( _("No analytics data available. Please contact your server administrator." )) end_times = time_range(start, end, stat.frequency, min_length) data = {'end_times': end_times, 'frequency': stat.frequency} for table in tables: if table == RealmCount: data['realm'] = get_time_series_by_subgroup( stat, RealmCount, realm.id, end_times, subgroup_to_label, include_empty_subgroups) if table == UserCount: data['user'] = get_time_series_by_subgroup( stat, UserCount, user_profile.id, end_times, subgroup_to_label, include_empty_subgroups) if labels_sort_function is not None: data['display_order'] = labels_sort_function(data) else: data['display_order'] = None return json_success(data=data)
def get_chart_data(request: HttpRequest, user_profile: UserProfile, chart_name: str = REQ(), min_length: Optional[int] = REQ( converter=to_non_negative_int, default=None), start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), realm: Optional[Realm] = None, for_installation: bool = False) -> HttpResponse: aggregate_table = RealmCount if for_installation: aggregate_table = InstallationCount if chart_name == 'number_of_humans': stats = [ COUNT_STATS['1day_actives::day'], COUNT_STATS['realm_active_humans::day'], COUNT_STATS['active_users_audit:is_bot:day'] ] tables = [aggregate_table] subgroup_to_label = { stats[0]: { None: '_1day' }, stats[1]: { None: '_15day' }, stats[2]: { 'false': 'all_time' } } # type: Dict[CountStat, Dict[Optional[str], str]] labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_over_time': stats = [COUNT_STATS['messages_sent:is_bot:hour']] tables = [aggregate_table, UserCount] subgroup_to_label = {stats[0]: {'false': 'human', 'true': 'bot'}} labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_by_message_type': stats = [COUNT_STATS['messages_sent:message_type:day']] tables = [aggregate_table, UserCount] subgroup_to_label = { stats[0]: { 'public_stream': _('Public streams'), 'private_stream': _('Private streams'), 'private_message': _('Private messages'), 'huddle_message': _('Group private messages') } } labels_sort_function = lambda data: sort_by_totals(data['everyone']) include_empty_subgroups = True elif chart_name == 'messages_sent_by_client': stats = [COUNT_STATS['messages_sent:client:day']] tables = [aggregate_table, UserCount] # Note that the labels are further re-written by client_label_map subgroup_to_label = { stats[0]: { str(id): name for id, name in Client.objects.values_list('id', 'name') } } labels_sort_function = sort_client_labels include_empty_subgroups = False else: raise JsonableError(_("Unknown chart name: %s") % (chart_name, )) # Most likely someone using our API endpoint. The /stats page does not # pass a start or end in its requests. if start is not None: start = convert_to_UTC(start) if end is not None: end = convert_to_UTC(end) if start is not None and end is not None and start > end: raise JsonableError( _("Start time is later than end time. Start: %(start)s, End: %(end)s" ) % { 'start': start, 'end': end }) if realm is None: realm = user_profile.realm if start is None: if for_installation: start = installation_epoch() else: start = realm.date_created if end is None: end = max( last_successful_fill(stat.property) or datetime.min.replace( tzinfo=timezone_utc) for stat in stats) if end is None or start > end: logging.warning( "User from realm %s attempted to access /stats, but the computed " "start time: %s (creation of realm or installation) is later than the computed " "end time: %s (last successful analytics update). Is the " "analytics cron job running?" % (realm.string_id, start, end)) raise JsonableError( _("No analytics data available. Please contact your server administrator." )) assert len(set([stat.frequency for stat in stats])) == 1 end_times = time_range(start, end, stats[0].frequency, min_length) data = { 'end_times': end_times, 'frequency': stats[0].frequency } # type: Dict[str, Any] aggregation_level = { InstallationCount: 'everyone', RealmCount: 'everyone', UserCount: 'user' } # -1 is a placeholder value, since there is no relevant filtering on InstallationCount id_value = { InstallationCount: -1, RealmCount: realm.id, UserCount: user_profile.id } for table in tables: data[aggregation_level[table]] = {} for stat in stats: data[aggregation_level[table]].update( get_time_series_by_subgroup(stat, table, id_value[table], end_times, subgroup_to_label[stat], include_empty_subgroups)) if labels_sort_function is not None: data['display_order'] = labels_sort_function(data) else: data['display_order'] = None return json_success(data=data)
def get_chart_data(request, user_profile, chart_name=REQ(), min_length=REQ(converter=to_non_negative_int, default=None), start=REQ(converter=to_utc_datetime, default=None), end=REQ(converter=to_utc_datetime, default=None)): # type: (HttpRequest, UserProfile, Text, Optional[int], Optional[datetime], Optional[datetime]) -> HttpResponse if chart_name == 'number_of_humans': stat = COUNT_STATS['realm_active_humans::day'] tables = [RealmCount] subgroup_to_label = {None: 'human'} # type: Dict[Optional[str], str] labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_over_time': stat = COUNT_STATS['messages_sent:is_bot:hour'] tables = [RealmCount, UserCount] subgroup_to_label = {'false': 'human', 'true': 'bot'} labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_by_message_type': stat = COUNT_STATS['messages_sent:message_type:day'] tables = [RealmCount, UserCount] subgroup_to_label = {'public_stream': 'Public streams', 'private_stream': 'Private streams', 'private_message': 'Private messages', 'huddle_message': 'Group private messages'} labels_sort_function = lambda data: sort_by_totals(data['realm']) include_empty_subgroups = True elif chart_name == 'messages_sent_by_client': stat = COUNT_STATS['messages_sent:client:day'] tables = [RealmCount, UserCount] # Note that the labels are further re-written by client_label_map subgroup_to_label = {str(id): name for id, name in Client.objects.values_list('id', 'name')} labels_sort_function = sort_client_labels include_empty_subgroups = False else: raise JsonableError(_("Unknown chart name: %s") % (chart_name,)) # Most likely someone using our API endpoint. The /stats page does not # pass a start or end in its requests. if start is not None: start = convert_to_UTC(start) if end is not None: end = convert_to_UTC(end) if start is not None and end is not None and start > end: raise JsonableError(_("Start time is later than end time. Start: %(start)s, End: %(end)s") % {'start': start, 'end': end}) realm = user_profile.realm if start is None: start = realm.date_created if end is None: end = last_successful_fill(stat.property) if end is None or start > end: logging.warning("User from realm %s attempted to access /stats, but the computed " "start time: %s (creation time of realm) is later than the computed " "end time: %s (last successful analytics update). Is the " "analytics cron job running?" % (realm.string_id, start, end)) raise JsonableError(_("No analytics data available. Please contact your server administrator.")) end_times = time_range(start, end, stat.frequency, min_length) data = {'end_times': end_times, 'frequency': stat.frequency} for table in tables: if table == RealmCount: data['realm'] = get_time_series_by_subgroup( stat, RealmCount, realm.id, end_times, subgroup_to_label, include_empty_subgroups) if table == UserCount: data['user'] = get_time_series_by_subgroup( stat, UserCount, user_profile.id, end_times, subgroup_to_label, include_empty_subgroups) if labels_sort_function is not None: data['display_order'] = labels_sort_function(data) else: data['display_order'] = None return json_success(data=data)
def get_chart_data(request: HttpRequest, user_profile: UserProfile, chart_name: str=REQ(), min_length: Optional[int]=REQ(converter=to_non_negative_int, default=None), start: Optional[datetime]=REQ(converter=to_utc_datetime, default=None), end: Optional[datetime]=REQ(converter=to_utc_datetime, default=None), realm: Optional[Realm]=None, for_installation: bool=False) -> HttpResponse: aggregate_table = RealmCount if for_installation: aggregate_table = InstallationCount if chart_name == 'number_of_humans': stats = [ COUNT_STATS['1day_actives::day'], COUNT_STATS['realm_active_humans::day'], COUNT_STATS['active_users_audit:is_bot:day']] tables = [aggregate_table] subgroup_to_label = { stats[0]: {None: '_1day'}, stats[1]: {None: '_15day'}, stats[2]: {'false': 'all_time'}} # type: Dict[CountStat, Dict[Optional[str], str]] labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_over_time': stats = [COUNT_STATS['messages_sent:is_bot:hour']] tables = [aggregate_table, UserCount] subgroup_to_label = {stats[0]: {'false': 'human', 'true': 'bot'}} labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_by_message_type': stats = [COUNT_STATS['messages_sent:message_type:day']] tables = [aggregate_table, UserCount] subgroup_to_label = {stats[0]: {'public_stream': _('Public streams'), 'private_stream': _('Private streams'), 'private_message': _('Private messages'), 'huddle_message': _('Group private messages')}} labels_sort_function = lambda data: sort_by_totals(data['everyone']) include_empty_subgroups = True elif chart_name == 'messages_sent_by_client': stats = [COUNT_STATS['messages_sent:client:day']] tables = [aggregate_table, UserCount] # Note that the labels are further re-written by client_label_map subgroup_to_label = {stats[0]: {str(id): name for id, name in Client.objects.values_list('id', 'name')}} labels_sort_function = sort_client_labels include_empty_subgroups = False else: raise JsonableError(_("Unknown chart name: %s") % (chart_name,)) # Most likely someone using our API endpoint. The /stats page does not # pass a start or end in its requests. if start is not None: start = convert_to_UTC(start) if end is not None: end = convert_to_UTC(end) if start is not None and end is not None and start > end: raise JsonableError(_("Start time is later than end time. Start: %(start)s, End: %(end)s") % {'start': start, 'end': end}) if realm is None: realm = user_profile.realm if start is None: if for_installation: start = installation_epoch() else: start = realm.date_created if end is None: end = max(last_successful_fill(stat.property) or datetime.min.replace(tzinfo=timezone_utc) for stat in stats) if end is None or start > end: logging.warning("User from realm %s attempted to access /stats, but the computed " "start time: %s (creation of realm or installation) is later than the computed " "end time: %s (last successful analytics update). Is the " "analytics cron job running?" % (realm.string_id, start, end)) raise JsonableError(_("No analytics data available. Please contact your server administrator.")) assert len(set([stat.frequency for stat in stats])) == 1 end_times = time_range(start, end, stats[0].frequency, min_length) data = {'end_times': end_times, 'frequency': stats[0].frequency} # type: Dict[str, Any] aggregation_level = {InstallationCount: 'everyone', RealmCount: 'everyone', UserCount: 'user'} # -1 is a placeholder value, since there is no relevant filtering on InstallationCount id_value = {InstallationCount: -1, RealmCount: realm.id, UserCount: user_profile.id} for table in tables: data[aggregation_level[table]] = {} for stat in stats: data[aggregation_level[table]].update(get_time_series_by_subgroup( stat, table, id_value[table], end_times, subgroup_to_label[stat], include_empty_subgroups)) if labels_sort_function is not None: data['display_order'] = labels_sort_function(data) else: data['display_order'] = None return json_success(data=data)
def get_chart_data( request: HttpRequest, user_profile: UserProfile, chart_name: str = REQ(), min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None), start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), realm: Optional[Realm] = None, for_installation: bool = False, remote: bool = False, remote_realm_id: Optional[int] = None, server: Optional["RemoteZulipServer"] = None, ) -> HttpResponse: TableType = Union[Type["RemoteInstallationCount"], Type[InstallationCount], Type["RemoteRealmCount"], Type[RealmCount], ] if for_installation: if remote: assert settings.ZILENCER_ENABLED aggregate_table: TableType = RemoteInstallationCount assert server is not None else: aggregate_table = InstallationCount else: if remote: assert settings.ZILENCER_ENABLED aggregate_table = RemoteRealmCount assert server is not None assert remote_realm_id is not None else: aggregate_table = RealmCount tables: Union[Tuple[TableType], Tuple[TableType, Type[UserCount]]] if chart_name == "number_of_humans": stats = [ COUNT_STATS["1day_actives::day"], COUNT_STATS["realm_active_humans::day"], COUNT_STATS["active_users_audit:is_bot:day"], ] tables = (aggregate_table, ) subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = { stats[0]: { None: "_1day" }, stats[1]: { None: "_15day" }, stats[2]: { "false": "all_time" }, } labels_sort_function = None include_empty_subgroups = True elif chart_name == "messages_sent_over_time": stats = [COUNT_STATS["messages_sent:is_bot:hour"]] tables = (aggregate_table, UserCount) subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}} labels_sort_function = None include_empty_subgroups = True elif chart_name == "messages_sent_by_message_type": stats = [COUNT_STATS["messages_sent:message_type:day"]] tables = (aggregate_table, UserCount) subgroup_to_label = { stats[0]: { "public_stream": _("Public streams"), "private_stream": _("Private streams"), "private_message": _("Private messages"), "huddle_message": _("Group private messages"), } } labels_sort_function = lambda data: sort_by_totals(data["everyone"]) include_empty_subgroups = True elif chart_name == "messages_sent_by_client": stats = [COUNT_STATS["messages_sent:client:day"]] tables = (aggregate_table, UserCount) # Note that the labels are further re-written by client_label_map subgroup_to_label = { stats[0]: { str(id): name for id, name in Client.objects.values_list("id", "name") } } labels_sort_function = sort_client_labels include_empty_subgroups = False elif chart_name == "messages_read_over_time": stats = [COUNT_STATS["messages_read::hour"]] tables = (aggregate_table, UserCount) subgroup_to_label = {stats[0]: {None: "read"}} labels_sort_function = None include_empty_subgroups = True else: raise JsonableError(_("Unknown chart name: {}").format(chart_name)) # Most likely someone using our API endpoint. The /stats page does not # pass a start or end in its requests. if start is not None: start = convert_to_UTC(start) if end is not None: end = convert_to_UTC(end) if start is not None and end is not None and start > end: raise JsonableError( _("Start time is later than end time. Start: {start}, End: {end}"). format( start=start, end=end, )) if realm is None: # Note that this value is invalid for Remote tables; be # careful not to access it in those code paths. realm = user_profile.realm if remote: # For remote servers, we don't have fillstate data, and thus # should simply use the first and last data points for the # table. assert server is not None assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount aggregate_table_remote = cast( Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table ) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types if not aggregate_table_remote.objects.filter(server=server).exists(): raise JsonableError( _("No analytics data available. Please contact your server administrator." )) if start is None: first = aggregate_table_remote.objects.filter( server=server).first() assert first is not None start = first.end_time if end is None: last = aggregate_table_remote.objects.filter(server=server).last() assert last is not None end = last.end_time else: # Otherwise, we can use tables on the current server to # determine a nice range, and some additional validation. if start is None: if for_installation: start = installation_epoch() else: start = realm.date_created if end is None: end = max(stat.last_successful_fill() or datetime.min.replace( tzinfo=timezone.utc) for stat in stats) if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION): logging.warning( "User from realm %s attempted to access /stats, but the computed " "start time: %s (creation of realm or installation) is later than the computed " "end time: %s (last successful analytics update). Is the " "analytics cron job running?", realm.string_id, start, end, ) raise JsonableError( _("No analytics data available. Please contact your server administrator." )) assert len({stat.frequency for stat in stats}) == 1 end_times = time_range(start, end, stats[0].frequency, min_length) data: Dict[str, Any] = { "end_times": [int(end_time.timestamp()) for end_time in end_times], "frequency": stats[0].frequency, } aggregation_level = { InstallationCount: "everyone", RealmCount: "everyone", UserCount: "user", } if settings.ZILENCER_ENABLED: aggregation_level[RemoteInstallationCount] = "everyone" aggregation_level[RemoteRealmCount] = "everyone" # -1 is a placeholder value, since there is no relevant filtering on InstallationCount id_value = { InstallationCount: -1, RealmCount: realm.id, UserCount: user_profile.id, } if settings.ZILENCER_ENABLED: if server is not None: id_value[RemoteInstallationCount] = server.id # TODO: RemoteRealmCount logic doesn't correctly handle # filtering by server_id as well. if remote_realm_id is not None: id_value[RemoteRealmCount] = remote_realm_id for table in tables: data[aggregation_level[table]] = {} for stat in stats: data[aggregation_level[table]].update( get_time_series_by_subgroup( stat, table, id_value[table], end_times, subgroup_to_label[stat], include_empty_subgroups, )) if labels_sort_function is not None: data["display_order"] = labels_sort_function(data) else: data["display_order"] = None return json_success(request, data=data)