コード例 #1
0
ファイル: counts.py プロジェクト: aakash-cr7/zulip
def process_count_stat(stat, fill_to_time):
    # type: (CountStat, datetime) -> None
    fill_state = FillState.objects.filter(property=stat.property).first()
    if fill_state is None:
        currently_filled = installation_epoch()
        fill_state = FillState.objects.create(property=stat.property,
                                              end_time=currently_filled,
                                              state=FillState.DONE)
        logger.info("INITIALIZED %s %s" % (stat.property, currently_filled))
    elif fill_state.state == FillState.STARTED:
        logger.info("UNDO START %s %s" % (stat.property, fill_state.end_time))
        do_delete_counts_at_hour(stat, fill_state.end_time)
        currently_filled = fill_state.end_time - timedelta(hours = 1)
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        logger.info("UNDO DONE %s" % (stat.property,))
    elif fill_state.state == FillState.DONE:
        currently_filled = fill_state.end_time
    else:
        raise AssertionError("Unknown value for FillState.state: %s." % (fill_state.state,))

    currently_filled = currently_filled + timedelta(hours = 1)
    while currently_filled <= fill_to_time:
        logger.info("START %s %s %s" % (stat.property, stat.interval, currently_filled))
        start = time.time()
        do_update_fill_state(fill_state, currently_filled, FillState.STARTED)
        do_fill_count_stat_at_hour(stat, currently_filled)
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        end = time.time()
        currently_filled = currently_filled + timedelta(hours = 1)
        logger.info("DONE %s %s (%dms)" % (stat.property, stat.interval, (end-start)*1000))
コード例 #2
0
ファイル: test_counts.py プロジェクト: dawran6/zulip
    def test_process_stat(self):
        # type: () -> None
        # process new stat
        current_time = installation_epoch() + self.HOUR
        stat = self.make_dummy_count_stat(current_time)
        property = stat.property
        process_count_stat(stat, current_time)
        self.assertFillStateEquals(current_time)
        self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1)

        # dirty stat
        FillState.objects.filter(property=property).update(state=FillState.STARTED)
        process_count_stat(stat, current_time)
        self.assertFillStateEquals(current_time)
        self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1)

        # clean stat, no update
        process_count_stat(stat, current_time)
        self.assertFillStateEquals(current_time)
        self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1)

        # clean stat, with update
        current_time = current_time + self.HOUR
        stat = self.make_dummy_count_stat(current_time)
        process_count_stat(stat, current_time)
        self.assertFillStateEquals(current_time)
        self.assertEqual(InstallationCount.objects.filter(property=property).count(), 2)
コード例 #3
0
ファイル: counts.py プロジェクト: techfreakworm/zulip
def process_count_stat(stat, fill_to_time):
    # type: (CountStat, datetime) -> None
    fill_state = FillState.objects.filter(property=stat.property).first()
    if fill_state is None:
        currently_filled = installation_epoch()
        fill_state = FillState.objects.create(property=stat.property,
                                              end_time=currently_filled,
                                              state=FillState.DONE)
        logger.info("INITIALIZED %s %s" % (stat.property, currently_filled))
    elif fill_state.state == FillState.STARTED:
        logger.info("UNDO START %s %s" % (stat.property, fill_state.end_time))
        do_delete_counts_at_hour(stat, fill_state.end_time)
        currently_filled = fill_state.end_time - timedelta(hours = 1)
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        logger.info("UNDO DONE %s" % (stat.property,))
    elif fill_state.state == FillState.DONE:
        currently_filled = fill_state.end_time
    else:
        raise ValueError("Unknown value for FillState.state: %s." % (fill_state.state,))

    currently_filled = currently_filled + timedelta(hours = 1)
    while currently_filled <= fill_to_time:
        logger.info("START %s %s %s" % (stat.property, stat.interval, currently_filled))
        start = time.time()
        do_update_fill_state(fill_state, currently_filled, FillState.STARTED)
        do_fill_count_stat_at_hour(stat, currently_filled)
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        end = time.time()
        currently_filled = currently_filled + timedelta(hours = 1)
        logger.info("DONE %s %s (%dms)" % (stat.property, stat.interval, (end-start)*1000))
コード例 #4
0
    def test_process_stat(self):
        # type: () -> None
        # process new stat
        current_time = installation_epoch() + self.HOUR
        stat = self.make_dummy_count_stat(current_time)
        property = stat.property
        process_count_stat(stat, current_time)
        self.assertFillStateEquals(current_time)
        self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1)

        # dirty stat
        FillState.objects.filter(property=property).update(state=FillState.STARTED)
        process_count_stat(stat, current_time)
        self.assertFillStateEquals(current_time)
        self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1)

        # clean stat, no update
        process_count_stat(stat, current_time)
        self.assertFillStateEquals(current_time)
        self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1)

        # clean stat, with update
        current_time = current_time + self.HOUR
        stat = self.make_dummy_count_stat(current_time)
        process_count_stat(stat, current_time)
        self.assertFillStateEquals(current_time)
        self.assertEqual(InstallationCount.objects.filter(property=property).count(), 2)
コード例 #5
0
ファイル: counts.py プロジェクト: vineetsgr07/zulip
def process_count_stat(stat: CountStat, fill_to_time: datetime,
                       realm: Optional[Realm]=None) -> None:
    # TODO: The realm argument is not yet supported, in that we don't
    # have a solution for how to update FillState if it is passed.  It
    # exists solely as partial plumbing for when we do fully implement
    # doing single-realm analytics runs for use cases like data import.
    #
    # Also, note that for the realm argument to be properly supported,
    # the CountStat object passed in needs to have come from
    # E.g. get_count_stats(realm), i.e. have the realm_id already
    # entered into the SQL query defined by the CountState object.
    if stat.frequency == CountStat.HOUR:
        time_increment = timedelta(hours=1)
    elif stat.frequency == CountStat.DAY:
        time_increment = timedelta(days=1)
    else:
        raise AssertionError("Unknown frequency: %s" % (stat.frequency,))

    verify_UTC(fill_to_time)
    if floor_to_hour(fill_to_time) != fill_to_time:
        raise ValueError("fill_to_time must be on an hour boundary: %s" % (fill_to_time,))

    fill_state = FillState.objects.filter(property=stat.property).first()
    if fill_state is None:
        currently_filled = installation_epoch()
        fill_state = FillState.objects.create(property=stat.property,
                                              end_time=currently_filled,
                                              state=FillState.DONE)
        logger.info("INITIALIZED %s %s", stat.property, currently_filled)
    elif fill_state.state == FillState.STARTED:
        logger.info("UNDO START %s %s", stat.property, fill_state.end_time)
        do_delete_counts_at_hour(stat, fill_state.end_time)
        currently_filled = fill_state.end_time - time_increment
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        logger.info("UNDO DONE %s", stat.property)
    elif fill_state.state == FillState.DONE:
        currently_filled = fill_state.end_time
    else:
        raise AssertionError("Unknown value for FillState.state: %s." % (fill_state.state,))

    if isinstance(stat, DependentCountStat):
        for dependency in stat.dependencies:
            dependency_fill_time = last_successful_fill(dependency)
            if dependency_fill_time is None:
                logger.warning("DependentCountStat %s run before dependency %s.",
                               stat.property, dependency)
                return
            fill_to_time = min(fill_to_time, dependency_fill_time)

    currently_filled = currently_filled + time_increment
    while currently_filled <= fill_to_time:
        logger.info("START %s %s", stat.property, currently_filled)
        start = time.time()
        do_update_fill_state(fill_state, currently_filled, FillState.STARTED)
        do_fill_count_stat_at_hour(stat, currently_filled, realm)
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        end = time.time()
        currently_filled = currently_filled + time_increment
        logger.info("DONE %s (%dms)", stat.property, (end-start)*1000)
コード例 #6
0
def process_count_stat(stat, fill_to_time):
    # type: (CountStat, datetime) -> None
    if stat.frequency == CountStat.HOUR:
        time_increment = timedelta(hours=1)
    elif stat.frequency == CountStat.DAY:
        time_increment = timedelta(days=1)
    else:
        raise AssertionError("Unknown frequency: %s" % (stat.frequency, ))

    if floor_to_hour(fill_to_time) != fill_to_time:
        raise ValueError("fill_to_time must be on an hour boundary: %s" %
                         (fill_to_time, ))
    if fill_to_time.tzinfo is None:
        raise ValueError("fill_to_time must be timezone aware: %s" %
                         (fill_to_time, ))

    fill_state = FillState.objects.filter(property=stat.property).first()
    if fill_state is None:
        currently_filled = installation_epoch()
        fill_state = FillState.objects.create(property=stat.property,
                                              end_time=currently_filled,
                                              state=FillState.DONE)
        logger.info("INITIALIZED %s %s" % (stat.property, currently_filled))
    elif fill_state.state == FillState.STARTED:
        logger.info("UNDO START %s %s" % (stat.property, fill_state.end_time))
        do_delete_counts_at_hour(stat, fill_state.end_time)
        currently_filled = fill_state.end_time - time_increment
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        logger.info("UNDO DONE %s" % (stat.property, ))
    elif fill_state.state == FillState.DONE:
        currently_filled = fill_state.end_time
    else:
        raise AssertionError("Unknown value for FillState.state: %s." %
                             (fill_state.state, ))

    if isinstance(stat, DependentCountStat):
        for dependency in stat.dependencies:
            dependency_fill_time = last_successful_fill(dependency)
            if dependency_fill_time is None:
                logger.warning(
                    "DependentCountStat %s run before dependency %s." %
                    (stat.property, dependency))
                return
            fill_to_time = min(fill_to_time, dependency_fill_time)

    currently_filled = currently_filled + time_increment
    while currently_filled <= fill_to_time:
        logger.info("START %s %s" % (stat.property, currently_filled))
        start = time.time()
        do_update_fill_state(fill_state, currently_filled, FillState.STARTED)
        do_fill_count_stat_at_hour(stat, currently_filled)
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        end = time.time()
        currently_filled = currently_filled + time_increment
        logger.info("DONE %s (%dms)" % (stat.property, (end - start) * 1000))
コード例 #7
0
    def get_fill_state(self) -> Dict[str, Any]:
        if not Realm.objects.exists():
            return {"status": 0, "message": "No realms exist, so not checking FillState."}

        warning_unfilled_properties = []
        critical_unfilled_properties = []
        for property, stat in COUNT_STATS.items():
            last_fill = stat.last_successful_fill()
            if last_fill is None:
                last_fill = installation_epoch()
            try:
                verify_UTC(last_fill)
            except TimeZoneNotUTCException:
                return {"status": 2, "message": f"FillState not in UTC for {property}"}

            if stat.frequency == CountStat.DAY:
                floor_function = floor_to_day
                warning_threshold = timedelta(hours=26)
                critical_threshold = timedelta(hours=50)
            else:  # CountStat.HOUR
                floor_function = floor_to_hour
                warning_threshold = timedelta(minutes=90)
                critical_threshold = timedelta(minutes=150)

            if floor_function(last_fill) != last_fill:
                return {
                    "status": 2,
                    "message": f"FillState not on {stat.frequency} boundary for {property}",
                }

            time_to_last_fill = timezone_now() - last_fill
            if time_to_last_fill > critical_threshold:
                critical_unfilled_properties.append(property)
            elif time_to_last_fill > warning_threshold:
                warning_unfilled_properties.append(property)

        if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
            return {"status": 0, "message": "FillState looks fine."}
        if len(critical_unfilled_properties) == 0:
            return {
                "status": 1,
                "message": "Missed filling {} once.".format(
                    ", ".join(warning_unfilled_properties),
                ),
            }
        return {
            "status": 2,
            "message": "Missed filling {} once. Missed filling {} at least twice.".format(
                ", ".join(warning_unfilled_properties),
                ", ".join(critical_unfilled_properties),
            ),
        }
コード例 #8
0
ファイル: counts.py プロジェクト: christi3k/zulip
def process_count_stat(stat, fill_to_time):
    # type: (CountStat, datetime) -> None
    if stat.frequency == CountStat.HOUR:
        time_increment = timedelta(hours=1)
    elif stat.frequency == CountStat.DAY:
        time_increment = timedelta(days=1)
    else:
        raise AssertionError("Unknown frequency: %s" % (stat.frequency,))

    if floor_to_hour(fill_to_time) != fill_to_time:
        raise ValueError("fill_to_time must be on an hour boundary: %s" % (fill_to_time,))
    if fill_to_time.tzinfo is None:
        raise ValueError("fill_to_time must be timezone aware: %s" % (fill_to_time,))

    fill_state = FillState.objects.filter(property=stat.property).first()
    if fill_state is None:
        currently_filled = installation_epoch()
        fill_state = FillState.objects.create(property=stat.property,
                                              end_time=currently_filled,
                                              state=FillState.DONE)
        logger.info("INITIALIZED %s %s" % (stat.property, currently_filled))
    elif fill_state.state == FillState.STARTED:
        logger.info("UNDO START %s %s" % (stat.property, fill_state.end_time))
        do_delete_counts_at_hour(stat, fill_state.end_time)
        currently_filled = fill_state.end_time - time_increment
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        logger.info("UNDO DONE %s" % (stat.property,))
    elif fill_state.state == FillState.DONE:
        currently_filled = fill_state.end_time
    else:
        raise AssertionError("Unknown value for FillState.state: %s." % (fill_state.state,))

    if isinstance(stat, DependentCountStat):
        for dependency in stat.dependencies:
            dependency_fill_time = last_successful_fill(dependency)
            if dependency_fill_time is None:
                logger.warning("DependentCountStat %s run before dependency %s." %
                               (stat.property, dependency))
                return
            fill_to_time = min(fill_to_time, dependency_fill_time)

    currently_filled = currently_filled + time_increment
    while currently_filled <= fill_to_time:
        logger.info("START %s %s" % (stat.property, currently_filled))
        start = time.time()
        do_update_fill_state(fill_state, currently_filled, FillState.STARTED)
        do_fill_count_stat_at_hour(stat, currently_filled)
        do_update_fill_state(fill_state, currently_filled, FillState.DONE)
        end = time.time()
        currently_filled = currently_filled + time_increment
        logger.info("DONE %s (%dms)" % (stat.property, (end-start)*1000))
コード例 #9
0
    def get_fill_state(self):
        # type: () -> Dict[str, Any]
        if not Realm.objects.exists():
            return {'status': 0, 'message': 'No realms exist, so not checking FillState.'}

        warning_unfilled_properties = []
        critical_unfilled_properties = []
        for property, stat in COUNT_STATS.items():
            last_fill = last_successful_fill(property)
            if last_fill is None:
                last_fill = installation_epoch()
            try:
                verify_UTC(last_fill)
            except TimezoneNotUTCException:
                return {'status': 2, 'message': 'FillState not in UTC for %s' % (property,)}

            if stat.frequency == CountStat.DAY:
                floor_function = floor_to_day
                warning_threshold = timedelta(hours=26)
                critical_threshold = timedelta(hours=50)
            else:  # CountStat.HOUR
                floor_function = floor_to_hour
                warning_threshold = timedelta(minutes=90)
                critical_threshold = timedelta(minutes=150)

            if floor_function(last_fill) != last_fill:
                return {'status': 2, 'message': 'FillState not on %s boundary for %s' %
                        (stat.frequency, property)}

            time_to_last_fill = timezone_now() - last_fill
            if time_to_last_fill > critical_threshold:
                critical_unfilled_properties.append(property)
            elif time_to_last_fill > warning_threshold:
                warning_unfilled_properties.append(property)

        if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
            return {'status': 0, 'message': 'FillState looks fine.'}
        if len(critical_unfilled_properties) == 0:
            return {'status': 1, 'message': 'Missed filling %s once.' %
                    (', '.join(warning_unfilled_properties),)}
        return {'status': 2, 'message': 'Missed filling %s once. Missed filling %s at least twice.' %
                (', '.join(warning_unfilled_properties), ', '.join(critical_unfilled_properties))}
コード例 #10
0
    def get_fill_state(self) -> Dict[str, Any]:
        if not Realm.objects.exists():
            return {'status': 0, 'message': 'No realms exist, so not checking FillState.'}

        warning_unfilled_properties = []
        critical_unfilled_properties = []
        for property, stat in COUNT_STATS.items():
            last_fill = last_successful_fill(property)
            if last_fill is None:
                last_fill = installation_epoch()
            try:
                verify_UTC(last_fill)
            except TimezoneNotUTCException:
                return {'status': 2, 'message': 'FillState not in UTC for %s' % (property,)}

            if stat.frequency == CountStat.DAY:
                floor_function = floor_to_day
                warning_threshold = timedelta(hours=26)
                critical_threshold = timedelta(hours=50)
            else:  # CountStat.HOUR
                floor_function = floor_to_hour
                warning_threshold = timedelta(minutes=90)
                critical_threshold = timedelta(minutes=150)

            if floor_function(last_fill) != last_fill:
                return {'status': 2, 'message': 'FillState not on %s boundary for %s' %
                        (stat.frequency, property)}

            time_to_last_fill = timezone_now() - last_fill
            if time_to_last_fill > critical_threshold:
                critical_unfilled_properties.append(property)
            elif time_to_last_fill > warning_threshold:
                warning_unfilled_properties.append(property)

        if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
            return {'status': 0, 'message': 'FillState looks fine.'}
        if len(critical_unfilled_properties) == 0:
            return {'status': 1, 'message': 'Missed filling %s once.' %
                    (', '.join(warning_unfilled_properties),)}
        return {'status': 2, 'message': 'Missed filling %s once. Missed filling %s at least twice.' %
                (', '.join(warning_unfilled_properties), ', '.join(critical_unfilled_properties))}
コード例 #11
0
ファイル: views.py プロジェクト: zhangchye/zulip
def get_chart_data(request: HttpRequest,
                   user_profile: UserProfile,
                   chart_name: str = REQ(),
                   min_length: Optional[int] = REQ(
                       converter=to_non_negative_int, default=None),
                   start: Optional[datetime] = REQ(converter=to_utc_datetime,
                                                   default=None),
                   end: Optional[datetime] = REQ(converter=to_utc_datetime,
                                                 default=None),
                   realm: Optional[Realm] = None,
                   for_installation: bool = False) -> HttpResponse:
    aggregate_table = RealmCount
    if for_installation:
        aggregate_table = InstallationCount

    if chart_name == 'number_of_humans':
        stats = [
            COUNT_STATS['1day_actives::day'],
            COUNT_STATS['realm_active_humans::day'],
            COUNT_STATS['active_users_audit:is_bot:day']
        ]
        tables = [aggregate_table]
        subgroup_to_label = {
            stats[0]: {
                None: '_1day'
            },
            stats[1]: {
                None: '_15day'
            },
            stats[2]: {
                'false': 'all_time'
            }
        }  # type: Dict[CountStat, Dict[Optional[str], str]]
        labels_sort_function = None
        include_empty_subgroups = True
    elif chart_name == 'messages_sent_over_time':
        stats = [COUNT_STATS['messages_sent:is_bot:hour']]
        tables = [aggregate_table, UserCount]
        subgroup_to_label = {stats[0]: {'false': 'human', 'true': 'bot'}}
        labels_sort_function = None
        include_empty_subgroups = True
    elif chart_name == 'messages_sent_by_message_type':
        stats = [COUNT_STATS['messages_sent:message_type:day']]
        tables = [aggregate_table, UserCount]
        subgroup_to_label = {
            stats[0]: {
                'public_stream': _('Public streams'),
                'private_stream': _('Private streams'),
                'private_message': _('Private messages'),
                'huddle_message': _('Group private messages')
            }
        }
        labels_sort_function = lambda data: sort_by_totals(data['everyone'])
        include_empty_subgroups = True
    elif chart_name == 'messages_sent_by_client':
        stats = [COUNT_STATS['messages_sent:client:day']]
        tables = [aggregate_table, UserCount]
        # Note that the labels are further re-written by client_label_map
        subgroup_to_label = {
            stats[0]: {
                str(id): name
                for id, name in Client.objects.values_list('id', 'name')
            }
        }
        labels_sort_function = sort_client_labels
        include_empty_subgroups = False
    else:
        raise JsonableError(_("Unknown chart name: %s") % (chart_name, ))

    # Most likely someone using our API endpoint. The /stats page does not
    # pass a start or end in its requests.
    if start is not None:
        start = convert_to_UTC(start)
    if end is not None:
        end = convert_to_UTC(end)
    if start is not None and end is not None and start > end:
        raise JsonableError(
            _("Start time is later than end time. Start: %(start)s, End: %(end)s"
              ) % {
                  'start': start,
                  'end': end
              })

    if realm is None:
        realm = user_profile.realm
    if start is None:
        if for_installation:
            start = installation_epoch()
        else:
            start = realm.date_created
    if end is None:
        end = max(
            last_successful_fill(stat.property) or datetime.min.replace(
                tzinfo=timezone_utc) for stat in stats)
    if end is None or start > end:
        logging.warning(
            "User from realm %s attempted to access /stats, but the computed "
            "start time: %s (creation of realm or installation) is later than the computed "
            "end time: %s (last successful analytics update). Is the "
            "analytics cron job running?" % (realm.string_id, start, end))
        raise JsonableError(
            _("No analytics data available. Please contact your server administrator."
              ))

    assert len(set([stat.frequency for stat in stats])) == 1
    end_times = time_range(start, end, stats[0].frequency, min_length)
    data = {
        'end_times': end_times,
        'frequency': stats[0].frequency
    }  # type: Dict[str, Any]

    aggregation_level = {
        InstallationCount: 'everyone',
        RealmCount: 'everyone',
        UserCount: 'user'
    }
    # -1 is a placeholder value, since there is no relevant filtering on InstallationCount
    id_value = {
        InstallationCount: -1,
        RealmCount: realm.id,
        UserCount: user_profile.id
    }
    for table in tables:
        data[aggregation_level[table]] = {}
        for stat in stats:
            data[aggregation_level[table]].update(
                get_time_series_by_subgroup(stat, table, id_value[table],
                                            end_times, subgroup_to_label[stat],
                                            include_empty_subgroups))

    if labels_sort_function is not None:
        data['display_order'] = labels_sort_function(data)
    else:
        data['display_order'] = None
    return json_success(data=data)
コード例 #12
0
ファイル: views.py プロジェクト: 284928489/zulip
def get_chart_data(request: HttpRequest, user_profile: UserProfile, chart_name: str=REQ(),
                   min_length: Optional[int]=REQ(converter=to_non_negative_int, default=None),
                   start: Optional[datetime]=REQ(converter=to_utc_datetime, default=None),
                   end: Optional[datetime]=REQ(converter=to_utc_datetime, default=None),
                   realm: Optional[Realm]=None, for_installation: bool=False) -> HttpResponse:
    aggregate_table = RealmCount
    if for_installation:
        aggregate_table = InstallationCount

    if chart_name == 'number_of_humans':
        stats = [
            COUNT_STATS['1day_actives::day'],
            COUNT_STATS['realm_active_humans::day'],
            COUNT_STATS['active_users_audit:is_bot:day']]
        tables = [aggregate_table]
        subgroup_to_label = {
            stats[0]: {None: '_1day'},
            stats[1]: {None: '_15day'},
            stats[2]: {'false': 'all_time'}}  # type: Dict[CountStat, Dict[Optional[str], str]]
        labels_sort_function = None
        include_empty_subgroups = True
    elif chart_name == 'messages_sent_over_time':
        stats = [COUNT_STATS['messages_sent:is_bot:hour']]
        tables = [aggregate_table, UserCount]
        subgroup_to_label = {stats[0]: {'false': 'human', 'true': 'bot'}}
        labels_sort_function = None
        include_empty_subgroups = True
    elif chart_name == 'messages_sent_by_message_type':
        stats = [COUNT_STATS['messages_sent:message_type:day']]
        tables = [aggregate_table, UserCount]
        subgroup_to_label = {stats[0]: {'public_stream': _('Public streams'),
                                        'private_stream': _('Private streams'),
                                        'private_message': _('Private messages'),
                                        'huddle_message': _('Group private messages')}}
        labels_sort_function = lambda data: sort_by_totals(data['everyone'])
        include_empty_subgroups = True
    elif chart_name == 'messages_sent_by_client':
        stats = [COUNT_STATS['messages_sent:client:day']]
        tables = [aggregate_table, UserCount]
        # Note that the labels are further re-written by client_label_map
        subgroup_to_label = {stats[0]:
                             {str(id): name for id, name in Client.objects.values_list('id', 'name')}}
        labels_sort_function = sort_client_labels
        include_empty_subgroups = False
    else:
        raise JsonableError(_("Unknown chart name: %s") % (chart_name,))

    # Most likely someone using our API endpoint. The /stats page does not
    # pass a start or end in its requests.
    if start is not None:
        start = convert_to_UTC(start)
    if end is not None:
        end = convert_to_UTC(end)
    if start is not None and end is not None and start > end:
        raise JsonableError(_("Start time is later than end time. Start: %(start)s, End: %(end)s") %
                            {'start': start, 'end': end})

    if realm is None:
        realm = user_profile.realm
    if start is None:
        if for_installation:
            start = installation_epoch()
        else:
            start = realm.date_created
    if end is None:
        end = max(last_successful_fill(stat.property) or
                  datetime.min.replace(tzinfo=timezone_utc) for stat in stats)
    if end is None or start > end:
        logging.warning("User from realm %s attempted to access /stats, but the computed "
                        "start time: %s (creation of realm or installation) is later than the computed "
                        "end time: %s (last successful analytics update). Is the "
                        "analytics cron job running?" % (realm.string_id, start, end))
        raise JsonableError(_("No analytics data available. Please contact your server administrator."))

    assert len(set([stat.frequency for stat in stats])) == 1
    end_times = time_range(start, end, stats[0].frequency, min_length)
    data = {'end_times': end_times, 'frequency': stats[0].frequency}  # type: Dict[str, Any]

    aggregation_level = {InstallationCount: 'everyone', RealmCount: 'everyone', UserCount: 'user'}
    # -1 is a placeholder value, since there is no relevant filtering on InstallationCount
    id_value = {InstallationCount: -1, RealmCount: realm.id, UserCount: user_profile.id}
    for table in tables:
        data[aggregation_level[table]] = {}
        for stat in stats:
            data[aggregation_level[table]].update(get_time_series_by_subgroup(
                stat, table, id_value[table], end_times, subgroup_to_label[stat], include_empty_subgroups))

    if labels_sort_function is not None:
        data['display_order'] = labels_sort_function(data)
    else:
        data['display_order'] = None
    return json_success(data=data)
コード例 #13
0
def get_chart_data(
    request: HttpRequest,
    user_profile: UserProfile,
    chart_name: str = REQ(),
    min_length: Optional[int] = REQ(converter=to_non_negative_int,
                                    default=None),
    start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
    end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
    realm: Optional[Realm] = None,
    for_installation: bool = False,
    remote: bool = False,
    remote_realm_id: Optional[int] = None,
    server: Optional["RemoteZulipServer"] = None,
) -> HttpResponse:
    TableType = Union[Type["RemoteInstallationCount"], Type[InstallationCount],
                      Type["RemoteRealmCount"], Type[RealmCount], ]
    if for_installation:
        if remote:
            assert settings.ZILENCER_ENABLED
            aggregate_table: TableType = RemoteInstallationCount
            assert server is not None
        else:
            aggregate_table = InstallationCount
    else:
        if remote:
            assert settings.ZILENCER_ENABLED
            aggregate_table = RemoteRealmCount
            assert server is not None
            assert remote_realm_id is not None
        else:
            aggregate_table = RealmCount

    tables: Union[Tuple[TableType], Tuple[TableType, Type[UserCount]]]

    if chart_name == "number_of_humans":
        stats = [
            COUNT_STATS["1day_actives::day"],
            COUNT_STATS["realm_active_humans::day"],
            COUNT_STATS["active_users_audit:is_bot:day"],
        ]
        tables = (aggregate_table, )
        subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = {
            stats[0]: {
                None: "_1day"
            },
            stats[1]: {
                None: "_15day"
            },
            stats[2]: {
                "false": "all_time"
            },
        }
        labels_sort_function = None
        include_empty_subgroups = True
    elif chart_name == "messages_sent_over_time":
        stats = [COUNT_STATS["messages_sent:is_bot:hour"]]
        tables = (aggregate_table, UserCount)
        subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}}
        labels_sort_function = None
        include_empty_subgroups = True
    elif chart_name == "messages_sent_by_message_type":
        stats = [COUNT_STATS["messages_sent:message_type:day"]]
        tables = (aggregate_table, UserCount)
        subgroup_to_label = {
            stats[0]: {
                "public_stream": _("Public streams"),
                "private_stream": _("Private streams"),
                "private_message": _("Private messages"),
                "huddle_message": _("Group private messages"),
            }
        }
        labels_sort_function = lambda data: sort_by_totals(data["everyone"])
        include_empty_subgroups = True
    elif chart_name == "messages_sent_by_client":
        stats = [COUNT_STATS["messages_sent:client:day"]]
        tables = (aggregate_table, UserCount)
        # Note that the labels are further re-written by client_label_map
        subgroup_to_label = {
            stats[0]: {
                str(id): name
                for id, name in Client.objects.values_list("id", "name")
            }
        }
        labels_sort_function = sort_client_labels
        include_empty_subgroups = False
    elif chart_name == "messages_read_over_time":
        stats = [COUNT_STATS["messages_read::hour"]]
        tables = (aggregate_table, UserCount)
        subgroup_to_label = {stats[0]: {None: "read"}}
        labels_sort_function = None
        include_empty_subgroups = True
    else:
        raise JsonableError(_("Unknown chart name: {}").format(chart_name))

    # Most likely someone using our API endpoint. The /stats page does not
    # pass a start or end in its requests.
    if start is not None:
        start = convert_to_UTC(start)
    if end is not None:
        end = convert_to_UTC(end)
    if start is not None and end is not None and start > end:
        raise JsonableError(
            _("Start time is later than end time. Start: {start}, End: {end}").
            format(
                start=start,
                end=end,
            ))

    if realm is None:
        # Note that this value is invalid for Remote tables; be
        # careful not to access it in those code paths.
        realm = user_profile.realm

    if remote:
        # For remote servers, we don't have fillstate data, and thus
        # should simply use the first and last data points for the
        # table.
        assert server is not None
        assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
        aggregate_table_remote = cast(
            Union[Type[RemoteInstallationCount],
                  Type[RemoteRealmCount]], aggregate_table
        )  # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
        if not aggregate_table_remote.objects.filter(server=server).exists():
            raise JsonableError(
                _("No analytics data available. Please contact your server administrator."
                  ))
        if start is None:
            first = aggregate_table_remote.objects.filter(
                server=server).first()
            assert first is not None
            start = first.end_time
        if end is None:
            last = aggregate_table_remote.objects.filter(server=server).last()
            assert last is not None
            end = last.end_time
    else:
        # Otherwise, we can use tables on the current server to
        # determine a nice range, and some additional validation.
        if start is None:
            if for_installation:
                start = installation_epoch()
            else:
                start = realm.date_created
        if end is None:
            end = max(stat.last_successful_fill() or datetime.min.replace(
                tzinfo=timezone.utc) for stat in stats)

        if start > end and (timezone_now() - start >
                            MAX_TIME_FOR_FULL_ANALYTICS_GENERATION):
            logging.warning(
                "User from realm %s attempted to access /stats, but the computed "
                "start time: %s (creation of realm or installation) is later than the computed "
                "end time: %s (last successful analytics update). Is the "
                "analytics cron job running?",
                realm.string_id,
                start,
                end,
            )
            raise JsonableError(
                _("No analytics data available. Please contact your server administrator."
                  ))

    assert len({stat.frequency for stat in stats}) == 1
    end_times = time_range(start, end, stats[0].frequency, min_length)
    data: Dict[str, Any] = {
        "end_times": [int(end_time.timestamp()) for end_time in end_times],
        "frequency": stats[0].frequency,
    }

    aggregation_level = {
        InstallationCount: "everyone",
        RealmCount: "everyone",
        UserCount: "user",
    }
    if settings.ZILENCER_ENABLED:
        aggregation_level[RemoteInstallationCount] = "everyone"
        aggregation_level[RemoteRealmCount] = "everyone"

    # -1 is a placeholder value, since there is no relevant filtering on InstallationCount
    id_value = {
        InstallationCount: -1,
        RealmCount: realm.id,
        UserCount: user_profile.id,
    }
    if settings.ZILENCER_ENABLED:
        if server is not None:
            id_value[RemoteInstallationCount] = server.id
        # TODO: RemoteRealmCount logic doesn't correctly handle
        # filtering by server_id as well.
        if remote_realm_id is not None:
            id_value[RemoteRealmCount] = remote_realm_id

    for table in tables:
        data[aggregation_level[table]] = {}
        for stat in stats:
            data[aggregation_level[table]].update(
                get_time_series_by_subgroup(
                    stat,
                    table,
                    id_value[table],
                    end_times,
                    subgroup_to_label[stat],
                    include_empty_subgroups,
                ))

    if labels_sort_function is not None:
        data["display_order"] = labels_sort_function(data)
    else:
        data["display_order"] = None
    return json_success(request, data=data)