def json_report_unnarrow_time(request, user_profile, initial_core=REQ(converter=to_non_negative_int), initial_free=REQ(converter=to_non_negative_int)): request._log_data["extra"] = "[%sms/%sms]" % (initial_core, initial_free) statsd.timing("unnarrow.initial_core.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), initial_core) statsd.timing("unnarrow.initial_free.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), initial_free) return json_success()
def json_report_narrow_time(request, user_profile, initial_core=REQ(converter=to_non_negative_int), initial_free=REQ(converter=to_non_negative_int), network=REQ(converter=to_non_negative_int)): # type: (HttpRequest, UserProfile, int, int, int) -> HttpResponse request._log_data["extra"] = "[%sms/%sms/%sms]" % (initial_core, initial_free, network) statsd.timing("narrow.initial_core.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), initial_core) statsd.timing("narrow.initial_free.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), initial_free) statsd.timing("narrow.network.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), network) return json_success()
def func_with_caching(*args, **kwargs): # type: (*Any, **Any) -> Callable[..., Any] key = keyfunc(*args, **kwargs) val = cache_get(key, cache_name=cache_name) extra = "" if cache_name == 'database': extra = ".dbcache" if with_statsd_key is not None: metric_key = with_statsd_key else: metric_key = statsd_key(key) status = "hit" if val is not None else "miss" statsd.incr("cache%s.%s.%s" % (extra, metric_key, status)) # Values are singleton tuples so that we can distinguish # a result of None from a missing key. if val is not None: return val[0] val = func(*args, **kwargs) cache_set(key, val, cache_name=cache_name, timeout=timeout) return val
def report_send_times(request: HttpRequest, user_profile: UserProfile, time: int=REQ(converter=to_non_negative_int), received: int=REQ(converter=to_non_negative_int, default=-1), displayed: int=REQ(converter=to_non_negative_int, default=-1), locally_echoed: bool=REQ(validator=check_bool, default=False), rendered_content_disparity: bool=REQ(validator=check_bool, default=False)) -> HttpResponse: received_str = "(unknown)" if received > 0: received_str = str(received) displayed_str = "(unknown)" if displayed > 0: displayed_str = str(displayed) request._log_data["extra"] = "[%sms/%sms/%sms/echo:%s/diff:%s]" \ % (time, received_str, displayed_str, locally_echoed, rendered_content_disparity) base_key = statsd_key(user_profile.realm.string_id, clean_periods=True) statsd.timing("endtoend.send_time.%s" % (base_key,), time) if received > 0: statsd.timing("endtoend.receive_time.%s" % (base_key,), received) if displayed > 0: statsd.timing("endtoend.displayed_time.%s" % (base_key,), displayed) if locally_echoed: statsd.incr('locally_echoed') if rendered_content_disparity: statsd.incr('render_disparity') return json_success()
def report_send_times( request: HttpRequest, user_profile: UserProfile, time: int = REQ(converter=to_non_negative_int), received: int = REQ(converter=to_non_negative_int, default=-1), displayed: int = REQ(converter=to_non_negative_int, default=-1), locally_echoed: bool = REQ(json_validator=check_bool, default=False), rendered_content_disparity: bool = REQ(json_validator=check_bool, default=False), ) -> HttpResponse: received_str = "(unknown)" if received > 0: received_str = str(received) displayed_str = "(unknown)" if displayed > 0: displayed_str = str(displayed) log_data = RequestNotes.get_notes(request).log_data assert log_data is not None log_data[ "extra"] = f"[{time}ms/{received_str}ms/{displayed_str}ms/echo:{locally_echoed}/diff:{rendered_content_disparity}]" base_key = statsd_key(user_profile.realm.string_id, clean_periods=True) statsd.timing(f"endtoend.send_time.{base_key}", time) if received > 0: statsd.timing(f"endtoend.receive_time.{base_key}", received) if displayed > 0: statsd.timing(f"endtoend.displayed_time.{base_key}", displayed) if locally_echoed: statsd.incr("locally_echoed") if rendered_content_disparity: statsd.incr("render_disparity") return json_success(request)
def report_unnarrow_times(request: HttpRequest, user_profile: UserProfile, initial_core: int=REQ(converter=to_non_negative_int), initial_free: int=REQ(converter=to_non_negative_int)) -> HttpResponse: request._log_data["extra"] = "[%sms/%sms]" % (initial_core, initial_free) base_key = statsd_key(user_profile.realm.string_id, clean_periods=True) statsd.timing("unnarrow.initial_core.%s" % (base_key,), initial_core) statsd.timing("unnarrow.initial_free.%s" % (base_key,), initial_free) return json_success()
def json_report_send_time(request, user_profile, time=REQ(converter=to_non_negative_int), received=REQ(converter=to_non_negative_int, default="(unknown)"), displayed=REQ(converter=to_non_negative_int, default="(unknown)"), locally_echoed=REQ(validator=check_bool, default=False), rendered_content_disparity=REQ(validator=check_bool, default=False)): request._log_data["extra"] = "[%sms/%sms/%sms/echo:%s/diff:%s]" \ % (time, received, displayed, locally_echoed, rendered_content_disparity) statsd.timing("endtoend.send_time.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), time) if received != "(unknown)": statsd.timing("endtoend.receive_time.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), received) if displayed != "(unknown)": statsd.timing("endtoend.displayed_time.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), displayed) if locally_echoed: statsd.incr('locally_echoed') if rendered_content_disparity: statsd.incr('render_disparity') return json_success()
def report_unnarrow_times(request: HttpRequest, user_profile: Union[UserProfile, AnonymousUser], initial_core: int=REQ(converter=to_non_negative_int), initial_free: int=REQ(converter=to_non_negative_int)) -> HttpResponse: request._log_data["extra"] = f"[{initial_core}ms/{initial_free}ms]" realm = get_valid_realm_from_request(request) base_key = statsd_key(realm.string_id, clean_periods=True) statsd.timing(f"unnarrow.initial_core.{base_key}", initial_core) statsd.timing(f"unnarrow.initial_free.{base_key}", initial_free) return json_success()
def handle(self, *args, **options): # type: (*Any, **Any) -> None # Get list of all active users in the last 1 week cutoff = timezone_now() - timedelta(minutes=30, hours=168) users = UserPresence.objects.select_related().filter(timestamp__gt=cutoff) # Calculate 10min, 2hrs, 12hrs, 1day, 2 business days (TODO business days), 1 week bucket of stats hour_buckets = [0.16, 2, 12, 24, 48, 168] user_info = defaultdict(dict) # type: Dict[str, Dict[float, List[str]]] for last_presence in users: if last_presence.status == UserPresence.IDLE: known_active = last_presence.timestamp - timedelta(minutes=30) else: known_active = last_presence.timestamp for bucket in hour_buckets: if bucket not in user_info[last_presence.user_profile.realm.string_id]: user_info[last_presence.user_profile.realm.string_id][bucket] = [] if timezone_now() - known_active < timedelta(hours=bucket): user_info[last_presence.user_profile.realm.string_id][bucket].append(last_presence.user_profile.email) for realm, buckets in user_info.items(): print("Realm %s" % (realm,)) for hr, users in sorted(buckets.items()): print("\tUsers for %s: %s" % (hr, len(users))) statsd.gauge("users.active.%s.%shr" % (statsd_key(realm, True), statsd_key(hr, True)), len(users)) # Also do stats for how many users have been reading the app. users_reading = UserActivity.objects.select_related().filter(query="/json/messages/flags") user_info = defaultdict(dict) for activity in users_reading: for bucket in hour_buckets: if bucket not in user_info[activity.user_profile.realm.string_id]: user_info[activity.user_profile.realm.string_id][bucket] = [] if timezone_now() - activity.last_visit < timedelta(hours=bucket): user_info[activity.user_profile.realm.string_id][bucket].append(activity.user_profile.email) for realm, buckets in user_info.items(): print("Realm %s" % (realm,)) for hr, users in sorted(buckets.items()): print("\tUsers reading for %s: %s" % (hr, len(users))) statsd.gauge("users.reading.%s.%shr" % (statsd_key(realm, True), statsd_key(hr, True)), len(users))
def handle(self, *args, **options): # type: (*Any, **Any) -> None # Get list of all active users in the last 1 week cutoff = timezone.now() - timedelta(minutes=30, hours=168) users = UserPresence.objects.select_related().filter(timestamp__gt=cutoff) # Calculate 10min, 2hrs, 12hrs, 1day, 2 business days (TODO business days), 1 week bucket of stats hour_buckets = [0.16, 2, 12, 24, 48, 168] user_info = defaultdict(dict) # type: Dict[str, Dict[float, List[str]]] for last_presence in users: if last_presence.status == UserPresence.IDLE: known_active = last_presence.timestamp - timedelta(minutes=30) else: known_active = last_presence.timestamp for bucket in hour_buckets: if bucket not in user_info[last_presence.user_profile.realm.string_id]: user_info[last_presence.user_profile.realm.string_id][bucket] = [] if timezone.now() - known_active < timedelta(hours=bucket): user_info[last_presence.user_profile.realm.string_id][bucket].append(last_presence.user_profile.email) for realm, buckets in user_info.items(): print("Realm %s" % (realm,)) for hr, users in sorted(buckets.items()): print("\tUsers for %s: %s" % (hr, len(users))) statsd.gauge("users.active.%s.%shr" % (statsd_key(realm, True), statsd_key(hr, True)), len(users)) # Also do stats for how many users have been reading the app. users_reading = UserActivity.objects.select_related().filter(query="/json/messages/flags") user_info = defaultdict(dict) for activity in users_reading: for bucket in hour_buckets: if bucket not in user_info[activity.user_profile.realm.string_id]: user_info[activity.user_profile.realm.string_id][bucket] = [] if timezone.now() - activity.last_visit < timedelta(hours=bucket): user_info[activity.user_profile.realm.string_id][bucket].append(activity.user_profile.email) for realm, buckets in user_info.items(): print("Realm %s" % (realm,)) for hr, users in sorted(buckets.items()): print("\tUsers reading for %s: %s" % (hr, len(users))) statsd.gauge("users.reading.%s.%shr" % (statsd_key(realm, True), statsd_key(hr, True)), len(users))
def get_data_url(buckets, realm): realm_key = statsd_key(realm, True) # This is the slightly-cleaned up JSON api version of https://graphiti.zulip.net/graphs/945c7aafc2d # # Fetches 1 month worth of data DATA_URL="https://stats1.zulip.net:444/render/?from=-1000d&format=json" for bucket in buckets: if realm != 'all': statsd_target = "stats.gauges.staging.users.active.%s.%s" % (realm_key, bucket) DATA_URL += "&target=%s" % (statsd_target,) else: # all means adding up all realms, but exclude the .all. metrics since that would double things DATA_URL += "&target=sum(exclude(stats.gauges.staging.users.active.*.%s, 'all'))" % (bucket,) return DATA_URL
default='today') parser.add_option('--realm', help='Which realm to query', default='all') parser.add_option('--bucket', help='Which bucket to query', default='12hr') if __name__ == '__main__': (options, args) = parser.parse_args() if not options.user or not options.password: parser.error("You must enter a username and password to log into graphite with") startfrom = noon_of(day=datetime.now()) if options.start_from != 'today': startfrom = noon_of(day=datetime.fromtimestamp(int(options.start_from))) print("Using baseline of today as %s" % (startfrom,)) realm_key = statsd_key(options.realm, True) buckets = [options.bucket] # This is the slightly-cleaned up JSON api version of https://graphiti.zulip.net/graphs/945c7aafc2d # # Fetches 1 month worth of data DATA_URL = get_data_url(buckets, options.realm) data = get_data(DATA_URL, options.user, options.password) parse_data(data, startfrom)