def run_update_analytics_counts(self, options): # type: (Dict[str, Any]) -> None fill_to_time = parse_datetime(options['time']) if options['utc']: fill_to_time = fill_to_time.replace(tzinfo=timezone_utc) if fill_to_time.tzinfo is None: raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?") fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone_utc)) if options['stat'] is not None: stats = [COUNT_STATS[options['stat']]] else: stats = list(COUNT_STATS.values()) logger.info("Starting updating analytics counts through %s" % (fill_to_time,)) if options['verbose']: start = time.time() last = start for stat in stats: process_count_stat(stat, fill_to_time) if options['verbose']: print("Updated %s in %.3fs" % (stat.property, time.time() - last)) last = time.time() if options['verbose']: print("Finished updating analytics counts through %s in %.3fs" % (fill_to_time, time.time() - start)) logger.info("Finished updating analytics counts through %s" % (fill_to_time,))
def run_update_analytics_counts(self, options: Dict[str, Any]) -> None: # installation_epoch relies on there being at least one realm; we # shouldn't run the analytics code if that condition isn't satisfied if not Realm.objects.exists(): logger.info("No realms, stopping update_analytics_counts") return fill_to_time = parse_datetime(options['time']) if options['utc']: fill_to_time = fill_to_time.replace(tzinfo=timezone_utc) if fill_to_time.tzinfo is None: raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?") fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone_utc)) if options['stat'] is not None: stats = [COUNT_STATS[options['stat']]] else: stats = list(COUNT_STATS.values()) logger.info("Starting updating analytics counts through %s" % (fill_to_time,)) if options['verbose']: start = time.time() last = start for stat in stats: process_count_stat(stat, fill_to_time) if options['verbose']: print("Updated %s in %.3fs" % (stat.property, time.time() - last)) last = time.time() if options['verbose']: print("Finished updating analytics counts through %s in %.3fs" % (fill_to_time, time.time() - start)) logger.info("Finished updating analytics counts through %s" % (fill_to_time,))
def test_process_stat(self): # type: () -> None # process new stat current_time = installation_epoch() + self.HOUR stat = self.make_dummy_count_stat(current_time) property = stat.property process_count_stat(stat, current_time) self.assertFillStateEquals(current_time) self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1) # dirty stat FillState.objects.filter(property=property).update(state=FillState.STARTED) process_count_stat(stat, current_time) self.assertFillStateEquals(current_time) self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1) # clean stat, no update process_count_stat(stat, current_time) self.assertFillStateEquals(current_time) self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1) # clean stat, with update current_time = current_time + self.HOUR stat = self.make_dummy_count_stat(current_time) process_count_stat(stat, current_time) self.assertFillStateEquals(current_time) self.assertEqual(InstallationCount.objects.filter(property=property).count(), 2)
def run_update_analytics_counts(self, options): # type: (Dict[str, Any]) -> None fill_to_time = parse_datetime(options['time']) if options['utc']: fill_to_time = fill_to_time.replace(tzinfo=timezone.utc) if fill_to_time.tzinfo is None: raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?") logger.info("Starting updating analytics counts through %s" % (fill_to_time,)) if options['stat'] is not None: process_count_stat(COUNT_STATS[options['stat']], fill_to_time) else: for stat in COUNT_STATS.values(): process_count_stat(stat, fill_to_time) logger.info("Finished updating analytics counts through %s" % (fill_to_time,))
def run_update_analytics_counts(self, options: Dict[str, Any]) -> None: # installation_epoch relies on there being at least one realm; we # shouldn't run the analytics code if that condition isn't satisfied if not Realm.objects.exists(): logger.info("No realms, stopping update_analytics_counts") return fill_to_time = parse_datetime(options["time"]) assert fill_to_time is not None if options["utc"]: fill_to_time = fill_to_time.replace(tzinfo=timezone.utc) if fill_to_time.tzinfo is None: raise ValueError( "--time must be timezone aware. Maybe you meant to use the --utc option?" ) fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc)) if options["stat"] is not None: stats = [COUNT_STATS[options["stat"]]] else: stats = list(COUNT_STATS.values()) logger.info("Starting updating analytics counts through %s", fill_to_time) if options["verbose"]: start = time.time() last = start for stat in stats: process_count_stat(stat, fill_to_time) if options["verbose"]: print(f"Updated {stat.property} in {time.time() - last:.3f}s") last = time.time() if options["verbose"]: print( f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s" ) logger.info("Finished updating analytics counts through %s", fill_to_time) if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS: send_analytics_to_remote_server()
def test_process_logging_stat(self): # type: () -> None end_time = self.TIME_ZERO user_stat = LoggingCountStat('user stat', UserCount, CountStat.DAY) stream_stat = LoggingCountStat('stream stat', StreamCount, CountStat.DAY) realm_stat = LoggingCountStat('realm stat', RealmCount, CountStat.DAY) user = self.create_user() stream = self.create_stream_with_recipient()[0] realm = self.default_realm UserCount.objects.create( user=user, realm=realm, property=user_stat.property, end_time=end_time, value=5) StreamCount.objects.create( stream=stream, realm=realm, property=stream_stat.property, end_time=end_time, value=5) RealmCount.objects.create( realm=realm, property=realm_stat.property, end_time=end_time, value=5) # Normal run of process_count_stat for stat in [user_stat, stream_stat, realm_stat]: process_count_stat(stat, end_time) self.assertTableState(UserCount, ['property', 'value'], [[user_stat.property, 5]]) self.assertTableState(StreamCount, ['property', 'value'], [[stream_stat.property, 5]]) self.assertTableState(RealmCount, ['property', 'value'], [[user_stat.property, 5], [stream_stat.property, 5], [realm_stat.property, 5]]) self.assertTableState(InstallationCount, ['property', 'value'], [[user_stat.property, 5], [stream_stat.property, 5], [realm_stat.property, 5]]) # Change the logged data and mark FillState as dirty UserCount.objects.update(value=6) StreamCount.objects.update(value=6) RealmCount.objects.filter(property=realm_stat.property).update(value=6) FillState.objects.update(state=FillState.STARTED) # Check that the change propagated (and the collected data wasn't deleted) for stat in [user_stat, stream_stat, realm_stat]: process_count_stat(stat, end_time) self.assertTableState(UserCount, ['property', 'value'], [[user_stat.property, 6]]) self.assertTableState(StreamCount, ['property', 'value'], [[stream_stat.property, 6]]) self.assertTableState(RealmCount, ['property', 'value'], [[user_stat.property, 6], [stream_stat.property, 6], [realm_stat.property, 6]]) self.assertTableState(InstallationCount, ['property', 'value'], [[user_stat.property, 6], [stream_stat.property, 6], [realm_stat.property, 6]])
def test_aggregation(self): # type: () -> None stat = LoggingCountStat('realm test', RealmCount, CountStat.DAY) do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO) process_count_stat(stat, self.TIME_ZERO) user = self.create_user() stat = LoggingCountStat('user test', UserCount, CountStat.DAY) do_increment_logging_stat(user, stat, None, self.TIME_ZERO) process_count_stat(stat, self.TIME_ZERO) stream = self.create_stream_with_recipient()[0] stat = LoggingCountStat('stream test', StreamCount, CountStat.DAY) do_increment_logging_stat(stream, stat, None, self.TIME_ZERO) process_count_stat(stat, self.TIME_ZERO) self.assertTableState( InstallationCount, ['property', 'value'], [['realm test', 1], ['user test', 1], ['stream test', 1]]) self.assertTableState( RealmCount, ['property', 'value'], [['realm test', 1], ['user test', 1], ['stream test', 1]]) self.assertTableState(UserCount, ['property', 'value'], [['user test', 1]]) self.assertTableState(StreamCount, ['property', 'value'], [['stream test', 1]])
def run_update_analytics_counts(self, options): # type: (Dict[str, Any]) -> None fill_to_time = parse_datetime(options['time']) if options['utc']: fill_to_time = fill_to_time.replace(tzinfo=timezone_utc) if fill_to_time.tzinfo is None: raise ValueError( "--time must be timezone aware. Maybe you meant to use the --utc option?" ) logger.info("Starting updating analytics counts through %s" % (fill_to_time, )) if options['stat'] is not None: process_count_stat(COUNT_STATS[options['stat']], fill_to_time) else: for stat in COUNT_STATS.values(): process_count_stat(stat, fill_to_time) logger.info("Finished updating analytics counts through %s" % (fill_to_time, ))
def test_aggregation(self): # type: () -> None stat = LoggingCountStat('realm test', RealmCount, CountStat.DAY) do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO) process_count_stat(stat, self.TIME_ZERO) user = self.create_user() stat = LoggingCountStat('user test', UserCount, CountStat.DAY) do_increment_logging_stat(user, stat, None, self.TIME_ZERO) process_count_stat(stat, self.TIME_ZERO) stream = self.create_stream_with_recipient()[0] stat = LoggingCountStat('stream test', StreamCount, CountStat.DAY) do_increment_logging_stat(stream, stat, None, self.TIME_ZERO) process_count_stat(stat, self.TIME_ZERO) self.assertTableState(InstallationCount, ['property', 'value'], [['realm test', 1], ['user test', 1], ['stream test', 1]]) self.assertTableState(RealmCount, ['property', 'value'], [['realm test', 1], ['user test', 1], ['stream test', 1]]) self.assertTableState(UserCount, ['property', 'value'], [['user test', 1]]) self.assertTableState(StreamCount, ['property', 'value'], [['stream test', 1]])