def test_snapshot_to_proto(self): s = stats.STATS_HANDLER.stats_minute_cls( key=stats.STATS_HANDLER.minute_key(self.now), created=self.now, values_compressed=stats._Snapshot( uploads=1, uploads_bytes=2, downloads=3, downloads_bytes=4, contains_requests=5, contains_lookups=6, requests=7, failures=8, )) p = isolated_pb2.StatsSnapshot() stats.snapshot_to_proto(s, p) expected = ( u'start_time {\n' u' seconds: 1262401440\n' u'}\n' u'uploads: 1\n' u'uploads_bytes: 2\n' u'downloads: 3\n' u'downloads_bytes: 4\n' u'contains_requests: 5\n' u'contains_lookups: 6\n' u'requests: 7\n' u'failures: 8\n') self.assertEqual(expected, unicode(p))
def _gen_stats(self): # Generates data for the last 10 days, last 10 hours and last 10 minutes. # TODO(maruel): Stop accessing the DB directly. Use stats_framework_mock to # generate it. now = datetime.datetime(2010, 1, 2, 3, 4, 5, 6) self.mock_now(now, 0) handler = stats.STATS_HANDLER for i in xrange(10): s = stats._Snapshot(requests=100 + i) day = (now - datetime.timedelta(days=i)).date() handler.stats_day_cls(key=handler.day_key(day), values_compressed=s).put() for i in xrange(10): s = stats._Snapshot(requests=10 + i) timestamp = (now - datetime.timedelta(hours=i)) handler.stats_hour_cls( key=handler.hour_key(timestamp), values_compressed=s).put() for i in xrange(10): s = stats._Snapshot(requests=1 + i) timestamp = (now - datetime.timedelta(minutes=i)) handler.stats_minute_cls( key=handler.minute_key(timestamp), values_compressed=s).put()