def _test_handler(self, url, added_data): stats_framework_logs_mock.reset_timestamp(stats.STATS_HANDLER, self.now) self.assertEqual('Yay', self.app.get(url).body) self.assertEqual(1, len(list(stats_logs.yield_entries(None, None)))) self.mock_now(self.now, 60) self.assertEqual(10, stats.cron_generate_stats()) actual = stats_framework.get_stats( stats.STATS_HANDLER, 'minutes', self.now, 1, True) expected = [ { 'contains_lookups': 0, 'contains_requests': 0, 'downloads': 0, 'downloads_bytes': 0, 'failures': 0, 'key': '2010-01-02T03:04', 'requests': 1, 'uploads': 0, 'uploads_bytes': 0, }, ] expected[0].update(added_data) self.assertEqual(expected, actual)
def test_cron_send_to_bq(self): # Generate entities. self.assertEqual(120, stats.cron_generate_stats()) payloads = [] def json_request(url, method, payload, scopes, deadline): self.assertEqual( 'https://www.googleapis.com/bigquery/v2/projects/sample-app/datasets/' 'isolated/tables/stats/insertAll', url) payloads.append(payload) self.assertEqual('POST', method) self.assertEqual(stats.bqh.INSERT_ROWS_SCOPE, scopes) self.assertEqual(600, deadline) return {'insertErrors': []} self.mock(stats.net, 'json_request', json_request) self.assertEqual(120, stats.cron_send_to_bq()) expected = { 'failed': [], 'last': datetime.datetime(2009, 12, 28, 2, 0), 'ts': datetime.datetime(2010, 1, 2, 3, 4, 5, 6), } self.assertEqual( expected, stats.BqStateStats.get_by_id(1).to_dict()) expected = [ { 'ignoreUnknownValues': False, 'kind': 'bigquery#tableDataInsertAllRequest', 'skipInvalidRows': True, }, ] actual_rows = payloads[0].pop('rows') self.assertEqual(expected, payloads) self.assertEqual(120, len(actual_rows)) # Next cron skips everything that was processed. self.assertEqual(0, stats.cron_send_to_bq())
def test_cron_generate_stats(self): # It generates empty stats. self.assertEqual(120, stats.cron_generate_stats())
def get(self): minutes = stats.cron_generate_stats() if minutes is not None: logging.info('Processed %d minutes', minutes)