def test_main(self): def smear(l): "given a list of dictionary deltas, return a list of dictionaries" cur = {} out = [] for delta in l: cur.update(delta) out.append(dict(cur)) return out json.dump(smear([ {'started': 1234, 'number': 1, 'tests_failed': 1, 'tests_run': 2, 'elapsed': 4, 'path': 'gs://logs/some-job/1', 'job': 'some-job', 'result': 'SUCCESS'}, {'number': 2, 'path': 'gs://logs/some-job/2'}, {'number': 3, 'path': 'gs://logs/some-job/3'}, {'number': 4, 'path': 'gs://logs/some-job/4'}, {'number': 5, 'path': 'gs://logs/other-job/5', 'job': 'other-job', 'elapsed': 8}, {'number': 7, 'path': 'gs://logs/other-job/7', 'result': 'FAILURE'}, ]), open('builds.json', 'w')) json.dump(smear([ {'name': 'example test', 'build': 'gs://logs/some-job/1', 'failure_text': 'some awful stack trace exit 1'}, {'build': 'gs://logs/some-job/2'}, {'build': 'gs://logs/some-job/3'}, {'build': 'gs://logs/some-job/4'}, {'failure_text': 'some other error message'}, {'name': 'unrelated test', 'build': 'gs://logs/other-job/5'}, {'build': 'gs://logs/other-job/7'}, ]), open('tests.json', 'w')) summarize.main(summarize.parse_args(['builds.json', 'tests.json'])) output = json_load_byteified(open('failure_data.json')) # uncomment when output changes # import pprint; pprint.pprint(output) self.assertEqual(output['builds'], {'cols': {'elapsed': [8, 8, 4, 4, 4, 4], 'executor': [None, None, None, None, None, None], 'pr': [None, None, None, None, None, None], 'result': ['SUCCESS', 'FAILURE', 'SUCCESS', 'SUCCESS', 'SUCCESS', 'SUCCESS'], 'started': [1234, 1234, 1234, 1234, 1234, 1234], 'tests_failed': [1, 1, 1, 1, 1, 1], 'tests_run': [2, 2, 2, 2, 2, 2]}, 'job_paths': {'other-job': 'gs://logs/other-job', 'some-job': 'gs://logs/some-job'}, 'jobs': {'other-job': {'5': 0, '7': 1}, 'some-job': [1, 4, 2]}}) random_hash_1 = output['clustered'][0][1] random_hash_2 = output['clustered'][1][1] self.assertEqual(output['clustered'], [['some awful stack trace exit 1', random_hash_1, 'some awful stack trace exit 1', [['example test', [['some-job', [1, 2, 3, 4]]]]]], ['some other error message', random_hash_2, 'some other error message', [['unrelated test', [['other-job', [5, 7]]]], ['example test', [['some-job', [4]]]]]]] )
def test_main(self): def smear(l): "given a list of dictionary deltas, return a list of dictionaries" cur = {} out = [] for delta in l: cur.update(delta) out.append(dict(cur)) return out json.dump( smear([ { 'started': 1234, 'number': 1, 'tests_failed': 1, 'tests_run': 2, 'elapsed': 4, 'path': 'gs://logs/some-job/1', 'job': 'some-job', 'result': 'SUCCESS' }, { 'number': 2, 'path': 'gs://logs/some-job/2' }, { 'number': 3, 'path': 'gs://logs/some-job/3' }, { 'number': 4, 'path': 'gs://logs/some-job/4' }, { 'number': 5, 'path': 'gs://logs/other-job/5', 'job': 'other-job', 'elapsed': 8 }, { 'number': 7, 'path': 'gs://logs/other-job/7', 'result': 'FAILURE' }, ]), open('builds.json', 'w')) tests = smear([ { 'name': 'example test', 'build': 'gs://logs/some-job/1', 'failure_text': 'some awful stack trace exit 1' }, { 'build': 'gs://logs/some-job/2' }, { 'build': 'gs://logs/some-job/3' }, { 'build': 'gs://logs/some-job/4' }, { 'name': 'another test', 'failure_text': 'some other error message' }, { 'name': 'unrelated test', 'build': 'gs://logs/other-job/5' }, {}, # intentional dupe { 'build': 'gs://logs/other-job/7' }, ]) with open('tests.json', 'w') as f: for t in tests: f.write(json.dumps(t) + '\n') json.dump({'node': ['example']}, open('owners.json', 'w')) summarize.main( summarize.parse_args([ 'builds.json', 'tests.json', '--output_slices=failure_data_PREFIX.json', '--owners=owners.json' ])) output = json_load_byteified(open('failure_data.json')) # uncomment when output changes # import pprint; pprint.pprint(output) self.assertEqual( output['builds'], { 'cols': { 'elapsed': [8, 8, 4, 4, 4, 4], 'executor': [None, None, None, None, None, None], 'pr': [None, None, None, None, None, None], 'result': [ 'SUCCESS', 'FAILURE', 'SUCCESS', 'SUCCESS', 'SUCCESS', 'SUCCESS' ], 'started': [1234, 1234, 1234, 1234, 1234, 1234], 'tests_failed': [1, 1, 1, 1, 1, 1], 'tests_run': [2, 2, 2, 2, 2, 2] }, 'job_paths': { 'other-job': 'gs://logs/other-job', 'some-job': 'gs://logs/some-job' }, 'jobs': { 'other-job': { '5': 0, '7': 1 }, 'some-job': [1, 4, 2] } }) random_hash_1 = output['clustered'][0]['id'] random_hash_2 = output['clustered'][1]['id'] self.assertEqual(output['clustered'], [{ 'id': random_hash_1, 'key': 'some awful stack trace exit 1', 'tests': [{ 'jobs': [{ 'builds': [4, 3, 2, 1], 'name': 'some-job' }], 'name': 'example test' }], 'spans': [29], 'owner': 'node', 'text': 'some awful stack trace exit 1' }, { 'id': random_hash_2, 'key': 'some other error message', 'tests': [{ 'jobs': [{ 'builds': [7, 5], 'name': 'other-job' }], 'name': 'unrelated test' }, { 'jobs': [{ 'builds': [4], 'name': 'some-job' }], 'name': 'another test' }], 'spans': [24], 'owner': 'testing', 'text': 'some other error message' }]) slice_output = json_load_byteified( open('failure_data_%s.json' % random_hash_1[:2])) self.assertEqual(slice_output['clustered'], [output['clustered'][0]]) self.assertEqual(slice_output['builds']['cols']['started'], [1234, 1234, 1234, 1234])
def test_main(self): def smear(l): "given a list of dictionary deltas, return a list of dictionaries" cur = {} out = [] for delta in l: cur.update(delta) out.append(dict(cur)) return out json.dump(smear([ {'started': 1234, 'number': 1, 'tests_failed': 1, 'tests_run': 2, 'elapsed': 4, 'path': 'gs://logs/some-job/1', 'job': 'some-job', 'result': 'SUCCESS'}, {'number': 2, 'path': 'gs://logs/some-job/2'}, {'number': 3, 'path': 'gs://logs/some-job/3'}, {'number': 4, 'path': 'gs://logs/some-job/4'}, {'number': 5, 'path': 'gs://logs/other-job/5', 'job': 'other-job', 'elapsed': 8}, {'number': 7, 'path': 'gs://logs/other-job/7', 'result': 'FAILURE'}, ]), open('builds.json', 'w')) tests = smear([ {'name': 'example test', 'build': 'gs://logs/some-job/1', 'failure_text': 'some awful stack trace exit 1'}, {'build': 'gs://logs/some-job/2'}, {'build': 'gs://logs/some-job/3'}, {'build': 'gs://logs/some-job/4'}, {'name': 'another test', 'failure_text': 'some other error message'}, {'name': 'unrelated test', 'build': 'gs://logs/other-job/5'}, {}, # intentional dupe {'build': 'gs://logs/other-job/7'}, ]) with open('tests.json', 'w') as f: for t in tests: f.write(json.dumps(t) + '\n') json.dump({ 'node': ['example'] }, open('owners.json', 'w')) summarize.main(summarize.parse_args( ['builds.json', 'tests.json', '--output_slices=failure_data_PREFIX.json', '--owners=owners.json'])) output = json_load_byteified(open('failure_data.json')) # uncomment when output changes # import pprint; pprint.pprint(output) self.assertEqual( output['builds'], {'cols': {'elapsed': [8, 8, 4, 4, 4, 4], 'executor': [None, None, None, None, None, None], 'pr': [None, None, None, None, None, None], 'result': ['SUCCESS', 'FAILURE', 'SUCCESS', 'SUCCESS', 'SUCCESS', 'SUCCESS'], 'started': [1234, 1234, 1234, 1234, 1234, 1234], 'tests_failed': [1, 1, 1, 1, 1, 1], 'tests_run': [2, 2, 2, 2, 2, 2]}, 'job_paths': {'other-job': 'gs://logs/other-job', 'some-job': 'gs://logs/some-job'}, 'jobs': {'other-job': {'5': 0, '7': 1}, 'some-job': [1, 4, 2]}}) random_hash_1 = output['clustered'][0]['id'] random_hash_2 = output['clustered'][1]['id'] self.assertEqual( output['clustered'], [{'id': random_hash_1, 'key': 'some awful stack trace exit 1', 'tests': [{'jobs': [{'builds': [4, 3, 2, 1], 'name': 'some-job'}], 'name': 'example test'}], 'spans': [29], 'owner': 'node', 'text': 'some awful stack trace exit 1'}, {'id': random_hash_2, 'key': 'some other error message', 'tests': [{'jobs': [{'builds': [7, 5], 'name': 'other-job'}], 'name': 'unrelated test'}, {'jobs': [{'builds': [4], 'name': 'some-job'}], 'name': 'another test'}], 'spans': [24], 'owner': 'testing', 'text': 'some other error message'}] ) slice_output = json_load_byteified(open('failure_data_%s.json' % random_hash_1[:2])) self.assertEqual(slice_output['clustered'], [output['clustered'][0]]) self.assertEqual(slice_output['builds']['cols']['started'], [1234, 1234, 1234, 1234])