def testTraceValue(self): results = page_test_results.PageTestResults() results.WillRunPage(self.pages[0]) results.AddValue(trace.TraceValue(None, trace_data.TraceData({'test' : 1}))) results.DidRunPage(self.pages[0]) results.WillRunPage(self.pages[1]) results.AddValue(trace.TraceValue(None, trace_data.TraceData({'test' : 2}))) results.DidRunPage(self.pages[1]) results.PrintSummary() values = results.FindAllTraceValues() self.assertEquals(2, len(values))
def testNoTracesLeftAfterCleanUp(self): results = page_test_results.PageTestResults() v0 = trace.TraceValue(None, trace_data.TraceData({'test': 1})) v1 = trace.TraceValue(None, trace_data.TraceData({'test': 2})) results.WillRunPage(self.pages[0]) results.AddValue(v0) results.DidRunPage(self.pages[0]) results.WillRunPage(self.pages[1]) results.AddValue(v1) results.DidRunPage(self.pages[1]) results.CleanUp() self.assertFalse(results.FindAllTraceValues())
def testMultiplePagesAndValues(self, cs_insert_mock): cs_insert_mock.return_value = 'https://cloud_storage_url/foo' trace_value = trace.TraceValue(None, trace_data.TraceData('{"events": 0}')) trace_value.UploadToCloud(bucket='foo') self.SimulateBenchmarkRun([ (self._story_set[0], [ scalar.ScalarValue( None, 'foo', 'seconds', 4, improvement_direction=improvement_direction.DOWN)]), (self._story_set[1], [ scalar.ScalarValue( None, 'foo', 'seconds', 3.4, improvement_direction=improvement_direction.DOWN), trace_value, scalar.ScalarValue( None, 'bar', 'km', 10, improvement_direction=improvement_direction.DOWN), scalar.ScalarValue( None, 'baz', 'count', 5, improvement_direction=improvement_direction.DOWN)])]) # Parse CSV output into list of lists. csv_string = self.Format() lines = csv_string.split(self._LINE_SEPARATOR) values = [s.split(',') for s in lines[1:-1]] self.assertEquals(len(values), 5) # We expect 5 value in total. self.assertEquals(len(set((v[1] for v in values))), 2) # 2 pages. self.assertEquals(len(set((v[2] for v in values))), 4) # 4 value names. self.assertEquals(values[2], ['story_set', 'http://www.bar.com/', 'trace', 'https://cloud_storage_url/foo', '', '1'])
def testStartFinishOneSliceOneThread(self): events = [ # Time is intentionally out of order. {'name': 'a', 'args': {}, 'pid': 52, 'ts': 560, 'cat': 'cat', 'tid': 53, 'ph': 'F', 'id': 72}, {'name': 'a', 'pid': 52, 'ts': 524, 'cat': 'cat', 'tid': 53, 'ph': 'S', 'id': 72, 'args': {'foo': 'bar'}} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) events = list(m.IterAllEvents()) self.assertEqual(2, len(events)) processes = m.GetAllProcesses() t = processes[0].threads[53] slices = t.async_slices self.assertEqual(1, len(slices)) self.assertEqual('a', slices[0].name) self.assertEqual('cat', slices[0].category) self.assertEqual(72, slices[0].id) self.assertEqual('bar', slices[0].args['foo']) self.assertEqual(0, slices[0].start) self.assertAlmostEqual((60 - 24) / 1000.0, slices[0].duration) self.assertEqual(t, slices[0].start_thread) self.assertEqual(t, slices[0].end_thread)
def testNestedParsing(self): events = [ {'name': 'a', 'args': {}, 'pid': 1, 'ts': 1, 'tts': 2, 'cat': 'foo', 'tid': 1, 'ph': 'B'}, {'name': 'b', 'args': {}, 'pid': 1, 'ts': 3, 'tts': 3, 'cat': 'bar', 'tid': 1, 'ph': 'B'}, {'name': 'b', 'args': {}, 'pid': 1, 'ts': 5, 'tts': 4, 'cat': 'bar', 'tid': 1, 'ph': 'E'}, {'name': 'a', 'args': {}, 'pid': 1, 'ts': 7, 'tts': 5, 'cat': 'foo', 'tid': 1, 'ph': 'E'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data, shift_world_to_zero=False) t = m.GetAllProcesses()[0].threads[1] slice_a = FindEventNamed(t.all_slices, 'a') slice_b = FindEventNamed(t.all_slices, 'b') self.assertEqual('a', slice_a.name) self.assertEqual('foo', slice_a.category) self.assertAlmostEqual(0.001, slice_a.start) self.assertAlmostEqual(0.006, slice_a.duration) self.assertAlmostEqual(0.002, slice_a.thread_start) self.assertAlmostEqual(0.003, slice_a.thread_duration) self.assertEqual('b', slice_b.name) self.assertEqual('bar', slice_b.category) self.assertAlmostEqual(0.003, slice_b.start) self.assertAlmostEqual(0.002, slice_b.duration) self.assertAlmostEqual(0.003, slice_b.thread_start) self.assertAlmostEqual(0.001, slice_b.thread_duration)
def testCleanUpCleansUpTraceValues(self): results = page_test_results.PageTestResults() v0 = trace.TraceValue(None, trace_data.TraceData({'test': 1})) v1 = trace.TraceValue(None, trace_data.TraceData({'test': 2})) results.WillRunPage(self.pages[0]) results.AddValue(v0) results.DidRunPage(self.pages[0]) results.WillRunPage(self.pages[1]) results.AddValue(v1) results.DidRunPage(self.pages[1]) results.CleanUp() self.assertTrue(v0.cleaned_up) self.assertTrue(v1.cleaned_up)
def testRepr(self): v = trace.TraceValue(self.pages[0], trace_data.TraceData({'test': 1}), important=True, description='desc') self.assertEquals('TraceValue(http://www.bar.com/, trace)', str(v))
def testFindTraceParts(self): raw_data = { 'powerTraceAsString': 'Battor Data', 'traceEvents': 'Chrome Data', 'tabIds': 'Tab Data', } data = trace_data.TraceData(raw_data) v = trace.TraceValue(None, data) tempdir = tempfile.mkdtemp() temp_path = os.path.join(tempdir, 'test.json') battor_seen = False chrome_seen = False tabs_seen = False try: trace_files = html2trace.CopyTraceDataFromHTMLFilePath( v.filename, temp_path) for f in trace_files: with open(f, 'r') as trace_file: d = trace_file.read() if d == raw_data['powerTraceAsString']: self.assertFalse(battor_seen) battor_seen = True elif d == raw_data['traceEvents']: self.assertFalse(chrome_seen) chrome_seen = True elif d == raw_data['tabIds']: self.assertFalse(tabs_seen) tabs_seen = True self.assertTrue(battor_seen) self.assertTrue(chrome_seen) self.assertTrue(tabs_seen) finally: shutil.rmtree(tempdir) os.remove(v.filename)
def testNestedAutoclosing(self): events = [ # Tasks that don't finish. {'name': 'a1', 'args': {}, 'pid': 1, 'ts': 1, 'cat': 'foo', 'tid': 1, 'ph': 'B'}, {'name': 'a2', 'args': {}, 'pid': 1, 'ts': 1.5, 'cat': 'foo', 'tid': 1, 'ph': 'B'}, # Slice that does finish to give an 'end time' to make autoclosing work. {'name': 'b', 'args': {}, 'pid': 1, 'ts': 1, 'cat': 'foo', 'tid': 2, 'ph': 'B'}, {'name': 'b', 'args': {}, 'pid': 1, 'ts': 2, 'cat': 'foo', 'tid': 2, 'ph': 'E'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data, shift_world_to_zero=False) t1 = m.GetAllProcesses()[0].threads[1] t2 = m.GetAllProcesses()[0].threads[2] slice_a1 = FindEventNamed(t1.all_slices, 'a1') slice_a2 = FindEventNamed(t1.all_slices, 'a2') FindEventNamed(t2.all_slices, 'b') self.assertAlmostEqual(0.002, slice_a1.end) self.assertAlmostEqual(0.002, slice_a2.end)
def testMultiCounter(self): events = [ {'name': 'ctr', 'args': {'value1': 0, 'value2': 7}, 'pid': 1, 'ts': 0, 'cat': 'foo', 'tid': 1, 'ph': 'C'}, {'name': 'ctr', 'args': {'value1': 10, 'value2': 4}, 'pid': 1, 'ts': 10, 'cat': 'foo', 'tid': 1, 'ph': 'C'}, {'name': 'ctr', 'args': {'value1': 0, 'value2': 1}, 'pid': 1, 'ts': 20, 'cat': 'foo', 'tid': 1, 'ph': 'C'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) p = m.GetAllProcesses()[0] ctr = p.counters['foo.ctr'] self.assertEqual('ctr', ctr.name) self.assertEqual('ctr', ctr.name) self.assertEqual('foo', ctr.category) self.assertEqual(3, ctr.num_samples) self.assertEqual(2, ctr.num_series) self.assertEqual(sorted(['value1', 'value2']), sorted(ctr.series_names)) self.assertEqual(sorted([0, 0.01, 0.02]), sorted(ctr.timestamps)) self.assertEqual(sorted([0, 7, 10, 4, 0, 1]), sorted(ctr.samples)) # We can't check ctr.totals here because it can change depending on # the order in which the series names are added. self.assertEqual(14, ctr.max_total)
def testBasicSingleThreadNonnestedParsing(self): events = [ {'name': 'a', 'args': {}, 'pid': 52, 'ts': 520, 'tts': 280, 'cat': 'foo', 'tid': 53, 'ph': 'B'}, {'name': 'a', 'args': {}, 'pid': 52, 'ts': 560, 'tts': 310, 'cat': 'foo', 'tid': 53, 'ph': 'E'}, {'name': 'b', 'args': {}, 'pid': 52, 'ts': 629, 'tts': 356, 'cat': 'bar', 'tid': 53, 'ph': 'B'}, {'name': 'b', 'args': {}, 'pid': 52, 'ts': 631, 'tts': 357, 'cat': 'bar', 'tid': 53, 'ph': 'E'}, {'name': 'c', 'args': {}, 'pid': 52, 'ts': 633, 'cat': 'baz', 'tid': 53, 'ph': 'B'}, {'name': 'c', 'args': {}, 'pid': 52, 'ts': 637, 'cat': 'baz', 'tid': 53, 'ph': 'E'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) processes = m.GetAllProcesses() self.assertEqual(1, len(processes)) p = processes[0] self.assertEqual(52, p.pid) self.assertEqual(1, len(p.threads)) t = p.threads[53] self.assertEqual(3, len(t.all_slices)) self.assertEqual(53, t.tid) slice_event = t.all_slices[0] self.assertEqual('a', slice_event.name) self.assertEqual('foo', slice_event.category) self.assertAlmostEqual(0, slice_event.start) self.assertAlmostEqual((560 - 520) / 1000.0, slice_event.duration) self.assertAlmostEqual((560 - 520) / 1000.0, slice_event.end) self.assertAlmostEqual(280 / 1000.0, slice_event.thread_start) self.assertAlmostEqual((310 - 280) / 1000.0, slice_event.thread_duration) self.assertAlmostEqual(310 / 1000.0, slice_event.thread_end) self.assertEqual(0, len(slice_event.sub_slices)) slice_event = t.all_slices[1] self.assertEqual('b', slice_event.name) self.assertEqual('bar', slice_event.category) self.assertAlmostEqual((629 - 520) / 1000.0, slice_event.start) self.assertAlmostEqual((631 - 629) / 1000.0, slice_event.duration) self.assertAlmostEqual((631 - 520) / 1000.0, slice_event.end) self.assertAlmostEqual(356 / 1000.0, slice_event.thread_start) self.assertAlmostEqual((357 - 356) / 1000.0, slice_event.thread_duration) self.assertAlmostEqual(357 / 1000.0, slice_event.thread_end) self.assertEqual(0, len(slice_event.sub_slices)) slice_event = t.all_slices[2] self.assertEqual('c', slice_event.name) self.assertEqual('baz', slice_event.category) self.assertAlmostEqual((633 - 520) / 1000.0, slice_event.start) self.assertAlmostEqual((637 - 633) / 1000.0, slice_event.duration) self.assertEqual(None, slice_event.thread_start) self.assertEqual(None, slice_event.thread_duration) self.assertEqual(None, slice_event.thread_end) self.assertEqual(0, len(slice_event.sub_slices))
def testTraceSerializationContainStoryName(self): tempdir = tempfile.mkdtemp() try: v = trace.TraceValue(self.pages[0], trace_data.TraceData({'test': 1})) fh = v.Serialize(tempdir) self.assertTrue(os.path.basename(fh.GetAbsPath()).startswith( 'http___www_bar_com')) finally: shutil.rmtree(tempdir)
def testSerialize(self): ri = trace_data.TraceData({'traceEvents': [1, 2, 3]}) f = cStringIO.StringIO() ri.Serialize(f) d = f.getvalue() self.assertIn('traceEvents', d) self.assertIn('[1, 2, 3]', d) json.loads(d)
def testNoLeakedTempFileWhenTraceSerialize(self): tempdir = tempfile.mkdtemp() v = trace.TraceValue(None, trace_data.TraceData({'test': 1})) fh = v.Serialize(tempdir) try: shutil.rmtree(fh.GetAbsPath(), ignore_errors=True) self.assertTrue(tempdir) finally: shutil.rmtree(tempdir) self.assertTrue(_IsEmptyDir(self.temp_test_dir))
def testMultiplePidParsing(self): events = [ {'name': 'a', 'args': {}, 'pid': 1, 'ts': 2, 'tts': 1, 'cat': 'foo', 'tid': 1, 'ph': 'B'}, {'name': 'a', 'args': {}, 'pid': 1, 'ts': 4, 'tts': 2, 'cat': 'foo', 'tid': 1, 'ph': 'E'}, {'name': 'b', 'args': {}, 'pid': 2, 'ts': 6, 'tts': 3, 'cat': 'bar', 'tid': 2, 'ph': 'B'}, {'name': 'b', 'args': {}, 'pid': 2, 'ts': 8, 'tts': 4, 'cat': 'bar', 'tid': 2, 'ph': 'E'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) processes = m.GetAllProcesses() self.assertEqual(2, len(processes)) p = processes[0] self.assertEqual(1, p.pid) self.assertEqual(1, len(p.threads)) # Check process 1 thread 1. t = p.threads[1] self.assertEqual(1, len(t.all_slices)) self.assertEqual(1, t.tid) slice_event = t.all_slices[0] self.assertEqual('a', slice_event.name) self.assertEqual('foo', slice_event.category) self.assertAlmostEqual(0, slice_event.start) self.assertAlmostEqual((4 - 2) / 1000.0, slice_event.duration) self.assertAlmostEqual(1 / 1000.0, slice_event.thread_start) self.assertAlmostEqual((2 - 1) / 1000.0, slice_event.thread_duration) # Check process 2 thread 2. # TODO: will this be in deterministic order? p = processes[1] self.assertEqual(2, p.pid) self.assertEqual(1, len(p.threads)) t = p.threads[2] self.assertEqual(1, len(t.all_slices)) self.assertEqual(2, t.tid) slice_event = t.all_slices[0] self.assertEqual('b', slice_event.name) self.assertEqual('bar', slice_event.category) self.assertAlmostEqual((6 - 2) / 1000.0, slice_event.start) self.assertAlmostEqual((8 - 6) / 1000.0, slice_event.duration) self.assertAlmostEqual(3 / 1000.0, slice_event.thread_start) self.assertAlmostEqual((4 - 3) / 1000.0, slice_event.thread_duration) # Check getAllThreads. self.assertEqual([processes[0].threads[1], processes[1].threads[2]], m.GetAllThreads())
def testAsChartDictWithTraceValuesThatHasTirLabel(self): v = trace.TraceValue(self._story_set[0], trace_data.TraceData({'test': 1})) v.tir_label = 'background' d = chart_json_output_formatter.ResultsAsChartDict( self._benchmark_metadata, page_specific_values=[v], summary_values=[v]) self.assertTrue('trace' in d['charts']) self.assertTrue('http://www.foo.com/' in d['charts']['trace'], msg=d['charts']['trace'])
def testSliceHierarchy(self): """The slice hierarchy should look something like this: [ a ] [ b ] [ d ] [ c ] [ e ] """ events = [ {'name': 'a', 'args': {}, 'pid': 52, 'ts': 100, 'cat': 'foo', 'tid': 53, 'ph': 'B'}, {'name': 'a', 'args': {}, 'pid': 52, 'ts': 200, 'cat': 'foo', 'tid': 53, 'ph': 'E'}, {'name': 'b', 'args': {}, 'pid': 52, 'ts': 125, 'cat': 'foo', 'tid': 53, 'ph': 'B'}, {'name': 'b', 'args': {}, 'pid': 52, 'ts': 165, 'cat': 'foo', 'tid': 53, 'ph': 'E'}, {'name': 'c', 'args': {}, 'pid': 52, 'ts': 125, 'cat': 'foo', 'tid': 53, 'ph': 'B'}, {'name': 'c', 'args': {}, 'pid': 52, 'ts': 135, 'cat': 'foo', 'tid': 53, 'ph': 'E'}, {'name': 'd', 'args': {}, 'pid': 52, 'ts': 175, 'cat': 'foo', 'tid': 53, 'ph': 'B'}, {'name': 'd', 'args': {}, 'pid': 52, 'ts': 190, 'cat': 'foo', 'tid': 53, 'ph': 'E'}, {'name': 'e', 'args': {}, 'pid': 52, 'ts': 155, 'cat': 'foo', 'tid': 53, 'ph': 'B'}, {'name': 'e', 'args': {}, 'pid': 52, 'ts': 165, 'cat': 'foo', 'tid': 53, 'ph': 'E'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data, shift_world_to_zero=False) processes = m.GetAllProcesses() self.assertEqual(1, len(processes)) p = processes[0] self.assertEqual(1, len(p.threads)) t = p.threads[53] slice_a = t.all_slices[0] self.assertEqual(4, len(slice_a.GetAllSubSlices())) self.assertEqual('a', slice_a.name) self.assertEqual(100 / 1000.0, slice_a.start) self.assertEqual(200 / 1000.0, slice_a.end) self.assertEqual(2, len(slice_a.sub_slices)) slice_b = slice_a.sub_slices[0] self.assertEqual('b', slice_b.name) self.assertEqual(2, len(slice_b.sub_slices)) self.assertEqual('c', slice_b.sub_slices[0].name) self.assertEqual('e', slice_b.sub_slices[1].name) slice_d = slice_a.sub_slices[1] self.assertEqual('d', slice_d.name) self.assertEqual(0, len(slice_d.sub_slices))
class TraceProcessor(): __temp_actions = {} __js_blame = {} def log(self, project, trace_info, extended_info, status, records_imported=0): if (project == None): return log = Log(parent=project.key, filename=trace_info.filename, date=datetime.today(), status=status, records_imported=records_imported) log.put() def process(self, project, trace_string, trace_info, extended_info): try: if re.search('json$', trace_info.filename): # Re-encode to ISO-8859-1 trace_string = trace_string.decode('UTF-8', 'ignore') trace_string = trace_string.encode('ISO-8859-1', 'ignore') trace_json = json.loads(trace_string) elif re.search('json.gz$', trace_info.filename): gzip_trace_string = gzip.GzipFile( fileobj=StringIO(trace_string)).read() trace_json = json.loads(gzip_trace_string) else: self.log(project, trace_info, extended_info, 'Error reading file: neither .json nor .json.gz') return except Exception, e: self.log(project, trace_info, extended_info, 'JSON parse error') return try: parsed_data = trace_data_module.TraceData(trace_json) model = model_module.TimelineModel(parsed_data) processes = model.GetAllProcesses() except Exception, e: self.log(project, trace_info, extended_info, 'Error processing the file.') return
def testAsyncStepsMissingFinish(self): events = [ # Time is intentionally out of order. {'name': 'a', 'args': {'step': 's1', 'y': 2}, 'pid': 52, 'ts': 548, 'cat': 'foo', 'tid': 53, 'ph': 'T', 'id': 72}, {'name': 'a', 'args': {'z': 3}, 'pid': 52, 'ts': 560, 'cat': 'foo', 'tid': 53, 'ph': 'S', 'id': 72} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) t = m.GetAllProcesses()[0].threads[53] self.assertTrue(t is not None)
def testAsDictWhenTraceSerializedAndUploaded(self): tempdir = tempfile.mkdtemp() try: v = trace.TraceValue(None, trace_data.TraceData({'test': 1})) fh = v.Serialize(tempdir) trace.cloud_storage.SetCalculatedHashesForTesting( {fh.GetAbsPath(): 123}) bucket = trace.cloud_storage.PUBLIC_BUCKET cloud_url = v.UploadToCloud(bucket) d = v.AsDict() self.assertEqual(d['file_id'], fh.id) self.assertEqual(d['cloud_url'], cloud_url) finally: shutil.rmtree(tempdir)
def testAsDictWhenTraceIsNotSerializedAndUploaded(self): test_temp_file = tempfile.NamedTemporaryFile(delete=False) try: v = trace.TraceValue(None, trace_data.TraceData({'test': 1})) trace.cloud_storage.SetCalculatedHashesForTesting( TestDefaultDict(123)) bucket = trace.cloud_storage.PUBLIC_BUCKET cloud_url = v.UploadToCloud(bucket) d = v.AsDict() self.assertEqual(d['cloud_url'], cloud_url) finally: if os.path.exists(test_temp_file.name): test_temp_file.close() os.remove(test_temp_file.name)
def testImportErrornousFlowEvent(self): events = [ {'name': 'a', 'cat': 'foo', 'id': 70, 'pid': 52, 'tid': 53, 'ts': 548, 'ph': 's', 'args': {}}, {'name': 'a2', 'cat': 'foo', 'id': 70, 'pid': 52, 'tid': 53, 'ts': 550, 'ph': 's', 'args': {}}, {'name': 'b', 'cat': 'foo', 'id': 73, 'pid': 52, 'tid': 53, 'ts': 570, 'ph': 'f', 'args': {}}, {'name': 'a', 'cat': 'foo', 'id': 72, 'pid': 52, 'tid': 53, 'ts': 560, 'ph': 't', 'args': {}}, ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) self.assertEqual(0, len(m.flow_events))
def testImportSamplesMissingArgs(self): events = [ {'name': 'a', 'pid': 52, 'ts': 548, 'cat': 'test', 'tid': 53, 'ph': 'P'}, {'name': 'b', 'pid': 52, 'ts': 548, 'cat': 'test', 'tid': 53, 'ph': 'P'}, {'name': 'c', 'pid': 52, 'ts': 549, 'cat': 'test', 'tid': 53, 'ph': 'P'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) p = m.GetAllProcesses()[0] t = p.threads[53] self.assertEqual(3, len(t.samples)) self.assertEqual(0, len(m.import_errors))
def testArgumentDupeCreatesNonFailingImportError(self): events = [ {'name': 'a', 'args': {'x': 1}, 'pid': 1, 'ts': 520, 'cat': 'foo', 'tid': 1, 'ph': 'B'}, {'name': 'a', 'args': {'x': 2}, 'pid': 1, 'ts': 560, 'cat': 'foo', 'tid': 1, 'ph': 'E'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) processes = m.GetAllProcesses() t = processes[0].threads[1] slice_a = FindEventNamed(t.all_slices, 'a') self.assertEqual(2, slice_a.args['x']) self.assertEqual(1, len(m.import_errors))
def testInstanceCounter(self): events = [ {'name': 'ctr', 'args': {'value': 0}, 'pid': 1, 'ts': 0, 'cat': 'foo', 'tid': 1, 'ph': 'C', 'id': 0}, {'name': 'ctr', 'args': {'value': 10}, 'pid': 1, 'ts': 10, 'cat': 'foo', 'tid': 1, 'ph': 'C', 'id': 0}, {'name': 'ctr', 'args': {'value': 10}, 'pid': 1, 'ts': 10, 'cat': 'foo', 'tid': 1, 'ph': 'C', 'id': 1}, {'name': 'ctr', 'args': {'value': 20}, 'pid': 1, 'ts': 15, 'cat': 'foo', 'tid': 1, 'ph': 'C', 'id': 1}, {'name': 'ctr', 'args': {'value': 30}, 'pid': 1, 'ts': 18, 'cat': 'foo', 'tid': 1, 'ph': 'C', 'id': 1}, {'name': 'ctr', 'args': {'value': 40}, 'pid': 1, 'ts': 20, 'cat': 'bar', 'tid': 1, 'ph': 'C', 'id': 2} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) p = m.GetAllProcesses()[0] ctr = p.counters['foo.ctr[0]'] self.assertEqual('ctr[0]', ctr.name) self.assertEqual('foo', ctr.category) self.assertEqual(2, ctr.num_samples) self.assertEqual(1, ctr.num_series) self.assertEqual([0, 0.01], ctr.timestamps) self.assertEqual([0, 10], ctr.samples) ctr = m.GetAllProcesses()[0].counters['foo.ctr[1]'] self.assertEqual('ctr[1]', ctr.name) self.assertEqual('foo', ctr.category) self.assertEqual(3, ctr.num_samples) self.assertEqual(1, ctr.num_series) self.assertEqual([0.01, 0.015, 0.018], ctr.timestamps) self.assertEqual([10, 20, 30], ctr.samples) ctr = m.GetAllProcesses()[0].counters['bar.ctr[2]'] self.assertEqual('ctr[2]', ctr.name) self.assertEqual('bar', ctr.category) self.assertEqual(1, ctr.num_samples) self.assertEqual(1, ctr.num_series) self.assertEqual([0.02], ctr.timestamps) self.assertEqual([40], ctr.samples)
def testAutoclosingWithEventsOutsideBounds(self): events = [ # Slice that begins before min and ends after max of the other threads. {'name': 'a', 'args': {}, 'pid': 1, 'ts': 0, 'tts': 0, 'cat': 'foo', 'tid': 1, 'ph': 'B'}, {'name': 'b', 'args': {}, 'pid': 1, 'ts': 6, 'tts': 3, 'cat': 'foo', 'tid': 1, 'ph': 'B'}, # Slice that does finish to give an 'end time' to establish a basis {'name': 'c', 'args': {}, 'pid': 1, 'ts': 2, 'tts': 1, 'cat': 'bar', 'tid': 2, 'ph': 'B'}, {'name': 'c', 'args': {}, 'pid': 1, 'ts': 4, 'tts': 2, 'cat': 'bar', 'tid': 2, 'ph': 'E'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data, shift_world_to_zero=False) p = m.GetAllProcesses()[0] t1 = p.threads[1] t1_thread_time_bounds = m._thread_time_bounds[t1] # pylint: disable=W0212 self.assertAlmostEqual(0.000, t1_thread_time_bounds.min) self.assertAlmostEqual(0.003, t1_thread_time_bounds.max) self.assertEqual(2, len(t1.all_slices)) slice_event = FindEventNamed(t1.all_slices, 'a') self.assertEqual('a', slice_event.name) self.assertEqual('foo', slice_event.category) self.assertAlmostEqual(0, slice_event.start) self.assertAlmostEqual(0.006, slice_event.duration) self.assertAlmostEqual(0, slice_event.thread_start) self.assertAlmostEqual(0.003, slice_event.thread_duration) t2 = p.threads[2] t2_thread_time_bounds = m._thread_time_bounds[t2] # pylint: disable=W0212 self.assertAlmostEqual(0.001, t2_thread_time_bounds.min) self.assertAlmostEqual(0.002, t2_thread_time_bounds.max) slice2 = FindEventNamed(t2.all_slices, 'c') self.assertEqual('c', slice2.name) self.assertEqual('bar', slice2.category) self.assertAlmostEqual(0.002, slice2.start) self.assertAlmostEqual(0.002, slice2.duration) self.assertAlmostEqual(0.001, slice2.thread_start) self.assertAlmostEqual(0.001, slice2.thread_duration) self.assertAlmostEqual(0.000, m.bounds.min) self.assertAlmostEqual(0.006, m.bounds.max)
def testAutoclosingLoneBegin(self): events = [ # Slice that doesn't finish. {'name': 'a', 'args': {}, 'pid': 1, 'ts': 1, 'tts': 1, 'cat': 'foo', 'tid': 1, 'ph': 'B'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) p = m.GetAllProcesses()[0] t = p.threads[1] slice_event = t.all_slices[0] self.assertEqual('a', slice_event.name) self.assertEqual('foo', slice_event.category) self.assertTrue(slice_event.did_not_finish) self.assertAlmostEqual(0, slice_event.start) self.assertAlmostEqual(0, slice_event.duration) self.assertAlmostEqual(1 / 1000.0, slice_event.thread_start) self.assertAlmostEqual(0, slice_event.thread_duration)
def testSerializeZip(self): data = trace_data.TraceData({ 'traceEvents': [1, 2, 3], 'battor': ['battor_data'] }) tf = tempfile.NamedTemporaryFile(delete=False) temp_name = tf.name tf.close() try: data.Serialize(temp_name, gzip_result=True) self.assertTrue(zipfile.is_zipfile(temp_name)) z = zipfile.ZipFile(temp_name, 'r') self.assertIn('battor', z.namelist()) self.assertIn('traceEvents', z.namelist()) z.close() finally: os.remove(temp_name)
def testImportCompleteEvent(self): events = [ {'name': 'a', 'args': {}, 'pid': 52, 'ts': 629, 'tts': 538, 'dur': 1, 'tdur': 1, 'cat': 'baz', 'tid': 53, 'ph': 'X'}, {'name': 'b', 'args': {}, 'pid': 52, 'ts': 730, 'tts': 620, 'dur': 20, 'tdur': 14, 'cat': 'foo', 'tid': 53, 'ph': 'X'}, {'name': 'c', 'args': {}, 'pid': 52, 'ts': 740, 'tts': 625, 'cat': 'baz', 'tid': 53, 'ph': 'X'}, ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) p = m.GetAllProcesses()[0] t = p.threads[53] self.assertEqual(3, len(t.all_slices)) slice_event = t.all_slices[0] self.assertEqual('a', slice_event.name) self.assertAlmostEqual(0.0, slice_event.start) self.assertAlmostEqual(1 / 1000.0, slice_event.duration) self.assertAlmostEqual(538 / 1000.0, slice_event.thread_start) self.assertAlmostEqual(1 / 1000.0, slice_event.thread_duration) self.assertFalse(slice_event.did_not_finish) self.assertEqual(0, len(slice_event.sub_slices)) slice_event = t.all_slices[1] self.assertEqual('b', slice_event.name) self.assertAlmostEqual((730 - 629) / 1000.0, slice_event.start) self.assertAlmostEqual(20 / 1000.0, slice_event.duration) self.assertAlmostEqual(620 / 1000.0, slice_event.thread_start) self.assertAlmostEqual(14 / 1000.0, slice_event.thread_duration) self.assertFalse(slice_event.did_not_finish) self.assertEqual(1, len(slice_event.sub_slices)) self.assertEqual(t.all_slices[2], slice_event.sub_slices[0]) slice_event = t.all_slices[2] self.assertEqual('c', slice_event.name) self.assertAlmostEqual((740 - 629) / 1000.0, slice_event.start) self.assertAlmostEqual(10 / 1000.0, slice_event.duration) self.assertAlmostEqual(625 / 1000.0, slice_event.thread_start) self.assertAlmostEqual(9 / 1000.0, slice_event.thread_duration) self.assertTrue(slice_event.did_not_finish) self.assertEqual(0, len(slice_event.sub_slices))
def testThreadNames(self): events = [ {'name': 'thread_name', 'args': {'name': 'Thread 1'}, 'pid': 1, 'ts': 0, 'tid': 1, 'ph': 'M'}, {'name': 'a', 'args': {}, 'pid': 1, 'ts': 1, 'cat': 'foo', 'tid': 1, 'ph': 'B'}, {'name': 'a', 'args': {}, 'pid': 1, 'ts': 2, 'cat': 'foo', 'tid': 1, 'ph': 'E'}, {'name': 'b', 'args': {}, 'pid': 2, 'ts': 3, 'cat': 'foo', 'tid': 2, 'ph': 'B'}, {'name': 'b', 'args': {}, 'pid': 2, 'ts': 4, 'cat': 'foo', 'tid': 2, 'ph': 'E'}, {'name': 'thread_name', 'args': {'name': 'Thread 2'}, 'pid': 2, 'ts': 0, 'tid': 2, 'ph': 'M'} ] trace_data = trace_data_module.TraceData(events) m = timeline_model.TimelineModel(trace_data) processes = m.GetAllProcesses() self.assertEqual('Thread 1', processes[0].threads[1].name) self.assertEqual('Thread 2', processes[1].threads[2].name)