def finish(self, intermediate=False): self.remove_last(2) # remove trailing ,\n if not intermediate: if self.samples: self.file.write('], "stackFrames": {\n') for (id, frame) in iteritems(self.frames): self.file.write('"%s": %s,\n' % (id, json.dumps(frame))) if self.frames: # deleting last two symbols from the file as we can't leave comma at the end due to json restrictions self.remove_last(2) self.file.write('\n}, "samples": [\n') for sample in self.samples: self.file.write(json.dumps(sample) + ',\n') if self.samples: # deleting last two symbols from the file as we can't leave comma at the end due to json restrictions self.remove_last(2) self.samples = [] self.frames = {} if self.metadata: self.file.write('\n],\n') for (key, value) in iteritems(self.metadata): self.file.write( '"%s": %s,\n' % (key, json.dumps(value[0] if len(value) == 1 else value))) self.remove_last(2) # remove trailing ,\n self.file.write('\n}') self.file.close() return self.file.write('\n]}') self.file.close()
def finalize(self): for (tid, (name, pid)) in iteritems(self.thread_names): for callback in self.callbacks.callbacks: thread_name = name.replace('\\"', '').replace('"', '') callback( "metadata_add", { 'domain': 'IntelSEAPI', 'str': '__thread__', 'pid': pid, 'tid': tid, 'data': '%s (%d)' % (thread_name, tid) }) for (pid, name) in iteritems(self.pid_names): self.callbacks.set_process_name(pid, name) self.callbacks.set_process_name(-pid, 'Sampling: ' + name) for (context, name) in iteritems(self.contexts): self.callbacks.set_thread_name(-1, int(context, 16), name) for (pid, proc) in iteritems(self.callbacks.processes): name = None for (tid, thread) in iteritems(proc.threads): if tid in self.pid_names: name = self.pid_names[tid] break if name: self.callbacks.set_process_name(pid, name) self.callbacks.set_process_name(-pid, 'Sampling: ' + name)
def finish(self): self.handle_event() self.current = {} for callback in self.callbacks.callbacks: for (pid, proc_data) in iteritems(self.proc_map): proc_name = proc_data['name'].replace('\\"', '').replace('"', '') callback( "metadata_add", { 'domain': 'IntelSEAPI', 'str': '__process__', 'pid': pid, 'tid': -1, 'data': proc_name }) for (tid, thread_data) in iteritems(proc_data['threads']): thread_name = thread_data['name'].replace('\\"', '').replace( '"', '') callback( "metadata_add", { 'domain': 'IntelSEAPI', 'str': '__thread__', 'pid': pid, 'tid': tid, 'data': '%s (tid %d)' % (thread_name, tid) })
def finish(self): GraphCombiner.finish(self) for (domain, data) in iteritems(self.per_domain): for (task_name, task_data) in iteritems(data['tasks']): time = task_data['time'] self.file.write( '%s,%s,%s,%s,%s,%d\n' % (cgi.escape(domain), cgi.escape(task_name), min(time), max(time), sum(time) / len(time), len(time))) self.file.close()
def complete_stage(self, ring_type, channel, latest_stamp, data): stamps = self.event_tracker.setdefault((ring_type, channel), {}) latest_stamp = int(latest_stamp, 16) to_del = set(stamp for stamp in stamps if stamp <= latest_stamp) if len( to_del ) < 100: # in old driver the CompleteExecute might be called so rare that it is not reliable at all for (stamp, stages) in iteritems(stamps): if stamp <= latest_stamp: verbose = ['%s(%s) %d:' % (ring_type, channel, stamp)] ctx_type = None old_ctx_id = None changed_context = False for stage in stages: verbose.append(stage['cmd']) if 'ctx_id' in stage: changed_context = old_ctx_id and old_ctx_id != stage[ 'ctx_id'] verbose.append('(%s)' % (('!' if changed_context else '') + stage['ctx_id'])) old_ctx_id = stage['ctx_id'] if 'ctx_type' in stage: assert ctx_type == stage[ 'ctx_type'] or not ctx_type or changed_context ctx_type = stage['ctx_type'] if not ctx_type and old_ctx_id: ctx_type = self.contexts[ old_ctx_id] if old_ctx_id in self.contexts else None if ctx_type: verbose.append('%s - %s' % (data['cmd'], ctx_type)) else: verbose.append(data['cmd']) if self.args.verbose: print('verbose:', ' '.join(verbose)) if not changed_context: # not sure what TODO with it yet task = self.complete_gpu(stages[-1], data, ctx_type, old_ctx_id) found_submit = False for stage in stages: if stage['cmd'] in [ 'SubmitQueueKMD', 'WriteStamp' ]: found_submit = True task = self.complete_cpu( stage, data, ctx_type, old_ctx_id, task) if stages[0]['cmd'] == 'PrepareQueueKMD': self.complete_prepare( stages[0], stage, ctx_type, old_ctx_id, task) break if not found_submit: self.complete_cpu(stages[0], data, ctx_type, old_ctx_id, task) for stamp in to_del: del stamps[stamp]
def write_footer(self, file): file.write('</profilerDataModel><noteData>\n') for note in self.notes: args = "\n".join([ str(key) + " = " + str(val).replace("{", "").replace("}", "") for (key, val) in iteritems(note[3]) ]) file.write( '<note startTime="%d" duration="%d" eventIndex="%d">%s</note>\n' % (note[0], note[1], note[2], cgi.escape(args))) file.write('</noteData><v8profile totalTime="0"/></trace>\n')
def start_new_trace(self): self.targets.append("%s-%d.json" % (self.args.output, self.trace_number)) self.trace_number += 1 self.file = codecs.open(self.targets[-1], "wb+", 'utf-8') self.file.write( '{\n"traceEvents": [\n\n' ) # second \n is for the rare case when there are no events, and finish cuts last two symbols for (key, value) in iteritems(self.tree["threads"]): pid_tid = key.split(',') self.file.write( '{"name": "thread_name", "ph":"M", "pid":%s, "tid":%s, "args": {"name":"%s(%s)"}},\n' % (pid_tid[0], pid_tid[1], value, pid_tid[1]))
def finalize(self): for (target, receiver) in iteritems(self.tcpip): self.end_receiver(receiver) packets = receiver['packets'] if packets: if 'pid' in receiver: pid = receiver['pid'] now = self.parser.convert_time( max(packet['time'] for packet in packets)) + 1000 packet = receiver['packets'][ 0] # all packets to this target are considered to be from the same source self.on_receive(0, now, pid, target, packet['source']) self.end_receiver(receiver) else: pass # not sure what to do with these remnants yet...
def convert_numbers(obj): if isinstance(obj, dict): for (k, v) in iteritems(obj): obj[k] = convert_numbers(v) elif isinstance(obj, list): new = [convert_numbers(v) for v in obj] del obj[:] obj.extend(new) elif hasattr(obj, '__iter__'): for v in obj: convert_numbers(v) elif isinstance(obj, basestring): if obj.isdigit(): return int(obj) return obj
def format_args( self, arg ): # this function must add quotes if value is string, and not number/float, do this recursively for dictionary if type(arg) == type({}): return dict([(key, self.format_args(value)) for (key, value) in iteritems(arg)]) try: val = float(arg) if float('inf') != val: if val.is_integer(): return int(val) else: return val except: pass return arg.strip()
def format_value( self, arg ): # this function must add quotes if value is string, and not number/float, do this recursively for dictionary if type(arg) == type({}): return "{" + ", ".join([ '"%s":%s' % (key, self.format_value(value)) for (key, value) in iteritems(arg) ]) + "}" try: val = float(arg) if float('inf') != val: if val.is_integer(): return int(val) else: return val except: pass return '"%s"' % unicode(arg).encode('ascii', 'ignore').strip().replace( "\\", "\\\\").replace('"', '\\"').replace('\n', '\\n')
def format_task(self, phase, type, begin, end): res = [] res.append('{"ph":"%s"' % phase) res.append(', "pid":%(pid)d' % begin) if 'tid' in begin: res.append(', "tid":%(tid)d' % begin) if GT_FLOAT_TIME: res.append(', "ts":%.3f' % (self.convert_time(begin['time']))) else: res.append(', "ts":%d' % (self.convert_time(begin['time']))) if "counter" == type: # workaround of chrome issue with forgetting the last counter value self.counters.setdefault( begin['domain'], {})[begin['str']] = begin # remember the last counter value if "marker" == type: name = begin['str'] res.append(', "s":"%s"' % (GoogleTrace.Markers[begin['data']])) elif "object_" in type: if 'str' in begin: name = begin['str'] else: name = "" elif "frame" == type: if 'str' in begin: name = begin['str'] else: name = begin['domain'] else: if type not in ["counter", "task", "overlapped"]: name = type + ":" else: name = "" if 'parent' in begin: name += to_hex(begin['parent']) + "->" if 'str' in begin: name += begin['str'] + ":" if 'pointer' in begin: name += "func<" + to_hex(begin['pointer']) + ">:" else: name = name.rstrip(":") assert (name or "object_" in type) res.append(', "name":"%s"' % name) res.append(', "cat":"%s"' % (begin['domain'])) if 'id' in begin: res.append(', "id":"%s"' % str(begin['id'])) if type in ['task']: dur = self.convert_time(end['time']) - self.convert_time( begin['time']) if dur < self.args.min_dur: return [] if GT_FLOAT_TIME: res.append(', "dur":%.3f' % dur) else: res.append(', "dur":%d' % dur) args = {} if 'args' in begin: args = begin['args'].copy() if 'args' in end: args.update(end['args']) if '__file__' in begin: args["__file__"] = begin["__file__"] args["__line__"] = begin["__line__"] if 'counter' == type: if 'delta' in begin: # multi-counter is passed as named sub-counters dict args[name] = begin['delta'] if 'memory' in begin: total = 0 breakdown = {} children = 0 for (size, values) in iteritems(begin['memory']): if size is None: # special case for children attribution children = values else: all = sum(values) total += size * all if all: breakdown[size] = all breakdown['TOTAL'] = total breakdown['CHILDREN'] = children args['CRT:Memory(size,count)'] = breakdown if args: res.append(', "args":') res.append(json.dumps(self.format_args(args), ensure_ascii=False)) res.append('}') return res
def preprocess(self, profile): ''' postDecode takes the unexported fields populated by decode (with suffix X) and populates the corresponding exported fields. The unexported fields are cleared up to facilitate testing. :param profile: protobuf generated pprof_profile.Profile :return: ''' self.period = profile.Period mappings = {} for maping in profile.Mapping: maping.File = self.get_string(profile.stringTable, maping.fileX) maping.BuildID = self.get_string(profile.stringTable, maping.buildIDX) mappings[maping.ID] = maping functions = {} for function in profile.Function: function.Name = self.get_string(profile.stringTable, function.nameX) function.SystemName = self.get_string(profile.stringTable, function.systemNameX) function.Filename = self.get_string(profile.stringTable, function.filenameX) functions[function.ID] = function locations = {} for location in profile.Location: if mappings.get(location.mappingIDX) is not None: location.Mapping.CopyFrom(mappings.get(location.mappingIDX)) location.mappingIDX = 0 for i in range(0, len(location.Line)): line = location.Line[i] uid = line.functionIDX if uid != 0: if functions.get(uid) is not None: location.Line[i].Function.CopyFrom(functions.get(uid)) if location.Line[i].Function is None: raise Exception("Function ID %d not found", uid) location.Line[i].functionIDX = 0 locations[location.ID] = location for sample_type in profile.SampleType: sample_type.Type = self.get_string(profile.stringTable, sample_type.typeX) sample_type.Unit = self.get_string(profile.stringTable, sample_type.unitX) for sample in profile.Sample: labels = {} num_labels = {} for label in sample.labelX: key = self.get_string(profile.stringTable, label.keyX) if label.strX != 0: value = self.get_string(profile.stringTable, label.strX) if key not in labels: labels[key] = [] labels[key].append(value) else: if key not in num_labels: num_labels[key] = [] num_labels[key].append(label.numX) if len(labels) > 0: for (key, value) in iteritems(labels): label = sample.Label.add() label.key = key for val in value: label.value.append(val) if len(num_labels) > 0: for (key, value) in iteritems(num_labels): label = sample.NumLabel.add() label.key = key for val in value: label.value.append(val) for location_id in sample.locationIDX: if locations.get(location_id) is not None: location = sample.Location.add() location.CopyFrom(locations.get(location_id)) period_type = profile.PeriodType if period_type is None: profile.PeriodType = profile.ValueType() else: period_type.Type = self.get_string(profile.stringTable, period_type.typeX) period_type.Unit = self.get_string(profile.stringTable, period_type.unitX)