def fill_line(self, r, func, cc, nc, tt, ct, strip=True): name = func_std_string(func) if strip: name = func_std_string(func_strip_path(func)) c = str(nc) if nc != cc: c = c + '/' + str(cc) self.itemDataMap[r] = (c, tt, float(tt) / nc, ct, float(ct) / cc, name, func)
def calc_profiler_results(middleware): import pstats # Make sure nothing is printed to stdout output = StringIO.StringIO() stats = pstats.Stats(middleware.prof, stream=output) stats.sort_stats("cumulative") results = { "total_call_count": stats.total_calls, "total_time": RequestStats.seconds_fmt(stats.total_tt), "calls": [] } width, list_func_names = stats.get_print_list([80]) for func_name in list_func_names: primitive_call_count, total_call_count, total_time, cumulative_time, callers = stats.stats[ func_name] func_desc = pstats.func_std_string(func_name) callers_names = map( lambda func_name: pstats.func_std_string(func_name), callers.keys()) callers_desc = map( lambda name: { "func_desc": name, "func_desc_short": RequestStats.short_method_fmt(name) }, callers_names) results["calls"].append({ "primitive_call_count": primitive_call_count, "total_call_count": total_call_count, "total_time": RequestStats.seconds_fmt(total_time), "per_call": RequestStats.seconds_fmt(total_time / total_call_count) if total_call_count else "", "cumulative_time": RequestStats.seconds_fmt(cumulative_time), "per_call_cumulative": RequestStats.seconds_fmt(cumulative_time / primitive_call_count) if primitive_call_count else "", "func_desc": func_desc, "func_desc_short": RequestStats.short_method_fmt(func_desc), "callers_desc": callers_desc, }) output.close() return results
def results(self): """Return cProfile results in a dictionary for template context.""" # Make sure nothing is printed to stdout output = StringIO.StringIO() stats = pstats.Stats(self.c_profile, stream=output) stats.sort_stats("cumulative") self.c_profile.create_stats() results = { "raw_stats": base64.b64encode(marshal.dumps(self.c_profile.stats)), "total_call_count": stats.total_calls, "total_time": util.seconds_fmt(stats.total_tt), "calls": [] } width, list_func_names = stats.get_print_list([80]) for func_name in list_func_names: primitive_call_count, total_call_count, total_time, cumulative_time, callers = stats.stats[ func_name] func_desc = pstats.func_std_string(func_name) callers_names = map( lambda func_name: pstats.func_std_string(func_name), callers.keys()) callers_desc = map( lambda name: { "func_desc": name, "func_desc_short": util.short_method_fmt(name) }, callers_names) results["calls"].append({ "primitive_call_count": primitive_call_count, "total_call_count": total_call_count, "cumulative_time": util.seconds_fmt(cumulative_time, 2), "total_time": util.seconds_fmt(total_time, 2), "per_call_cumulative": util.seconds_fmt(cumulative_time / primitive_call_count, 2) if primitive_call_count else "", "func_desc": func_desc, "func_desc_short": util.short_method_fmt(func_desc), "callers_desc": callers_desc, }) output.close() return results
def calc_profiler_results(middleware): if middleware.simple_timing: return {"total_time": RequestStats.seconds_fmt(middleware.end - middleware.start)} import pstats # Make sure nothing is printed to stdout output = StringIO.StringIO() stats = pstats.Stats(middleware.prof, stream=output) stats.sort_stats("cumulative") results = { "total_call_count": stats.total_calls, "total_time": RequestStats.seconds_fmt(stats.total_tt), "calls": [], } width, list_func_names = stats.get_print_list([80]) for func_name in list_func_names: primitive_call_count, total_call_count, total_time, cumulative_time, callers = stats.stats[func_name] func_desc = pstats.func_std_string(func_name) callers_names = map(lambda func_name: pstats.func_std_string(func_name), callers.keys()) callers_desc = map( lambda name: {"func_desc": name, "func_desc_short": RequestStats.short_method_fmt(name)}, callers_names ) results["calls"].append( { "primitive_call_count": primitive_call_count, "total_call_count": total_call_count, "total_time": RequestStats.seconds_fmt(total_time), "per_call": RequestStats.seconds_fmt(total_time / total_call_count) if total_call_count else "", "cumulative_time": RequestStats.seconds_fmt(cumulative_time), "per_call_cumulative": RequestStats.seconds_fmt(cumulative_time / primitive_call_count) if primitive_call_count else "", "func_desc": func_desc, "func_desc_short": RequestStats.short_method_fmt(func_desc), "callers_desc": callers_desc, } ) output.close() return results
def process_response(self, request, response): if self.profiler is not None: stats = pstats.Stats(self.profiler) function_calls = [] for func in stats.strip_dirs().sort_stats(1).fcn_list: current = [] if stats.stats[func][0] != stats.stats[func][1]: current.append('%d/%d' % (stats.stats[func][1], stats.stats[func][0])) else: current.append(stats.stats[func][1]) current.append(stats.stats[func][2]*1000) if stats.stats[func][1]: current.append(stats.stats[func][2]*1000/stats.stats[func][1]) else: current.append(0) current.append(stats.stats[func][3]*1000) if stats.stats[func][0]: current.append(stats.stats[func][3]*1000/stats.stats[func][0]) else: current.append(0) current.append(pstats.func_std_string(func)) function_calls.append(current) self.stats = stats self.function_calls = function_calls # destroy the profiler just in case return response
def func_stats(self, func): d = {} cc, nc, tt, ct, callers = self.stats[func] c = str(nc) if nc != cc: c = c + '/' + str(cc) d['rcalls'] = cc else: d['rcalls'] = '' d['call_str'] = c d['ncalls'] = nc d['tottime'] = tt try: d['percall'] = tt/nc except ZeroDivisionError: d['percall'] = 'inf' d['cumtime'] = ct try: d['cumpercall'] = ct/cc except ZeroDivisionError: d['cumpercall'] = 'inf' d['func_name'] = pstats.func_std_string(func) return d
def print_csv(stats): # this is a modified version of what's in pstats.Stats.print_title(...) print( 'ncalls\ttottime\tpercall\tcumtime\tpercall\tfilename:lineno(function)' ) def f(x): # this replaces pstats.f8(...) return f'{x:.3f}' for func, (cc, nc, tt, ct, callers) in stats.items(): # the content of this loop is a modified version of what's in pstats.Stats.print_line(...) # only differences are w.r.t tabs and spaces and that output is always written to stdout c = str(nc) if nc != cc: c = c + '/' + str(cc) print(c, end='\t') print(f(tt), end='\t') if nc == 0: print(' ', end='\t') else: print(f(tt / nc), end='\t') print(f(ct), end='\t') if cc == 0: print(' ', end='\t') else: print(f(ct / cc), end=' ') print(func_std_string(func))
def sort_stats(self, *field): if not field: self.fcn_list = 0 return self if len(field) == 1 and type(field[0]) == type(1): # Be compatible with old profiler field = [ {-1: "stdname", 0:"calls", 1:"time", 2: "cumulative" } [ field[0] ] ] sort_arg_defs = self.get_sort_arg_defs() sort_tuple = () self.sort_type = "" connector = "" for word in field: sort_tuple = sort_tuple + sort_arg_defs[word][0] self.sort_type += connector + sort_arg_defs[word][1] connector = ", " stats_list = [] for func, (cc, nc, tt, ct, callers) in self.stats.iteritems(): stats_list.append((cc, nc, tt, ct) + func + (pstats.func_std_string(func), func)) stats_list.sort(pstats.TupleComp(sort_tuple).compare) self.fcn_list = fcn_list = [] counter = 0 for tup in stats_list: counter += 1 filnam, line, fn = tup[-1] new_tup = (filnam, line, fn) fcn_list.append(new_tup) return self
def strip_dirs(self, strip=True): for r in range(self.GetItemCount()): item = self.GetItem(r) i = item.GetData() func = self.itemDataMap[i][-1] if strip: func = func_strip_path(func) self.SetStringItem(r, 5, func_std_string(func))
def get_profile_stats(self): ''' return profile statistics to client. :return: Tuple, (breakdown, profile_result) breakdown: dict; server main thread's cProfile stats; {} if not enabled. profile_result: dict; end-to-end time of each phase data format of breakdown dict: key: function name including file path, like /root/zmq.py:send value: dict{ 'ncall': int, number of function call profiled, 'tot_avg': average time of the function, not including sub-func call 'cum_avg': average time of the function, including sub-func call } format of profile_result: dict{ 'keys': list, keys in time sequence, like [start, step1, step2, step3] 'start': [t_rpc1, t_rpc2, ...] # t_rpc is float() from time.time() 'step1': [t_rpc1, t_rpc2, ...] ... } User software could use this dict to do further calculation, like avg, rms, etc. ''' stats_server = [] breakdown = {} if self.rpc_server.profiler: try: stats_server = pstats.Stats(self.rpc_server.profiler).stats # profile breakdown breakdown = { pstats.func_std_string(k): { 'ncall': v[1], 'tot_avg': float(v[2]) / v[1], 'cum_avg': float(v[3]) / v[0] } for k, v in stats_server.items() } except: self.logger.info(traceback.format_exc()) stats_server = [] breakdown = {} profile_result = self.rpc_server.generate_profile_result() # overall server handling time return breakdown, profile_result
def normalize_paths(self, stats): import os.path from pstats import add_func_stats, func_std_string python_paths = sorted(sys.path, reverse=True) def rel_filename(filename): for path in python_paths: if filename.startswith(path): return filename[len(path) + 1:] return os.path.basename(filename) def func_strip_path(func_name): filename, line, name = func_name return rel_filename(filename), line, name oldstats = stats.stats stats.stats = newstats = {} max_name_len = 0 for func, (cc, nc, tt, ct, callers) in oldstats.iteritems(): newfunc = func_strip_path(func) if len(func_std_string(newfunc)) > max_name_len: max_name_len = len(func_std_string(newfunc)) newcallers = {} for func2, caller in callers.iteritems(): newcallers[func_strip_path(func2)] = caller if newfunc in newstats: newstats[newfunc] = add_func_stats( newstats[newfunc], (cc, nc, tt, ct, newcallers)) else: newstats[newfunc] = (cc, nc, tt, ct, newcallers) old_top = stats.top_level stats.top_level = new_top = {} for func in old_top: new_top[func_strip_path(func)] = None stats.max_name_len = max_name_len stats.fcn_list = None stats.all_callees = None return self
def wrapped_controller(*args, **kw): profiler = profile.Profile() try: request.tgdb_call_start_time = time.time() result = profiler.runcall(controller, *args, **kw) request.tgdb_call_time = ( time.time() - request.tgdb_call_start_time) * 1000 except: raise finally: stats = pstats.Stats(profiler) function_calls = [] flist = stats.sort_stats('cumulative').fcn_list for func in flist: current = {} info = stats.stats[func] # Number of calls if info[0] != info[1]: current['ncalls'] = '%d/%d' % (info[1], info[0]) else: current['ncalls'] = info[1] # Total time current['tottime'] = info[2] * 1000 # Quotient of total time divided by number of calls if info[1]: current['percall'] = info[2] * 1000 / info[1] else: current['percall'] = 0 # Cumulative time current['cumtime'] = info[3] * 1000 # Quotient of the cumulative time divded by the number # of primitive calls. if info[0]: current['percall_cum'] = info[3] * 1000 / info[0] else: current['percall_cum'] = 0 # Filename filename = pstats.func_std_string(func) current['filename_long'] = filename current['filename'] = format_fname(filename) function_calls.append(current) request.tgdb_profiling_stats = stats request.tgdb_profiling_function_calls = function_calls return result
def profile_function(to_profile, filename_stats=None): """largely from pyramid_debugtoolbar""" profiler = profile.Profile() result = profiler.runcall(to_profile) stats = pstats.Stats(profiler) function_calls = [] flist = stats.sort_stats('cumulative').fcn_list for func in flist: current = {} info = stats.stats[func] # Number of calls if info[0] != info[1]: current['ncalls'] = '%d/%d' % (info[1], info[0]) else: current['ncalls'] = info[1] # Total time current['tottime'] = info[2] * 1000 # Quotient of total time divided by number of calls if info[1]: current['percall'] = info[2] * 1000 / info[1] else: current['percall'] = 0 # Cumulative time current['cumtime'] = info[3] * 1000 # Quotient of the cumulative time divded by the number # of primitive calls. if info[0]: current['percall_cum'] = info[3] * 1000 / info[0] else: current['percall_cum'] = 0 # Filename filename = pstats.func_std_string(func) current['filename_long'] = filename current['filename'] = format_fname(filename) function_calls.append(current) keys = function_calls[0].keys() if filename_stats: with open(filename_stats, 'w') as output_file: dict_writer = csv.DictWriter(output_file, keys) dict_writer.writeheader() dict_writer.writerows(function_calls) print("wrote to %s" % filename_stats) else: print("returning (function_calls)") return function_calls
def results(self): """Return cProfile results in a dictionary for template context.""" # Make sure nothing is printed to stdout output = StringIO.StringIO() stats = pstats.Stats(self.c_profile, stream=output) stats.sort_stats("cumulative") self.c_profile.create_stats() results = { "raw_stats": base64.b64encode(marshal.dumps(self.c_profile.stats)), "total_call_count": stats.total_calls, "total_time": util.seconds_fmt(stats.total_tt), "calls": [] } width, list_func_names = stats.get_print_list([80]) for func_name in list_func_names: primitive_call_count, total_call_count, total_time, cumulative_time, callers = stats.stats[func_name] func_desc = pstats.func_std_string(func_name) callers_names = map(lambda func_name: pstats.func_std_string(func_name), callers.keys()) callers_desc = map( lambda name: {"func_desc": name, "func_desc_short": util.short_method_fmt(name)}, callers_names) results["calls"].append({ "primitive_call_count": primitive_call_count, "total_call_count": total_call_count, "cumulative_time": util.seconds_fmt(cumulative_time, 2), "total_time": util.seconds_fmt(total_time, 2), "per_call_cumulative": util.seconds_fmt(cumulative_time / primitive_call_count, 2) if primitive_call_count else "", "func_desc": func_desc, "func_desc_short": util.short_method_fmt(func_desc), "callers_desc": callers_desc, }) output.close() return results
def _collect_callers_data(self, source, call_dict): """collect to similar of pstats.Stats.print_callers() data in self.callers_data. It refers to pstats.Stats.print_call_line() method. """ clist = list(call_dict.keys()) clist.sort() tempdata = [] for cnt, func in enumerate(clist): name = func_std_string(func) value = call_dict[func] if isinstance(value, tuple): nc, cc, tt, ct = value del (cc) else: nc = value tt = self.prof.stats[func][2] ct = self.prof.stats[func][3] ncl = mapping_table(nc, self.prof.total_calls) ttl = mapping_table(tt, self.prof.total_tt) ctl = mapping_table(ct, self.prof.total_tt) if cnt: dst_function = "" else: dst_function = func_std_string(source) callers_link = hashlib.md5( func_std_string(source).encode()).hexdigest() tempdata.append({ 'dst_function': dst_function, 'callers_link': callers_link, 'ncalls': nc, 'ncallslevel': ncl, 'tottime': "%8.4f" % (tt), 'tottimelevel': ttl, 'cumtime': "%8.4f" % (ct), 'cumtimelevel': ctl, 'org_function': name }) self.callers_data.append(tempdata)
def profile_handler(request): try: self.profiler.enable() try: result = yield from handler(request) finally: self.profiler.disable() except: raise finally: stats = pstats.Stats(self.profiler) function_calls = [] flist = stats.sort_stats('cumulative').fcn_list for func in flist: current = {} info = stats.stats[func] # Number of calls if info[0] != info[1]: current['ncalls'] = '%d/%d' % (info[1], info[0]) else: current['ncalls'] = info[1] # Total time current['tottime'] = info[2] * 1000 # Quotient of total time divided by number of calls if info[1]: current['percall'] = info[2] * 1000 / info[1] else: current['percall'] = 0 # Cumulative time current['cumtime'] = info[3] * 1000 # Quotient of the cumulative time divded by the number # of primitive calls. if info[0]: current['percall_cum'] = info[3] * 1000 / info[0] else: current['percall_cum'] = 0 # Filename filename = pstats.func_std_string(func) current['filename_long'] = filename current['filename'] = format_fname(filename) function_calls.append(current) self.stats = stats self.function_calls = function_calls return result
async def profile_handler(request): try: self.profiler.enable() try: result = await handler(request) finally: self.profiler.disable() except BaseException: raise finally: stats = pstats.Stats(self.profiler) function_calls = [] flist = stats.sort_stats('cumulative').fcn_list for func in flist: current = {} info = stats.stats[func] # Number of calls if info[0] != info[1]: current['ncalls'] = '%d/%d' % (info[1], info[0]) else: current['ncalls'] = info[1] # Total time current['tottime'] = info[2] * 1000 # Quotient of total time divided by number of calls if info[1]: current['percall'] = info[2] * 1000 / info[1] else: current['percall'] = 0 # Cumulative time current['cumtime'] = info[3] * 1000 # Quotient of the cumulative time divided by the number # of primitive calls. if info[0]: current['percall_cum'] = info[3] * 1000 / info[0] else: current['percall_cum'] = 0 # Filename filename = pstats.func_std_string(func) current['filename_long'] = filename current['filename'] = format_fname(filename) function_calls.append(current) self.stats = stats self.function_calls = function_calls return result
def process_response(self, request, response): if not self.is_active: return False if self.profiler is not None: self.profiler.disable() try: stats = pstats.Stats(self.profiler) except TypeError: self.is_active = False return False function_calls = [] for func in stats.sort_stats(1).fcn_list: current = {} info = stats.stats[func] # Number of calls if info[0] != info[1]: current['ncalls'] = '%d/%d' % (info[1], info[0]) else: current['ncalls'] = info[1] # Total time current['tottime'] = info[2] * 1000 # Quotient of total time divided by number of calls if info[1]: current['percall'] = info[2] * 1000 / info[1] else: current['percall'] = 0 # Cumulative time current['cumtime'] = info[3] * 1000 # Quotient of the cumulative time divded by the number of # primitive calls. if info[0]: current['percall_cum'] = info[3] * 1000 / info[0] else: current['percall_cum'] = 0 # Filename filename = pstats.func_std_string(func) current['filename_long'] = filename current['filename'] = format_fname(filename) function_calls.append(current) self.stats = stats self.function_calls = function_calls # destroy the profiler just in case return response
def strip_dirs(self, additional_system_paths=()): # def strip_func(func_name): # return strip_dir_from_module_regex(func_name, pattern) # path_prefixes = set() # for path in chain(sys.path, additional_system_paths): # if not path: # continue # if path[-1] != '/': # path = path + '/' # path_prefixes.add(path) strip_func = create_strip_func(additional_system_paths) oldstats = self.stats self.stats = newstats = {} max_name_len = 0 for func, (cc, nc, tt, ct, callers) in oldstats.iteritems(): newfunc = strip_func(func) if len(func_std_string(newfunc)) > max_name_len: max_name_len = len(func_std_string(newfunc)) newcallers = {} for func2, caller in callers.iteritems(): newcallers[strip_func(func2)] = caller if newfunc in newstats: newstats[newfunc] = add_func_stats( newstats[newfunc], (cc, nc, tt, ct, newcallers)) else: newstats[newfunc] = (cc, nc, tt, ct, newcallers) old_top = self.top_level self.top_level = new_top = {} for func in old_top: new_top[strip_func(func)] = None self.max_name_len = max_name_len self.fcn_list = None self.all_callees = None return self
def render_result(self, profile, time_elapsed, environ): profile.create_stats() stats = profile.stats fmt = "{:.2f}".format function_calls = [] for func, info in iteritems(stats): current = {} filename = pstats.func_std_string(func) # hide our hook functions if filename.startswith(_file_path): continue # col0: filename if filename.startswith(("{", "<")): # built-in functions name, name_full = filename, "n/a" else: # functions from library and our project name, name_full = shorten_filename(filename), filename current["filename"], current["filename_full"] = name, name_full # skip functions that is in library or built-in if self.simple_output and name.startswith(("{", "<")): continue # col1: number of calls if info[0] != info[1]: current["ncalls"] = "%d/%d" % (info[1], info[0]) else: current["ncalls"] = info[1] # col2: total time current["tottime"] = fmt(info[2] * 1000) # col3: quotient of total time divided by number of calls current["percall"] = fmt(info[2] * 1000 / info[1]) if info[1] else 0 # col4: cumulative time current["cumtime"] = fmt(info[3] * 1000) # col5: quotient of the cumulative time divided by the number of # primitive calls. current["percall_cum"] = fmt(info[3] * 1000 / info[0]) if info[0] else 0 function_calls.append(current) path = reconstruct_path(environ) + ("&" if environ.get("QUERY_STRING") else "?") return _template.render( ms_elapsed="{:.1f}".format(time_elapsed * 1000), function_calls=function_calls, disable_url=path + "%s=" % self.toggle_key, toggle_simple_output_url=path + "%s=%s" % (self.SIMPLE_OUTPUT_TOGGLE_KEY, not self.simple_output), simple_output=self.simple_output, )
def print_line(self, func): # hack : should print percentages cc, nc, tt, ct, call_dict= self.stats[func] print('%2d%%' % (100*tt/self.total_tt), end=' ') Stats.print_line( self, func ) if not self.withcalls: return name_size = 12 #9+8+8+8+8 +5 +1 if not call_dict: print("--") return clist = list(call_dict.keys()) clist.sort() indent = " "*name_size for func in clist: name = pstats.func_std_string(func) print(indent + name + '('+ repr(call_dict[func])+')')
def print_line(self, func): # hack: should print percentages cc, nc, tt, ct, callers = self.stats[func] c = str(nc) if nc != cc: c = c + '/' + str(cc) print(' ', c.rjust(15), end=' ', file=self.stream) print(f8(tt), end=' ', file=self.stream) if nc == 0: print(' ' * 8, end=' ', file=self.stream) else: print(f8(tt / nc), end=' ', file=self.stream) print(f8(ct), end=' ', file=self.stream) if cc == 0: print(' ' * 8, end=' ', file=self.stream) else: print(f8(ct / cc), end=' ', file=self.stream) print(func_std_string(func), file=self.stream)
def print_line(self, func): # hack : should print percentages cc, nc, tt, ct, callers = self.stats[func] c = str(nc) if nc != cc: c = c + '/' + str(cc) print >> self.stream, c.rjust(12), # Rob, was 9 print >> self.stream, pstats.f8(tt), if nc == 0: print >> self.stream, ' '*8, else: print >> self.stream, pstats.f8(tt/nc), print >> self.stream, pstats.f8(ct), if cc == 0: print >> self.stream, ' '*8, else: print >> self.stream, pstats.f8(ct/cc), print >> self.stream, ' ' + pstats.func_std_string(func)
def stats_to_markdownstr(p: pstats.Stats, prtRowNum: int) -> str: """convert pstats result to markdown table Args: p (pstats.Stats): pstats result prtRowNum (int): only print several functions Returns: str: the markdown string with table """ s = io.StringIO() p.stream = s p.print_stats(prtRowNum) sumlines = [] _line = '' flag = True fp = 0 s.seek(0) while flag: fp = s.tell() _line = s.readline() _pl = _line.rstrip() flag = _line.find(" ncalls tottime") != 0 and _line if _pl and flag: sumlines.append(_pl) s.seek(fp) # rewind before table width, fncs = p.get_print_list((prtRowNum, )) results = [] for func in fncs: cc, nc, tt, ct, callers = p.stats[func] results.append( (nc, cc, tt, tt / nc, ct, ct / nc, pstats.func_std_string(func))) headers = [ 'ncalls', 'pcalls', 'tottime', 'tt percall', 'cumtime', 'ct percall', 'filename:lineno(function)' ] sumstr = '\n'.join(sumlines) tablestr = tabulate.tabulate(results, headers=headers, floatfmt='.3g', tablefmt='pipe') # for markdown resultstr = sumstr + '\n' + tablestr return resultstr
def func_calls(self): """Get collected profiling data. """ func_calls = [] if not self.stats: return func_calls for func in self.stats.sort_stats(1).fcn_list: info = self.stats.stats[func] stat = {} # Number of calls if info[0] != info[1]: stat['ncalls'] = "%d/%d" % (info[1], info[0]) else: stat['ncalls'] = info[1] # Total time stat['tottime'] = info[2] * 1000 # Time per call if info[1]: stat['percall'] = info[2] * 1000 / info[1] else: stat['percall'] = 0 # Cumulative time spent in this and all subfunctions stat['cumtime'] = info[3] * 1000 # Cumulative time per primitive call if info[0]: stat['percall_cum'] = info[3] * 1000 / info[0] else: stat['percall_cum'] = 0 # Filename filename = pstats.func_std_string(func) stat['filename'] = filename_format(filename) func_calls.append(stat) return func_calls
def get_func_calls_from_stats(stats): func_calls = [] if not stats: return func_calls for func in stats.sort_stats(1).fcn_list: info = stats.stats[func] stat = {} # Number of calls if info[0] != info[1]: stat['ncalls'] = "%d/%d" % (info[1], info[0]) else: stat['ncalls'] = info[1] # Total time stat['tottime'] = info[2] * 1000 # Time per call if info[1]: stat['percall'] = info[2] * 1000 / info[1] else: stat['percall'] = 0 # Cumulative time spent in this and all subfunctions stat['cumtime'] = info[3] * 1000 # Cumulative time per primitive call if info[0]: stat['percall_cum'] = info[3] * 1000 / info[0] else: stat['percall_cum'] = 0 # Filename stat['filename'] = pstats.func_std_string(func) func_calls.append(stat) return func_calls
def print_line(self, func): # hack : should print percentages cc, nc, tt, ct, callers = self.stats[func] if nc < 100: return # cc和ncs的关系? c = str(nc) # if nc != cc: # c = c + '/' + str(cc) print >> self.stream, c.rjust(9), print >> self.stream, f8(tt * 1000), # 单位: ms if nc == 0: print >> self.stream, ' '*8, else: print >> self.stream, f8(float(tt)/nc * 1000), print >> self.stream, f8(ct * 1000), if cc == 0: print >> self.stream, ' '*8, else: print >> self.stream, f8(float(ct)/cc * 1000), print >> self.stream, func_std_string(func)
def func_std_string(self, func): return pstats.func_std_string(func)
def func(self): return pstats.func_std_string(self._function)
def main(): import optparse parser = optparse.OptionParser('%prog: [opts] <profile>...') opt,args = parser.parse_args() if len(args) == 0: parser.print_help() sys.exit(0) for fn in args: print print 'Cumulative time' print p = pstats.Stats(fn) p = p.strip_dirs() p.sort_stats('cumulative').print_stats(100) print print 'Callees ordered by cumulative time:' print p.print_callees(40) print print 'Time' print p = pstats.Stats(fn) p.sort_stats('time').print_stats(40) p.print_callees() width,lst = p.get_print_list([40]) #print 'lst', lst if lst: p.calc_callees() name_size = width arrow = '->' print 'lst length:', len(lst) for func in lst: #print 'func', func if func in p.all_callees: p.print_call_heading(width, "called...") print pstats.func_std_string(func).ljust(name_size) + arrow, print #p.print_call_line(width, func, p.all_callees[func]) cc = p.all_callees[func] #print 'Callees:', cc TT = [] CT = [] for func,value in cc.items(): #print 'func,value', func, value if isinstance(value, tuple): nc, ccx, tt, ct = value TT.append(tt) CT.append(ct) #print ' ', func, ct, tt else: print 'NON-TUPLE', value I = np.argsort(CT) FV = list(cc.items()) for i in reversed(I[-40:]): func,value = FV[i] name = pstats.func_std_string(func) if isinstance(value, tuple): nc, ccx, tt, ct = value if nc != ccx: substats = '%d/%d' % (nc, ccx) else: substats = '%d' % (nc,) substats = '%-20s %s %s %s' % (substats, pstats.f8(tt), pstats.f8(ct), name) print ' ' + substats else: p.print_call_line(width, func, {}) print print
def main(): import optparse parser = optparse.OptionParser('%prog: [opts] <profile>...') parser.add_option('--merge', action='store_true', help='Merge input files into one big profile') opt, args = parser.parse_args() if len(args) == 0: parser.print_help() sys.exit(0) if opt.merge: p = pstats.Stats(args[0]) #p.add(*args[1:]) for fn in args[1:]: p.add(fn) P = [p] else: P = [pstats.Stats(fn) for fn in args] for p in P: print() print('Cumulative time') print() #p = pstats.Stats(fn) p = p.strip_dirs() p.sort_stats('cumulative').print_stats(100) print() print('Callees ordered by cumulative time:') print() p.print_callees(40) print() print('Time') print() #p = pstats.Stats(fn) p.sort_stats('time').print_stats(40) p.print_callees() width, lst = p.get_print_list([40]) #print 'lst', lst if lst: p.calc_callees() name_size = width arrow = '->' print('lst length:', len(lst)) for func in lst: #print 'func', func if func in p.all_callees: p.print_call_heading(width, "called...") print(pstats.func_std_string(func).ljust(name_size) + arrow, end=' ') print() #p.print_call_line(width, func, p.all_callees[func]) cc = p.all_callees[func] #print 'Callees:', cc TT = [] CT = [] for func, value in cc.items(): #print 'func,value', func, value if isinstance(value, tuple): nc, ccx, tt, ct = value TT.append(tt) CT.append(ct) #print ' ', func, ct, tt else: print('NON-TUPLE', value) I = np.argsort(CT) FV = list(cc.items()) for i in reversed(I[-40:]): func, value = FV[i] name = pstats.func_std_string(func) if isinstance(value, tuple): nc, ccx, tt, ct = value if nc != ccx: substats = '%d/%d' % (nc, ccx) else: substats = '%d' % (nc, ) substats = '%-20s %s %s %s' % ( substats, pstats.f8(tt), pstats.f8(ct), name) print(' ' + substats) else: p.print_call_line(width, func, {}) print() print()
def _analyzed_prof(self): """analyzed to not linetimings profile data. """ self.prof.sort_stats('time', 'calls') self.prof.stream = open(os.devnull, 'w') # darty hack backstream = self.prof.stream funclist = self.prof.get_print_list(())[1] self.prof.stream = backstream self.prof.stream.close() datalist = list() self.profiledata_count = len(funclist) for cnt, func in enumerate(funclist): if cnt >= self.functions_number: break stat = self.prof.stats[func] ncalls = stat[0] if not int(ncalls): ## skip to 0calls function self.profiledata_count -= 1 continue self._collect_callers_data(func, self.prof.stats[func][4]) #tottime = convert_unit(stat[2]) tottime = "%8.4lf" % stat[2] try: totpercall = "%8.4lf" % float(stat[2] / stat[0]) except ZeroDivisionError: totpercall = "0.0000" cumtime = "%8.4lf" % stat[3] try: cumpercall = "%8.4lf" % float(stat[3] / stat[0]) except ZeroDivisionError: cumpercall = "0.0000" ncallslevel = mapping_table(ncalls, self.prof.total_calls) tottlevel = mapping_table(stat[2], self.prof.total_tt) totclevel = mapping_table(float(totpercall), self.prof.total_tt) cumtlevel = mapping_table(stat[3], self.prof.total_tt) cumclevel = mapping_table(float(cumpercall), self.prof.total_tt) linelink = "%s.html" % func[0].replace('/', '_') callers_link = hashlib.md5( func_std_string(func).encode()).hexdigest() data = { 'func': func, 'linelink': linelink, 'ncalls': ncalls, 'tottime': tottime, 'cumtime': cumtime, 'totpercall': totpercall, 'cumpercall': cumpercall, 'ncallslevel': ncallslevel, 'cumtimelevel': cumtlevel, 'tottimelevel': tottlevel, 'totcalllevel': totclevel, 'cumcalllevel': cumclevel, 'callers_link': callers_link, } datalist.append(data) profdata = { 'totaltime': "%8.4lf" % self.prof.total_tt, 'totalcalls': self.prof.total_calls, 'data': datalist, } return profdata