def example_one(): """ 20003 function calls in 0.778 seconds Ordered by: cumulative time ncalls tottime percall cumtime percall filename:lineno(function) 1 0.000 0.000 0.778 0.778 item_58_profile.py:38(<lambda>) 1 0.001 0.001 0.778 0.778 item_58_profile.py:19(insertion_sort) 10000 0.764 0.000 0.777 0.000 item_58_profile.py:26(insert_value) 9989 0.013 0.000 0.013 0.000 {method 'insert' of 'list' objects} 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} 11 0.000 0.000 0.000 0.000 {method 'append' of 'list' objects} - notes for Nick: - we can see that the biggest use of CPU in our test is the cumulative time spent in the 'insert_value' function. """ max_size = 10**4 data = [randint(0, max_size) for _ in range(max_size)] test = lambda: insertion_sort(data) profiler = Profile() profiler.runcall(test) # to extract statistics about the 'test' function's performance, we use pstats stats = Stats(profiler) stats.strip_dirs() stats.sort_stats('cumulative') stats.print_stats()
def write_profile(pfile='./logs/profile.out'): global BUBBLE_PROFILE if not BUBBLE_PROFILE: return BUBBLE_PROFILE.disable() #s = io.StringIO() s = StringIO() sortby = 'cumulative' #ps = Stats(BUBBLE_PROFILE).sort_stats(sortby) ps = Stats(BUBBLE_PROFILE, stream=s).sort_stats(sortby) ps.print_stats() # print(s.getvalue()) # now=arrow.now() #pstats_file='./logs/profiling'+str(now)+'.pstats' #profile_text='./logs/profile'+str(now)+'.txt' pstats_file = './logs/profiling.pstats' profile_text = './logs/profile.txt' BUBBLE_PROFILE.dump_stats(pstats_file) with open(profile_text, 'a+') as pf: pf.write(s.getvalue()) print("end_profile") print('BUBBLE_PROFILE:pstats_file:' + pstats_file) print('BUBBLE_PROFILE:profile_text:' + profile_text)
def __run(self, *args, **kwargs): __start = time.time() # notify if we don't process quickly if __start - self.__time_submitted > 0.05: self.log.warning(f'Starting of {self.name} took too long: {__start - self.__time_submitted:.2f}s. ' f'Maybe there are not enough threads?') # start profiler pr = Profile() pr.enable() # Execute the function try: self._func(*args, **kwargs) except Exception as e: self.__format_traceback(e, *args, **kwargs) # disable profiler pr.disable() # log warning if execution takes too long __dur = time.time() - __start if self.__warn_too_long and __dur > 0.8: self.log.warning(f'Execution of {self.name} took too long: {__dur:.2f}s') s = io.StringIO() ps = Stats(pr, stream=s).sort_stats(SortKey.CUMULATIVE) ps.print_stats(0.1) # limit to output to 10% of the lines for line in s.getvalue().splitlines()[4:]: # skip the amount of calls and "Ordered by:" if line: self.log.warning(line)
def write_profile(pfile='./logs/profile.out'): global BUBBLE_PROFILE if not BUBBLE_PROFILE: return BUBBLE_PROFILE.disable() #s = io.StringIO() s = StringIO() sortby = 'cumulative' #ps = Stats(BUBBLE_PROFILE).sort_stats(sortby) ps = Stats(BUBBLE_PROFILE,stream=s).sort_stats(sortby) ps.print_stats() # print(s.getvalue()) # now=arrow.now() #pstats_file='./logs/profiling'+str(now)+'.pstats' #profile_text='./logs/profile'+str(now)+'.txt' pstats_file='./logs/profiling.pstats' profile_text='./logs/profile.txt' BUBBLE_PROFILE.dump_stats(pstats_file) with open(profile_text,'a+') as pf: pf.write(s.getvalue()) print("end_profile") print('BUBBLE_PROFILE:pstats_file:'+pstats_file) print('BUBBLE_PROFILE:profile_text:'+profile_text)
def _execute(self, func, phase_name, n, *args): if not self.profile_dir: return func(*args) basename = '%s-%s-%d-%02d-%d' % ( self.contender_name, phase_name, self.objects_per_txn, n, self.rep) txt_fn = os.path.join(self.profile_dir, basename + ".txt") prof_fn = os.path.join(self.profile_dir, basename + ".prof") profiler = cProfile.Profile() profiler.enable() try: res = func(*args) finally: profiler.disable() profiler.dump_stats(prof_fn) with open(txt_fn, 'w') as f: st = Stats(profiler, stream=f) st.strip_dirs() st.sort_stats('cumulative') st.print_stats() return res
def example_two(): """ 30003 function calls in 0.018 seconds Ordered by: cumulative time ncalls tottime percall cumtime percall filename:lineno(function) 1 0.000 0.000 0.018 0.018 item_58_profile.py:98(<lambda>) 1 0.001 0.001 0.018 0.018 item_58_profile.py:38(insertion_sort) 10000 0.002 0.000 0.017 0.000 item_58_profile.py:88(insert_value_better) 10000 0.012 0.000 0.012 0.000 {method 'insert' of 'list' objects} 10000 0.003 0.000 0.003 0.000 {built-in method _bisect.bisect_left} 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} """ max_size = 10**4 data = [randint(0, max_size) for _ in range(max_size)] test = lambda: insertion_sort(data) profiler = Profile() profiler.runcall(test) # to extract statistics about the 'test' function's performance, we use pstats stats = Stats(profiler) stats.strip_dirs() stats.sort_stats('cumulative') stats.print_stats()
def to_txt(self, filename): """ Saves all profiles into one file. @param filename filename where to save the profiles, can be a stream """ if len(self) == 0: raise ValueError( # pragma: no cover "No profile was done.") if isinstance(filename, str): with open(filename, "w") as f: # pylint: disable=W1514 self.to_txt(f) return f = filename f.write(self.name + "\n") for i, (prof, kw) in enumerate(self): f.write("------------------------------------------------------\n") f.write("profile %d\n" % i) if kw: for a, b in sorted(kw.items()): f.write("%s=%s\n" % (a, str(b).replace('\n', '\\n'))) f.write("--\n") if hasattr(prof, 'output_text'): f.write(prof.output_text(unicode=False, color=False)) else: s = StringIO() sortby = SortKey.CUMULATIVE ps = Stats(prof, stream=s).sort_stats(sortby) ps.print_stats() f.write(s.getvalue()) f.write("\n")
def new_func(*args, **kwargs): # create a Profile and enable tracking prof = Profile() prof.enable() # call the profiled function result = func(*args, **kwargs) # disable tracking prof.disable() # simulate writing to file via string buffer buff = StringIO() # write stats to string buffer prof_stats = Stats(prof, stream=buff) prof_stats.print_stats() # print the output from string buffer print('-' * 79) path = func.__code__.co_filename.split(sep) path = path[len(path) - list(reversed(path)).index(package) - 1:] print(f'{".".join(path)} :: ' f'{func.__name__}') print(buff.getvalue()) # propagate back the real func's output return result
def main(): """Conduct a peridynamics simulation.""" parser = argparse.ArgumentParser() parser.add_argument('--profile', action='store_const', const=True) args = parser.parse_args() if args.profile: profile = cProfile.Profile() profile.enable() model = Model(mesh_file, horizon=0.1, critical_strain=0.005, elastic_modulus=0.05, initial_crack=is_crack) # Set left-hand side and right-hand side of boundary indices = np.arange(model.nnodes) model.lhs = indices[model.coords[:, 0] < 1.5*model.horizon] model.rhs = indices[model.coords[:, 0] > 1.0 - 1.5*model.horizon] integrator = Euler(dt=1e-3) u, damage, *_ = model.simulate( steps=100, integrator=integrator, boundary_function=boundary_function, write=1000 ) if args.profile: profile.disable() s = StringIO() stats = Stats(profile, stream=s).sort_stats(SortKey.CUMULATIVE) stats.print_stats() print(s.getvalue())
def __call__(self, environ, start_response): response_body = [] def catching_start_response(status, headers, exc_info=None): start_response(status, headers, exc_info) return response_body.append def runapp(): appiter = self._app(environ, catching_start_response) response_body.extend(appiter) if hasattr(appiter, 'close'): appiter.close() p = Profile() p.runcall(runapp) body = ''.join(response_body) stats = Stats(p, stream=self._stream) stats.sort_stats(*self._sort_by) self._stream.write('-' * 80) self._stream.write('\nPATH: %r\n' % environ.get('PATH_INFO')) stats.print_stats(*self._restrictions) self._stream.write('-' * 80 + '\n\n') return [body]
def stopTest(self, test): super(BenchTestResult, self).stopTest(test) if self._benchmark: self._profiler.disable() stats = Stats(self._profiler) stats.sort_stats(self._sort) stats.print_stats(self._limit)
def profile(): prof = Profile() prof.runcall(f1) stat = Stats(prof) stat.strip_dirs() stat.sort_stats('cumulative') stat.print_stats()
def expose(self, widget, event): context = widget.window.cairo_create() #r = (event.area.x, event.area.y, event.area.width, event.area.height) #context.rectangle(r[0]-.5, r[1]-.5, r[2]+1, r[3]+1) #context.clip() if False: import profile profile.runctx("self.draw(context, event.area)", locals(), globals(), "/tmp/pychessprofile") from pstats import Stats s = Stats("/tmp/pychessprofile") s.sort_stats('cumulative') s.print_stats() else: self.drawcount += 1 start = time() self.animationLock.acquire() self.draw(context, event.area) self.animationLock.release() self.drawtime += time() - start #if self.drawcount % 100 == 0: # print "Average FPS: %0.3f - %d / %d" % \ # (self.drawcount/self.drawtime, self.drawcount, self.drawtime) return False
def __analyze2 (): import profile profile.runctx("self.__analyze2()", locals(), globals(), "/tmp/pychessprofile") from pstats import Stats s = Stats("/tmp/pychessprofile") s.sort_stats('cumulative') s.print_stats()
def main(): """Main sequence""" analyser = Analyser(config=ProfilingConfig) data = import_all(config=ProfilingConfig) analyser.run(data) del analyser del data profiler = Profile() tracemalloc.start(10) time1 = tracemalloc.take_snapshot() profiler.runcall(test) time2 = tracemalloc.take_snapshot() time_stats = Stats(profiler) time_stats.strip_dirs() time_stats.sort_stats('cumulative') print("\n===Time Profiler Stats===\n") time_stats.print_stats(TOP_STATS) print("\n===Time Profiler Callers===\n") time_stats.print_callers(TOP_STATS) memory_stats = time2.compare_to(time1, 'lineno') print("\n===Memory Profiler Callers===\n") for stat in memory_stats[:3]: print(stat) print("\n===Top Memory Consumer===\n") top = memory_stats[0] print('\n'.join(top.traceback.format()))
def tearDown(self): """Report profiling results""" p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() print "\n--->>>"
def tearDown(self): if self.should_profile: results = Stats(self.profile) results.strip_dirs() results.sort_stats('cumulative') results.print_stats(50) super().tearDown()
def profile(to=None, sort_by='cumtime'): '''Profiles a chunk of code, use with the ``with`` statement:: from halonctl.debug import profile with profile('~/Desktop/stats'): pass # Do something performance-critical here... Results for individual runs are collected into ``to``. The specifics of how reports are done varies depending on what type ``to`` is. * **File-like objects**: Stats are dumped, according to ``sort_by``, into the stream, separated by newlines - watch out, the file/buffer may grow very big when used in loops. * **List-like objects**: A number of pstats.Stats objects are appended. * **str and unicode**: Treated as a path and opened for appending. Tildes (~) will be expanded, and intermediary directories created if possible. * **None or omitted**: Results are printed to sys.stderr. ''' if isinstance(to, six.string_types): to = open_fuzzy(to, 'a') to_is_stream = hasattr(to, 'write') to_is_list = hasattr(to, 'append') p = Profile() p.enable() yield p.disable() ps = Stats(p, stream=to if to_is_stream else sys.stderr) ps.sort_stats('cumtime') if to_is_stream or to is None: ps.print_stats() elif to_is_list: to.append(ps)
def __call__(self, environ, start_response): response_body = [] def catching_start_response(status, headers, exc_info=None): start_response(status, headers, exc_info) return response_body.append def runapp(): appiter = self._app(environ, catching_start_response) response_body.extend(appiter) if hasattr(appiter, 'close'): appiter.close() p = Profile() p.runcall(runapp) body = ''.join(response_body) stats = Stats(p) stats.sort_stats(*self._sort_by) self._stream.write('-' * 80) self._stream.write('\nPATH: %r\n' % environ.get('PATH_INFO')) stats.print_stats(*self._restrictions) self._stream.write('-' * 80 + '\n\n') return [body]
def tearDown(self): '''Disconnect from statseg''' self.stat.disconnect() profile = Stats(self.profile) profile.strip_dirs() profile.sort_stats('cumtime') profile.print_stats() print("\n--->>>")
def print_stats(statsfile, statstext): with open(statstext, 'w') as f: mystats = Stats(statsfile, stream=f) mystats.strip_dirs() mystats.sort_stats('cumtime') # mystats.print_callers('_strptime') mystats.print_stats() startfile(statstext)
def tearDown(self): """finish any test""" if hasattr(self, "prof"): p = Stats(self.prof) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() print("\n--->>>")
def tearDown(self): """ """ p = Stats(self.prof) p.sort_stats("cumtime") if self.verbose is True: p.dump_stats("profiles/test_graphsearcher.py.prof") p.print_stats() p.strip_dirs()
def wrap(*args, **kwargs): prof = Profile() res = prof.runcall(func, *args, **kwargs) stats = Stats(prof) stats.strip_dirs() stats.sort_stats('tottime') stats.print_stats(20) return res
def tearDown(self): if PROFILE: p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() if DEBUG: print('\n{}>>>'.format('-' * 77))
def tearDown(self): return """finish any test""" p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() print("\n--->>>")
def tearDown(self): """Disconnect from statseg""" self.stat.disconnect() profile = Stats(self.profile) profile.strip_dirs() profile.sort_stats("cumtime") profile.print_stats() print("\n--->>>")
def _run(self): """ Do the work """ # Init with the very first library section. This will block! section = self.queue.get() self.queue.task_done() if section is None: return while not self.isCanceled(): if section is None: break LOG.debug('Start processing section %s (%ss)', section.name, section.plex_type) self.current = 1 self.processed = 0 self.total = section.total self.section_name = section.name self.section_type_text = utils.lang( v.TRANSLATION_FROM_PLEXTYPE[section.plex_type]) profile = Profile() profile.enable() with section.context(self.last_sync) as context: while not self.isCanceled(): # grabs item from queue. This will block! item = self.queue.get() if isinstance(item, dict): context.add_update(item['xml'][0], section_name=section.name, section_id=section.id, children=item['children']) self.title = item['xml'][0].get('title') self.processed += 1 elif isinstance(item, UpdateLastSyncAndPlaystate): context.plexdb.update_last_sync( item.plex_id, section.plex_type, self.last_sync) if section.plex_type != v.PLEX_TYPE_ARTIST: context.update_userdata(item.xml_item, section.plex_type) elif isinstance(item, InitNewSection) or item is None: section = item break else: context.remove(item.plex_id, plex_type=section.plex_type) self.update_progressbar() self.current += 1 if self.processed == 500: self.processed = 0 context.commit() self.queue.task_done() self.queue.task_done() profile.disable() string_io = StringIO() stats = Stats(profile, stream=string_io).sort_stats('cumulative') stats.print_stats() LOG.info('cProfile result: ') LOG.info(string_io.getvalue())
def profileit(fun, *args, **kwargs): pr = Profile() pr.enable() res = fun(*args, **kwargs) pr.disable() ps = Stats(pr).sort_stats('tottime') #ps = Stats(pr).sort_stats('cumtime') ps.print_stats() return res, ps
def profile(func, file_path): pr = Profile() pr.enable() func() pr.disable() s = open(file_path, "w") sortby = "cumulative" ps = Stats(pr, stream=s).sort_stats(sortby) ps.print_stats()
def profile(func, file_path): pr = Profile() pr.enable() func() pr.disable() s = open(file_path, 'w') sortby = 'cumulative' ps = Stats(pr, stream=s).sort_stats(sortby) ps.print_stats()
def wrapper(*args, **kwargs): pr = cProfile.Profile() pr.enable() retval = func(*args, **kwargs) s = StringIO() ps = Stats(pr, stream=s).sort_stats(sortby) ps.print_stats() print(s.getvalue()) return retval
def print_profile_data(): """ Print the collected profile data. """ stream = StringIO() statistics = Stats(profiler, stream=stream) statistics.sort_stats('cumulative') statistics.print_stats() print(stream.getvalue())
def run(): if sys.version_info < (3, 0, 0): sys.stderr.write("You need python 3.0 or later to run this script\n") sys.exit(1) arg_parser = get_arg_parser() args = arg_parser.parse_args() if not args.p1_path and (not args.p1_draft or not args.p1_battle): arg_parser.error("You should use either p1-path or both " "p1-draft and p1-battle.\n") elif not args.p2_path and (not args.p2_draft or not args.p2_battle): arg_parser.error("You should use either p2-path or both " "p2-draft and p2-battle.\n") if args.p1_path is not None: player_1 = agents.NativeAgent(args.p1_path) player_1 = (player_1, player_1) else: player_1 = parse_draft_agent(args.p1_draft)(), \ parse_battle_agent(args.p1_battle)() if args.p2_path is not None: player_2 = agents.NativeAgent(args.p2_path) player_2 = (player_2, player_2) else: player_2 = parse_draft_agent(args.p2_draft)(), \ parse_battle_agent(args.p2_battle)() if args.profile: profiler = cProfile.Profile() result = io.StringIO() profiler.enable() for i in range(args.games): evaluate((i, player_1, player_2, args.seed)) profiler.disable() profiler_stats = Stats(profiler, stream=result) profiler_stats.sort_stats('cumulative') profiler_stats.print_stats() print(result.getvalue()) else: params = ((j, player_1, player_2, args.seed, args.silent) for j in range(args.games)) with Pool(args.processes) as pool: pool.map(evaluate, params) wins, games = wins_by_p0 ratio = 100 * wins / games print(f"{'%.2f' % ratio}% {'%.2f' % (100 - ratio)}%")
def inner(*args, **kwargs): pro = Profile() pro.runcall(func, *args, **kwargs) stats = Stats(pro) stats.strip_dirs() stats.sort_stats(field) print("Profile for {}()".format(func.__name__)) stats.print_stats() stats.print_callers()
def search_method(): """Match for applicable methods and their arguments. Input: * username: username. * theory_name: name of the theory. * thm_name: name of the theorem. Returns: * search_res: list of search results. * ctxt: current proof context. """ data = json.loads(request.get_data().decode("utf-8")) if data['profile']: pr = cProfile.Profile() pr.enable() if not proof_cache.check_cache(data): start_time = time.perf_counter() proof_cache.create_cache(data) print("Load: %f" % (time.perf_counter() - start_time)) if data['thm_name'] != '': limit = ('thm', data['thm_name']) else: limit = None basic.load_theory(data['theory_name'], limit=limit, username=data['username']) start_time = time.perf_counter() state = proof_cache.states[data['index']] fact_ids = data['step']['fact_ids'] goal_id = data['step']['goal_id'] search_res = state.search_method(goal_id, fact_ids) with settings.global_setting(unicode=True): for res in search_res: if '_goal' in res: res['_goal'] = [printer.print_term(t) for t in res['_goal']] if '_fact' in res: res['_fact'] = [printer.print_term(t) for t in res['_fact']] vars = state.get_vars(goal_id) with settings.global_setting(unicode=True, highlight=True): print_vars = dict((k, printer.print_type(v)) for k, v in vars.items()) print("Response:", time.perf_counter() - start_time) if data['profile']: p = Stats(pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() return jsonify({'search_res': search_res, 'ctxt': print_vars})
def profile_call(_func, *args, **kwargs): p = Profile() rv = [] p.runcall(lambda: rv.append(_func(*args, **kwargs))) p.dump_stats('/tmp/sentry-%s-%s.prof' % (time.time(), _func.__name__)) stats = Stats(p, stream=sys.stderr) stats.sort_stats('time', 'calls') stats.print_stats() return rv[0]
def tearDown(self): for worker in self.driver._workers: worker.stop() worker.wait() self.cvx.endpoint_data.clear() super(MechTestBase, self).tearDown() if ENABLE_PROFILER: p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats()
def show_time_profiler_results(pr, top_records): """ Show results of timed profiling. :param pr: profiler instance :param top_records: how many top function calls to show. """ if pr: st = Stats(pr) st.strip_dirs() st.sort_stats('cumulative') st.print_stats(top_records)
def wrapper(*args, **kwg): f = func res = None try: cProfile.runctx("res = f(*args, **kwg)", globals(), locals(), filename) return res finally: if filename: pstats = Stats(filename) pstats.sort_stats(*sort_fields) pstats.print_stats(*p_amount)
def profile(func, args=None, kwargs=None, sort="time"): prof = profile_.Profile() if args is None: args = () if kwargs is None: kwargs = {} ret = prof.runcall(func, *args, **kwargs) stats = Stats(prof) stats.sort_stats(sort) stats.print_stats() return ret
def print_stats(profiler, printCallers=False): from pstats import Stats stats = Stats(profiler) stats.strip_dirs() stats.sort_stats('cumulative') if printCallers is True: stats.print_callers() else: stats.print_stats()
def tearDown(self): if ENABLE_PROFILE: if DUMP_PROFILE: self.pr.dump_stats('profile.out') p = Stats(self.pr) p.strip_dirs() p.sort_stats('time') p.print_stats(40) p.print_callees('types.py:846\(validate_value', 20) p.print_callees('types.py:828\(_validate_primitive_value', 20) p.print_callees('uploadsession.py:185\(write', 20) TestBase.teardown(self)
def tearDownClass(cls): # stop swarm print("TEST: stopping swarm") for node in cls.swarm: node.stop() shutil.rmtree(STORAGE_DIR) # get profiler stats stats = Stats(cls.profile) stats.strip_dirs() stats.sort_stats('cumtime') stats.print_stats()
def run(self): """method calling cProfile and printing the output""" tests=self.tests() for test in tests: tmpBuffer=StringIO.StringIO() profile=cProfile.Profile() profile.runctx('self.'+str(test[0])+"()",globals(),locals()) stats=Stats(profile,stream=tmpBuffer) stats.sort_stats('time','calls') stats.print_stats(1) match=re.findall(r'\bin\b(.*?)\bCPU\b',tmpBuffer.getvalue()) print str(test[1].__doc__ )+":"+str(match[0])+" CPU Time"
def profile_func(func): from cProfile import Profile from pstats import Stats p = Profile() rv = [] p.runcall(lambda: rv.append(func())) p.dump_stats('/tmp/lektor-%s.prof' % func.__name__) stats = Stats(p, stream=sys.stderr) stats.sort_stats('time', 'calls') stats.print_stats() return rv[0]
def process_response(self, request, response): if self.is_on(request): self.prof.create_stats() out = StringIO.StringIO() stats = Stats(self.prof, stream=out) stats.sort_stats(*sort_tuple) stats.print_stats() stats_str = out.getvalue() if response and response.content and stats_str: response.content = "<pre>" + stats_str + "</pre>" return response
def _wrap(*kl, **kw): prof = Profile() prof.enable() try: return func(*kl, **kw) finally: prof.disable() result = StringIO() ps = Stats(prof, stream = result).sort_stats('cumulative') ps.print_stats() print result.getvalue()
def tearDownClass(cls): # stop swarm print("TEST: stopping swarm") for node in cls.swarm: node.stop() time.sleep(0.1) # not to fast cls.test_get_unl_peer.stop() shutil.rmtree(STORAGE_DIR) # get profiler stats if PROFILE: stats = Stats(cls.profile) stats.strip_dirs() stats.sort_stats('cumtime') stats.print_stats()
def main(): """Profiling main function.""" profiler = Profile() profiler = profiler.runctx("run_app()", globals(), locals()) iostream = StringIO() stats = Stats(profiler, stream=iostream) stats.sort_stats("time") # or cumulative stats.print_stats(80) # 80 == how many to print # optional: # stats.print_callees() # stats.print_callers() logging.info("Profile data:\n%s", iostream.getvalue())
def test_api_me_post_success(self): self.login(self.get_default_test_username(), self.get_default_test_password()) self.pr = cProfile.Profile() self.pr.enable() #CREATING y BILLS for x in range(0, 100): self.apiCreateNewBill(testUtils.random_name_generator(), testUtils.random_number_generator()) p = Stats (self.pr) p.strip_dirs() p.sort_stats ('cumtime') p.print_stats () self.logout()
def run(self): """method calling cProfile and printing the output""" tests=self.tests() for test in tests: tmpBuffer=StringIO.StringIO() profile=cProfile.Profile() profile.runctx('self.'+str(test[0])+"()",globals(),locals()) stats=Stats(profile,stream=tmpBuffer) stats.sort_stats('time','calls') stats.print_stats(1) #match=re.findall(r'\bin\b(.*?)\bCPU\b',tmpBuffer.getvalue()) match=re.findall(r'\bin\b(.*?)\bseconds\b',tmpBuffer.getvalue()) #There is some difference between 2.6 and 2.7, re with seconds fits both version #but in 2.6 it returns CPU string, the filter will clean it print str(test[1].__doc__ )+":"+filter(lambda x: x.isdigit() or x==".", str(match[0]))+" CPU Time"
def profiling(): from cProfile import Profile from pstats import Stats ir_params = { 'ev_params' : {'space':'e3', 'F': [0.1,0.1,0.1], 'j_max':30}, 'duration' : 1, 'nu' : 1.7e-5, 'sampling_rate' : 8000, } p = Profile() p.runcall(lambda : get_ir(ir_params)) stats = Stats(p, stream=sys.stdout) stats.sort_stats('time') stats.print_stats(10)
def __profile_code(): """ @return: None @rtype: None Edit this function to do all profiling. """ import cProfile from pstats import Stats profile_file = "C:\\Users\\PBS Biotech\\Documents\\Personal\\PBS_Office\\MSOffice\\officelib\\pbslib\\test\\profile.txt" cProfile.run('full_scan(manyfile3, manyrecipesteps)', filename=profile_file) with open("C:\\Users\\Public\\Documents\\PBSSS\\Functional Testing\\tpid.txt", 'w') as f: stats = Stats(profile_file, stream=f) # stats.strip_dirs() stats.sort_stats('time') stats.print_stats('MSOffice')
def profile(): import cProfile from pstats import Stats path_to_stat_file = '/tmp/_profiling_' cProfile.run('solver_profiling()', path_to_stat_file) s = Stats(path_to_stat_file) # s.strip_dirs() s.sort_stats('time') # s.sort_stats('cum') s.print_stats(20) times = [v[2] for v in s.stats.values()] times.sort(reverse=True) # times = times[:N] sum_of_times = sum(times) print sum_of_times, [round(t / sum_of_times * 100, 2) for t in times][:10]