def admin_menu(): from pygame.display import set_mode, list_modes, set_caption from pygame import init, quit init() screen = set_mode(list_modes()[0]) set_caption("Hero Misadventures") menu = Menu(("Debug", "Release"), screen, text_color=color("White"), surface=color("Black"), selection_color=color("Slate Gray")) while True: choose = menu.update() if choose == -1: continue else: if choose == 0: from cProfile import runctx from pstats import Stats runctx("from bin.Interaction import debug_menu; debug_menu(screen)", {"screen": screen}, {}, "test/profiling.prof") file = open("test/profiling.txt", "w") info = Stats("test/profiling.prof", stream=file) info.strip_dirs().sort_stats("cumulative").print_stats() elif choose == 1: quit() start_menu() return
def admin_menu(): from pygame.display import set_mode, list_modes, set_caption from pygame import init, quit init() screen = set_mode(list_modes()[0]) set_caption("Hero Misadventures") menu = Menu(("Debug", "Release"), screen, text_color=color("White"), surface=color("Black"), selection_color=color("Slate Gray")) while True: choose = menu.update() if choose == -1: continue else: if choose == 0: from cProfile import runctx from pstats import Stats runctx( "from bin.Interaction import debug_menu; debug_menu(screen)", {"screen": screen}, {}, "test/profiling.prof") file = open("test/profiling.txt", "w") info = Stats("test/profiling.prof", stream=file) info.strip_dirs().sort_stats("cumulative").print_stats() elif choose == 1: quit() start_menu() return
def profile(): prof = Profile() prof.runcall(f1) stat = Stats(prof) stat.strip_dirs() stat.sort_stats('cumulative') stat.print_stats()
def main(): """Main sequence""" analyser = Analyser(config=ProfilingConfig) data = import_all(config=ProfilingConfig) analyser.run(data) del analyser del data profiler = Profile() tracemalloc.start(10) time1 = tracemalloc.take_snapshot() profiler.runcall(test) time2 = tracemalloc.take_snapshot() time_stats = Stats(profiler) time_stats.strip_dirs() time_stats.sort_stats('cumulative') print("\n===Time Profiler Stats===\n") time_stats.print_stats(TOP_STATS) print("\n===Time Profiler Callers===\n") time_stats.print_callers(TOP_STATS) memory_stats = time2.compare_to(time1, 'lineno') print("\n===Memory Profiler Callers===\n") for stat in memory_stats[:3]: print(stat) print("\n===Top Memory Consumer===\n") top = memory_stats[0] print('\n'.join(top.traceback.format()))
def print_stats(limit=limit, sort=sort, strip_dirs=strip_dirs): if _have_stats: stats = Stats(_profile) if strip_dirs: stats.strip_dirs() apply(stats.sort_stats, sort) apply(stats.print_stats, limit)
def run(number=100000): sys.path[0] = '.' path = os.getcwd() print(" msec rps tcalls funcs") for framework in frameworks: os.chdir(os.path.join(path, framework)) try: main = __import__('app', None, None, ['main']).main f = lambda: list(main(environ.copy(), start_response)) time = timeit(f, number=number) st = Stats(profile.Profile().runctx( 'f()', globals(), locals())) print("%-11s %6.0f %6.0f %7d %6d" % (framework, 1000 * time, number / time, st.total_calls, len(st.stats))) if 0: st = Stats(profile.Profile().runctx( 'timeit(f, number=number)', globals(), locals())) st.strip_dirs().sort_stats('time').print_stats(10) del sys.modules['app'] except ImportError: print("%-15s not installed" % framework) modules = [m for m in sys.modules.keys() if m.endswith('helloworld')] for m in modules: del sys.modules[m]
def tearDown(self): if self.should_profile: results = Stats(self.profile) results.strip_dirs() results.sort_stats('cumulative') results.print_stats(50) super().tearDown()
def tearDown(self): """ """ p = Stats(self.prof) p.sort_stats("cumtime") if self.verbose: p.dump_stats("profiles/test_graph.py.prof") p.strip_dirs()
def example_two(): """ 30003 function calls in 0.018 seconds Ordered by: cumulative time ncalls tottime percall cumtime percall filename:lineno(function) 1 0.000 0.000 0.018 0.018 item_58_profile.py:98(<lambda>) 1 0.001 0.001 0.018 0.018 item_58_profile.py:38(insertion_sort) 10000 0.002 0.000 0.017 0.000 item_58_profile.py:88(insert_value_better) 10000 0.012 0.000 0.012 0.000 {method 'insert' of 'list' objects} 10000 0.003 0.000 0.003 0.000 {built-in method _bisect.bisect_left} 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} """ max_size = 10**4 data = [randint(0, max_size) for _ in range(max_size)] test = lambda: insertion_sort(data) profiler = Profile() profiler.runcall(test) # to extract statistics about the 'test' function's performance, we use pstats stats = Stats(profiler) stats.strip_dirs() stats.sort_stats('cumulative') stats.print_stats()
def tearDown(self): """Report profiling results""" p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() print "\n--->>>"
def profilingStats(): print(f"# Profiling\n") p = Stats('stats') p.print_stats = print_stats p.strip_dirs().sort_stats('cumulative').print_stats(p, 100) os.remove('stats') print(f"# Other prints\n")
def _execute(self, func, phase_name, n, *args): if not self.profile_dir: return func(*args) basename = '%s-%s-%d-%02d-%d' % ( self.contender_name, phase_name, self.objects_per_txn, n, self.rep) txt_fn = os.path.join(self.profile_dir, basename + ".txt") prof_fn = os.path.join(self.profile_dir, basename + ".prof") profiler = cProfile.Profile() profiler.enable() try: res = func(*args) finally: profiler.disable() profiler.dump_stats(prof_fn) with open(txt_fn, 'w') as f: st = Stats(profiler, stream=f) st.strip_dirs() st.sort_stats('cumulative') st.print_stats() return res
def run(name, ctx, number=100000): sys.path[0] = '.' print("\n%-16s msec rps tcalls funcs" % name) for framework in frameworks: os.chdir(os.path.join(path, framework)) if not os.path.exists(name): print("%-22s not available" % framework) continue try: main = __import__('app', None, None, ['main']).main render = main(name) f = lambda: render(ctx) f() # warm up first call time = timeit(f, number=number) st = Stats(profile.Profile().runctx('f()', globals(), locals())) print("%-16s %6.0f %6.0f %7d %6d" % (framework, 1000 * time, number / time, st.total_calls, len(st.stats))) if 0: st = Stats(profile.Profile().runctx('timeit(f, number=number)', globals(), locals())) st.strip_dirs().sort_stats('time').print_stats(10) del sys.modules['app'] except ImportError: print("%-22s not installed" % framework)
def stats_for_fib(type_, fib): p = Profile() p.runcall(fib, 30) p.dump_stats(type_.lower().replace(' ', '_') + '.stats') s = Stats(p) s.strip_dirs().sort_stats('time', 'cumulative') print_stats(type_, s)
def example_one(): """ 20003 function calls in 0.778 seconds Ordered by: cumulative time ncalls tottime percall cumtime percall filename:lineno(function) 1 0.000 0.000 0.778 0.778 item_58_profile.py:38(<lambda>) 1 0.001 0.001 0.778 0.778 item_58_profile.py:19(insertion_sort) 10000 0.764 0.000 0.777 0.000 item_58_profile.py:26(insert_value) 9989 0.013 0.000 0.013 0.000 {method 'insert' of 'list' objects} 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} 11 0.000 0.000 0.000 0.000 {method 'append' of 'list' objects} - notes for Nick: - we can see that the biggest use of CPU in our test is the cumulative time spent in the 'insert_value' function. """ max_size = 10**4 data = [randint(0, max_size) for _ in range(max_size)] test = lambda: insertion_sort(data) profiler = Profile() profiler.runcall(test) # to extract statistics about the 'test' function's performance, we use pstats stats = Stats(profiler) stats.strip_dirs() stats.sort_stats('cumulative') stats.print_stats()
def wrap(*args, **kwargs): prof = Profile() res = prof.runcall(func, *args, **kwargs) stats = Stats(prof) stats.strip_dirs() stats.sort_stats('tottime') stats.print_stats(20) return res
def tearDown(self): return """finish any test""" p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() print("\n--->>>")
def profile(): ''' Function used to profile code for speedups. ''' import cProfile cProfile.run('main(50)', 'pstats') from pstats import Stats p = Stats('pstats') p.strip_dirs().sort_stats('time').print_stats(10)
def tearDownClass(cls): if cls.is_running: return urlopen('http://localhost:8000/quit') cls.cli.close() p = Stats(cls.profiler) p.strip_dirs() p.sort_stats('cumtime')
def tearDown(self): '''Disconnect from statseg''' self.stat.disconnect() profile = Stats(self.profile) profile.strip_dirs() profile.sort_stats('cumtime') profile.print_stats() print("\n--->>>")
def tearDown(self): """finish any test""" if hasattr(self, "prof"): p = Stats(self.prof) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() print("\n--->>>")
def tearDown(self): """Disconnect from statseg""" self.stat.disconnect() profile = Stats(self.profile) profile.strip_dirs() profile.sort_stats("cumtime") profile.print_stats() print("\n--->>>")
def print_stats(statsfile, statstext): with open(statstext, 'w') as f: mystats = Stats(statsfile, stream=f) mystats.strip_dirs() mystats.sort_stats('cumtime') # mystats.print_callers('_strptime') mystats.print_stats() startfile(statstext)
def tearDown(self): if PROFILE: p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() if DEBUG: print('\n{}>>>'.format('-' * 77))
def inner(*args, **kwargs): pro = Profile() pro.runcall(func, *args, **kwargs) stats = Stats(pro) stats.strip_dirs() stats.sort_stats(field) print("Profile for {}()".format(func.__name__)) stats.print_stats() stats.print_callers()
def search_method(): """Match for applicable methods and their arguments. Input: * username: username. * theory_name: name of the theory. * thm_name: name of the theorem. Returns: * search_res: list of search results. * ctxt: current proof context. """ data = json.loads(request.get_data().decode("utf-8")) if data['profile']: pr = cProfile.Profile() pr.enable() if not proof_cache.check_cache(data): start_time = time.perf_counter() proof_cache.create_cache(data) print("Load: %f" % (time.perf_counter() - start_time)) if data['thm_name'] != '': limit = ('thm', data['thm_name']) else: limit = None basic.load_theory(data['theory_name'], limit=limit, username=data['username']) start_time = time.perf_counter() state = proof_cache.states[data['index']] fact_ids = data['step']['fact_ids'] goal_id = data['step']['goal_id'] search_res = state.search_method(goal_id, fact_ids) with settings.global_setting(unicode=True): for res in search_res: if '_goal' in res: res['_goal'] = [printer.print_term(t) for t in res['_goal']] if '_fact' in res: res['_fact'] = [printer.print_term(t) for t in res['_fact']] vars = state.get_vars(goal_id) with settings.global_setting(unicode=True, highlight=True): print_vars = dict((k, printer.print_type(v)) for k, v in vars.items()) print("Response:", time.perf_counter() - start_time) if data['profile']: p = Stats(pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() return jsonify({'search_res': search_res, 'ctxt': print_vars})
def stats_for_main(): p = Profile() p.runcall(main) p.dump_stats('main.stats') s = Stats(p) s.strip_dirs().sort_stats('time', 'cumulative') print_stats('MAIN - ALL STATS', s) print_stats('MAIN - CALLERS', s, 'sleep') print_stats('MAIN - CALLEES', s, 'heavy')
def build_document(self, file_name): """This is the entry point for the NetcfBuilders from the ingestManager. These documents are id'd by fcstValidEpoch. The data section is an array each element of which contains variable data and a station name. To process this file we need to itterate the document by recNum and process the station name along with all the other variables in the variableList. Args: file_name (str): the name of the file being processed Returns: [dict]: document """ # noinspection PyBroadException try: # stash the file_name so that it can be used later self.file_name = os.path.basename(file_name) # pylint: disable=no-member self.ncdf_data_set = nc.Dataset(file_name) if len(self.station_names) == 0: result = self.cluster.query("""SELECT raw name FROM mdata WHERE type = 'MD' AND docType = 'station' AND subset = 'METAR' AND version = 'V01'; """) self.station_names = list(result) self.initialize_document_map() logging.info("%s building documents for file %s", self.__class__.__name__, file_name) if self.do_profiling: with cProfile.Profile() as _pr: self.handle_document() with open("profiling_stats.txt", "w") as stream: stats = Stats(_pr, stream=stream) stats.strip_dirs() stats.sort_stats("time") stats.dump_stats("profiling_stats.prof") stats.print_stats() else: self.handle_document() # pylint: disable=assignment-from-no-return document_map = self.get_document_map() data_file_id = self.create_data_file_id(file_name=file_name) data_file_doc = self.build_datafile_doc( file_name=file_name, data_file_id=data_file_id, ) document_map[data_file_doc["id"]] = data_file_doc return document_map except Exception as _e: # pylint:disable=broad-except logging.error( "%s: Exception with builder build_document: error: %s", self.__class__.__name__, str(_e), ) return {}
def tearDown(self): if not DEBUG_MODE: self.test_elasticity.collection.drop() self.test_tasks.collection.drop() if PROFILE_MODE: p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() print("\n--->>>")
def print_stats(profiler, printCallers=False): from pstats import Stats stats = Stats(profiler) stats.strip_dirs() stats.sort_stats('cumulative') if printCallers is True: stats.print_callers() else: stats.print_stats()
def show_time_profiler_results(pr, top_records): """ Show results of timed profiling. :param pr: profiler instance :param top_records: how many top function calls to show. """ if pr: st = Stats(pr) st.strip_dirs() st.sort_stats('cumulative') st.print_stats(top_records)
def profile_func(function): """ Profile execution of a function """ profile = Profile() profile.runcall(function) stat = Stats(profile) stat.strip_dirs() stat.sort_stats('cumulative') stat.print_stats()
def tearDown(self): if DEBUG: print("\nCompression was run on a total of %d times\n" % self.c._compress_count) if PROFILE: p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() if (DEBUG): print('\n{}>>>'.format('-' * 77))
def example_three(): """ 20242 function calls in 0.063 seconds Ordered by: cumulative time ncalls tottime percall cumtime percall filename:lineno(function) 1 0.000 0.000 0.063 0.063 item_58_profile.py:140(my_program) 20 0.002 0.000 0.063 0.003 item_58_profile.py:130(first_func) 20200 0.061 0.000 0.061 0.000 item_58_profile.py:124(my_utility) 20 0.000 0.000 0.001 0.000 item_58_profile.py:135(second_func) 1 0.000 0.000 0.000 0.000 {method 'disable' of '_lsprof.Profiler' objects} -notes for Nick - you may profile your program only to find that a common utility function is responsible for majority of the execution time. - default output from the profiler doesn't show how the utility function is called by many different parts of your program. - 'my_utility' function is evidently the source of most execution time, but it is not immediately obvious why that function is called so many times. ---- Callers ----- Ordered by: cumulative time Function was called by... ncalls tottime cumtime item_58_profile.py:140(my_program) <- item_58_profile.py:130(first_func) <- 20 0.002 0.062 item_58_profile.py:140(my_program) item_58_profile.py:124(my_utility) <- 20000 0.061 0.061 item_58_profile.py:130(first_func) 200 0.001 0.001 item_58_profile.py:135(second_func) item_58_profile.py:135(second_func) <- 20 0.000 0.001 item_58_profile.py:140(my_program) {method 'disable' of '_lsprof.Profiler' objects} <- """ profiler = Profile() profiler.runcall(my_program) # to extract statistics about the 'test' function's performance, we use pstats stats = Stats(profiler) stats.strip_dirs() stats.sort_stats('cumulative') stats.print_stats() print('\n---- Callers -----\n') # to see how many times a function is called stats.print_callers()
def tearDown(self): for worker in self.driver._workers: worker.stop() worker.wait() self.cvx.endpoint_data.clear() super(MechTestBase, self).tearDown() if ENABLE_PROFILER: p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats()
def test_2(): max_size = 10**4 data = [randint(0, max_size) for _ in range(max_size)] test = lambda: insertion_sort(data) profiler = Profile() profiler.runcall(test) stats = Stats(profiler) stats.strip_dirs() stats.sort_stats('cumulative') stats.print_stats()
def test_profile_get_combination_depth(self): self.pr = cProfile.Profile() self.pr.enable() self.depth = self.permutations.get_permutation_depth() p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats() print "\n--->>>"
def tearDown(self): if ENABLE_PROFILE: if DUMP_PROFILE: self.pr.dump_stats('profile.out') p = Stats(self.pr) p.strip_dirs() p.sort_stats('time') p.print_stats(40) p.print_callees('types.py:846\(validate_value', 20) p.print_callees('types.py:828\(_validate_primitive_value', 20) p.print_callees('uploadsession.py:185\(write', 20) TestBase.teardown(self)
def tearDownClass(cls): # stop swarm print("TEST: stopping swarm") for node in cls.swarm: node.stop() shutil.rmtree(STORAGE_DIR) # get profiler stats stats = Stats(cls.profile) stats.strip_dirs() stats.sort_stats('cumtime') stats.print_stats()
def profiling(print_stats): """ Run the model for the given set of parameters and additionally perform some profiling. """ cProfile.runctx( "sample_runs(n_runs, single_steps, N, link_density," + "rewiring_prob, update_timescale, adj_mats)", globals(), locals(), "profile.prof", ) s = Stats("profile.prof") if print_stats: s.strip_dirs().sort_stats("time").print_stats() system("rm profile.prof")
def tearDownClass(cls): # stop swarm print("TEST: stopping swarm") for node in cls.swarm: node.stop() time.sleep(0.1) # not to fast cls.test_get_unl_peer.stop() shutil.rmtree(STORAGE_DIR) # get profiler stats if PROFILE: stats = Stats(cls.profile) stats.strip_dirs() stats.sort_stats('cumtime') stats.print_stats()
def test_api_me_post_success(self): self.login(self.get_default_test_username(), self.get_default_test_password()) self.pr = cProfile.Profile() self.pr.enable() #CREATING y BILLS for x in range(0, 100): self.apiCreateNewBill(testUtils.random_name_generator(), testUtils.random_number_generator()) p = Stats (self.pr) p.strip_dirs() p.sort_stats ('cumtime') p.print_stats () self.logout()
def process_view(self, request, view_func, view_args, view_kwargs): from cProfile import Profile from pstats import Stats full_name = "{v.__module__}.{v.func_name}".format(v=view_func) if self.regex.match(full_name): profile = Profile() response = profile.runcall(view_func, request, *view_args, **view_kwargs) stats = Stats(profile) if os.path.exists(self.filename): stats.add(self.filename) stats.strip_dirs() stats.dump_stats(self.filename) return response
def do(impl, count): # warm-up _res = [impl() for _ in _range(count)] profile = Profile() profile.enable() res = [impl() for _ in _range(count)] profile.disable() out = StringIO() stats = Stats(profile, stream=out) stats.strip_dirs() stats.sort_stats('calls').print_stats(10) print(out.getvalue().lstrip()) out.close() return _res, res
def run(frameworks, number, do_profile): print("Benchmarking frameworks:", ', '.join(frameworks)) sys.path[0] = '.' path = os.getcwd() print(" ms rps tcalls funcs") for framework in frameworks: os.chdir(os.path.join(path, framework)) try: main = __import__('app', None, None, ['main']).main f = lambda: list(main(environ.copy(), start_response)) time = timeit(f, number=number) st = Stats(profile.Profile().runctx( 'f()', globals(), locals())) print("%-11s %6.0f %7.0f %7d %6d" % (framework, 1000 * time, number / time, st.total_calls, len(st.stats))) if do_profile: st = Stats(profile.Profile().runctx( 'timeit(f, number=number)', globals(), locals())) st.strip_dirs().sort_stats('time').print_stats(10) del sys.modules['app'] except ImportError: print("%-15s not installed" % framework)
def tearDown(self): p = Stats(self.pr) p.strip_dirs() p.sort_stats('cumtime') p.print_stats(40) TestBase.teardown(self)
pr.enable() markup = exporter.render(content_state) pr.disable() p = Stats(pr) def prettify(markup): return re.sub(r'</?(body|html|head)>', '', BeautifulSoup(markup, 'html5lib').prettify()).strip() pretty = prettify(markup) # Display in console. print(pretty) p.strip_dirs().sort_stats('cumulative').print_stats(0) styles = """ /* Tacit CSS framework https://yegor256.github.io/tacit/ */ input,textarea,select,button,html,body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:18px;font-stretch:normal;font-style:normal;font-weight:300;line-height:29.7px}input,textarea,select,button,html,body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:18px;font-stretch:normal;font-style:normal;font-weight:300;line-height:29.7px}th{font-weight:600}td,th{border-bottom:1.08px solid #ccc;padding:14.85px 18px}thead th{border-bottom-width:2.16px;padding-bottom:6.3px}table{display:block;max-width:100%;overflow-x:auto}input,textarea,select,button,html,body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:18px;font-stretch:normal;font-style:normal;font-weight:300;line-height:29.7px}input,textarea,select,button{display:block;max-width:100%;padding:9.9px}label{display:block;margin-bottom:14.76px}input[type="submit"],input[type="reset"],button{background:#f2f2f2;border-radius:3.6px;color:#8c8c8c;cursor:pointer;display:inline;margin-bottom:18px;margin-right:7.2px;padding:6.525px 23.4px;text-align:center}input[type="submit"]:hover,input[type="reset"]:hover,button:hover{background:#d9d9d9;color:#000}input[type="submit"][disabled],input[type="reset"][disabled],button[disabled]{background:#e6e6e6;color:#b3b3b3;cursor:not-allowed}input[type="submit"],button[type="submit"]{background:#367ac3;color:#fff}input[type="submit"]:hover,button[type="submit"]:hover{background:#255587;color:#bfbfbf}input[type="text"],input[type="password"],input[type="email"],input[type="url"],input[type="phone"],input[type="tel"],input[type="number"],input[type="datetime"],input[type="date"],input[type="month"],input[type="week"],input[type="color"],input[type="time"],input[type="search"],input[type="range"],input[type="file"],input[type="datetime-local"],select,textarea{border:1px solid #ccc;margin-bottom:18px;padding:5.4px 6.3px}input[type="checkbox"],input[type="radio"]{float:left;line-height:36px;margin-right:9px;margin-top:8.1px}input,textarea,select,button,html,body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:18px;font-stretch:normal;font-style:normal;font-weight:300;line-height:29.7px}pre,code,kbd,samp,var,output{font-family:Menlo,Monaco,Consolas,"Courier New",monospace;font-size:16.2px}pre{border-left:1.8px solid #96bbe2;line-height:25.2px;margin-top:29.7px;overflow:auto;padding-left:18px}pre code{background:none;border:0;line-height:29.7px;padding:0}code{background:#ededed;border:1.8px solid #ccc;border-radius:3.6px;display:inline-block;line-height:18px;padding:3px 6px 2px}input,textarea,select,button,html,body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:18px;font-stretch:normal;font-style:normal;font-weight:300;line-height:29.7px}h1,h2,h3,h4,h5,h6{color:#000;margin-bottom:18px}h1{font-size:36px;font-weight:500;margin-top:36px}h2{font-size:25.2px;font-weight:400;margin-top:27px}h3{font-size:21.6px;margin-top:21.6px}h4{font-size:18px;margin-top:18px}h5{font-size:14.4px;font-weight:bold;margin-top:18px;text-transform:uppercase}h6{color:#ccc;font-size:14.4px;font-weight:bold;margin-top:18px;text-transform:uppercase}input,textarea,select,button,html,body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:18px;font-stretch:normal;font-style:normal;font-weight:300;line-height:29.7px}a{color:#367ac3;text-decoration:none}a:hover{text-decoration:underline}hr{border-bottom:1px solid #ccc}small{font-size:15.3px}em,i{font-style:italic}strong,b{font-weight:600}*{border:0;border-collapse:separate;border-spacing:0;box-sizing:border-box;margin:0;outline:0;padding:0;text-align:left;vertical-align:baseline}html,body{height:100%;width:100%}body{background:#f5f5f5;color:#1a1a1a;padding:36px}p,ul,ol,dl,blockquote,hr,pre,table,form,fieldset,figure,address{margin-bottom:29.7px}section{margin-left:auto;margin-right:auto;max-width:100%;width:900px}article{background:#fff;border:1.8px solid #d9d9d9;border-radius:7.2px;padding:43.2px}header{margin-bottom:36px}footer{margin-top:36px}nav{text-align:center}nav ul{list-style:none;margin-left:0;text-align:center}nav ul li{display:inline;margin-left:9px;margin-right:9px}nav ul li:first-child{margin-left:0}nav ul li:last-child{margin-right:0}ol,ul{margin-left:29.7px}li ol,li ul{margin-bottom:0}@media (max-width: 767px){body{padding:18px}article{border-radius:0;margin:-18px;padding:18px}textarea,input,select{max-width:100%}fieldset{min-width:0}section{width:auto}fieldset,x:-moz-any-link{display:table-cell}} /* Custom styles to help with debugging */ blockquote { border-left: 0.25rem solid #aaa; padding-left: 1rem; font-style: italic; } .u-text-center { text-align: center; } a:hover, a:focus { outline: 1px solid red; } .hashtag { color: pink; } .list-item--depth-1 { margin-left: 5rem; } """ # Output to a styled HTML file for development. with codecs.open('example.html', 'w', 'utf-8') as file: file.write("""
nx2tot = max(nx2list) ny1tot = min(ny1list) ny2tot = max(ny2list) nxtot = nx2tot-nx1tot; print nxtot nytot = ny2tot-ny1tot; print nytot ymaxtot = (nytot-1)*dy; xmaxtot = (nxtot-1)*dx surf_ytot = linspace(0,ymaxtot,nytot); #print surf_ytot[1]-surf_ytot[0]; surf_xtot = linspace(0,xmaxtot,nxtot); #print surf_xtot[1]-surf_xtot[0]; surf_xgridtot, surf_ygridtot = meshgrid(surf_xtot,surf_ytot) settemp = solution[ny1tot:ny2tot,nx1tot:nx2tot] print("Output size is "+str(settemp.shape)+".") #fig = plt.figure(retfig) #ax = fig.add_subplot(111, projection='3d') #ax.plot_surface(surf_xgridtot, surf_ygridtot, settemp,rstride=5,cstride=5) #ax.set_xlabel('x') #ax.set_ylabel('y') #ax.set_title('total') #retfig +=1 # Save it savetxt('solution.gz',solution) import cProfile cProfile.run('run()', 'pstats_bsc5') from pstats import Stats p = Stats('pstats_bsc5') p.strip_dirs().sort_stats('time').print_stats(10)
def profile(): cProfile.run("parse_replays()","replay_profile") stats = Stats("replay_profile") stats.strip_dirs().sort_stats("time").print_stats(30)
from random import randint from cProfile import Profile from pstats import Stats def insertion_sort(data): result = [] for value in data: insert_value(result, value) return result def insert_value(array, value): for i, existing in enumerate(array): if existing > value: array.insert(i, value) return array.append(value) max_size = 10 **4 data = [randint(0,max_size) for _ in range(10)] print(data) result = insertion_sort(data) print(result) profiler = Profile() profiler.runcall(lambda : insertion_sort(data)) stats = Stats(profiler) stats.strip_dirs() stats.sort_stats('cumulative') stats.print_stats()
def __init__(self, filename, strip_dirs=True, remote_js=False): self.name = os.path.basename(filename) stats = Stats(filename) self.stats = stats.strip_dirs() if strip_dirs else stats self.timings, self.callers = _calc_frames(stats) self.remote_js = remote_js
""" Created on Jan 13, 2014 @Company: PBS Biotech @Author: Nathan Starkweather """ from pstats import Stats stats_file = "C:\\Users\\PBS Biotech\\Documents\\Personal\\PBS_Office\\MSOffice\\officelib\\pbslib\\test\\profile2.txt" from datetime import datetime s = Stats(stats_file) s.strip_dirs() s.sort_stats('time') s.print_callers(0.1)
def test_performance(self): @define def test_func(a, b): def body(a_id, a_name, b_id, b_name): pass return body, [a.id, a.name, b.id, b.name] struct = Construct({ 'r1': if_(self.a_cls.id, then_=test_func.defn(self.a_cls, self.b_cls)), 'r2': if_(self.a_cls.name, then_=test_func.defn(self.a_cls, self.b_cls)), 'r3': if_(self.b_cls.id, then_=test_func.defn(self.a_cls, self.b_cls)), 'r4': if_(self.b_cls.name, then_=test_func.defn(self.a_cls, self.b_cls)), }) row = ( self.session.query(*struct._columns) .join(self.b_cls.a) .first() ) # warm-up for _ in _range(5000): struct._from_row(row) profile1 = Profile() profile1.enable() for _ in _range(5000): struct._from_row(row) profile1.disable() out1 = StringIO() stats1 = Stats(profile1, stream=out1) stats1.strip_dirs() stats1.sort_stats('calls').print_stats(10) print(out1.getvalue().lstrip()) out1.close() row = ( self.session.query( self.a_cls.id.label('a_id'), self.a_cls.name.label('a_name'), self.b_cls.id.label('b_id'), self.b_cls.name.label('b_name'), ) .join(self.b_cls.a) .first() ) def make_object(row): Object(dict( r1=( test_func.func(row.a_id, row.a_name, row.b_id, row.b_name) if row.a_id else None ), r2=( test_func.func(row.a_id, row.a_name, row.b_id, row.b_name) if row.a_name else None ), r3=( test_func.func(row.a_id, row.a_name, row.b_id, row.b_name) if row.b_id else None ), r4=( test_func.func(row.a_id, row.a_name, row.b_id, row.b_name) if row.b_name else None ), )) # warm-up for _ in _range(5000): make_object(row) profile2 = Profile() profile2.enable() for _ in _range(5000): make_object(row) profile2.disable() out2 = StringIO() stats2 = Stats(profile2, stream=out2) stats2.strip_dirs() stats2.sort_stats('calls').print_stats(10) print(out2.getvalue().lstrip()) out2.close() self.assertEqual(stats1.total_calls, stats2.total_calls)
def tearDown(self): if use_profiler: p = Stats (self.pr) p.strip_dirs() p.sort_stats ('cumtime') p.print_stats()