def raw_sql_profiler(): profiler = Profile() for _ in range(10): profiler.runctx( "list(HttcOrder.objects.raw('''SELECT * FROM httc_order WHERE mk_contract_id = 1000 ORDER BY price'''))", locals(), globals()) convert(profiler.getstats(), 'raw_sql_profiler.kgrind')
def generate_profiler_entry(): def func(): a = 1 + 2 return a prof = Profile() prof.runctx("func()", locals(), globals()) return prof.getstats()
def profile(): #cProfile.run('main()','stats') #Tweede test met RGB topo coloring zonder schaduw.py from cProfile import Profile from pyprof2calltree import convert, visualize profiler = Profile() profiler.runctx('main()', locals(), globals()) visualize(profiler.getstats())
def cursor_execute_profiler(): cursor = connection.cursor() profiler = Profile() for _ in range(10): profiler.runctx( "cursor.execute('''SELECT * FROM httc_order WHERE mk_contract_id = 1000 ORDER BY price''')", locals(), globals()) convert(profiler.getstats(), 'cursor_execute_profiler.kgrind')
def profile_create_offices_for_departement(departement): """ Run create_offices_for_departement with profiling. """ profiler = Profile() command = "create_offices_for_departement('%s')" % departement profiler.runctx(command, locals(), globals()) relative_filename = 'profiling_results/create_index_dpt%s.kgrind' % departement filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), relative_filename) convert(profiler.getstats(), filename)
def __call__(self, environ, start_response): """ This will be called when the application is being run. This can be either: - a request to the __profiler__ framework to display profiled information, or - a normal request that will be profiled. Returns the WSGI application. """ # If we're not accessing the profiler, profile the request. req = Request(environ) self.profiling_enabled = os.path.exists(ENABLED_FLAG_FILE) if req.path_info_peek() != self.profiler_path.strip('/'): if not self.profiling_enabled: return self.app(environ, start_response) _locals = locals() prof = Profile() start_timestamp = datetime.now() prof.runctx("app = self.app(environ, start_response)", globals(), _locals) stats = prof.getstats() session = ProfilingSession(stats, environ, start_timestamp) self._backend.add(session) return _locals['app'] req.path_info_pop() # We could import `routes` and use something like that here, but since # not all frameworks use this, it might become an external dependency # that isn't needed. So parse the URL manually using :class:`webob`. query_param = req.path_info_pop() if not query_param: wsgi_app = self.list_profiles(req) elif query_param == "graph": wsgi_app = self.render_graph(req) elif query_param == "media": wsgi_app = self.media(req) elif query_param == "profiles": wsgi_app = self.show_profile(req) elif query_param == "delete": wsgi_app = self.delete_profile(req) else: wsgi_app = HTTPNotFound() return wsgi_app(environ, start_response)
def __call__(self, environ, start_response): """ This will be called when the application is being run. This can be either: - a request to the __profiler__ framework to display profiled information, or - a normal request that will be profiled. Returns the WSGI application. """ # If we're not accessing the profiler, profile the request. req = Request(environ) self.profiling_enabled = os.path.exists(ENABLED_FLAG_FILE) if req.path_info_peek() != self.profiler_path.strip('/'): if not self.profiling_enabled: return self.app(environ, start_response) _locals = locals() prof = Profile() start_timestamp = datetime.now() prof.runctx( "app = self.app(environ, start_response)", globals(), _locals) stats = prof.getstats() session = ProfilingSession(stats, environ, start_timestamp) self._backend.add(session) return _locals['app'] req.path_info_pop() # We could import `routes` and use something like that here, but since # not all frameworks use this, it might become an external dependency # that isn't needed. So parse the URL manually using :class:`webob`. query_param = req.path_info_pop() if not query_param: wsgi_app = self.list_profiles(req) elif query_param == "graph": wsgi_app = self.render_graph(req) elif query_param == "media": wsgi_app = self.media(req) elif query_param == "profiles": wsgi_app = self.show_profile(req) elif query_param == "delete": wsgi_app = self.delete_profile(req) else: wsgi_app = HTTPNotFound() return wsgi_app(environ, start_response)
def analyze_profiler(profiler: Profile): methods = [ '__add__', '__sub__', '__mul__', '__div__', '__pow__', 'invert', '_convert' ] stats = profiler.getstats() stats = filter(lambda row: re.match('.*modularinteger', row.code.co_filename) is not None, stats) stats = filter(lambda row: row.code.co_name in methods, stats) stats = map(lambda row: (row.code.co_name, row.callcount), stats) stats = map(lambda entry: (entry[0].strip('_'), entry[1]), stats) total_count = sum((entry[1] for entry in stats), 0) return {'total_count': total_count, 'detailed_usage': stats}
def test_create_graph(self): """ Test that a graph gets generated for a test function """ def test_func(): pass prof = Profile() prof.runctx("test_func()", locals(), globals()) graph = linesman.create_graph(prof.getstats()) # We should only ever have three items here assert_equals(len(graph), 3) # Assert that the three items we have are as expected assert_equals(graph.nodes(), ['<string>.<module>', 'linesman.tests.test_graphs.test_func', "<method 'disable' of '_lsprof.Profiler' objects>"]) # Assert that the correct edges are set-up assert_equals([('<string>.<module>', 'linesman.tests.test_graphs.test_func')], graph.edges())
def update_data_profiling_wrapper(create_full, create_partial, disable_parallel_computing=False): if Profiling.ACTIVATED: logger.info("STARTED run with profiling") profiler = Profile() profiler.runctx( "update_data(create_full, create_partial, disable_parallel_computing)", locals(), globals()) relative_filename = 'profiling_results/create_index_run.kgrind' filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), relative_filename) convert(profiler.getstats(), filename) logger.info( "COMPLETED run with profiling: exported profiling result as %s", filename) else: logger.info("STARTED run without profiling") update_data(create_full, create_partial, disable_parallel_computing) logger.info("COMPLETED run without profiling")
def test_create_graph(self): """ Test that a graph gets generated for a test function """ def test_func(): pass prof = Profile() prof.runctx("test_func()", locals(), globals()) graph = linesman.create_graph(prof.getstats()) # We should only ever have three items here assert_equals(len(graph), 3) # Assert that the three items we have are as expected assert_equals(graph.nodes(), [ '<string>.<module>', 'linesman.tests.test_graphs.test_func', "<method 'disable' of '_lsprof.Profiler' objects>" ]) # Assert that the correct edges are set-up assert_equals( [('<string>.<module>', 'linesman.tests.test_graphs.test_func')], graph.edges())
def main(): N = 100000 # Number of tests # Different argument types variants = [ (1, 3), ((1, 3), ), ("fox", ), ] args = [] for i in range(N): args.append(variants[randint(0, len(variants) - 1)]) def run_ovl(): for arg in args: func_ovl(*arg) def run_normal(): for arg in args: func_normal(*arg) print("Running benchmark...") time_ovl = timeit.timeit(run_ovl, number=1) / N time_normal = timeit.timeit(run_normal, number=1) / N profiler = Profile() profiler.runctx("run_ovl()", globals(), locals()) convert(profiler.getstats(), "C:/Users/andreasxp/Desktop/callgrind.profile") print(f"Average over {N} runs:") print( f"Overloaded function: {time_ovl * 1000000:.2f} mcs ({time_ovl / time_normal:.2f}x)" ) print(f"Non-overloaded function: {time_normal * 1000000:.2f} mcs")
class Measure: time_factor = 1000 def __init__(self, measure_session=None, measure_point_name=None, save_queue=None, data=None): if data is None: self.measure_session = measure_session self.point_name = measure_point_name self._save_queue = save_queue self._profile = Profile() else: self.measure_session = data[0] self.point_name = data[1] self.load(data[2]) def load(self, data): self.timings = data def start(self): self._profile.enable() return self def stop(self): self._profile.disable() self.stats = self._profile.getstats() self.timings = list() self.timestamp = datetime.utcnow() for stat in self.stats: if stat.calls is not None: calls = list() for call in stat.calls: calls.append( TimingStat(self.timestamp, str(self.measure_session), str(self.point_name), str(call.code), call.callcount, call.reccallcount, call.totaltime * time_factor, call.inlinetime * time_factor, None)) else: calls = None self.timings.append( TimingStat(self.timestamp, str(self.measure_session), str(self.point_name), str(stat.code), stat.callcount, stat.reccallcount, stat.totaltime * time_factor, stat.inlinetime * time_factor, calls)) return self def save(self): self._save_queue.put_nowait( (self.measure_session, self.point_name, self.timings)) return self def __str__(self): buf = "<{0}: measure_session='{1}' point_name='{1}'".format( self.__class__, self.measure_session, self.point_name) if hasattr(self, "stat"): buf += ", stat='{}'".format(self.stats) if hasattr(self, "timing"): buf += ", timing='{}'".format(self.timings) buf += ">" return buf
class GProfiler(gtk.Window): """An interactive graphical profiler. It is not visible by default; you will need to call .show() on the returned window. """ def __init__(self): super(GProfiler, self).__init__() self.set_title(_("Layer Profiler")) self.set_default_size(400, 300) self.profile = Profile() self.add(gtk.VBox()) self.ui = gtk.UIManager() actions = gtk.ActionGroup("ProfilerActions") actions.add_actions([ ('Profiles', None, _("_Profiles")), ('OpenProfile', gtk.STOCK_OPEN, None, "<control>O", None, None), ('SaveProfile', gtk.STOCK_SAVE, None, "<control>S", None, None), ('ExportProfile', gtk.STOCK_CONVERT, _("_Export as Text"), "<control>E", None, None), ("Quit", gtk.STOCK_QUIT, None, None, None, lambda a: self.destroy()), ("Help", None, _("_Help")), ("About", gtk.STOCK_ABOUT, None, None, None, None), ]) self.ui.insert_action_group(actions, -1) self.ui.add_ui_from_string(UI) self.add_accel_group(self.ui.get_accel_group()) self.child.pack_start(self.ui.get_widget("/Menu"), expand=False) toolbar = gtk.Toolbar() self.child.pack_start(toolbar, expand=False, fill=True) self.model = ProfileModel() filtered = self.model.filter_new() align = gtk.Alignment(xscale=1.0, yscale=1.0) align.set_padding(0, 0, 6, 0) label = gtk.Label() self.entry = FilterEntry(filtered) label.set_mnemonic_widget(self.entry) label.set_text_with_mnemonic(_("_Filter:")) self.view = gtk.TreeView(gtk.TreeModelSort(filtered)) self.view.connect('row-activated', self._open_file) self.view.set_enable_search(True) self.view.set_search_equal_func(self.model.matches) sw = gtk.ScrolledWindow() sw.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC) sw.set_shadow_type(gtk.SHADOW_IN) sw.add(self.view) box = gtk.HBox(spacing=3) box.pack_start(label, expand=False) box.pack_start(self.entry) align.add(box) self.child.pack_start(align, expand=False) self.child.pack_start(sw, expand=True) cell = gtk.CellRendererText() cell.props.ellipsize = pango.ELLIPSIZE_MIDDLE column = gtk.TreeViewColumn(_("Filename"), cell) column.add_attribute(cell, 'text', 1) column.set_sort_column_id(2) column.set_resizable(True) column.set_expand(True) self.view.append_column(column) column = gtk.TreeViewColumn(_("Function"), cell) column.add_attribute(cell, 'text', 2) column.set_sort_column_id(2) column.set_resizable(True) column.set_expand(True) self.view.append_column(column) cell = gtk.CellRendererText() column = gtk.TreeViewColumn(_("Call #"), cell) column.add_attribute(cell, 'text', 4) column.set_sort_column_id(4) self.view.append_column(column) column = gtk.TreeViewColumn(_("Total"), cell) column.add_attribute(cell, 'text', 6) column.set_sort_column_id(6) self.view.append_column(column) column = gtk.TreeViewColumn(_("Inline"), cell) column.add_attribute(cell, 'text', 7) column.set_sort_column_id(7) self.view.append_column(column) self.totalstats = gtk.Statusbar() self.child.pack_start(self.totalstats, expand=False) toggle = gtk.ToggleToolButton(gtk.STOCK_MEDIA_RECORD) toggle.set_active(False) toggle.connect('toggled', self.toggle) toolbar.insert(toggle, 0) self.entry.grab_focus() self.running = False self.child.show_all() def _open_file(self, view, path, column): code = view.get_model().model[path][0].code try: lineno = "+%d" % code.co_firstlineno filename = code.co_filename except AttributeError: pass else: args = ['sensible-editor', lineno, filename] gobject.spawn_async(args, flags=gobject.SPAWN_SEARCH_PATH) def toggle(self, button): """Turn profiling on if off and vice versa.""" if button.get_active(): self.start() else: self.stop() def snapshot(self): """Update the UI with a snapshot of the current profile state.""" self.model.stats = self.profile.getstats() totalcalls = 0 totaltime = 0 for entry in self.model.stats: totalcalls += entry.callcount totaltime += entry.inlinetime text = _("%(calls)d calls in %(time)f CPU seconds.") % dict( calls=totalcalls, time=totaltime) self.totalstats.pop(0) self.totalstats.push(0, text) def start(self): """Start profiling (adding to existing stats).""" if not self.running: self.profile.enable() self.running = True def stop(self): """Stop profiling (but retain stats).""" if self.running: self.profile.disable() self.running = False self.snapshot()
env_init_list=env_init_list) print "Trying to solve..." nsprof = Profile() nsprof.run("aut = gen_navobs_soln(init_list=init_list, goal_list=goal_list, W=W, num_obs=len(env_init_list), env_init_list=env_init_list, restrict_radius=1)") if aut is None: print "Nominal spec not feasible." print "#"*60 continue print "Resulting solution automaton M has %d nodes." % aut.size() aut.trimDeadStates() print "After trimming dead nodes, M has size %d" % aut.size() ind = -1 while not hasattr(nsprof.getstats()[ind].code, "co_name") or (nsprof.getstats()[ind].code.co_name != "gen_navobs_soln"): ind -= 1 nom_time = nsprof.getstats()[ind].totaltime aut.writeDotFileCoord("tempsyn-ORIG.dot") with open("tempsyn-ORIG.gexf", "w") as f: f.write(cg.dumpGexf(aut, use_viz=True, use_clusters=True)) # Place block randomly in way of nominal plan (so that # patching is indeed necessary). W_actual = W.copy() block_try_count = 0 while True: block_try_count += 1 if block_try_count > max_blocking_tries: break
"C_lambda": C_LAMBDA_TRUE, "delta_S": DELTA_S_TRUE } price_path = preisSim(param_true) # p_true_SS = all_summary_stats(price_path, price_path) return (price_path) p_true_SS = main() p_true_SS.to_csv("new_test_case.csv", index=False, header=False) original = pd.read_csv("original_test_case.csv", header=None) new_case = pd.read_csv("new_test_case.csv", header=None) print( all( round(original.apply(float, axis=1), 0) == round( new_case.apply(float, axis=1), 0))) profiler = Profile() profiler.runctx("main()", locals(), globals()) # from pyprof2calltree import convert, visualize visualize(profiler.getstats()) # run kcachegrind convert(profiler.getstats(), 'profiling_results.kgrind') # save for later
import logging from cProfile import Profile from atpase import atpase, bgt FILENAME = "atpase_profile.kgrind" LOG = "profile.log" profiler = Profile() logger = bgt.logger logger.setLevel(logging.INFO) handler = logging.FileHandler(filename=LOG, mode='w') logger.addHandler(handler) try: profiler.runcall(atpase) except KeyboardInterrupt as ex: handler.flush() raise ex from pyprof2calltree import convert, visualize convert(profiler.getstats(), FILENAME) visualize(profiler.getstats())
class Measure: time_factor = 1000 def __init__(self, measure_session=None, measure_point_name=None, save_queue=None, data=None): if data is None: self.measure_session = measure_session self.point_name = measure_point_name self._save_queue = save_queue self._profile = Profile() else: self.measure_session = data[0] self.point_name = data[1] self.load(data[2]) def load(self, data): self.timings = data def start(self): self._profile.enable() return self def stop(self): self._profile.disable() self.stats = self._profile.getstats() self.timings = list() self.timestamp = datetime.utcnow() for stat in self.stats: if stat.calls is not None: calls = list() for call in stat.calls: calls.append(TimingStat( self.timestamp, str(self.measure_session), str(self.point_name), str(call.code), call.callcount, call.reccallcount, call.totaltime * time_factor, call.inlinetime * time_factor, None)) else: calls = None self.timings.append(TimingStat( self.timestamp, str(self.measure_session), str(self.point_name), str(stat.code), stat.callcount, stat.reccallcount, stat.totaltime * time_factor, stat.inlinetime * time_factor, calls)) return self def save(self): self._save_queue.put_nowait((self.measure_session, self.point_name, self.timings)) return self def __str__(self): buf = "<{0}: measure_session='{1}' point_name='{1}'".format( self.__class__, self.measure_session, self.point_name) if hasattr(self, "stat"): buf += ", stat='{}'".format(self.stats) if hasattr(self, "timing"): buf += ", timing='{}'".format(self.timings) buf += ">" return buf