def on_epoch_end(self, epoch, log={}): x = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss web_browser_debug = True print(x) if x > 40000: if web_browser_debug: if epoch == 0: start_in_background() tr = tracker.SummaryTracker() tr.print_diff() else: global memlist all_objects = muppy.get_objects(include_frames=True) # print(len(all_objects)) sum1 = summary.summarize(all_objects) memlist.append(sum1) summary.print_(sum1) if len(memlist) > 1: # compare with last - prints the difference per epoch diff = summary.get_diff(memlist[-2], memlist[-1]) summary.print_(diff) my_types = muppy.filter(all_objects, Type=types.ClassType) for t in my_types: print(t)
def printMemorySummary(self): all_objects = muppy.get_objects() out = "Total_NumObjects=" + str(len(all_objects)) + "\n" self.write(out) #filter out certain types of objects out = None types1 = muppy.filter(all_objects, Type=types.ClassType) out = "Num_Type=" + str(len(types1)) + "\n" for t in types1: out += str(t) out += "\n" self.write(out) out = None #comppare summery of memory # sumCurr = summary.summarize(all_objects) # if (self.sumPrev): # diff = summary.get_diff(sumCurr, self.sumPrev) # summary.print_(diff) # #self.write(str(diff)) # self.sumPrev = sumCurr # self.tr.print_diff() print "memory.summary.done"
def heapdump(): def yield_heapdump(heap_summary): for line in summary.format_(heap_summary): yield f'{line}\n' all_objects = muppy.get_objects() filter_type = request.args.get('filter') if filter_type is None: sum1 = summary.summarize(all_objects) else: sum1 = summary.summarize(muppy.filter(all_objects, Type=bytes)) return Response(yield_heapdump(sum1), mimetype='text/plain')
def print_memory(count=30): ''' Print the statistics of the objects in the memory. Need pympler to use. ''' from pympler import muppy, summary gc.collect() all_objects = muppy.get_objects() my_types = muppy.filter(all_objects, Type=wx.Object) sum1 = summary.summarize(my_types) # sum1 = summary.summarize(all_objects) summary.print_(sum1, limit=count)
def collect_and_dump_root(self): log.msg('Profiling memory for OmsRoot objects...', system=self.__name__) try: import inspect from sys import getsizeof from BTrees.OOBTree import OOBucket from ZEO.Exceptions import ClientDisconnected from opennode.oms.model.model.root import OmsRoot data = [] all_objects = muppy.get_objects() roots = muppy.filter(all_objects, Type=OmsRoot) logger.info('Root profile follows (%s rows)' % len(roots)) gc.collect() for ue in roots: referrers = [] for ref in gc.get_referrers(ue): try: if inspect.isframe(ref): continue # local object ref elif isinstance(ref, list): referrers.append('list len=%s id=%x' % (len(ref), id(ref))) elif isinstance(ref, OOBucket): referrers.append('OOBucket len=%s id=%x' % (len(ref), id(ref))) else: sref = repr(ref) referrers.append(sref) except ClientDisconnected: referrers.append('ClientDisconnected') data.append((referrers, str(ue), repr(ue), str(getsizeof(ue)))) rrows = [('object', 'raw', 'size', 'referrers')] + data rows = _format_table(rrows) for row in rows: logger.info(row) log.msg('Profiling Omsroot memory done', system=self.__name__) del all_objects gc.collect() return defer.succeed(None) except Exception, e: import traceback logger.error(traceback.format_exc(e)) return defer.fail(None)
def request(ctx, flow): global step, ssl print("==========") print(f"GC: {gc.collect()}") print(f"Threads: {threading.active_count()}") step += 1 if step == 1: all_objects = muppy.get_objects() ssl = muppy.filter(all_objects, SSL.Connection)[0] if step == 2: ib = refbrowser.InteractiveBrowser(ssl, 2, str_fun, repeat=False) del ssl # do this to unpollute view ib.main(True)
def request(ctx, flow): global step, ssl print("==========") print("GC: {}".format(gc.collect())) print("Threads: {}".format(threading.active_count())) step += 1 if step == 1: all_objects = muppy.get_objects() ssl = muppy.filter(all_objects, SSL.Connection)[0] if step == 2: ib = refbrowser.InteractiveBrowser(ssl, 2, str_fun, repeat=False) del ssl # do this to unpollute view ib.main(True)
def test_filter_by_type(self): """Test that only elements of a certain type are included, no elements are removed which belong to this type and no elements are added.""" s = (s1, s2, s3, s4) = ('', 'a', 'b', 'a') t = (t1, t2) = (dict, str) i1 = 1 l1 = [] objects = [s1, s2, i1, l1, t1, t2, s3, s4] objects = muppy.filter(objects, Type=str) self.assertEqual(len(objects), len(s)) for element in s: self.assertEqual(element in objects, True)
def test_filter_by_type(self): """Test that only elements of a certain type are included, no elements are removed which belong to this type and no elements are added.""" s = (s1, s2, s3, s4) = ("", "a", "b", "a") t = (t1, t2) = (dict, str) i1 = 1 l1 = [] objects = [s1, s2, i1, l1, t1, t2, s3, s4] objects = muppy.filter(objects, Type=str) self.assertEqual(len(objects), len(s)) for element in s: self.assertEqual(element in objects, True)
def test_filter_by_size(self): """Test that only elements within the specified size boundaries are returned. Also verify that if minimum is larger than maximum an exception is raised.""" minimum = 42 maximum = 958 objects = [] for i in range(1000): objects.append(' ' * i) objects = muppy.filter(objects, min=minimum, max=maximum) self.assert_(len(objects) != 0) for o in objects: self.assert_(minimum <= getsizeof(o) <= maximum) self.assertRaises(ValueError, muppy.filter, objects, min=17, max=16)
def test_filter_by_size(self): """Test that only elements within the specified size boundaries are returned. Also verify that if minimum is larger than maximum an exception is raised.""" minimum = 42 maximum = 958 objects = [] for i in range(1000): rand = random.randint(0, 1000) objects.append(" " * rand) objects = muppy.filter(objects, min=minimum, max=maximum) for o in objects: self.assert_(minimum <= _getsizeof(o) <= maximum) self.assertRaises(ValueError, muppy.filter, objects, min=17, max=16)
node_options = {} # trck.track_class(core.Agent, resolution_level=5) # trck.track_class(Node, resolution_level=5) st.print_diff() g = nx.powerlaw_cluster_graph(5000, 20, 0.1) # trck.track_object(g, resolution_level=5) # trck.create_snapshot('Graph only') st.print_diff() sim = Simulation() sim.run(starting_graph=g) st.print_diff() # trck.create_snapshot('With agents') # trck.stats.print_summary() from pympler import refbrowser import types def output_function(o): return str(type(o)) for root in muppy.filter(muppy.get_objects(), types.MethodType): refbrowser.ConsoleBrowser(root, maxdepth=2, str_func=output_function).print_tree()
# trck.track_class(core.Agent, resolution_level=5) # trck.track_class(Node, resolution_level=5) st.print_diff() g = nx.powerlaw_cluster_graph(5000, 20, 0.1) # trck.track_object(g, resolution_level=5) # trck.create_snapshot('Graph only') st.print_diff() sim = Simulation() sim.run(starting_graph=g) st.print_diff() # trck.create_snapshot('With agents') # trck.stats.print_summary() from pympler import refbrowser import types def output_function(o): return str(type(o)) for root in muppy.filter(muppy.get_objects(), types.MethodType): refbrowser.ConsoleBrowser(root, maxdepth=2, str_func=output_function).print_tree()
from pympler import muppy all_objects = muppy.get_objects() len(all_objects) import types my_types = muppy.filter(all_objects, Type=types.ClassType) len(my_types) for t in my_types: print t from pympler import summary sum1 = summary.summarize(all_objects) summary.print_(sum1) sum2 = summary.summarize(muppy.get_objects()) diff = summary.get_diff(sum1, sum2) summary.print_(diff) from pympler import refbrowser root = "some root object" root_ref1 = [root] root_ref2 = (root, ) def output_function(o): return str(type(o)) cb = refbrowser.ConsoleBrowser(root, maxdepth=2, str_func=output_function) #ib = refbrowser.InteractiveBrowser(root)
from operator import itemgetter from pympler import muppy, summary max_results = 3 all_objects = muppy.get_objects() def format_size(size): for unit in ["B", "KiB", "MiB", "GiB"]: if size < 1024.0 or unit == "GiB": break size /= 1024.0 return f"{size:.2f} {unit}" profile = sorted( (object_ for object_ in summary.summarize(all_objects)), key=itemgetter(2), reverse=True, ) for object_ in profile[:max_results]: print(f"Name: {object_[0]}") print(f"Number of objects: {object_[1]}") print(f"Total size: {format_size(object_[2])}", end="\n\n") for type in (str, dict): print(f"Last {max_results} {type} objects in memory", end="\n\n") latest_objects = muppy.filter(all_objects, Type=type)[-max_results:] print("\n".join(map(str, latest_objects)))
def mem_analysis(): all_objects = muppy.get_objects() my_types = muppy.filter(all_objects, Type=Node) print(len(my_types))