def _get_tree(self, root, maxdepth): """Workhorse of the get_tree implementation. This is an recursive method which is why we have a wrapper method. root is the current root object of the tree which should be returned. Note that root is not of the type _Node. maxdepth defines how much further down the from the root the tree should be build. """ objects = gc.get_referrers(root) res = _Node(root, self.str_func) self.already_included.add(id(root)) if maxdepth == 0: return res self.ignore.append(inspect.currentframe()) self.ignore.append(objects) for o in objects: # Ignore dict of _Node and RefBrowser objects if isinstance(o, dict): if any(isinstance(ref, (_Node, RefBrowser)) for ref in gc.get_referrers(o)): continue _id = id(o) if not self.repeat and (_id in self.already_included): s = self.str_func(o) res.children.append("%s (already included, id %s)" % (s, _id)) continue if (not isinstance(o, _Node)) and (o not in self.ignore): res.children.append(self._get_tree(o, maxdepth - 1)) return res
def dump_references(log, instances, exclude=[]): import gc import inspect gc.collect() frame = inspect.currentframe() try: exclude.append(instances) exclude.append([frame]) for instance in instances: referrers = [x for x in gc.get_referrers(instance) if (x not in exclude and len([y for y in exclude if x in y])==0)] log.info("referrers for %s: %s", instance, len(referrers)) for i in range(len(referrers)): r = referrers[i] log.info("[%s] in %s", i, type(r)) if inspect.isframe(r): log.info(" frame info: %s", str(inspect.getframeinfo(r))[:1024]) elif type(r)==list: listref = gc.get_referrers(r) log.info(" list: %s.. %s referrers: %s", str(r[:32])[:1024], len(listref), str(listref[:32])[:1024]) elif type(r)==dict: if len(r)>64: log.info(" %s items: %s", len(r), str(r)[:1024]) continue for k,v in r.items(): if k is instance: log.info(" key with value=%s", v) elif v is instance: log.info(" for key=%s", k) else: log.info(" %s : %s", type(r), r) finally: del frame
def treeinit( request ): myopen = request.GET.get('open') mylayers = request.GET.get('layers') if myopen and not mylayers: qs = models.layertreenode.objects.all().filter( parent_id__in = myopen.split(",") ).distinct('id') elif mylayers and not myopen: qs = models.layertreenode.objects.all().filter( id__in = mylayers.split(",") ).distinct('id') elif mylayers and myopen: qs = (models.layertreenode.objects.all().filter( parent_id__in = myopen.split(",") ) | models.layertreenode.objects.all().filter( id__in = mylayers.split(",") ).distinct('id') ) else: qs = models.layertreenode.objects.all().filter( id = 1 ) print gc.get_referrers( models.layertreenode.objects) print 'qs' print qs layers = models.layertreenode.objects.all().get_queryset_ancestors(qs, include_self=True).distinct('id') print 'layers' print layers newlayers = subtc( qs ) serializer = layertreenodeSerializer( newlayers ) return JSONResponse(serializer.data)
def get_parent(code): """ Given a code object find the Class that uses it as a method. First we find the function that wraps the code, then from there we find the class dict or method object. Note: get_referrers will often return both forms. We escape on whatever we hit first. Also, this is a real slow function and relies on the gc. """ funcs = [ f for f in gc.get_referrers(code) if inspect.isfunction(f) ] if len(funcs) != 1: return None refs = [f for f in gc.get_referrers(funcs[0])] for ref in refs: # assume if that if a dict is pointed to by a class, # that dict is the __dict__ if isinstance(ref, dict): parents = [p for p in gc.get_referrers(ref) if isinstance(p, type)] if len(parents) == 1: return parents[0].__name__ if inspect.ismethod(ref): return ref.__qualname__.rsplit('.', 1)[0] return None
def test(sample_path): detection_params = {'w_s': 11, 'peak_radius': 4., 'threshold': 40., 'max_peaks': 4 } sample = TiffFile(sample_path) curr_dir = os.path.dirname(__file__) fname = os.path.join( curr_dir, os.path.join(sample.fpath, sample.fname)) arr = sample.asarray() peaks = detect_peaks(arr, shape_label=('t', 'z', 'x', 'y'), verbose=True, show_progress=False, parallel=True, **detection_params) # del sample # sample = None gc.get_referrers(arr) del arr gc.collect()
def testAmountOfInstancesPerPoolName(self): idle = oop.IOPROC_IDLE_TIME try: oop.IOPROC_IDLE_TIME = 5 poolA = "A" poolB = "B" wrapper = ref(oop.getProcessPool(poolA)) ioproc = ref(oop.getProcessPool(poolA)._ioproc) oop.getProcessPool(poolA) time.sleep(oop.IOPROC_IDLE_TIME + 1) oop.getProcessPool(poolB) self.assertEquals(wrapper(), None) gc.collect() time.sleep(1) gc.collect() try: self.assertEquals(ioproc(), None) except AssertionError: logging.info("GARBAGE: %s", gc.garbage) refs = gc.get_referrers(ioproc()) logging.info(refs) logging.info(gc.get_referrers(*refs)) raise finally: oop.IOPROC_IDLE_TIME = idle
def debug(what): from sys import getrefcount from gc import get_referrers for o in what: if getrefcount(o) - 3 != 1 or \ len(get_referrers(o)) - 2 != 1: print 'GD DEBUG LEAK:', o, hex(id(o)), type(o), getrefcount(o)-3, len(get_referrers(o))-2
def snapshot(): canary = object() survivor = weaksurvivor() if survivor is None: return None all_referrers = gc.get_referrers(survivor) canary_referrers = gc.get_referrers(canary) referrers = [r for r in all_referrers if r not in canary_referrers] assert len(all_referrers) == len(referrers) + 1, "Canary to filter out the debugging tool's reference did not work" def _format_frame(frame, survivor_id): return "%s as %s in %s" % ( frame, " / ".join(k for (k, v) in frame.f_locals.items() if id(v) == survivor_id), frame.f_code) # can't use survivor in list comprehension, or it would be moved # into a builtins.cell rather than a frame, and that won't spew out # the details _format_frame can extract survivor_id = id(survivor) referrer_strings = [ _format_frame(x, survivor_id) if str(type(x)) == "<class 'frame'>" else pprint.pformat(x) for x in referrers] formatted_survivor = pprint.pformat(vars(survivor)) return "Survivor found: %r\nReferrers of the survivor:\n*"\ " %s\n\nSurvivor properties: %s" % ( survivor, "\n* ".join(referrer_strings), formatted_survivor)
def file_module_function_of(self, frame): code = frame.f_code filename = code.co_filename if filename: modulename = modname(filename) else: modulename = None funcname = code.co_name clsname = None if code in self._caller_cache: if self._caller_cache[code] is not None: clsname = self._caller_cache[code] else: self._caller_cache[code] = None funcs = [f for f in gc.get_referrers(code) if inspect.isfunction(f)] if len(funcs) == 1: dicts = [d for d in gc.get_referrers(funcs[0]) if isinstance(d, dict)] if len(dicts) == 1: classes = [c for c in gc.get_referrers(dicts[0]) if hasattr(c, "__bases__")] if len(classes) == 1: clsname = classes[0].__name__ self._caller_cache[code] = clsname if clsname is not None: funcname = "%s.%s" % (clsname, funcname) return (filename, modulename, funcname)
def _get_usage(function, *args): """Get the usage of a function call. This function is to be used only internally. The 'real' get_usage function is a wrapper around _get_usage, but the workload is done here. """ res = [] # init before calling (s_before, s_after) = _get_summaries(function, *args) # ignore all objects used for the measurement ignore = [] if s_before != s_after: ignore.append(s_before) for row in s_before: # ignore refs from summary and frame (loop) if len(gc.get_referrers(row)) == 2: ignore.append(row) for item in row: # ignore refs from summary and frame (loop) if len(gc.get_referrers(item)) == 2: ignore.append(item) for o in ignore: s_after = summary._subtract(s_after, o) res = summary.get_diff(s_before, s_after) return summary._sweep(res)
def fn_scope(): '''Function providing the scope for instances. ''' vanishing_object = OrdinaryClass( a = 1 ) print 'referrers to vanishing', gc.get_referrers( vanishing_object ) persisting_object = ClassWithProperty( a = 1 ) print 'referrers to persisting', gc.get_referrers( persisting_object )
def dump_state(self): """Dump the state of the application to the output, this method is triggered by pressing :kbd:`Ctrl-Alt-D` in the GUI""" from camelot.view.model_thread import post from camelot.view.register import dump_register from camelot.view.proxy.collection_proxy import CollectionProxy import gc gc.collect() dump_register() def dump_session_state(): import collections from camelot.model.authentication import Person print '======= begin session ==============' type_counter = collections.defaultdict(int) for o in Person.query.session: type_counter[type(o).__name__] += 1 for k,v in type_counter.items(): print k,v print '====== end session ==============' post( dump_session_state ) for o in gc.get_objects(): if isinstance(o, CollectionProxy): print o for r in gc.get_referrers(o): print ' ', type(r).__name__ for rr in gc.get_referrers(r): print ' ', type(rr).__name__
def _leak_tst_yb(): print "Doing YumBase leak test. " out_mem(os.getpid()) while True: yb = yum.YumBase() yb.preconf.debuglevel = 0 yb.preconf.errorlevel = 0 yb.repos.setCacheDir(yum.misc.getCacheDir()) yb.rpmdb.returnPackages() yb.pkgSack.returnPackages() yb.tsInfo yb.ts yb.up yb.comps yb.history yb.igroups yb.pkgtags out_mem(os.getpid()) time.sleep(4) if False: del yb print len(gc.garbage) if gc.garbage: print gc.garbage[0] print gc.get_referrers(gc.garbage[0])
def get_referrers(obj, path=None): if not path: path = [] referrers = gc.get_referrers(obj) for edge in path: referrers = gc.get_referrers(referrers[edge]) return referrers
def fini(self): gc.collect() print "self", len(gc.get_referrers(self)) print "learner", len(gc.get_referrers(self.getLearner())) print "testRec", len(gc.get_referrers(self.testRec)) print "recTest", len(gc.get_referrers(self.recTest)) print "1010", len(gc.get_referrers(1010)) sys.stdout.flush()
def test_no_new_reference_cycles(self): # Similar to https://github.com/mgedmin/objgraph/pull/22 but for # typestats() gc.disable() x = type('MyClass', (), {})() self.assertEqual(len(gc.get_referrers(x)), 1) objgraph.typestats() self.assertEqual(len(gc.get_referrers(x)), 1)
def searchRefs(obj, *args): """Pseudo-interactive function for tracing references backward. **Arguments:** obj: The initial object from which to start searching args: A set of string or int arguments. each integer selects one of obj's referrers to be the new 'obj' each string indicates an action to take on the current 'obj': t: print the types of obj's referrers l: print the lengths of obj's referrers (if they have __len__) i: print the IDs of obj's referrers o: print obj ro: return obj rr: return list of obj's referrers Examples:: searchRefs(obj, 't') ## Print types of all objects referring to obj searchRefs(obj, 't', 0, 't') ## ..then select the first referrer and print the types of its referrers searchRefs(obj, 't', 0, 't', 'l') ## ..also print lengths of the last set of referrers searchRefs(obj, 0, 1, 'ro') ## Select index 0 from obj's referrer, then select index 1 from the next set of referrers, then return that object """ ignore = {id(sys._getframe()): None} gc.collect() refs = gc.get_referrers(obj) ignore[id(refs)] = None refs = [r for r in refs if id(r) not in ignore] for a in args: # fo = allFrameObjs() # refs = [r for r in refs if r not in fo] if type(a) is int: obj = refs[a] gc.collect() refs = gc.get_referrers(obj) ignore[id(refs)] = None refs = [r for r in refs if id(r) not in ignore] elif a == "t": print(list(map(typeStr, refs))) elif a == "i": print(list(map(id, refs))) elif a == "l": def slen(o): if hasattr(o, "__len__"): return len(o) else: return None print(list(map(slen, refs))) elif a == "o": print(obj) elif a == "ro": return obj elif a == "rr": return refs
def run_and_check(run_client): w = run_interaction(run_client=run_client) if w(): print(pformat(gc.get_referrers(w()))) for x in gc.get_referrers(w()): print(pformat(x)) for y in gc.get_referrers(x): print('-', pformat(y)) raise AssertionError('server should be dead by now')
def do(): print "do" import gc print gc.collect() print gc.collect() print gc.collect() print gc.garbage print len(gc.get_referrers(L())) for i in gc.get_referrers(L()): print i
def test_no_new_reference_cycles(self): # Similar to https://github.com/mgedmin/objgraph/pull/22 but for # count() gc.disable() x = type('MyClass', (), {})() before = len(gc.get_referrers(x)) objgraph.count('MyClass') after = len(gc.get_referrers(x)) self.assertEqual(before, after)
def _get_referring_modules(self, obj): """ Find all modules that *directly* refer to object `obj' """ for ref_obj in gc.get_referrers(obj): if isinstance(ref_obj, dict) and '__name__' in ref_obj: for mod in gc.get_referrers(ref_obj): if isinstance(mod, types.ModuleType): yield mod.__name__
def test_code_tree_not_in_cache_can_be_garbage_collected(self): code_tree = CodeTree(None) self.manager.remember_code_tree(code_tree, "module.py") # Referred from the test and from the CodeTreesManager. assert_length(gc.get_referrers(code_tree), 2) self.manager.clear_cache() # No longer referred from the CodeTreesManager. assert_length(gc.get_referrers(code_tree), 1)
def _debug_referrents(self): import gc gc.collect() print "referrers to", self g = gc.get_referrers(self) for ob in g: print " ",ob b = gc.get_referrers(ob) for ob2 in b: print " ",ob2
def update_method_referrers(methodName, oldClassMethod, newClassMethod): """ The instance methods we need to update are stored in slots on instances of socket._socketobject (actually our replacement subclass _socketobject_new). """ for referrer1 in gc.get_referrers(oldClassMethod): if isinstance(referrer1, types.MethodType): for referrer2 in gc.get_referrers(referrer1): if isinstance(referrer2, _socketobject_new): setattr(referrer2, methodName, types.MethodType(newClassMethod, referrer1.im_self, referrer1.im_class))
def create_summary(self): """Return a summary. See also the notes on ignore_self in the class as well as the initializer documentation. """ if not self.ignore_self: res = summary.summarize(muppy.get_objects()) else: # If the user requested the data required to store summaries to be # ignored in the summaries, we need to identify all objects which # are related to each summary stored. # Thus we build a list of all objects used for summary storage as # well as a dictionary which tells us how often an object is # referenced by the summaries. # During this identification process, more objects are referenced, # namely int objects identifying referenced objects as well as the # correspondind count. # For all these objects it will be checked wether they are # referenced from outside the monitor's scope. If not, they will be # subtracted from the snapshot summary, otherwise they are # included (as this indicates that they are relevant to the # application). all_of_them = [] # every single object ref_counter = {} # how often it is referenced; (id(o), o) pairs def store_info(o): all_of_them.append(o) if id(o) in ref_counter: ref_counter[id(o)] += 1 else: ref_counter[id(o)] = 1 # store infos on every single object related to the summaries store_info(self.summaries) for k, v in self.summaries.items(): store_info(k) summary._traverse(v, store_info) # do the summary res = summary.summarize(muppy.get_objects()) # remove ids stored in the ref_counter for _id in ref_counter: # referenced in frame, ref_counter, ref_counter.keys() if len(gc.get_referrers(_id)) == (3): summary._subtract(res, _id) for o in all_of_them: # referenced in frame, summary, all_of_them if len(gc.get_referrers(o)) == (ref_counter[id(o)] + 2): summary._subtract(res, o) return res
def run_and_check(self, run_client): wref_to_hidden_server_socket = self.run_interaction(run_client=run_client) greentest.gc_collect_if_needed() if wref_to_hidden_server_socket(): from pprint import pformat print(pformat(gc.get_referrers(wref_to_hidden_server_socket()))) for x in gc.get_referrers(wref_to_hidden_server_socket()): print(pformat(x)) for y in gc.get_referrers(x): print('-', pformat(y)) self.fail('server socket should be dead by now')
def test_get_referrers(self): if is_cli: self.assertRaises(NotImplementedError, gc.get_referrers,1,"hello",True) self.assertRaises(NotImplementedError, gc.get_referrers) else: gc.get_referrers(1,"hello",True) gc.get_referrers() class TempClass: pass tc = TempClass() self.assertEqual(gc.get_referrers(TempClass).count(tc), 1)
def run_and_check(run_client): w = run_interaction(run_client=run_client) clear_sys_exc_info() gc.collect() if w(): print(pformat(gc.get_referrers(w()))) for x in gc.get_referrers(w()): print(pformat(x)) for y in gc.get_referrers(x): print('- {0}'.format(pformat(y))) raise AssertionError('server should be dead by now')
def test_get_referrers(): if is_cli or is_silverlight: AssertError(NotImplementedError, gc.get_referrers,1,"hello",True) AssertError(NotImplementedError, gc.get_referrers) else: gc.get_referrers(1,"hello",True) gc.get_referrers() class TempClass: pass tc = TempClass() AreEqual(gc.get_referrers(TempClass).count(tc), 1)
def nameof(obj): ''' if obj is referenced strongly in another object's __dict__, give its attribute name. ''' for r in gc.get_referrers(obj): for r2 in gc.get_referrers(r): if r is getattr(r2, '__dict__', object()): for k, v in r.iteritems(): if v is obj: return '[%r attribute of %r]' % (k, r2)
def _check_activity_death(cls, activity_ref: ReferenceType[Activity], counter: List[int]) -> None: """Sanity check to make sure an Activity was destroyed properly. Receives a weakref to a ba.Activity which should have torn itself down due to no longer being referenced anywhere. Will complain and/or print debugging info if the Activity still exists. """ try: import gc import types activity = activity_ref() print('ERROR: Activity is not dying when expected:', activity, '(warning ' + str(counter[0] + 1) + ')') print('This means something is still strong-referencing it.') counter[0] += 1 # FIXME: Running the code below shows us references but winds up # keeping the object alive; need to figure out why. # For now we just print refs if the count gets to 3, and then we # kill the app at 4 so it doesn't matter anyway. if counter[0] == 3: print('Activity references for', activity, ':') refs = list(gc.get_referrers(activity)) i = 1 for ref in refs: if isinstance(ref, types.FrameType): continue print(' reference', i, ':', ref) i += 1 if counter[0] == 4: print('Killing app due to stuck activity... :-(') _ba.quit() except Exception: print_exception('Error on _check_activity_death/')
def testCleanup(self, cacheMemoryManager): try: cacheMemoryManager.disable() op = OpBlockedArrayCache(graph=self.opProvider.graph) op.Input.connect(self.opProvider.Output) s = self.opProvider.Output.meta.shape op.BlockShape.setValue(s) op.fixAtCurrent.setValue(False) x = op.Output[...].wait() op.Input.disconnect() op.cleanUp() r = weakref.ref(op) del op gc.collect() ref = r() if ref is not None: for i, o in enumerate(gc.get_referrers(ref)): print("Object", i, ":", type(o), ":", o) assert r() is None, "OpBlockedArrayCache was not cleaned up correctly" finally: cacheMemoryManager.enable()
def get_args(): """ Retrieves the arguments of the calling function. """ # Get the parent frame and obtain the calling function frame = inspect.currentframe().f_back func = None for referrer in gc.get_referrers(frame.f_code): if callable(referrer) and referrer.__code__ is frame.f_code: func = referrer assert func, "could not resolve function" # Extract the arguments from the frame argspec = inspect.getfullargspec(func) args = { key: frame.f_locals[key] for key in argspec.args + argspec.kwonlyargs } if argspec.varargs: args['*varargs'] = frame.f_locals[argspec.varargs] if argspec.varkw: args.update(frame.f_locals[argspec.varkw]) return args
def update(self, verbose=0): obs = sys.getobjects(0) type2count = {} type2all = {} for o in obs: all = sys.getrefcount(o) t = type(o) if verbose: # if t == types.TupleType: if isinstance(o, tb.Group): # if isinstance(o, MetaIsDescription): print("-->", o, "refs:", all) refrs = gc.get_referrers(o) trefrs = [] for refr in refrs: trefrs.append(type(refr)) print("Referrers -->", refrs) print("Referrers types -->", trefrs) # if t == types.StringType: print "-->",o if t in type2count: type2count[t] += 1 type2all[t] += all else: type2count[t] = 1 type2all[t] = all ct = sorted([(type2count[t] - self.type2count.get(t, 0), type2all[t] - self.type2all.get(t, 0), t) for t in type2count.keys()]) ct.reverse() for delta1, delta2, t in ct: if delta1 or delta2: print("%-55s %8d %8d" % (t, delta1, delta2)) self.type2count = type2count self.type2all = type2all
def clear(self, pattern=None): """ Clear one or more buffers. Args: pattern (str): a regular expression to match against the buffer names when determining what should be cleared. If None, then all buffers are cleared. """ if pattern is None: # free all buffers self._aliases.clear() if not self._pymem: keylist = list(self._refs.keys()) for k in keylist: #gc.collect() referrers = gc.get_referrers(self._refs[k]) #print("clear {} referrers for {} are: ".format(len(referrers), k), referrers) #print("clear refcount for {} is ".format(k), sys.getrefcount(self._refs[k]) ) if sys.getrefcount(self._refs[k]) > 2: warnings.warn( "Cache object {} has external references and will not be freed." .format(k), RuntimeWarning) del self._refs[k] self._refs.clear() else: pat = re.compile(pattern) names = [] for n, r in self._refs.items(): mat = pat.match(n) if mat is not None: names.append(n) del r for n in names: self.destroy(n) return
def record_memory_leak_status(print_diff=False): run_garbage_collector() logger.info('MEM: collecting info (can take some time)...') new_stats = [] for obj in gc.get_objects(): if 'A' <= getattr(obj, '__name__', ' ')[0] <= 'Z': cnt = len(gc.get_referrers(obj)) new_stats.append((obj.__name__ + ' ' + str(obj), cnt)) new_stats.sort() logger.info('MEM: ...done collecting.') global old_stats if old_stats: if print_diff: d = {} for obj, cnt in old_stats: d[obj] = cnt for obj, cnt in new_stats: cnt_old = d.get(obj, 0) if cnt != cnt_old: logger.info('MEM: DELTA %+d %s', cnt - cnt_old, obj) else: logger.info('MEM: Stored stats to compare with the next ' 'info collection.') old_stats = new_stats
def checkfn() -> None: import gc time.sleep(2) gc.collect() obj = w_obj() if not obj: return # TODO: Si ves el mensaje a continuación significa que "algo" ha dejado # ..... alguna referencia a un formulario (o similar) que impide que se destruya # ..... cuando se deja de usar. Causando que los connects no se destruyan tampoco # ..... y que se llamen referenciando al código antiguo y fallando. # print("HINT: Objetos referenciando %r::%r (%r) :" % (typename, obj, name)) for ref in gc.get_referrers(obj): if isinstance(ref, dict): x: List[str] = [] for k, v in ref.items(): if v is obj: k = "(**)" + k x.insert(0, k) # print(" - dict:", repr(x), gc.get_referrers(ref)) else: if "<frame" in str(repr(ref)): continue
def delete_modules(self): ''' Clean up after any modules created by this Document when its session is destroyed. ''' from gc import get_referrers from types import FrameType log.debug("Deleting %s modules for %s" % (len(self._modules), self)) for module in self._modules: # Modules created for a Document should have three referrers at this point: # # - sys.modules # - self._modules # - a frame object # # This function will take care of removing these expected references. # # If there are any additional referrers, this probably means the module will be # leaked. Here we perform a detailed check that the only referrers are expected # ones. Otherwise issue an error log message with details. referrers = get_referrers(module) referrers = [x for x in referrers if x is not sys.modules] # lgtm [py/comparison-using-is] referrers = [x for x in referrers if x is not self._modules] # lgtm [py/comparison-using-is] referrers = [x for x in referrers if not isinstance(x, FrameType)] if len(referrers) != 0: log.error("Module %r has extra unexpected referrers! This could indicate a serious memory leak. Extra referrers: %r" % (module, referrers)) # remove the reference from sys.modules if module.__name__ in sys.modules: del sys.modules[module.__name__] # remove the reference from self._modules self._modules = None
def execute(self, client, recv): self.ircd.dnsblCache = {} self.ircd.throttle = {} self.ircd.hostcache = {} # for w in dict(self.ircd.whowas): # del self.ircd.whowas[w] gc.collect() del gc.garbage[:] if self.ircd.forked: return try: print('-' * 25) objgraph.show_most_common_types(limit=20) print('-' * 25) if len(recv) > 1: type = recv[1] print('Showing type: {}'.format(type)) obj = objgraph.by_type(type) print('Amount: {}'.format(len(obj))) # ref = objgraph.show_backrefs([obj], max_depth=10) for r in obj: # print('-'*10) # print(r) # if ref: # print('Ref: {}'.format(ref)) # objgraph.show_refs([obj], filename='/home/y4kuzi/Desktop/NewIRCd/sample-graph.png') # objgraph.show_backrefs([obj], filename='/home/y4kuzi/Desktop/NewIRCd/sample-backref-graph.png') ### Socket debugger. if type == 'socket': inuse = list( filter(lambda s: s.socket == r, self.ircd.users + self.ircd.servers)) # print('Socket is in use? {}'.format(bool(inuse))) if not inuse and r not in self.ircd.listen_socks: with open('ref.txt', 'a') as f: f.write('-' * 10 + '\n') f.write(str(r) + '\n') f.write('Ref:\n') for ref in gc.get_referrers(r): f.write(str(ref) + '\n') try: r.close() except: pass del r ### List debugger ''' if type == 'list': if len(r) == 0: continue with open('ref.txt', 'a') as f: f.write('-'*10+'\n') f.write(str(r)+'\n') f.write('Ref:\n') for ref in gc.get_referrers(r): f.write(str(ref)+'\n') ''' # if type == 'module': # print('-'*10) # print(r) print('Growth (if any):') objgraph.show_growth(limit=10) except: pass
def snapshot(): # This object is created locally and held by the same referrers # that also hold the now-recreated survivor. # # By comparing its referrers to the surviver's referrers, we can # filter out this tool's entry in the already hard to read list of # objects that kept the survivor alive. canary = object() survivor = weaksurvivor() if survivor is None: return None all_referrers = gc.get_referrers(survivor) canary_referrers = gc.get_referrers(canary) if canary_referrers: referrers = [ r for r in all_referrers if r not in canary_referrers ] assert len(all_referrers) == len( referrers ) + 1, "Canary to filter out the debugging tool's reference did not work.\nReferrers:\n%s\ncanary_referrers:\n%s" % ( pprint.pformat(all_referrers), pprint.pformat(canary_referrers)) else: # There is probably an optimization around that makes the # current locals not show up as referrers. It is hoped (and # least with the current Python it works) that this also works # for the survivor, so it's already not in the list. referrers = all_referrers def _format_any(frame, survivor_id): if str(type(frame)) == "<class 'frame'>": return _format_frame(frame, survivor_id) if isinstance(frame, dict): # If it's a __dict__, it'd be really handy to know whose dict that is framerefs = gc.get_referrers(frame) owners = [ o for o in framerefs if getattr(o, "__dict__", None) is frame ] if owners: return pprint.pformat( frame) + "\n ... which is the __dict__ of %s" % ( owners, ) return pprint.pformat(frame) def _format_frame(frame, survivor_id): return "%s as %s in %s" % (frame, " / ".join( k for (k, v) in frame.f_locals.items() if id(v) == survivor_id), frame.f_code) # can't use survivor in list comprehension, or it would be moved # into a builtins.cell rather than a frame, and that won't spew out # the details _format_frame can extract survivor_id = id(survivor) referrer_strings = [_format_any(x, survivor_id) for x in referrers] formatted_survivor = pprint.pformat(vars(survivor)) return "Survivor found: %r\nReferrers of the survivor:\n*"\ " %s\n\nSurvivor properties: %s" % ( survivor, "\n* ".join(referrer_strings), formatted_survivor)
def __getitem__(self, key, table=None): # # Construct a weakref handler for events. # # If the referent doesn't exist, raise the # exception to remove the handler from the # chain. # def wr_handler(wr, fname, *argv): try: return getattr(wr(), fname)(*argv) except: # check if the weakref became invalid if wr() is None: raise InvalidateHandlerException() raise iclass = self.classes[table or self.table] if self.match_src: match_src = [x for x in self.match_src] match_pairs = dict(self.match_pairs) else: match_src = [] match_pairs = {} if self.constraints: match_src.insert(0, self.constraints) for cskey, csvalue in self.constraints.items(): match_pairs[cskey] = cskey ret = iclass(self, key, match_src=match_src, match_pairs=match_pairs) # rtnl_object.key() returns a dcitionary that can not # be used as a cache key. Create here a tuple from it. # The key order guaranteed by the dictionary. cache_key = tuple(ret.key.items()) # Iterate all the cache to remove unused and clean # (without any started transaction) objects. for ckey in tuple(self.cache): # Skip the current cache_key to avoid extra # cache del/add records in the logs if ckey == cache_key: continue # The number of referrers must be > 1, the first # one is the cache itself rcount = len(gc.get_referrers(self.cache[ckey])) # The number of changed rtnl_object fields must # be 0 which means that no transaction is started ccount = len(self.cache[ckey].changed) if rcount == 1 and ccount == 0: self.log.debug('cache del %s' % (ckey, )) del self.cache[ckey] # Cache only existing objects if ret.state == 'system': if cache_key in self.cache: self.log.debug('cache hit %s' % (cache_key, )) # Explicitly get rid of the created object del ret # The object from the cache has already # registered callbacks, simply return it ret = self.cache[cache_key] return ret else: self.log.debug('cache add %s' % (cache_key, )) # Otherwise create a cache entry self.cache[cache_key] = ret wr = weakref.ref(ret) for event, fname in ret.event_map.items(): # # Do not trust the implicit scope and pass the # weakref explicitly via partial # (self.ndb.register_handler(event, partial(wr_handler, wr, fname))) return ret
def dump_references(log, instances, exclude=[]): import gc import inspect frame = inspect.currentframe() exclude.append(instances) exclude.append([frame]) exclude = [ [frame], ] rexclude = exclude np = sys.modules.get("numpy") if np: rexclude = [] skip_types = (np.ndarray, np.generic) for v in exclude: rexclude.append( tuple(x for x in v if not isinstance(x, skip_types))) del exclude gc.collect() try: log.info("dump references for %i instances:", len(instances)) for j, instance in enumerate(instances): referrers = tuple(x for x in gc.get_referrers(instance) if not any(y for y in rexclude if x in y)) log.info("* %i : %s, type=%s, with %i referers", j, repr_ellipsized(str(instance)), type(instance), len(referrers)) j += 1 for i, r in enumerate(referrers): log.info(" [%s] in %s", i, type(r)) if inspect.isframe(r): log.info(" frame info: %s", str(inspect.getframeinfo(r))[:1024]) elif isinstance(r, (list, tuple)): listref = gc.get_referrers(r) lr = len(r) if lr <= 128: log.info(" %i %s items: %s", lr, type(r), csv(repr_ellipsized(str(x)) for x in r)) elif lr < 512: log.info(" %i %s items: %s..", lr, type(r), repr_ellipsized(csv(r))) else: log.info(" %i %s items", lr, type(r)) ll = len(listref) if ll < 128: log.info(" %i referrers: %s", ll, csv(repr_ellipsized(str(x)) for x in listref)) elif ll < 512: log.info(" %i referrers: %s", ll, repr_ellipsized(csv(listref))) else: log.info(" %i referrers", ll) elif isinstance(r, dict): if len(r) > 64: log.info(" %s items: %s", len(r), repr_ellipsized(str(r))) continue for k, v in r.items(): if k is instance: log.info(" key with value=%s", repr_ellipsized(str(v))) elif v is instance: log.info(" for key=%s", repr_ellipsized(str(k))) else: log.info(" %s", repr_ellipsized(str(r))) finally: del frame
def run(self): # set of ids of objects that we know are always attached to builtin; # if an object is attached to one of these, it's attached to builtin # this cuts down on the amount of searching that needs to be done builtinIds = set() builtinIds.add(id(builtins.__dict__)) try: builtinIds.add(id(base)) builtinIds.add(id(base.cr)) builtinIds.add(id(base.cr.doId2do)) except: pass try: builtinIds.add(id(simbase)) builtinIds.add(id(simbase.air)) builtinIds.add(id(simbase.air.doId2do)) except: pass try: builtinIds.add(id(uber)) builtinIds.add(id(uber.air)) builtinIds.add(id(uber.air.doId2do)) except: pass while True: yield None objects = list(messenger._Messenger__objectEvents.keys()) assert self.notify.debug('%s objects in the messenger' % len(objects)) for object in objects: yield None assert self.notify.debug('---> new object: %s' % itype(object)) # try to find a path to builtin that doesn't involve the messenger # lists of objects for breadth-first search # iterate through one list while populating other list objList1 = [] objList2 = [] curObjList = objList1 nextObjList = objList2 visitedObjIds = set() # add the id of the object, and the messenger containers so that # the search for builtin will stop at the messenger; we're looking # for any path to builtin that don't involve the messenger visitedObjIds.add(id(object)) visitedObjIds.add(id(messenger._Messenger__objectEvents)) visitedObjIds.add(id(messenger._Messenger__callbacks)) nextObjList.append(object) foundBuiltin = False # breadth-first search, go until you run out of new objects or you find __builtin__ while len(nextObjList): if foundBuiltin: break # swap the lists, prepare for the next pass curObjList = nextObjList nextObjList = [] assert self.notify.debug( 'next search iteration, num objects: %s' % len(curObjList)) for curObj in curObjList: if foundBuiltin: break yield None referrers = gc.get_referrers(curObj) assert self.notify.debug( 'curObj: %s @ %s, %s referrers, repr=%s' % (itype(curObj), hex(id(curObj)), len(referrers), fastRepr(curObj, maxLen=2))) for referrer in referrers: #assert self.notify.debug('referrer: %s' % itype(curObj)) yield None refId = id(referrer) # don't go in a loop if refId in visitedObjIds: #assert self.notify.debug('already visited') continue # don't self-reference if referrer is curObjList or referrer is nextObjList: continue if refId in builtinIds: # not a leak, there is a path to builtin that does not involve the messenger #assert self.notify.debug('object has another path to __builtin__, it\'s not a messenger leak') foundBuiltin = True break else: visitedObjIds.add(refId) nextObjList.append(referrer) if not foundBuiltin: self.notify.warning( '%s is referenced only by the messenger' % (itype(object)))
def objectSize(obj, ignore=None, verbose=False, depth=0, recursive=False): """Guess how much memory an object is using""" ignoreTypes = [ 'MethodType', 'UnboundMethodType', 'BuiltinMethodType', 'FunctionType', 'BuiltinFunctionType' ] ignoreTypes = [ getattr(types, key) for key in ignoreTypes if hasattr(types, key) ] ignoreRegex = re.compile('(method-wrapper|Flag|ItemChange|Option|Mode)') if ignore is None: ignore = {} indent = ' ' * depth try: hash(obj) hsh = obj except: hsh = "%s:%d" % (str(type(obj)), id(obj)) if hsh in ignore: return 0 ignore[hsh] = 1 try: size = sys.getsizeof(obj) except TypeError: size = 0 if isinstance(obj, ndarray): try: size += len(obj.data) except: pass if recursive: if type(obj) in [list, tuple]: if verbose: print(indent + "list:") for o in obj: s = objectSize(o, ignore=ignore, verbose=verbose, depth=depth + 1) if verbose: print(indent + ' +', s) size += s elif isinstance(obj, dict): if verbose: print(indent + "list:") for k in obj: s = objectSize(obj[k], ignore=ignore, verbose=verbose, depth=depth + 1) if verbose: print(indent + ' +', k, s) size += s #elif isinstance(obj, QtCore.QObject): #try: #childs = obj.children() #if verbose: #print indent+"Qt children:" #for ch in childs: #s = objectSize(obj, ignore=ignore, verbose=verbose, depth=depth+1) #size += s #if verbose: #print indent + ' +', ch.objectName(), s #except: #pass #if isinstance(obj, types.InstanceType): gc.collect() if verbose: print(indent + 'attrs:') for k in dir(obj): if k in ['__dict__']: continue o = getattr(obj, k) if type(o) in ignoreTypes: continue strtyp = str(type(o)) if ignoreRegex.search(strtyp): continue #if isinstance(o, types.ObjectType) and strtyp == "<type 'method-wrapper'>": #continue #if verbose: #print indent, k, '?' refs = [ r for r in gc.get_referrers(o) if type(r) != types.FrameType ] if len(refs) == 1: s = objectSize(o, ignore=ignore, verbose=verbose, depth=depth + 1) size += s if verbose: print(indent + " +", k, s) #else: #if verbose: #print indent + ' -', k, len(refs) return size
def rebuild(module, doLog=1): """ Reload a module and do as much as possible to replace its references. """ global lastRebuild lastRebuild = time.time() if hasattr(module, "ALLOW_TWISTED_REBUILD"): # Is this module allowed to be rebuilt? if not module.ALLOW_TWISTED_REBUILD: raise RuntimeError("I am not allowed to be rebuilt.") if doLog: log.msg(f"Rebuilding {str(module.__name__)}...") # Safely handle adapter re-registration from twisted.python import components components.ALLOW_DUPLICATES = True d = module.__dict__ _modDictIDMap[id(d)] = module newclasses = {} classes = {} functions = {} values = {} if doLog: log.msg(f" (scanning {str(module.__name__)}): ") for k, v in d.items(): if issubclass(type(v), types.FunctionType): if v.__globals__ is module.__dict__: functions[v] = 1 if doLog: log.logfile.write("f") log.logfile.flush() elif isinstance(v, type): if v.__module__ == module.__name__: newclasses[v] = 1 if doLog: log.logfile.write("o") log.logfile.flush() values.update(classes) values.update(functions) fromOldModule = values.__contains__ newclasses = newclasses.keys() classes = classes.keys() functions = functions.keys() if doLog: log.msg("") log.msg(f" (reload {str(module.__name__)})") # Boom. reload(module) # Make sure that my traceback printing will at least be recent... linecache.clearcache() if doLog: log.msg(f" (cleaning {str(module.__name__)}): ") for clazz in classes: if getattr(module, clazz.__name__) is clazz: log.msg( f"WARNING: class {reflect.qual(clazz)} not replaced by reload!" ) else: if doLog: log.logfile.write("x") log.logfile.flush() clazz.__bases__ = () clazz.__dict__.clear() clazz.__getattr__ = __injectedgetattr__ clazz.__module__ = module.__name__ if newclasses: import gc for nclass in newclasses: ga = getattr(module, nclass.__name__) if ga is nclass: log.msg("WARNING: new-class {} not replaced by reload!".format( reflect.qual(nclass))) else: for r in gc.get_referrers(nclass): if getattr(r, "__class__", None) is nclass: r.__class__ = ga if doLog: log.msg("") log.msg(f" (fixing {str(module.__name__)}): ") modcount = 0 for mk, mod in sys.modules.items(): modcount = modcount + 1 if mod == module or mod is None: continue if not hasattr(mod, "__file__"): # It's a builtin module; nothing to replace here. continue if hasattr(mod, "__bundle__"): # PyObjC has a few buggy objects which segfault if you hash() them. # It doesn't make sense to try rebuilding extension modules like # this anyway, so don't try. continue changed = 0 for k, v in mod.__dict__.items(): try: hash(v) except Exception: continue if fromOldModule(v): if doLog: log.logfile.write("f") log.logfile.flush() nv = latestFunction(v) changed = 1 setattr(mod, k, nv) if doLog and not changed and ((modcount % 10) == 0): log.logfile.write(".") log.logfile.flush() components.ALLOW_DUPLICATES = False if doLog: log.msg("") log.msg(f" Rebuilt {str(module.__name__)}.") return module
def actionListobj(self): import gc import sys self.sendHeader() if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: yield "This function is disabled on this proxy" return # No more if not in debug mode if not config.debug: yield "Not in debug mode" return type_filter = self.get.get("type") yield """ <style> * { font-family: monospace; white-space: pre } table * { text-align: right; padding: 0px 10px } </style> """ yield "Listing all %s objects in memory...<br>" % html.escape(type_filter) ref_count = {} objs = gc.get_objects() for obj in objs: obj_type = str(type(obj)) if obj_type != type_filter: continue refs = [ ref for ref in gc.get_referrers(obj) if hasattr(ref, "__class__") and ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"] ] if not refs: continue try: yield "%.1fkb <span title=\"%s\">%s</span>... " % ( float(sys.getsizeof(obj)) / 1024, html.escape(str(obj)), html.escape(str(obj)[0:100].ljust(100)) ) except: continue for ref in refs: yield " [" if "object at" in str(ref) or len(str(ref)) > 100: yield str(ref.__class__.__name__) else: yield str(ref.__class__.__name__) + ":" + html.escape(str(ref)) yield "] " ref_type = ref.__class__.__name__ if ref_type not in ref_count: ref_count[ref_type] = [0, 0] ref_count[ref_type][0] += 1 # Count ref_count[ref_type][1] += float(sys.getsizeof(obj)) / 1024 # Size yield "<br>" yield "<br>Object referrer (total: %s, %.2fkb):<br>" % (len(ref_count), sum([stat[1] for stat in list(ref_count.values())])) for obj, stat in sorted(list(ref_count.items()), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count yield " - %.1fkb = %s x %s<br>" % (stat[1], stat[0], html.escape(str(obj))) gc.collect() # Implicit grabage collection
updates = {grad: W} function([], updates=updates) gc.collect() gc.collect() gc.collect() #after = theano.sandbox.cuda.cuda_ndarray.cuda_ndarray.mem_info() print 'references to initial array: ', sys.getrefcount(init_array) print "ALL DEALLOCS AFTER HERE ARE TOO LATE" print '--------------------------------' culprits = gc.get_referrers(init_array) for culprit in culprits: if culprit is locals(): print '\t<locals()>' else: print '\t', culprit print '--------------------------------' skip = { '__call__': 'would cause infinite loop (property?)', '__class__': 'would cause infinite loop (property?)', '__cmp__': 'would cause infinite loop (property?)', '__reduce__': 'would cause infinite loop (property?)', '__reduce_ex__': 'would cause infinite loop (property?)',
def test_no_cycle_refs(self): vmgr = VersionMgr(W3AF_LOCAL_PATH, MagicMock(return_value=None)) self.assertEqual(len(gc.get_referrers(vmgr)), 1)
def __del__( self ): #when the code is about to end the class Employaa is no longer required and so, # it is ready to be destroyed.Before the class Awesome is destroyed the __del__ method is called automatically. # print(self) class_name = self.__class__.__name__ print('{} уничтожен'.format(self.name)) emp1 = Employee("Андрей", 2000) emp2 = Employee("Мария", 5000) emp3 = Employee("Фаина", 7000) print(sys.getrefcount(emp2)) print(len(gc.get_referrers(emp2))) print(gc.get_referrers(emp2)) print('----') print(locals()) print('****----') d = dict(globals(), **locals()) print([ref for ref in d if d[ref] is emp2]) print(sys.getrefcount(emp2)) #getrefcount also creates a reference print('----') print(sys.getrefcount(emp2)) del emp2 # del statement removes a variable and its reference (not the object itself). # This is often useful when working in Jupyter notebooks because all cell variables use the global scope. print('====')
def make_patches(self): """Set self.patches to a list of mock._patch objects which wrap our target function.""" primary_patch = mock.patch(self.target) # Replace the target with a wrapper. original, local = primary_patch.get_original() if isinstance(original, types.FunctionType): base = original elif isinstance(original, (staticmethod, classmethod)): base = original.__func__ elif isinstance(original, property): base = original.fget else: raise TypeError( "Cannot probe: %s is not a function." % (repr(self.target),) ) varnames = self.maybe_unwrap(base).__code__.co_varnames @functools.wraps(base) def probe_wrapper(*args, **kwargs): now = datetime.datetime.utcnow() hotspots = HotspotsFinder() instruments_by_event = {"call": [], "return": [], "end": []} for I in self.instruments.itervalues(): if I.expires and now > I.expires: continue if I.check_call(self, *args, **kwargs): instruments_by_event[I.event].append(I) if I.event in ("call", "return"): if "hotspots" in I.value or "hotspots" in ( I.custom.get("tags") or "" ): hotspots.enabled = True target_obj, target_func_name = self.target.rsplit(".", 1) is_unwrapped = base.__code__.co_name == target_func_name if instruments_by_event["end"]: # We have instruments that require evaluation in the local # context of the function. Call sys.settrace() to gain access. predicate = hunter.When( hunter.Query( # Only trace returns (this will include exceptions!)... kind="return", # ...and only in the given function... function=target_func_name, # ...(no deeper). depth=0, ) if is_unwrapped else hunter.Query( # Only trace returns (this will include exceptions!)... kind="return", # ...and only in the given function... function=target_func_name, # ...but we don't know how many times it's been wrapped. # Use the module instead as an approximate match. # This may catch other functions with the same name # in the same module, but not much we can do about # that without a custom Cython Query. module_in=target_obj, ), TraceHandler(self, instruments_by_event["end"]), ) tracer = hunter.Tracer( # There's no need to call threading.settrace() because # a) we're targeting a function we're about to call # in the same thread, # b) we're going to undo it immediately after, and # c) it would collide with other threads if they did # the same concurrently. threading_support=False ).trace(predicate) elif hotspots.enabled: # We have instruments that require timing internal lines. # Call sys.settrace() to gain access. predicate = hunter.When( hunter.Query( # Only trace lines... kind="line", # ...and only in the given function... function=target_func_name, # ...(no deeper). depth=1, ) if is_unwrapped else hunter.Query( # Only trace lines... kind="line", # ...and only in the given function... function=target_func_name, # ...but we don't know how many times it's been wrapped. # Use the module instead as an approximate match. # This may catch other functions with the same name # in the same module, but not much we can do about # that without a custom Cython Query. module_in=target_obj, ), hotspots, ) tracer = hunter.Tracer( # There's no need to call threading.settrace() because # a) we're targeting a function we're about to call # in the same thread, # b) we're going to undo it immediately after, and # c) it would collide with other threads if they did # the same concurrently. threading_support=False ).trace(predicate) else: tracer = None try: if instruments_by_event["call"] or instruments_by_event["return"]: start = time.time() _locals = { "start": start, "now": now, "args": args, "kwargs": kwargs, "frame": sys._getframe(), } # Add positional args to locals by name. for i, argname in enumerate(varnames[: len(args)]): if argname not in ("args", "kwargs"): _locals[argname] = args[i] # Add kwargs to locals _locals.update(kwargs) for instrument in instruments_by_event["call"]: try: instrument.fire(instrument.mgr.global_namespace, _locals) except: try: instrument.handle_error(self) except: traceback.print_exc() # Execute the base function and obtain its result. try: result = base(*args, **kwargs) except: result = sys.exc_info()[1] raise finally: if hotspots.enabled: hotspots.finish() _locals["hotspots"] = hotspots if instruments_by_event["return"]: end = time.time() elapsed = end - start _locals.update( {"result": result, "end": end, "elapsed": elapsed} ) for instrument in instruments_by_event["return"]: try: instrument.fire(instrument.mgr.global_namespace, _locals) except: try: instrument.handle_error(self) except: traceback.print_exc() return result finally: if tracer is not None: tracer.stop() if isinstance(original, property): # We can't patch original.fget directly because it's read-only, # so we replace the whole property with a new one, passing our # probe_wrapper as its fget. # At this time, we only patch fget. If there's enough demand, # we could do all three in the future, but then that would take # three probe_wrapper functions, and what the instruments do # with three instead of one could be very confusing. primary_patch.new = property( probe_wrapper, original.fset, original.fdel, original.__doc__ ) else: if isinstance(original, staticmethod): probe_wrapper = staticmethod(probe_wrapper) elif isinstance(original, classmethod): probe_wrapper = classmethod(probe_wrapper) primary_patch.new = probe_wrapper patches = {0: primary_patch} # Add patches for any other modules/classes which have # the target as an attribute, or "registry" dicts which have # the target as a value. _resolved_target = primary_patch.getter() for ref in gc.get_referrers(original): if not isinstance(ref, dict): continue names = [k for k, v in ref.items() if v is original] for parent in gc.get_referrers(ref): if parent is _resolved_target or parent is primary_patch: continue if getattr(parent, "__dict__", None) is ref: # An attribute of a module or class or instance. for name in names: patch_id = len(patches) patch = WeakMethodPatch( self.make_getter(patch_id, parent), name, probe_wrapper ) patches[patch_id] = patch else: for gpa in gc.get_referrers(parent): if getattr(gpa, "__dict__", None) is parent: # A member of a dict which is # an attribute of a module or class or instance. for name in names: patch_id = len(patches) patch = DictPatch(ref, name, probe_wrapper) patches[patch_id] = patch break self.patches = patches
def print_path(o): prefix = "" seen = set() queue = [] objects = [] for _i in range(30): objects.append(o) print(prefix + str(id(o)), type(o), end=' ') try: if isinstance(o, dict) and "__name__" in o: print("with name", o["__name__"]) else: print(repr(o)) # [:1000] except: print("Bad repr.") found = False if isinstance(o, types.ModuleType): if not queue: break o, prefix = queue.pop() continue if isinstance(o, weakref.WeakKeyDictionary): for k, v in o.data.items(): if v is objects[-4]: k = k() seen.add(id(k)) queue.append((k, prefix + " (key) ")) for i in gc.get_referrers(o): if i is objs or i is objects: continue if id(i) in seen: continue if isinstance(i, types.FrameType): continue seen.add(id(i)) queue.append((i, prefix + " ")) found = True break if not queue: break if not found: print("<no parent, popping>") o, prefix = queue.pop()
def test_engine_reference_cycle(self, simple_index): # GH27585 index = simple_index nrefs_pre = len(gc.get_referrers(index)) index._engine assert len(gc.get_referrers(index)) == nrefs_pre
def findRefPath(startObj, endObj, maxLen=8, restart=True, seen=None, path=None, ignore=None): """Determine all paths of object references from startObj to endObj""" refs = [] if path is None: path = [endObj] if ignore is None: ignore = {} if seen is None: seen = {} ignore[id(sys._getframe())] = None ignore[id(path)] = None ignore[id(seen)] = None prefix = " " * (8 - maxLen) #print prefix + str(map(type, path)) prefix += " " if restart: #gc.collect() seen.clear() gc.collect() newRefs = [r for r in gc.get_referrers(endObj) if id(r) not in ignore] ignore[id(newRefs)] = None #fo = allFrameObjs() #newRefs = [] #for r in gc.get_referrers(endObj): #try: #if r not in fo: #newRefs.append(r) #except: #newRefs.append(r) for r in newRefs: #print prefix+"->"+str(type(r)) if type(r).__name__ in ['frame', 'function', 'listiterator']: #print prefix+" FRAME" continue try: if any(r is x for x in path): #print prefix+" LOOP", objChainString([r]+path) continue except: print(r) print(path) raise if r is startObj: refs.append([r]) print(refPathString([startObj] + path)) continue if maxLen == 0: #print prefix+" END:", objChainString([r]+path) continue ## See if we have already searched this node. ## If not, recurse. tree = None try: cache = seen[id(r)] if cache[0] >= maxLen: tree = cache[1] for p in tree: print(refPathString(p + path)) except KeyError: pass ignore[id(tree)] = None if tree is None: tree = findRefPath(startObj, r, maxLen - 1, restart=False, path=[r] + path, ignore=ignore) seen[id(r)] = [maxLen, tree] ## integrate any returned results if len(tree) == 0: #print prefix+" EMPTY TREE" continue else: for p in tree: refs.append(p + [r]) #seen[id(r)] = [maxLen, refs] return refs
def run_test(args): if args.generate: if args.dir == None: print('Please specify the desired data directory using --data_dir') return generate_h5_file(args.n_samples, args.dir) generate_indices(floor(args.n_samples * 0.9), floor(args.n_samples * 0.1), args.n_samples, args.dir) if args.use_triumf_path: trainval_path = [ '/fast_scratch/WatChMaL/data/IWCDmPMT_4pi_fulltank_9M_splits_CNN/IWCDmPMT_4pi_fulltank_9M_trainval.h5' ] else: trainval_path = [os.path.join(args.dir, args.h5_name)] train_dset = Test_Dset(trainval_path[0], use_mem_map=args.use_mem_map, use_tables=args.use_tables, reopen_mem_map=args.reopen_mem_map, driver=args.driver, fadvise=args.fadvise) if args.no_torch: for epoch in range(args.epochs): training_idxs = np.arange(len(train_dset)) np.random.shuffle(training_idxs) start_idx = 0 end_idx = 512 for i in range(1, ceil(len(train_dset) / 512)): batch_start = 512 * (i - 1) batch_end = 512 * i if 512 * i < len(train_dset) else len( train_dset) data = train_dset[training_idxs[batch_start:batch_end]] print( "Epoch: {} Batch: {} Object Size: {} Event_data Refs: {} Data: {} File Size: {}" .format(epoch + 1, i, sys.getsizeof(data), len(gc.get_referrers(train_dset.event_data)), sys.getsizeof(data), sys.getsizeof(train_dset.f))) # pprint_ntuple(psutil.swap_memory()) if args.del_data: del data else: train_indices = [i for i in range(len(train_dset))] train_loader = DataLoader(train_dset, batch_size=512, shuffle=False, pin_memory=False, sampler=SubsetRandomSampler(train_indices), num_workers=args.num_workers) start = time.time() for epoch in range(args.epochs): for i, data in enumerate(train_loader): print(time.time() - start) print( "Epoch: {} Batch: {} Object Size: {} Event_data Refs: {} Data: {} File Size: {}" .format(epoch + 1, i, sys.getsizeof(data), len(gc.get_referrers(train_dset.event_data)), sys.getsizeof(data), sys.getsizeof(train_dset.f))) # pprint_ntuple(psutil.swap_memory()) if args.del_data: del data start = time.time()
def updateClass(old, new, debug): ## Track town all instances and subclasses of old refs = gc.get_referrers(old) for ref in refs: try: if isinstance(ref, old) and ref.__class__ is old: ref.__class__ = new if debug: print(" Changed class for %s" % safeStr(ref)) elif inspect.isclass(ref) and issubclass( ref, old) and old in ref.__bases__: ind = ref.__bases__.index(old) ## Does not work: #ref.__bases__ = ref.__bases__[:ind] + (new,) + ref.__bases__[ind+1:] ## reason: Even though we change the code on methods, they remain bound ## to their old classes (changing im_class is not allowed). Instead, ## we have to update the __bases__ such that this class will be allowed ## as an argument to older methods. ## This seems to work. Is there any reason not to? ## Note that every time we reload, the class hierarchy becomes more complex. ## (and I presume this may slow things down?) newBases = ref.__bases__[:ind] + ( new, old) + ref.__bases__[ind + 1:] try: ref.__bases__ = newBases except TypeError: print(" Error setting bases for class %s" % ref) print(" old bases: %s" % repr(ref.__bases__)) print(" new bases: %s" % repr(newBases)) raise if debug: print(" Changed superclass for %s" % safeStr(ref)) #else: #if debug: #print " Ignoring reference", type(ref) except Exception: print("Error updating reference (%s) for class change (%s -> %s)" % (safeStr(ref), safeStr(old), safeStr(new))) raise ## update all class methods to use new code. ## Generally this is not needed since instances already know about the new class, ## but it fixes a few specific cases (pyqt signals, for one) for attr in dir(old): oa = getattr(old, attr) if (py3 and inspect.isfunction(oa)) or inspect.ismethod(oa): # note python2 has unbound methods, whereas python3 just uses plain functions try: na = getattr(new, attr) except AttributeError: if debug: print( " Skipping method update for %s; new class does not have this attribute" % attr) continue ofunc = getattr( oa, '__func__', oa) # in py2 we have to get the __func__ from unbound method, nfunc = getattr(na, '__func__', na) # in py3 the attribute IS the function if ofunc is not nfunc: depth = updateFunction(ofunc, nfunc, debug) if not hasattr(nfunc, '__previous_reload_method__'): nfunc.__previous_reload_method__ = oa # important for managing signal connection #oa.__class__ = new ## bind old method to new class ## not allowed if debug: extra = "" if depth > 0: extra = " (and %d previous versions)" % depth print(" Updating method %s%s" % (attr, extra)) ## And copy in new functions that didn't exist previously for attr in dir(new): if attr == '__previous_reload_version__': continue if not hasattr(old, attr): if debug: print(" Adding missing attribute %s" % attr) setattr(old, attr, getattr(new, attr)) ## finally, update any previous versions still hanging around.. if hasattr(old, '__previous_reload_version__'): updateClass(old.__previous_reload_version__, new, debug)
label='GC collection count %s' % i, description='Current collection counts', numerator='collections', ) global_registry().gauge_callback( name='python.gc.objects.count', callback=lambda: len(gc.get_objects()), label='GC tracked object count', description='Number of objects being tracked by the garbage collector', numerator='objects', ) global_registry().gauge_callback( name='python.gc.referrers.count', callback=lambda: len(gc.get_referrers()), label='GC tracked object referrers', description='Number of objects that directly refer to any objects', numerator='referrers', ) global_registry().gauge_callback( name='python.gc.referents.count', callback=lambda: len(gc.get_referrers()), label='GC tracked object referents', description='Number of objects that directly referred to any objects', numerator='referents', ) # ------------------------------------------------------------------------------
def get_currently_executing_function_call_arguments( include_module_name: bool = False, include_caller_names: bool = False, **kwargs ) -> dict: """ :param include_module_name: bool If True, module name will be determined and included in output dictionary (default is False) :param include_caller_names: bool If True, arguments, such as "self" and "cls", if present, will be included in output dictionary (default is False) :param kwargs: :return: dict Output dictionary, consisting of call arguments as attribute "name: value" pairs. Example usage: # Gather the call arguments of the present function (include the "module_name" and add the "class_name"), filter # out the Falsy values, and set the instance "_config" variable equal to the resulting dictionary. self._config = get_currently_executing_function_call_arguments( include_module_name=True, **{ "class_name": self.__class__.__name__, }, ) filter_properties_dict(properties=self._config, inplace=True) """ cf: FrameType = currentframe() fb: FrameType = cf.f_back argvs: ArgInfo = getargvalues(fb) fc: CodeType = fb.f_code cur_func_obj: Callable = [ referer for referer in get_referrers(fc) if getattr(referer, "__code__", None) is fc and getclosurevars(referer).nonlocals.items() <= fb.f_locals.items() ][0] cur_mod = getmodule(cur_func_obj) sig: Signature = signature(cur_func_obj) params: dict = {} var_positional: dict = {} var_keyword: dict = {} for key, param in sig.parameters.items(): val: Any = argvs.locals[key] params[key] = val if param.kind == Parameter.VAR_POSITIONAL: var_positional[key] = val elif param.kind == Parameter.VAR_KEYWORD: var_keyword[key] = val bound_args: BoundArguments = sig.bind(**params) call_args: OrderedDict = bound_args.arguments call_args_dict: dict = dict(call_args) for key, value in var_positional.items(): call_args_dict[key] = value for key, value in var_keyword.items(): call_args_dict.pop(key) call_args_dict.update(value) if include_module_name: call_args_dict.update({"module_name": cur_mod.__name__}) if not include_caller_names: if call_args.get("cls"): call_args_dict.pop("cls", None) if call_args.get("self"): call_args_dict.pop("self", None) call_args_dict.update(**kwargs) return call_args_dict
def test_engine_reference_cycle(self): # GH27585 index = self.create_index() nrefs_pre = len(gc.get_referrers(index)) index._engine assert len(gc.get_referrers(index)) == nrefs_pre
def mock_constructor(target, class_name, allow_private=False): if not isinstance(class_name, str): raise ValueError( "Second argument must be a string with the name of the class.") _bail_if_private(class_name, allow_private) if isinstance(target, str): target = testslide._importer(target) target_class_id = (id(target), class_name) if target_class_id in _mocked_target_classes: original_class, mocked_class = _mocked_target_classes[target_class_id] if not getattr(target, class_name) is mocked_class: raise AssertionError( "The class {} at {} was changed after mock_constructor() mocked " "it!".format(class_name, target)) callable_mock = mocked_class.__new__ else: original_class = getattr(target, class_name) if "__new__" in original_class.__dict__: raise NotImplementedError( "Usage with classes that define __new__() is currently not supported." ) gc.collect() instances = [ obj for obj in gc.get_referrers(original_class) if type(obj) is original_class ] if instances: raise RuntimeError( "mock_constructor() can not be used after instances of {} were created: {}" .format(class_name, instances)) if not inspect.isclass(original_class): raise ValueError("Target must be a class.") elif not issubclass(original_class, object): raise ValueError("Old style classes are not supported.") callable_mock = _CallableMock(original_class, "__new__") mocked_class = _patch_and_return_mocked_class(target, class_name, target_class_id, original_class, callable_mock) def original_callable(cls, *args, **kwargs): global _init_args_from_original_callable, _init_kwargs_from_original_callable assert cls is mocked_class # Python unconditionally calls __init__ with the same arguments as # __new__ once it is invoked. We save the correct arguments here, # so that __init__ can use them when invoked for the first time. _init_args_from_original_callable = args _init_kwargs_from_original_callable = kwargs return object.__new__(cls) return _MockConstructorDSL( target=mocked_class, method="__new__", cls=mocked_class, callable_mock=callable_mock, original_callable=original_callable, )
def showRefs(obj, N=5, backrefs=set(), objs=None, P=''): """Given any Python object print a tree view of the objects which reference it. """ if N <= 0: return if gc is None: warn('GC debugging not enabled') return if objs is None: gc.collect() # list current objects # so we can reject temporaries created # by this function O = gc.get_objects() objs = [id(o) for o in O] if id(obj) in backrefs: return backrefs.add(id(obj)) print P, 'obj:', str(obj)[:60] print P, 'typ:', type(obj) if obj in gc.garbage: print P, ' Uncollectable!' for ref in gc.get_referrers(obj): t = type(ref) if id(ref) in backrefs: print P, ' <', str(ref)[:60] continue elif id(ref) not in objs: continue elif t in _method_types: backrefs.add(id(ref)) print P, ' >', str(ref)[:60] #for b in gc.get_referents(ref): #showRefs(b, N-1, backrefs, objs, P+'|') elif t in _ignore_types: pass elif t is types.FrameType: import inspect fname, line, func, ctxt, idx = inspect.getframeinfo(ref) if func == 'showRefs': continue print P, ' <<<' for f in inspect.getouterframes(ref): _, fname, line, func, ctxt, idx = f print P, ' %s() %s:%d' % (func, fname, line) #fname, line, func, ctxt, idx = inspect.getframeinfo(ref) #print P,' F %s() %s:%d'%(func,fname,line) elif hasattr(ref, '__class__'): if ref.__class__ in [list, dict, set, tuple]: if ref.__class__ is dict: print P, ' /--is:', for k, v in ref.iteritems(): if v is obj: print k, print showRefs(ref, N - 1, backrefs, objs, P + '.') else: print P, ' ', str(ref)[:60] else: print P, ' ?', str(ref)[:60]