def test_get_referents(self): if test_support.due_to_ironpython_incompatibility( "http://tkbgitvstfat01:8080/WorkItemTracking/WorkItem.aspx?artifactMoniker=314470" ): return alist = [1, 3, 5] got = gc.get_referents(alist) got.sort() self.assertEqual(got, alist) atuple = tuple(alist) got = gc.get_referents(atuple) got.sort() self.assertEqual(got, alist) adict = {1: 3, 5: 7} expected = [1, 3, 5, 7] got = gc.get_referents(adict) got.sort() self.assertEqual(got, expected) got = gc.get_referents([1, 2], {3: 4}, (0, 0, 0)) got.sort() self.assertEqual(got, [0, 0] + range(5)) self.assertEqual(gc.get_referents(1, "a", 4j), [])
def test_gc(): a = flowlet(lambda: None) assert len(get_referents(a.greenlet)) == 3 b = ref(a.greenlet) del a collect() assert len(get_referents(b)) == 0
def test_get_referents(): if is_cli or is_silverlight: AssertError(NotImplementedError, gc.get_referents,1,"hello",True) AssertError(NotImplementedError, gc.get_referents) else: gc.get_referents(1,"hello",True) gc.get_referents() class TempClass: pass AreEqual(gc.get_referents(TempClass).count('TempClass'), 1)
def test_get_referents(self): if is_cli: self.assertRaises(NotImplementedError, gc.get_referents,1,"hello",True) self.assertRaises(NotImplementedError, gc.get_referents) else: gc.get_referents(1,"hello",True) gc.get_referents() class TempClass: pass self.assertEqual(gc.get_referents(TempClass).count('TempClass'), 1)
def show_edge(source, target): if collapse_dicts and id(source) in dicts: for key, value in source.iteritems(): if value is target: membership_edges.append((id(dicts[id(source)]), mapping.get(id(target), id(target)), quote(key))) return if mapping.get(id(target), None)==id(source): return if isinstance(target, dict): source_dict=getattr(source, "__dict__", None) if isinstance(source_dict, types.DictProxyType): source_dict=gc.get_referents(gc.get_referents(source_dict))[0] if target is source_dict: elementary_edges.append((id(source), mapping.get(id(target), id(target)), "__dict__")) return if isinstance(source, type): if target in source.__bases__: inheritance_edges.append((id(source), mapping.get(id(target), id(target)), "")) return if getattr(source, "__class__", None) is target: instance_edges.append((id(source), mapping.get(id(target), id(target)), "")) return elif getattr(source, "__self__", None) is target: ownership_edges.append((id(source), mapping.get(id(target), id(target)), "")) return elif isinstance(source, dict): for key, value in source.iteritems(): if value is target: elementary_edges.append((id(source), mapping.get(id(target), id(target)), quote(repr(key)))) return elif isinstance(source, types.FrameType) and source.f_back is target: traceback_edges.append((id(source), mapping.get(id(target), id(target)), "")) return try: if target in source: elementary_edges.append((id(source), mapping.get(id(target), id(target)), "")) return except: pass for name in dir(source): try: if target is getattr(source, name): elementary_edges.append((id(source), mapping.get(id(target), id(target)), name)) return except: pass reference_edges.append((id(source), mapping.get(id(target), id(target)), ""))
def test_gc_traversal(w): sentinel = mock.Mock() w.set_serializer(sentinel) assert sentinel in gc.get_referents(w) sentinel = mock.Mock() w.set_log_fn(sentinel, 0) assert sentinel in gc.get_referents(w) w.add_function("test_method", 60, echo_function) print gc.get_referents(w) assert {'test_method': echo_function} in gc.get_referents(w)
def hasPcapObj(): count = 0 for obj in gc.get_objects(): #t = type(obj) if isinstance(obj, pcap.pcapObject) or \ isinstance(obj, event.EventCallbackHandle) or \ isinstance(obj, Capture._CaptureDescriptor) or \ isinstance(obj, Capture): if isinstance(obj, Capture): print obj, gc.get_referents(gc.get_referents(obj)) count += 1 print "objects found:", count
def check_owners(target): if id(target) in owners: return ignore.add(id(sys._getframe())) sources = gc.get_referrers(target) ignore.add(id(sources)) try: master = None if isinstance(target, dict): for source in sources: if id(source) in ignore: continue if skip_functions and \ isinstance(source, types.FunctionType) and \ source.__globals__ is target: continue reference = getattr(source, "__dict__", None) if isinstance(reference, types.DictProxyType): reference = gc.get_referents(gc.get_referents(reference))[0] if target is reference: if master is None: master = source else: owners[id(target)] = None return elif isinstance(target, tuple): for source in sources: if id(source) in ignore: continue if getattr(source, "__bases__", None) is target or getattr(source, "__mro__", None) is target: if master is None: master = source else: owners[id(target)] = None return if master is None: return else: owners[id(target)] = master return finally: ignore.remove(id(sources)) ignore.remove(id(sys._getframe())) del sources
def debug_info(self): result = [] for oid, klass in self.persistent_classes.items(): result.append((oid, len(gc.get_referents(klass)), type(klass).__name__, klass._p_state, )) for oid, value in self.data.items(): result.append((oid, len(gc.get_referents(value)), type(value).__name__, value._p_state, )) return result
def test_get_referents(self): import gc y = 12345 z = 23456 x = [y, z] lst = gc.get_referents(x) assert y in lst and z in lst
def _inner(tar, gar = None, index = 0): if gar == tar: return [('end', 'find')] if index > TIMES: return if gar: res = [gar] else: res = [tar] for obj in res: subres = [] if hasattr(obj, '__dict__'): subres.extend(obj.__dict__.items()) for attr in gc.get_referents(obj): item = ('mem', attr) subres.append(item) for key, y in subres: if inspect.isbuiltin(y): continue tmp = _inner(tar, y, index + 1) if tmp: tmp.append((key, y)) return tmp return []
def dump_description_of_object_refs(o, f): # This holds the ids of all referents that we've already dumped. dumped = set() # First, any __dict__ items try: itemsiter = o.__dict__.iteritems() except: pass else: for k, v in itemsiter: try: idr = id(v) if idr not in dumped: dumped.add(idr) f.write("%d:"%len(k)) f.write(k) f.write(",") f.write("%0x,"%idr) except: pass # Then anything else that gc.get_referents() returns. for r in gc.get_referents(o): idr = id(r) if idr not in dumped: dumped.add(idr) f.write("0:,%0x,"%idr)
def mem_dump(path): dump = open(os.path.join(root, path), 'w') with dump as wfile: for obj in gc.get_objects(): i = id(obj) size = sys.getsizeof(obj, 0) if size > 1000: # referrers = [id(o) for o in gc.get_referrers(obj) if hasattr(o, '__class__')] referents = [id(o) for o in gc.get_referents(obj) if hasattr(o, '__class__')] if hasattr(obj, '__class__'): cls = str(obj.__class__) if hasattr(obj, 'name'): name = obj.name else: name = obj.__class__.__name__ wfile.write( 'id: {:<10s} name: {:<10s} class: {:<50s} size: {:<10s} referents:{}\n'.format(str(i), name, cls, str(size), len(referents), ) ) if isinstance(obj, dict): keys = ','.join(map(str, obj.keys())) wfile.write('keys: {}'.format(keys))
def test_garbage_collect_cycle(Class): from nose.tools import assert_not_in, assert_in import gc structure = get_a_structure(Class) atom_ids = [id(u) for u in structure] structure_id = id(structure) scale_id = id(structure.scale) # add a cycle structure.parent_structure = structure for this_id in atom_ids + [structure_id, scale_id]: assert_in(this_id, [id(u) for u in gc.get_objects()]) assert_in( structure_id, [id(u) for u in gc.get_referents(structure.__dict__)] ) # Deletes atom and collect garbage # structure should then be truly destroyed, e.g. neither tracked nor in # unreachables. del structure gc.collect() for this_id in atom_ids + [structure_id, scale_id]: assert_not_in(this_id, [id(u) for u in gc.get_objects()]) assert_not_in(this_id, [id(u) for u in gc.garbage])
def test_dynamic_scope_creation(): """Test handling bad arguments when creating dnamic scope. """ owner = object() locs = sortedmap() globs = {} builtins = {} change = {} tracer = object() dynamicscope = DynamicScope(owner, locs, globs, builtins, change, tracer) for referrent, obj in zip(gc.get_referents(dynamicscope), [owner, change, tracer, locs, globs, builtins, None, None]): assert referrent is obj with pytest.raises(TypeError) as excinfo: DynamicScope(owner, None, globs, builtins) assert 'mapping' in excinfo.exconly() with pytest.raises(TypeError) as excinfo: DynamicScope(owner, locs, None, builtins) assert 'dict' in excinfo.exconly() with pytest.raises(TypeError) as excinfo: DynamicScope(owner, locs, globs, None) assert 'dict' in excinfo.exconly() del dynamicscope gc.collect()
def __init__(self, object_seq=None, find_ancestors_count=0): """ If object_seq is specified, the objects and their children are scanned for their relationship. """ self._fAllObjects = {} """ Dict with all objects found, mapping id(obj) -> obj """ self._fChildren = collections.defaultdict(set) """ Dict of all id's of children, keyed by id of the parent """ self._fParents = collections.defaultdict(set) """ Dict of all id's of parents, keyed by id of the child """ if object_seq is None: scan_all = True object_seq = gc.get_objects() else: scan_all = False local_frame = sys._getframe() scan_up_queue = [] next_scan_up_queue = [] ignore_id_set = set([id(local_frame), id(self._fAllObjects), id(object_seq), id(scan_up_queue), id(next_scan_up_queue)]) for i in xrange(len(object_seq)): parent = object_seq[i] parent_id = id(parent) self._fAllObjects[parent_id] = parent children = gc.get_referents(parent) for child in children: child_id = id(child) self._fAllObjects[child_id] = child self._fChildren[parent_id].add(child_id) self._fParents[child_id].add(parent_id) if not scan_all and (isinstance(parent, dict) or find_ancestors_count > 0): scan_up_queue.append(parent) found_parents = set() i = 0 max_up_levels = 2 while i < max_up_levels: i += 1 while len(scan_up_queue) > 0: obj = scan_up_queue.pop() obj_id = id(obj) if obj_id not in found_parents: found_parents.add(obj_id) grand_id_list = self._FindAdditionalParents(obj, id(object_seq)) for grand_id, grand in grand_id_list: if grand_id not in ignore_id_set: self._fParents[obj_id].add(grand_id) self._fAllObjects[grand_id] = grand if i + 1 < find_ancestors_count: next_scan_up_queue.append(grand) scan_up_queue.extend(next_scan_up_queue) del next_scan_up_queue[:]
def close_all(contr): logger.log(ENDSECTION, 'COMPONENTS FINALIZATION') # Force the deletion of the sensor objects if hasattr(GameLogic, 'componentDict'): for obj, component_instance in GameLogic.componentDict.items(): del obj # Force the deletion of the robot objects if hasattr(GameLogic, 'robotDict'): for obj, robot_instance in GameLogic.robotDict.items(): del obj logger.log(ENDSECTION, 'CLOSING REQUEST MANAGERS...') del GameLogic.morse_services logger.log(ENDSECTION, 'CLOSING MIDDLEWARES...') # Force the deletion of the middleware objects if hasattr(GameLogic, 'mwDict'): for obj, mw_instance in GameLogic.mwDict.items(): if mw_instance: mw_instance.cleanup() import gc # Garbage Collector logger.debug("At closing time, %s has %s references" % (mw_instance, gc.get_referents(mw_instance))) del obj if MULTINODE_SUPPORT: logger.log(ENDSECTION, 'CLOSING MULTINODE...') GameLogic.node_instance.finish_node()
def checkMemory(): """as the name says""" # pylint: disable=too-many-branches if not Debug.gc: return gc.set_threshold(0) gc.set_debug(gc.DEBUG_LEAK) gc.enable() print('collecting {{{') gc.collect() # we want to eliminate all output print('}}} done') # code like this may help to find specific things if True: # pylint: disable=using-constant-test interesting = ('Client', 'Player', 'Game') for obj in gc.garbage: if hasattr(obj, 'cell_contents'): obj = obj.cell_contents if not any(x in repr(obj) for x in interesting): continue for referrer in gc.get_referrers(obj): if referrer is gc.garbage: continue if hasattr(referrer, 'cell_contents'): referrer = referrer.cell_contents if referrer.__class__.__name__ in interesting: for referent in gc.get_referents(referrer): print('%s refers to %s' % (referrer, referent)) else: print('referrer of %s/%s is: id=%s type=%s %s' % (type(obj), obj, id(referrer), type(referrer), referrer)) print('unreachable:%s' % gc.collect()) gc.set_debug(0)
def _getObjectList(self): if hasattr(sys, 'getobjects'): return sys.getobjects(0) else: gc.collect() gc_objects = gc.get_objects() objects = gc_objects objects.append(__builtin__.__dict__) nextObjList = gc_objects found = set() found.add(id(objects)) found.add(id(found)) found.add(id(gc_objects)) for obj in objects: found.add(id(obj)) while len(nextObjList): curObjList = nextObjList nextObjList = [] for obj in curObjList: refs = gc.get_referents(obj) for ref in refs: if id(ref) not in found: found.add(id(ref)) objects.append(ref) nextObjList.append(ref) return objects
def test_make(self): obj1 = Refferee() obj2 = Refferee() obj3 = Refferee() obj4 = Refferee() obj5 = Refferee() west0 = weakref.WeakSet((obj1, obj2, obj3, obj4, obj5)) west1 = self.TEST_CLS() west2 = self.TEST_CLS((obj1, obj2, obj3, obj4, obj5)) for obj in west0: self.assertIn(obj, west2) self.assertNotIn(obj, gc.get_referents(west1)) self.assertNotIn(obj, gc.get_referents(west2))
def close_all(contr): """ Close the open communication channels from middlewares Call the destructors of all component instances. This should also call the methods to close middlewares """ logger.log(ENDSECTION, 'COMPONENTS FINALIZATION') # Force the deletion of the sensor objects if hasattr(bge.logic, 'componentDict'): for obj, component_instance in bge.logic.componentDict.items(): del obj # Force the deletion of the robot objects if hasattr(bge.logic, 'robotDict'): for obj, robot_instance in bge.logic.robotDict.items(): del obj logger.log(ENDSECTION, 'CLOSING REQUEST MANAGERS...') del bge.logic.morse_services logger.log(ENDSECTION, 'CLOSING MIDDLEWARES...') # Force the deletion of the middleware objects if hasattr(bge.logic, 'mwDict'): for obj, mw_instance in bge.logic.mwDict.items(): if mw_instance: mw_instance.cleanup() import gc # Garbage Collector logger.debug("At closing time, %s has %s references" % (mw_instance, gc.get_referents(mw_instance))) del obj if MULTINODE_SUPPORT: logger.log(ENDSECTION, 'CLOSING MULTINODE...') bge.logic.node_instance.finalize()
def recurse(obj, start, all, current_path): if show_progress: outstream.write('%d\r' % len(all)) all[id(obj)] = None referents = gc.get_referents(obj) for referent in referents: # If we’ve found our way back to the start, this is # a cycle, so print it out if referent is start: try: outstream.write('Cyclic reference: %r\n' % referent) except TypeError: try: outstream.write('Cyclic reference: %i (%r)\n' % (id(referent), type(referent))) except TypeError: outstream.write('Cyclic reference: %i\n' % id(referent)) print_path(current_path) # Don’t go back through the original list of objects, or # through temporary references to the object, since those # are just an artifact of the cycle detector itself. elif referent is objects or isinstance(referent, FrameType): continue # We haven’t seen this object before, so recurse elif id(referent) not in all: recurse(referent, start, all, current_path + (obj,))
def ProcessClasses(self, _value_old, _value_new): objs = gc.get_referents(_value_old) for cur_object in objs: if isinstance(cur_object, list): for index, item in enumerate(cur_object): if item == _value_old: cur_object[index] = _value_new elif isinstance(cur_object, dict): obj_copy = cur_object.copy() for _key, _value in obj_copy.iteritems(): # _key 和 _value 是同一类型有BUG if _key == _value_old: if _value != _value_old: cur_object[_value_new] = cur_object[_key] else: cur_object[_value_new] = _value_new del cur_object[_key] elif _value == _value_old: cur_object[_key] = _value_new elif isinstance(cur_object, _value_old): cur_object.__class__ = _value_new elif self.IsClass(cur_object): if issubclass(cur_object, _value_old): if len(cur_object.__bases__) == 1: cur_object.__bases__ = (_value_new, )
def test_get_referents(self): import gc y = [12345] z = [23456] x = [y, z] lst = gc.get_referents(x) assert y in lst and z in lst
def newrequesthandler(self, request): log.debug("dbrequest cache: query for request: %r" % request) requesthash = hash(request) log.debug("dbrequest cache: sucessfully hashed request: %d" % requesthash) try: # try to get the result from the cache result = self.requestcache[requesthash][0] # update atime self.requestcache[requesthash][2] = time.time() self.requestcachehits += 1 log.debug("dbrequest cache: hit for request: %r" % request) except KeyError: # make a copy of request for later storage in cache requestcopy = copy.copy(request) result = requesthandler(self, request) resultnoobjects = len(gc.get_referents(result)) + 1 self.requestcache[requesthash] = [result, requestcopy, time.time(), resultnoobjects] self.requestcachemisses += 1 self.requestcachesize += resultnoobjects # remove least recently used items from cache if self.requestcachesize > self.requestcachemaxsize: log.debug("dbrequest cache: purging old items") cachebytime = [(item[2], key) for key, item in self.requestcache.items()] cachebytime.sort() for atime, key in cachebytime[-10:]: self.requestcachesize -= self.requestcache[key][3] del self.requestcache[key] log.debug("db request cache miss for request: %r (%d requests and %d objects cached)" % (request, len(self.requestcache), self.requestcachesize)) return result
def recurse(obj, start, all, current_path): """ :param obj: :param start: :param all: :param current_path: """ if show_progress: outstream.write("%d\r" % len(all)) all[id(obj)] = None referents = gc.get_referents(obj) for referent in referents: # If we've found our way back to the start, this is # a derivation_step, so print it out if referent is start: print_path(current_path) # Don't go back through the original list of objects, or # through temporary references to the object, since those # are just an artifact of the derivation_step detector itself. elif referent is objects or isinstance(referent, FrameType): continue # We haven't seen this object before, so recurse elif id(referent) not in all: recurse(referent, start, all, current_path + [obj])
def _getObjectList(self): if hasattr(sys, 'getobjects'): return sys.getobjects(0) else: gc.collect() # grab gc's object list gc_objects = gc.get_objects() # use get_referents to find everything else objects = gc_objects objects.append(__builtin__.__dict__) nextObjList = gc_objects found = set() found.add(id(objects)) found.add(id(found)) found.add(id(gc_objects)) for obj in objects: found.add(id(obj)) # repeatedly call get_referents until we can't find any more objects while len(nextObjList): curObjList = nextObjList nextObjList = [] for obj in curObjList: refs = gc.get_referents(obj) for ref in refs: if id(ref) not in found: found.add(id(ref)) objects.append(ref) nextObjList.append(ref) return objects """
def _recurse(o, dct, depth): if max_depth >= 0 and depth > max_depth: return for ref in get_referents(o): idr = id(ref) if not idr in dct: dct[idr] = (ref, getsizeof(ref, default=0)) _recurse(ref, dct, depth+1)
def get_total_size(obj, n=0): if n > 10: return 0 obj_len = 0 obj_len += getattr(ob, '__len__', 0) for a in gc.get_referents(obj): obj_len += get_total_size(a, n + 1) return obj_len
def test_referents_circular(Class): from nose.tools import assert_equal from gc import get_referents atom0, atom1 = Class(0.1, 0.2, 0.5), Class(0.2, 0.2, 0.5) atom0.type = atom1 atom1.type = atom1 actual0 = set([id(u) for u in get_referents(atom0)]) expected0 = set([id(atom0.__dict__), id(atom1)]) if Class is not Atom: expected0.add(id(Class)) assert_equal(actual0, expected0) actual1 = set([id(u) for u in get_referents(atom1)]) expected1 = set([id(atom1.__dict__), id(atom1)]) if Class is not Atom: expected1.add(id(Class)) assert_equal(actual1, expected1)
def test_debug_info_w_persistent_class(self): KEY = b'pclass' class pclass(object): _p_oid = KEY cache = self._makeOne() pclass._p_state = UPTODATE cache[KEY] = pclass gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) oid, refc, typ, state = info[0] self.assertEqual(oid, KEY) self.assertEqual(refc, len(gc.get_referents(pclass))) self.assertEqual(typ, 'type') self.assertEqual(state, UPTODATE)
def _gen(self, obj, depth=0): if self.maxdepth and depth >= self.maxdepth: yield depth, 0, "---- Max depth reached ----" return for ref in gc.get_referents(obj): if id(ref) in self._ignore: continue elif id(ref) in self.seen: yield depth, id(ref), "!" + get_repr(ref) continue else: self.seen[id(ref)] = None yield depth, id(ref), get_repr(ref) for child in self._gen(ref, depth + 1): yield child
def dump_descriptions_of_all_objects_with_refs(f): ids = set() ls = locals() for o in gc.get_objects(): if o is f or o is ids or o is ls: continue if not id(o) in ids: ids.add(id(o)) dump_description_of_object_with_refs(o, f) for so in gc.get_referents(o): if o is f or o is ids or o is ls: continue if not id(so) in ids: ids.add(id(so)) dump_description_of_object_with_refs(so, f) ls = None # break reference cycle return len(ids)
def getsize(obj): """sum size of object & members.""" if isinstance(obj, BLACKLIST): raise TypeError('getsize() does not take argument of type: ' + str(type(obj))) seen_ids = set() size = 0 objects = [obj] while objects: need_referents = [] for obj in objects: if not isinstance(obj, BLACKLIST) and id(obj) not in seen_ids: seen_ids.add(id(obj)) size += sys.getsizeof(obj) need_referents.append(obj) objects = get_referents(*need_referents) return size
def test_debug_info_w_ghost(self): from persistent.interfaces import GHOST KEY = b'ghost' cache = self._makeOne() ghost = self._makePersist(state=GHOST, oid=KEY) cache[KEY] = ghost gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) oid, refc, typ, state = info[0] self.assertEqual(oid, KEY) self.assertEqual(refc, len(gc.get_referents(ghost))) self.assertEqual(typ, 'DummyPersistent') self.assertEqual(state, GHOST)
def estimate_memory_usage(root, seen_ids=None): if seen_ids is None: seen_ids = set() id_root = id(root) if id_root in seen_ids: return 0 seen_ids.add(id_root) result = sys.getsizeof(root) from gc import get_referents for ref in get_referents(root): result += estimate_memory_usage(ref, seen_ids=seen_ids) return result
def trace_one(self, req, typename, objid): rows = [] objid = int(objid) all_objs = gc.get_objects() for obj in all_objs: if id(obj) == objid: objtype = type(obj) if "%s.%s" % (objtype.__module__, objtype.__name__) != typename: rows = ["<h3>The object you requested is no longer " "of the correct type.</h3>"] else: # Attributes rows.append('<div class="obj"><h3>Attributes</h3>') for k in dir(obj): try: v = getattr(obj, k, AttributeError) except Exception as ex: v = ex if type(v) not in method_types: rows.append('<p class="attr"><b>%s:</b> %s</p>' % (k, get_repr(v))) del v rows.append('</div>') # Referrers rows.append('<div class="refs"><h3>Referrers (Parents)</h3>') rows.append('<p class="desc"><a href="%s">Show the ' 'entire tree</a> of reachable objects</p>' % url(req, "/tree/%s/%s" % (typename, objid))) tree = ReferrerTree(obj, req) tree.ignore(all_objs) for depth, parentid, parentrepr in tree.walk(maxdepth=1): if parentid: rows.append("<p class='obj'>%s</p>" % parentrepr) rows.append('</div>') # Referents rows.append('<div class="refs"><h3>Referents (Children)</h3>') for child in gc.get_referents(obj): rows.append("<p class='obj'>%s</p>" % tree.get_repr(child)) rows.append('</div>') break if not rows: rows = ["<h3>The object you requested was not found.</h3>"] return rows
def memory_dump(): import gc x = 0 for obj in gc.get_objects(): i = id(obj) size = sys.getsizeof(obj, 0) # referrers = [id(o) for o in gc.get_referrers(obj)] try: cls = str(obj.__class__) except: cls = "<no class>" if size > 1024 * 50: referents = set([id(o) for o in gc.get_referents(obj)]) x += 1 print(x, {'id': i, 'class': cls, 'size': size, "ref": len(referents)})
def memoryDump(): dump = open("memory_pickle.txt", 'w') for obj in gc.get_objects(): i = id(obj) size = sys.getsizeof(obj, 0) # referrers = [id(o) for o in gc.get_referrers(obj) if hasattr(o, '__class__')] referents = [ id(o) for o in gc.get_referents(obj) if hasattr(o, '__class__') ] if hasattr(obj, '__class__'): cls = str(obj.__class__) cPickle.dump( { 'id': i, 'class': cls, 'size': size, 'referents': referents }, dump)
def walk_gc(obj, towards, handler): visited = set() queue = collections.deque([(obj, [])]) while queue: item, trace = queue.popleft() if id(item) in visited: continue if handler(item): if towards: yield trace + [item] else: yield [item] + trace visited.add(id(item)) if towards: queue.extend([(t, trace + [item]) for t in gc.get_referrers(item)]) else: queue.extend([(t, [item] + trace) for t in gc.get_referents(item)])
def test_traversing_bound_method(): """Test traversing a bound method. """ source = dedent("""\ from enaml.widgets.window import Window enamldef MyWindow(Window): main: func call(arg, kwarg=1): return super() """) tester = compile_source(source, 'MyWindow')() assert (Counter(gc.get_referents(tester.call)) == Counter([tester.call.__func__, tester, tester.call.__key__] + ([type(tester.call)] if PY39 else [])) )
def test_debug_info_w_normal_object(self): import gc from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('uptodate') cache = self._makeOne() uptodate = self._makePersist(state=UPTODATE) cache[KEY] = uptodate gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) oid, refc, typ, state = info[0] self.assertEqual(oid, KEY) self.assertEqual(refc, len(gc.get_referents(uptodate))) self.assertEqual(typ, 'DummyPersistent') self.assertEqual(state, UPTODATE)
def get_leaking_objects(objects=None): """Return objects that do not have any referents. These could indicate reference-counting bugs in C code. Or they could be legitimate. Note that the GC does not track simple objects like int or str. .. versionadded:: 1.7 """ if objects is None: gc.collect() objects = gc.get_objects() try: ids = set(id(i) for i in objects) for i in objects: ids.difference_update(id(j) for j in gc.get_referents(i)) # this then is our set of objects without referrers return [i for i in objects if id(i) in ids] finally: del objects, i # clear cyclic references to frame
def inner(obj, level=0, name=''): obj_id = id(obj) if isinstance(obj, EXCLUDE_TYPES) or id(obj) in seen_ids: size = 0 elif is_class_attr(obj, name): size = 0 else: if verbose: print('%s%s %s: %r' % (' ' * level, obj_id, name, obj)) seen_ids.add(obj_id) size = sys.getsizeof(obj) if isinstance(obj, zero_depth_bases): pass elif isinstance(obj, (tuple, list, Set, deque)): size += sum( inner(v, level + 1, '%s[%s]' % (name, k)) for k, v in enumerate(obj)) elif isinstance(obj, Mapping) or hasattr(obj, iteritems): size += sum( inner(k, level + 1, '%s[%s]k' % (name, k)) + inner(v, level + 1, '%s[%s]v' % (name, k)) for k, v in getattr(obj, iteritems)()) # Check for custom object instances - may subclass above too for _dict in _all_dicts(obj): for k, v in _dict.items(): if not (isinstance(obj, EXCLUDE_TYPES) or id(obj) in seen_ids or # __slots__ are handled below k == '__slots__'): size += inner(k, level + 1, 'key: ' + k) size += inner(v, level + 1, k) for k in _all_slots(obj): if hasattr(obj, k): v = getattr(obj, k) if not (isinstance(v, EXCLUDE_TYPES) or id(v) in seen_ids): size += inner(v, level + 1, k) # Check referents, in case anything was missed. for o in get_referents(obj): if not (isinstance(o, EXCLUDE_TYPES) or id(o) in seen_ids): k = '|'.join(attr_names(obj, o)) size += inner(o, level + 1, '(%s)' % (k, )) if verbose: print('%s%s %d %s: %r' % (' ' * level, obj_id, size, name, obj)) return size
def test_signalconnector_lifecycle(): """Test creating and destroying an event binder. We create enough event binder to exceed the freelist length and fully deallocate some. """ class SignalAtom(Atom): s = Signal() signal_connectors = [SignalAtom.s for i in range(512)] for i, e in enumerate(signal_connectors): signal_connectors[i] = None del e gc.collect() atom = SignalAtom() sc = atom.s assert gc.get_referents(sc) == [SignalAtom.s, atom]
def get_object_mem_size(obj): """ Credit to Aaron Hall """ if isinstance(obj, BLACKLIST): raise TypeError( F'getsize() does not take argument of type: {type(obj)}') seen_ids = set() size = 0 objects = [obj] while objects: need_referents = [] for obj in objects: if not isinstance(obj, BLACKLIST) and id(obj) not in seen_ids: seen_ids.add(id(obj)) size += sys.getsizeof(obj) need_referents.append(obj) objects = get_referents(*need_referents) return size
def test_eventbinder_lifecycle(): """Test creating and destroying an event binder. We create enough event binder to exceed the freelist length and fully deallocate some. """ class EventAtom(Atom): e = Event() event_binders = [EventAtom.e for i in range(512)] for i, e in enumerate(event_binders): event_binders[i] = None del e gc.collect() atom = EventAtom() eb = atom.e assert gc.get_referents(eb) == [EventAtom.e, atom]
def get_approx_object_size(obj): """Returns the approximate size of an object and its children.""" blacklisted_types = (type, types.ModuleType, types.FunctionType) if isinstance(obj, blacklisted_types): raise TypeError('getsize() does not take argument of type: ' + str(type(obj))) seen_ids = set() size = 0 objects = [obj] while objects: need_referents = [] for obj in objects: if not isinstance(obj, blacklisted_types) and id(obj) not in seen_ids: seen_ids.add(id(obj)) size += sys.getsizeof(obj) need_referents.append(obj) objects = gc.get_referents(*need_referents) return size
def inner(level): for item in level: item_id = id(item) if item_id not in garbage_ids: continue if item_id in visited_ids: continue if item_id in stack_ids: candidate = stack[stack.index(item):] candidate.append(item) found.append(candidate) continue stack.append(item) stack_ids.add(item_id) inner(gc.get_referents(item)) stack.pop() stack_ids.remove(item_id) visited_ids.add(item_id)
def get_referents(object, level=1): """Get all referents of an object up to a certain level. The referents will not be returned in a specific order and will not contain duplicate objects. Duplicate objects will be removed. Keyword arguments: level -- level of indirection to which referents considered. This function is recursive. """ res = gc.get_referents(object) level -= 1 if level > 0: for o in res: res.extend(get_referents(o, level)) res = _remove_duplicates(res) return res
def get_obj_size(obj): """Pick an object, any object, and it will get the size of it (in bytes) This is useful for checking to make sure we wont fill up RAM by creating a new in-memory db """ marked = {id(obj)} obj_q = [obj] sz = 0 while obj_q: sz += sum(map(sys.getsizeof, obj_q)) all_refr = ((id(o), o) for o in gc.get_referents(*obj_q)) new_refr = { o_id: o for o_id, o in all_refr if o_id not in marked and not isinstance(o, type) } obj_q = new_refr.values() marked.update(new_refr.keys()) return sz
def show_refs(cls): obj = next((o for o in gc.get_objects() if type(o) == cls), None) if obj: print '================= {} referrers ================'.format(cls) # print '{} referrers'.format(obj) for ri in gc.get_referrers(obj): keys = '' if isinstance(ri, dict): keys = ','.join(ri.keys()) print '{:<30s} {} {}'.format(str(id(ri)), type(ri), ri, keys) print '================== {} referents ================'.format(cls) # print '{} referents'.format(obj) for ri in gc.get_referents(obj): keys = '' if isinstance(ri, dict): keys = ','.join(ri.keys()) print '{:<30s} {} {}'.format(str(id(ri)), type(ri), ri, keys)
def test_tp_traverse(self): import sys if hasattr(sys, 'gettotalrefcount'): import pytest pytest.skip( "Test fails on debug build: https://github.com/hpyproject/hpy/issues/255" ) import gc mod = self.make_module(""" @DEFINE_PairObject @DEFINE_Pair_new @DEFINE_Pair_traverse @EXPORT_PAIR_TYPE(&Pair_new, &Pair_traverse) @INIT """) p = mod.Pair("hello", "world") referents = gc.get_referents(p) referents.sort() assert referents == ['hello', 'world']
def get_leaking_objects(): """ Return objects that do not have any referents. These could indicate reference-counting bugs in C code. Or they could be legitimate. Note that the GC does not track simple objects like int or str. """ gc.collect() # 手动执行垃圾回收 objects = gc.get_objects( ) # Returns a list of all objects tracked by the collector, excluding the list returned. try: ids = {id(i) for i in objects} for i in objects: ids -= { id(j) for j in gc.get_referents(i) } # Return a list of objects directly referred to by any of the arguments return [i for i in objects if id(i) in ids ] # this then is our set of objects without referrers finally: del objects, i # clear cyclic references to frame
def visit_all_objects(f): """ Brian and I *think* that this gets all objects. This is predicated on the assumption that every object either participates in gc, or is at most one hop from an object that participates in gc. This was Brian's clever idea. """ ids = set() ls = locals() import inspect cf = inspect.currentframe() for o in gc.get_objects(): if o is ids or o is ls or o is cf: continue if not id(o) in ids: ids.add(id(o)) f(o) for so in gc.get_referents(o): if not id(so) in ids: ids.add(id(so)) f(so)
def getsize(obj, ignore_types=(type, ModuleType, FunctionType)): """Summed size of object and members. Source: https://stackoverflow.com/a/30316760/2403000 """ if isinstance(obj, ignore_types): raise TypeError("getsize() does not take argument of type '{}'".format(type(obj))) seen_ids = set() size = 0 objects = [obj] while objects: need_referents = [] for obj in objects: if not isinstance(obj, ignore_types) and id(obj) not in seen_ids: seen_ids.add(id(obj)) size += sys.getsizeof(obj) need_referents.append(obj) objects = get_referents(*need_referents) return size
def traverse_bfs(*objs, marked: Optional[set] = None) -> Iterable[Any]: """ Traverse all the arguments' sub-tree. This exclude `type` objects, i.e., where `isinstance(o, type)` is True. Parameters ---------- objs : object(s) One or more object(s). marked : set, optional An existing set for marked objects. Objects that are in this set will not be traversed. If a set is given, it will be updated with all the traversed objects. Yields ------ object The traversed objects, one by one. """ if marked is None: marked = set() while objs: # Get the object's ids objs = ((id(o), o) for o in objs) # Filter: # - Object that are already marked (using the marked set). # - Type objects such as a class or a module as they are common among all objects. # - Repeated objects (using dict notation). objs = {o_id: o for o_id, o in objs if o_id not in marked and not isinstance(o, type)} # Update the marked set with the ids so we will not traverse them again. marked.update(objs.keys()) # Yield traversed objects yield from objs.values() # Lookup all the object referred to by the object from the current round. # See: https://docs.python.org/3.7/library/gc.html#gc.get_referents objs = gc.get_referents(*objs.values())
def get_size_of(obj) -> int: """ Return the size of the given object in memory. Parameters ---------- obj : object The object to measure for space complexity. Returns ------- int The object size in bytes. """ marked = {id(obj)} obj_q = [obj] size = 0 while obj_q: size += sum(map(sys.getsizeof, obj_q)) # Lookup all the object referred to by the object in obj_q. # See: https://docs.python.org/3.7/library/gc.html#gc.get_referents all_refs = ((id(o), o) for o in gc.get_referents(*obj_q)) # Filter object that are already marked. # Using dict notation will prevent repeated objects. new_ref = { o_id: o for o_id, o in all_refs if o_id not in marked and not isinstance(o, type) } # The new obj_q will be the ones that were not marked, # and we will update marked with their ids so we will # not traverse them again. obj_q = new_ref.values() marked.update(new_ref.keys()) return size
def teardown(): garbage = [] for g in gc.garbage: garbage.append('GARBAGE: %r' % (g, )) garbage.append(' gc.get_referents: %r' % (gc.get_referents(g), )) garbage.append(' gc.get_referrers: %r' % (gc.get_referrers(g), )) if garbage: assert False, '\n'.join(garbage) c = client_context.client if c: c.drop_database("pymongo-pooling-tests") c.drop_database("pymongo_test") c.drop_database("pymongo_test1") c.drop_database("pymongo_test2") c.drop_database("pymongo_test_mike") c.drop_database("pymongo_test_bernie") c.close() # Jython does not support gc.get_objects. if not sys.platform.startswith('java'): print_unclosed_clients()
def test_debug_info_w_persistent_class(self): import gc from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('pclass') class pclass(object): pass cache = self._makeOne() pclass._p_state = UPTODATE cache[KEY] = pclass info = cache.debug_info() self.assertEqual(len(info), 1) oid, refc, typ, state = info[0] self.assertEqual(oid, KEY) self.assertEqual(refc, len(gc.get_referents(pclass))) self.assertEqual(typ, 'type') self.assertEqual(state, UPTODATE)