def __delattr__(self, attr, other_arg=2.3**10): print "__delattr__" print other_arg del C.__delattr__ print other_arg sys._clear_type_cache() print other_arg
def __setattr__(self, attr, val, other_arg=2.1**10): print "__setattr__", attr, val print other_arg del C.__setattr__ print other_arg sys._clear_type_cache() print other_arg
def test_tp_version_tag_unique(self): """tp_version_tag should be unique assuming no overflow, even after clearing type cache. """ # Check if global version tag has already overflowed. Y = type('Y', (), {}) Y.x = 1 Y.x # Force a _PyType_Lookup, populating version tag y_ver = type_get_version(Y) # Overflow, or not enough left to conduct the test. if y_ver == 0 or y_ver > 0xFFFFF000: self.skipTest("Out of type version tags") # Note: try to avoid any method lookups within this loop, # It will affect global version tag. all_version_tags = [] append_result = all_version_tags.append assertNotEqual = self.assertNotEqual for _ in range(30): _clear_type_cache() X = type('Y', (), {}) X.x = 1 X.x tp_version_tag_after = type_get_version(X) assertNotEqual(tp_version_tag_after, 0, msg="Version overflowed") append_result(tp_version_tag_after) self.assertEqual( len(set(all_version_tags)), 30, msg=f"{all_version_tags} contains non-unique versions")
def dash_R_cleanup(fs, ps, pic, zdc, abcs): import gc, copy_reg # Restore some original values. warnings.filters[:] = fs copy_reg.dispatch_table.clear() copy_reg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) try: import zipimport except ImportError: pass # Run unmodified on platforms without zipimport support else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zdc) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. for abc, registry in abcs.items(): abc._abc_registry = registry.copy() abc._abc_cache.clear() abc._abc_negative_cache.clear() clear_caches()
def assert_no_dangling_Cclasses(doassert=None): global CheckForDanglingClasses global WorstDanglingCount sys._clear_type_cache() if doassert is None: doassert = AssertOnDanglingClasses CMAinit.uninit() gc.collect() # For good measure... count = proj_class_live_object_count() #print >>sys.stderr, "CHECKING FOR DANGLING CLASSES (%d)..." % count # Avoid cluttering the output up with redundant messages... if count > WorstDanglingCount and CheckForDanglingClasses: WorstDanglingCount = count if doassert: print >> sys.stderr, 'STARTING OBJECT DUMP' print 'stdout STARTING OBJECT DUMP' dump_c_objects() print >> sys.stderr, 'OBJECT DUMP COMPLETE' print 'stdout OBJECT DUMP COMPLETE' raise AssertionError("Dangling C-class objects - %d still around" % count) else: print >> sys.stderr, ( "*****ERROR: Dangling C-class objects - %d still around" % count)
def func(): print("I am evil") try: import sys sys._clear_type_cache() except ImportError: pass
def enable(self, sandbox): # Blacklist all dict methods able to modify a dict, to protect # ReadOnlyBuiltins for name in ('__init__', 'clear', '__delitem__', 'pop', 'popitem', 'setdefault', '__setitem__', 'update'): del self.dict_dict[name] if version_info < (3, 0): del self.function_dict['func_closure'] del self.function_dict['func_globals'] del self.function_dict['func_code'] del self.function_dict['func_defaults'] if version_info >= (2, 6): del self.function_dict['__closure__'] del self.function_dict['__globals__'] del self.function_dict['__code__'] del self.function_dict['__defaults__'] del self.frame_dict['f_locals'] if ('code' not in sandbox.config.features) \ and ('traceback' not in sandbox.config.features): del self.frame_dict['f_code'] del self.type_dict['__subclasses__'] del self.builtin_func_dict['__self__'] if version_info >= (2, 6) \ and ('code' not in sandbox.config.features): del self.generator_dict['gi_code'] _clear_type_cache()
def _refleak_cleanup(): # Collect cyclic trash and read memory statistics immediately after. try: func1 = sys.getallocatedblocks except AttributeError: def func1(): return 42 try: func2 = sys.gettotalrefcount except AttributeError: def func2(): return 42 # Flush standard output, so that buffered data is sent to the OS and # associated Python objects are reclaimed. for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if stream is not None: stream.flush() sys._clear_type_cache() # This also clears the various internal CPython freelists. gc.collect() return func1(), func2()
def dash_R_cleanup(fs, ps, pic, zdc, abcs): import copyreg import collections.abc # Restore some original values. warnings.filters[:] = fs copyreg.dispatch_table.clear() copyreg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) try: import zipimport except ImportError: pass # Run unmodified on platforms without zipimport support else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zdc) # Clear ABC registries, restoring previously saved ABC registries. abs_classes = [ getattr(collections.abc, a) for a in collections.abc.__all__ ] abs_classes = filter(isabstract, abs_classes) for abc in abs_classes: for obj in abc.__subclasses__() + [abc]: for ref in abcs.get(obj, set()): if ref() is not None: obj.register(ref()) obj._abc_caches_clear() # Clear caches clear_caches() # Clear type cache at the end: previous function calls can modify types sys._clear_type_cache()
def enable(self, sandbox): # Blacklist all dict methods able to modify a dict, to protect # ReadOnlyBuiltins for name in ( '__init__', 'clear', '__delitem__', 'pop', 'popitem', 'setdefault', '__setitem__', 'update'): del self.dict_dict[name] if version_info < (3, 0): del self.function_dict['func_closure'] del self.function_dict['func_globals'] del self.function_dict['func_code'] del self.function_dict['func_defaults'] if version_info >= (2, 6): del self.function_dict['__closure__'] del self.function_dict['__globals__'] del self.function_dict['__code__'] del self.function_dict['__defaults__'] del self.frame_dict['f_locals'] if ('code' not in sandbox.config.features) \ and ('traceback' not in sandbox.config.features): del self.frame_dict['f_code'] del self.type_dict['__subclasses__'] del self.builtin_func_dict['__self__'] if version_info >= (2, 6) \ and ('code' not in sandbox.config.features): del self.generator_dict['gi_code'] _clear_type_cache()
def __getattr__(self, attr, other_arg=2.2**10): print "__getattr__" print other_arg del C.__getattr__ print other_arg sys._clear_type_cache() print other_arg return attr
def __get__(self, obj, type, other_arg=2.4**10): print "D.__get__" print other_arg del C.x print other_arg sys._clear_type_cache() print other_arg return 1
def __set__(self, obj, value, other_arg=2.0**10): print "D.__set__", type(obj), value print other_arg del D.__set__ print other_arg sys._clear_type_cache() print other_arg return 1
def test_type_cache(self): class C: x = 42 a = C() self.assertEqual(a.x, 42) sys._clear_type_cache() C.x = 100 self.assertEqual(a.x, 100)
def disable(self, sandbox): self.dict_dict.restore() self.function_dict.restore() self.frame_dict.restore() self.type_dict.restore() self.builtin_func_dict.restore() self.generator_dict.restore() # Python 2.6+ uses a method cache: clear it to avoid errors _clear_type_cache()
def dash_R_cleanup(fs, ps, pic, zdc, abcs): import gc, copy_reg import _strptime, linecache dircache = test_support.import_module('dircache', deprecated=True) import urlparse, urllib, urllib2, mimetypes, doctest import struct, filecmp from distutils.dir_util import _path_created # Clear the warnings registry, so they can be displayed again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Restore some original values. warnings.filters[:] = fs copy_reg.dispatch_table.clear() copy_reg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) try: import zipimport except ImportError: pass # Run unmodified on platforms without zipimport support else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zdc) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. for abc, registry in abcs.items(): abc._abc_registry = registry.copy() abc._abc_cache.clear() abc._abc_negative_cache.clear() # Clear assorted module caches. _path_created.clear() re.purge() _strptime._regex_cache.clear() urlparse.clear_cache() urllib.urlcleanup() urllib2.install_opener(None) dircache.reset() linecache.clearcache() mimetypes._default_mime_types() filecmp._cache.clear() struct._clearcache() doctest.master = None # Collect cyclic trash. gc.collect()
def dash_R_cleanup(fs, ps, pic, abcs): import gc, copy_reg import _strptime, linecache dircache = test_support.import_module('dircache', deprecated=True) import urlparse, urllib, urllib2, mimetypes, doctest import struct, filecmp from distutils.dir_util import _path_created # Clear the warnings registry, so they can be displayed again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Restore some original values. warnings.filters[:] = fs copy_reg.dispatch_table.clear() copy_reg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. for abc, registry in abcs.items(): abc._abc_registry = registry.copy() abc._abc_cache.clear() abc._abc_negative_cache.clear() # Clear assorted module caches. _path_created.clear() re.purge() _strptime._regex_cache.clear() urlparse.clear_cache() urllib.urlcleanup() urllib2.install_opener(None) dircache.reset() linecache.clearcache() mimetypes._default_mime_types() filecmp._cache.clear() struct._clearcache() doctest.master = None if _llvm: code_types = (types.CodeType, types.FunctionType, types.MethodType) for obj in gc.get_objects(): if isinstance(obj, code_types): _llvm.clear_feedback(obj) # Collect cyclic trash. gc.collect()
def remove_dangerous_attrs(): """ Removes all the methods not mentioned in the good_*_methods.py files. """ for i in dangerous: for j in method_origin[i]: if not hasattr(j, i): not_expressed[j].append(i) saved[(j, i)] = dictionary_of(j)[i] del dictionary_of(j)[i] # make sure our modifications is mirrored in the types we modify # this is a specialised purpose sys._clear_type_cache() assert not hasattr(j, i), "{} still has {}".format(j, i)
def assert_no_dangling_Cclasses(): global CheckForDanglingClasses global WorstDanglingCount sys._clear_type_cache() gc.collect() count = proj_class_live_object_count() # Avoid cluttering the output up with redundant messages... if count > WorstDanglingCount and CheckForDanglingClasses: WorstDanglingCount = count if AssertOnDanglingClasses: dump_c_objects() raise AssertionError, "Dangling C-class objects - %d still around" % count else: print >> sys.stderr, ("*****ERROR: Dangling C-class objects - %d still around" % count)
def combine_on_obTime_id_bigData(sta_ob, sta_fo_list, need_match_ob=True): import sys, gc ''' 将观测 :param sta_ob: :param sta_fo_list: :return: ''' if not isinstance(sta_fo_list, list): print("the second args shold be a list") return grouped_ob = dict(list(sta_ob.groupby("id"))) nfo = len(sta_fo_list) grouped_fo_list = [] for i in range(nfo): grouped_fo_list.append(dict(list(sta_fo_list[i].groupby("id")))) id_ob = list(grouped_ob.keys()) sys._clear_type_cache() gc.collect() sta_all = [] n_id = len(id_ob) if need_match_ob: how = "inner" else: how = "right" for i in range(n_id): rate = int((i / n_id) * 100) if rate % 5 == 0: if abs(i - rate * 0.01 * n_id) < 1: print(str(rate) + "% combined") key = id_ob[i] all_fos_have = True sta_ob_one_id = grouped_ob.pop(key) sta_fos_one_id = [] for i in range(nfo): if key in grouped_fo_list[i].keys(): sta_fos_one_id.append(grouped_fo_list[i].pop(key)) else: all_fos_have = False if all_fos_have: combine_one = combine_on_obTime_one_id(sta_ob_one_id, sta_fos_one_id, how=how) sta_all.append(combine_one) sta_all = pd.concat(sta_all, axis=0) return sta_all
def replace_dangerous_attrs(): """ Replaces all the methods removed by :func:`remove_dangerous_attrs`. """ for i in dangerous: for j in method_origin[i]: dictionary_of(j)[i] = saved[(j, i)] # make sure our modifications is mirrored in the types we modify # this is a specialised purpose sys._clear_type_cache() if i in not_expressed[j]: assert i in j.__dict__, "{} still doesn't' have {}" \ .format(j, i) else: assert hasattr(j, i), "{} still doesn't' have {}"\ .format(j, i)
def dash_R_cleanup(fs, ps, pic, zdc, abcs): import gc, copyreg import collections.abc from weakref import WeakSet # Restore some original values. warnings.filters[:] = fs copyreg.dispatch_table.clear() copyreg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) try: import zipimport except ImportError: pass # Run unmodified on platforms without zipimport support else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zdc) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. abs_classes = [ getattr(collections.abc, a) for a in collections.abc.__all__ ] abs_classes = filter(isabstract, abs_classes) if 'typing' in sys.modules: t = sys.modules['typing'] # These classes require special treatment because they do not appear # in direct subclasses of collections.abc classes abs_classes = list(abs_classes) + [ t.ChainMap, t.Counter, t.DefaultDict ] for abc in abs_classes: for obj in abc.__subclasses__() + [abc]: obj._abc_registry = abcs.get(obj, WeakSet()).copy() obj._abc_cache.clear() obj._abc_negative_cache.clear() clear_caches() # Collect cyclic trash and read memory statistics immediately after. func1 = sys.getallocatedblocks func2 = sys.gettotalrefcount gc.collect() return func1(), func2(), support.fd_count()
def dash_R_cleanup(fs, ps, pic, abcs): import gc, copyreg import _strptime, linecache import urllib.parse, urllib.request, mimetypes, doctest import struct, filecmp, _abcoll from distutils.dir_util import _path_created from weakref import WeakSet # Clear the warnings registry, so they can be displayed again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Restore some original values. warnings.filters[:] = fs copyreg.dispatch_table.clear() copyreg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. for abc in [getattr(_abcoll, a) for a in _abcoll.__all__]: if not isabstract(abc): continue for obj in abc.__subclasses__() + [abc]: obj._abc_registry = abcs.get(obj, WeakSet()).copy() obj._abc_cache.clear() obj._abc_negative_cache.clear() # Clear assorted module caches. _path_created.clear() re.purge() _strptime._regex_cache.clear() urllib.parse.clear_cache() urllib.request.urlcleanup() linecache.clearcache() mimetypes._default_mime_types() filecmp._cache.clear() struct._clearcache() doctest.master = None # Collect cyclic trash. gc.collect()
def enable(self, sandbox): if not sandbox.config.cpython_restricted: # Deny access to func.func_code to avoid an attacker to modify a # trusted function: replace the code of the function hide_func_code = True else: # CPython restricted mode already denies read and write access to # function attributes hide_func_code = False # Blacklist all dict methods able to modify a dict, to protect # ReadOnlyBuiltins for name in ( '__init__', 'clear', '__delitem__', 'pop', 'popitem', 'setdefault', '__setitem__', 'update'): del self.dict_dict[name] if version_info < (3, 0): # pysandbox stores trusted objects in closures: deny access to # closures to not leak these secrets del self.function_dict['func_closure'] del self.function_dict['func_globals'] if hide_func_code: del self.function_dict['func_code'] del self.function_dict['func_defaults'] if version_info >= (2, 6): del self.function_dict['__closure__'] del self.function_dict['__globals__'] if hide_func_code: del self.function_dict['__code__'] del self.function_dict['__defaults__'] del self.frame_dict['f_locals'] # Hiding type.__bases__ crashs CPython 2.5 because of a infinite loop # in PyErr_ExceptionMatches(): it calls abstract_get_bases() but # abstract_get_bases() fails and call PyErr_ExceptionMatches() ... if version_info >= (2, 6): # Setting __bases__ crash Python < 3.3a2 # http://bugs.python.org/issue14199 del self.type_dict['__bases__'] # object.__subclasses__ leaks the file type in Python 2 # and (indirectly) the FileIO file in Python 3 del self.type_dict['__subclasses__'] del self.builtin_func_dict['__self__'] _clear_type_cache()
def dash_R_cleanup(fs, ps, pic, abcs): import gc, copyreg import _strptime, linecache dircache = support.import_module('dircache', deprecated=True) import urllib.parse, urllib.request, urllib.parse, urllib.error, urllib.request, urllib.error, urllib.parse, mimetypes, doctest import struct, filecmp from distutils.dir_util import _path_created # Clear the warnings registry, so they can be displayed again for mod in list(sys.modules.values()): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Restore some original values. warnings.filters[:] = fs copyreg.dispatch_table.clear() copyreg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. for abc, registry in list(abcs.items()): abc._abc_registry = registry.copy() abc._abc_cache.clear() abc._abc_negative_cache.clear() # Clear assorted module caches. _path_created.clear() re.purge() _strptime._regex_cache.clear() urllib.parse.clear_cache() urllib.request.urlcleanup() urllib.request.install_opener(None) dircache.reset() linecache.clearcache() mimetypes._default_mime_types() filecmp._cache.clear() struct._clearcache() doctest.master = None # Collect cyclic trash. gc.collect()
def _refleak_cleanup(): # Collect cyclic trash and read memory statistics immediately after. func1 = sys.getallocatedblocks try: func2 = sys.gettotalrefcount except AttributeError: func2 = lambda: 42 # Flush standard output, so that buffered data is sent to the OS and # associated Python objects are reclaimed. for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if stream is not None: stream.flush() sys._clear_type_cache() # This also clears the various internal CPython freelists. gc.collect() return func1(), func2()
def enable(self, sandbox): if not sandbox.config.cpython_restricted: # Deny access to func.func_code to avoid an attacker to modify a # trusted function: replace the code of the function hide_func_code = True else: # CPython restricted mode already denies read and write access to # function attributes hide_func_code = False # Blacklist all dict methods able to modify a dict, to protect # ReadOnlyBuiltins for name in ('__init__', 'clear', '__delitem__', 'pop', 'popitem', 'setdefault', '__setitem__', 'update'): del self.dict_dict[name] if version_info < (3, 0): # pysandbox stores trusted objects in closures: deny access to # closures to not leak these secrets del self.function_dict['func_closure'] del self.function_dict['func_globals'] if hide_func_code: del self.function_dict['func_code'] del self.function_dict['func_defaults'] if version_info >= (2, 6): del self.function_dict['__closure__'] del self.function_dict['__globals__'] if hide_func_code: del self.function_dict['__code__'] del self.function_dict['__defaults__'] del self.frame_dict['f_locals'] # Hiding type.__bases__ crashs CPython 2.5 because of a infinite loop # in PyErr_ExceptionMatches(): it calls abstract_get_bases() but # abstract_get_bases() fails and call PyErr_ExceptionMatches() ... if version_info >= (2, 6): # Setting __bases__ crash Python < 3.3a2 # http://bugs.python.org/issue14199 del self.type_dict['__bases__'] # object.__subclasses__ leaks the file type in Python 2 # and (indirectly) the FileIO file in Python 3 del self.type_dict['__subclasses__'] del self.builtin_func_dict['__self__'] _clear_type_cache()
def reconfig(): """ Set global PVM configs """ cfg = {} cfg['sys.platform'] = sys.platform cfg['sys.maxsize'] = sys.maxsize cfg['sys.path'] = sys.path cfg['sys.excepthook'] = sys.excepthook cfg['old sys.switchinterval'] = sys.getswitchinterval() sys.setswitchinterval(LONGER_CHECK_INTERVAL) cfg['new sys.switchinterval'] = sys.getswitchinterval() cfg['old sys.recursionlimit'] = sys.getrecursionlimit() sys.setrecursionlimit(BIGGER_RECURSION_LIMIT) cfg['new sys.recursionlimit'] = sys.getrecursionlimit() cfg['old gc.threshold'] = str(gc.get_threshold()) gc.set_threshold(*LOWER_GC_THRESHOLD) cfg['new gc.threshold'] = str(gc.get_threshold()) sys._clear_type_cache() cfg['sys._clear_type_cache'] = True return cfg
def dash_R_cleanup(fs, ps, pic, zdc, abcs): import gc, copyreg import collections.abc from weakref import WeakSet # Restore some original values. warnings.filters[:] = fs copyreg.dispatch_table.clear() copyreg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) try: import zipimport except ImportError: pass # Run unmodified on platforms without zipimport support else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zdc) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. abs_classes = [getattr(collections.abc, a) for a in collections.abc.__all__] abs_classes = filter(isabstract, abs_classes) if 'typing' in sys.modules: t = sys.modules['typing'] # These classes require special treatment because they do not appear # in direct subclasses of collections.abc classes abs_classes = list(abs_classes) + [t.ChainMap, t.Counter, t.DefaultDict] for abc in abs_classes: for obj in abc.__subclasses__() + [abc]: obj._abc_registry = abcs.get(obj, WeakSet()).copy() obj._abc_cache.clear() obj._abc_negative_cache.clear() clear_caches() # Collect cyclic trash and read memory statistics immediately after. func1 = sys.getallocatedblocks func2 = sys.gettotalrefcount gc.collect() return func1(), func2(), support.fd_count()
def assert_no_dangling_Cclasses(doassert=None): global CheckForDanglingClasses global WorstDanglingCount sys._clear_type_cache() if doassert is None: doassert = AssertOnDanglingClasses CMAinit.uninit() gc.collect() # For good measure... count = proj_class_live_object_count() #print >>sys.stderr, "CHECKING FOR DANGLING CLASSES (%d)..." % count # Avoid cluttering the output up with redundant messages... if count > WorstDanglingCount and CheckForDanglingClasses: WorstDanglingCount = count if doassert: print >> sys.stderr, 'STARTING OBJECT DUMP' dump_c_objects() print >> sys.stderr, 'OBJECT DUMP COMPLETE' print 'stdout OBJECT DUMP COMPLETE' raise AssertionError("Dangling C-class objects - %d still around" % count) else: print >> sys.stderr, ("*****ERROR: Dangling C-class objects - %d still around" % count)
def dash_R_cleanup(fs, ps, pic, zdc, abcs): import gc, copyreg import collections.abc # Restore some original values. warnings.filters[:] = fs copyreg.dispatch_table.clear() copyreg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) try: import zipimport except ImportError: pass # Run unmodified on platforms without zipimport support else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zdc) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. abs_classes = [ getattr(collections.abc, a) for a in collections.abc.__all__ ] abs_classes = filter(isabstract, abs_classes) for abc in abs_classes: for obj in abc.__subclasses__() + [abc]: for ref in abcs.get(obj, set()): if ref() is not None: obj.register(ref()) obj._abc_caches_clear() clear_caches() # Collect cyclic trash and read memory statistics immediately after. func1 = sys.getallocatedblocks func2 = sys.gettotalrefcount gc.collect() return func1(), func2(), support.fd_count()
def dash_R_cleanup(fs, ps, pic, zdc, abcs): import gc, copyreg import collections.abc from weakref import WeakSet # Restore some original values. warnings.filters[:] = fs copyreg.dispatch_table.clear() copyreg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) try: import zipimport except ImportError: pass # Run unmodified on platforms without zipimport support else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zdc) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]: if not isabstract(abc): continue for obj in abc.__subclasses__() + [abc]: obj._abc_registry = abcs.get(obj, WeakSet()).copy() obj._abc_cache.clear() obj._abc_negative_cache.clear() clear_caches() # Collect cyclic trash and read memory statistics immediately after. func1 = sys.getallocatedblocks func2 = sys.gettotalrefcount gc.collect() return func1(), func2(), fd_count()
def reconfig(): ''' Set global best configs, eg: gc and recursion limit ''' cfg = {} cfg['sys.platform'] = sys.platform cfg['sys.maxsize'] = sys.maxsize cfg['sys.path'] = sys.path cfg['sys.excepthook'] = sys.excepthook cfg['old sys.checkinterval'] = sys.getcheckinterval() sys.setcheckinterval(BEST_CHECK_INTERVAL) cfg['new sys.checkinterval'] = sys.getcheckinterval() cfg['old sys.recursionlimit'] = sys.getrecursionlimit() sys.setrecursionlimit(BEST_RECURSION_LIMIT) cfg['new sys.recursionlimit'] = sys.getrecursionlimit() cfg['old gc.threshold'] = str(gc.get_threshold()) gc.set_threshold(512, 8, 6) cfg['new gc.threshold'] = str(gc.get_threshold()) sys._clear_type_cache() cfg['sys._clear_type_cache'] = True return cfg
def dash_R_cleanup(fs, ps, pic, zdc, abcs): import gc, copyreg import collections.abc from weakref import WeakSet warnings.filters[:] = fs copyreg.dispatch_table.clear() copyreg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) try: import zipimport except ImportError: pass else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zdc) sys._clear_type_cache() abs_classes = [ getattr(collections.abc, a) for a in collections.abc.__all__ ] abs_classes = filter(isabstract, abs_classes) if 'typing' in sys.modules: t = sys.modules['typing'] abs_classes = list(abs_classes) + [ t.ChainMap, t.Counter, t.DefaultDict ] for abc in abs_classes: for obj in (abc.__subclasses__() + [abc]): obj._abc_registry = abcs.get(obj, WeakSet()).copy() obj._abc_cache.clear() obj._abc_negative_cache.clear() clear_caches() func1 = sys.getallocatedblocks func2 = sys.gettotalrefcount gc.collect() return func1(), func2(), fd_count()
def test_clear_type_cache(self): if not test.test_support.due_to_ironpython_bug("http://ironpython.codeplex.com/WorkItem/View.aspx?WorkItemId=17460"): sys._clear_type_cache()
def test_clear_type_cache(self): sys._clear_type_cache()
import sys import pygame import os sys._clear_type_cache() sys.exc_clear() # Importe la classe Invader and MyHero # se trouvant dans les fichiers # invader.py et myhero.py sys.path.append(os.path.join('menu')) pygame.init() pygame.mixer.music.load('menu.mp3') pygame.mixer.music.play(-1) execfile("menu/menu.py")
def dash_R_cleanup(fs, ps, pic, zdc, abcs): import gc, copyreg import _strptime, linecache import urllib.parse, urllib.request, mimetypes, doctest import struct, filecmp, collections.abc from distutils.dir_util import _path_created from weakref import WeakSet # Clear the warnings registry, so they can be displayed again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Restore some original values. warnings.filters[:] = fs copyreg.dispatch_table.clear() copyreg.dispatch_table.update(ps) sys.path_importer_cache.clear() sys.path_importer_cache.update(pic) try: import zipimport except ImportError: pass # Run unmodified on platforms without zipimport support else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zdc) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]: if not isabstract(abc): continue for obj in abc.__subclasses__() + [abc]: obj._abc_registry = abcs.get(obj, WeakSet()).copy() obj._abc_cache.clear() obj._abc_negative_cache.clear() # Flush standard output, so that buffered data is sent to the OS and # associated Python objects are reclaimed. for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if stream is not None: stream.flush() # Clear assorted module caches. _path_created.clear() re.purge() _strptime._regex_cache.clear() urllib.parse.clear_cache() urllib.request.urlcleanup() linecache.clearcache() mimetypes._default_mime_types() filecmp._cache.clear() struct._clearcache() doctest.master = None try: import ctypes except ImportError: # Don't worry about resetting the cache if ctypes is not supported pass else: ctypes._reset_cache() # Collect cyclic trash and read memory statistics immediately after. func1 = sys.getallocatedblocks func2 = sys.gettotalrefcount gc.collect() return func1(), func2()
# =============================================== # MODULE STUDY: sys import sys sys.argv # The list of command line arguments passed to a Python script. argv[0] is the script name # ............................................ sys.byteorder # An indicator of the native byte order. This will have the value 'big' on big-endian (most-significant byte first) platforms, and 'little' on little-endian (least-significant byte first) platforms. # ............................................ sys.builtin_module_names # A tuple of strings giving the names of all modules that are compiled into this Python interpreter. # ............................................ sys.call_tracing(func, args) # Call func(*args), while tracing is enabled. The tracing state is saved, and restored afterwards. # ............................................ sys.copyright # A string containing the copyright pertaining to the Python interpreter. # ............................................ sys._clear_type_cache() # Clear the internal type cache. The type cache is used to speed up attribute and method lookups. # ............................................ sys._current_frames() # ............................................ sys.dllhandle # Integer specifying the handle of the Python DLL. Availability: Windows. # ............................................ sys.displayhook(value) # If value is not None, this function prints it to sys.stdout, and saves it in __builtin__._. # ............................................ sys.dont_write_bytecode # If this is true, Python won’t try to write .pyc or .pyo files on the import of source modules. # This value is initially set to True or False depending on the -B command line option and the PYTHONDONTWRITEBYTECODE environment variable, # but you can set it yourself to control bytecode file generation. # ............................................ sys.excepthook(type, value, traceback) # This function prints out a given traceback and exception to sys.stderr. # ............................................ sys.__displayhook__ sys.__excepthook__ # These objects contain the original values of displayhook and excepthook at the start of the program.
def cleanup(warning_filters, copyreg_dispatch_table, path_importer_cache, zip_directory_cache, abcs): import copyreg import re import warnings import _strptime import linecache import urllib.parse import urllib.request import mimetypes import doctest import struct import filecmp import collections.abc from distutils.dir_util import _path_created from weakref import WeakSet # Clear the warnings registry, so they can be displayed again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): del mod.__warningregistry__ # Restore some original values. warnings.filters[:] = warning_filters copyreg.dispatch_table.clear() copyreg.dispatch_table.update(copyreg_dispatch_table) sys.path_importer_cache.clear() sys.path_importer_cache.update(path_importer_cache) try: import zipimport except ImportError: pass # Run unmodified on platforms without zipimport support else: zipimport._zip_directory_cache.clear() zipimport._zip_directory_cache.update(zip_directory_cache) # clear type cache sys._clear_type_cache() # Clear ABC registries, restoring previously saved ABC registries. for a in collections.abc.__all__: abc = getattr(collections.abc, a) if not isabstract(abc): continue for obj in abc.__subclasses__() + [abc]: obj._abc_registry = abcs.get(obj, WeakSet()).copy() obj._abc_cache.clear() obj._abc_negative_cache.clear() # Flush standard output, so that buffered data is sent to the OS and # associated Python objects are reclaimed. for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__): if stream is not None: stream.flush() # Clear assorted module caches. _path_created.clear() re.purge() _strptime._regex_cache.clear() urllib.parse.clear_cache() urllib.request.urlcleanup() linecache.clearcache() mimetypes._default_mime_types() filecmp._cache.clear() struct._clearcache() doctest.master = None try: import ctypes except ImportError: # Don't worry about resetting the cache if ctypes is not supported pass else: ctypes._reset_cache() # Collect cyclic trash and read memory statistics immediately after. func1 = sys.getallocatedblocks func2 = sys.gettotalrefcount gc.collect() return func1(), func2()
def test_clear_type_cache(self): if not test.test_support.due_to_ironpython_bug( "http://ironpython.codeplex.com/WorkItem/View.aspx?WorkItemId=17460" ): sys._clear_type_cache()
def _cleanup(): sys._clear_type_cache() gc.collect()