Exemple #1
0
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import copyreg
    import collections.abc

    # Restore some original values.
    warnings.filters[:] = fs
    copyreg.dispatch_table.clear()
    copyreg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass  # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)

    # Clear ABC registries, restoring previously saved ABC registries.
    abs_classes = [
        getattr(collections.abc, a) for a in collections.abc.__all__
    ]
    abs_classes = filter(isabstract, abs_classes)
    for abc in abs_classes:
        for obj in abc.__subclasses__() + [abc]:
            for ref in abcs.get(obj, set()):
                if ref() is not None:
                    obj.register(ref())
            obj._abc_caches_clear()

    # Clear caches
    clear_caches()

    # Clear type cache at the end: previous function calls can modify types
    sys._clear_type_cache()
Exemple #2
0
def hunt_leaks(func, nwarmup, ntracked):
    """Run a func multiple times, looking for leaks."""
    # This code is hackish and inelegant, but it seems to do the job.
    import copyreg
    import collections.abc

    # Save current values for cleanup() to restore.
    fs = warnings.filters[:]
    ps = copyreg.dispatch_table.copy()
    pic = sys.path_importer_cache.copy()
    try:
        import zipimport
    except ImportError:
        zdc = None  # Run unmodified on platforms without zipimport support
    else:
        zdc = zipimport._zip_directory_cache.copy()
    abcs = {}
    for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
        if not isabstract(abc):
            continue
        for obj in abc.__subclasses__() + [abc]:
            abcs[obj] = obj._abc_registry.copy()
    repcount = nwarmup + ntracked
    rc_deltas = [0] * repcount
    alloc_deltas = [0] * repcount
    # initialize variables to make pyflakes quiet
    rc_before = alloc_before = 0
    for i in range(repcount):
        func()
        alloc_after, rc_after = cleanup(fs, ps, pic, zdc, abcs)
        if i >= nwarmup:
            rc_deltas[i] = rc_after - rc_before
            alloc_deltas[i] = alloc_after - alloc_before
        alloc_before = alloc_after
        rc_before = rc_after

    # These checkers return False on success, True on failure
    def check_rc_deltas(deltas):
        return any(deltas)

    def check_alloc_deltas(deltas):
        # At least 1/3rd of 0s
        if 3 * deltas.count(0) < len(deltas):
            return True
        # Nothing else than 1s, 0s and -1s
        if not set(deltas) <= {1, 0, -1}:
            return True
        return False

    leaks = Leaks()
    for deltas, item_name, checker in [(rc_deltas, 'refs', check_rc_deltas),
                                       (alloc_deltas, 'blocks',
                                        check_alloc_deltas)]:
        if checker(deltas):
            leaks[item_name] = deltas[nwarmup:]
    return leaks
Exemple #3
0
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import gc, copyreg
    import collections.abc
    from weakref import WeakSet

    # Restore some original values.
    warnings.filters[:] = fs
    copyreg.dispatch_table.clear()
    copyreg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass  # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    abs_classes = [
        getattr(collections.abc, a) for a in collections.abc.__all__
    ]
    abs_classes = filter(isabstract, abs_classes)
    if 'typing' in sys.modules:
        t = sys.modules['typing']
        # These classes require special treatment because they do not appear
        # in direct subclasses of collections.abc classes
        abs_classes = list(abs_classes) + [
            t.ChainMap, t.Counter, t.DefaultDict
        ]
    for abc in abs_classes:
        for obj in abc.__subclasses__() + [abc]:
            obj._abc_registry = abcs.get(obj, WeakSet()).copy()
            obj._abc_cache.clear()
            obj._abc_negative_cache.clear()

    clear_caches()

    # Collect cyclic trash and read memory statistics immediately after.
    func1 = sys.getallocatedblocks
    func2 = sys.gettotalrefcount
    gc.collect()
    return func1(), func2(), support.fd_count()
Exemple #4
0
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import gc, copyreg
    import collections.abc
    from weakref import WeakSet

    # Restore some original values.
    warnings.filters[:] = fs
    copyreg.dispatch_table.clear()
    copyreg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    abs_classes = [getattr(collections.abc, a) for a in collections.abc.__all__]
    abs_classes = filter(isabstract, abs_classes)
    if 'typing' in sys.modules:
        t = sys.modules['typing']
        # These classes require special treatment because they do not appear
        # in direct subclasses of collections.abc classes
        abs_classes = list(abs_classes) + [t.ChainMap, t.Counter, t.DefaultDict]
    for abc in abs_classes:
        for obj in abc.__subclasses__() + [abc]:
            obj._abc_registry = abcs.get(obj, WeakSet()).copy()
            obj._abc_cache.clear()
            obj._abc_negative_cache.clear()

    clear_caches()

    # Collect cyclic trash and read memory statistics immediately after.
    func1 = sys.getallocatedblocks
    func2 = sys.gettotalrefcount
    gc.collect()
    return func1(), func2(), support.fd_count()
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import gc, copyreg
    import collections.abc

    # Restore some original values.
    warnings.filters[:] = fs
    copyreg.dispatch_table.clear()
    copyreg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass  # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    abs_classes = [
        getattr(collections.abc, a) for a in collections.abc.__all__
    ]
    abs_classes = filter(isabstract, abs_classes)
    for abc in abs_classes:
        for obj in abc.__subclasses__() + [abc]:
            for ref in abcs.get(obj, set()):
                if ref() is not None:
                    obj.register(ref())
            obj._abc_caches_clear()

    clear_caches()

    # Collect cyclic trash and read memory statistics immediately after.
    func1 = sys.getallocatedblocks
    func2 = sys.gettotalrefcount
    gc.collect()
    return func1(), func2(), support.fd_count()
Exemple #6
0
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import gc, copyreg
    import collections.abc
    from weakref import WeakSet

    # Restore some original values.
    warnings.filters[:] = fs
    copyreg.dispatch_table.clear()
    copyreg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
        if not isabstract(abc):
            continue
        for obj in abc.__subclasses__() + [abc]:
            obj._abc_registry = abcs.get(obj, WeakSet()).copy()
            obj._abc_cache.clear()
            obj._abc_negative_cache.clear()

    clear_caches()

    # Collect cyclic trash and read memory statistics immediately after.
    func1 = sys.getallocatedblocks
    func2 = sys.gettotalrefcount
    gc.collect()
    return func1(), func2(), fd_count()
Exemple #7
0
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import gc, copyreg
    import collections.abc
    from weakref import WeakSet
    warnings.filters[:] = fs
    copyreg.dispatch_table.clear()
    copyreg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)
    sys._clear_type_cache()
    abs_classes = [
        getattr(collections.abc, a) for a in collections.abc.__all__
    ]
    abs_classes = filter(isabstract, abs_classes)
    if 'typing' in sys.modules:
        t = sys.modules['typing']
        abs_classes = list(abs_classes) + [
            t.ChainMap, t.Counter, t.DefaultDict
        ]
    for abc in abs_classes:
        for obj in (abc.__subclasses__() + [abc]):
            obj._abc_registry = abcs.get(obj, WeakSet()).copy()
            obj._abc_cache.clear()
            obj._abc_negative_cache.clear()
    clear_caches()
    func1 = sys.getallocatedblocks
    func2 = sys.gettotalrefcount
    gc.collect()
    return func1(), func2(), fd_count()
Exemple #8
0
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import gc, copyreg
    import _strptime, linecache
    import urllib.parse, urllib.request, mimetypes, doctest
    import struct, filecmp, collections.abc
    from distutils.dir_util import _path_created
    from weakref import WeakSet

    # Clear the warnings registry, so they can be displayed again
    for mod in sys.modules.values():
        if hasattr(mod, '__warningregistry__'):
            del mod.__warningregistry__

    # Restore some original values.
    warnings.filters[:] = fs
    copyreg.dispatch_table.clear()
    copyreg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
        if not isabstract(abc):
            continue
        for obj in abc.__subclasses__() + [abc]:
            obj._abc_registry = abcs.get(obj, WeakSet()).copy()
            obj._abc_cache.clear()
            obj._abc_negative_cache.clear()

    # Flush standard output, so that buffered data is sent to the OS and
    # associated Python objects are reclaimed.
    for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__):
        if stream is not None:
            stream.flush()

    # Clear assorted module caches.
    _path_created.clear()
    re.purge()
    _strptime._regex_cache.clear()
    urllib.parse.clear_cache()
    urllib.request.urlcleanup()
    linecache.clearcache()
    mimetypes._default_mime_types()
    filecmp._cache.clear()
    struct._clearcache()
    doctest.master = None
    try:
        import ctypes
    except ImportError:
        # Don't worry about resetting the cache if ctypes is not supported
        pass
    else:
        ctypes._reset_cache()

    # Collect cyclic trash and read memory statistics immediately after.
    func1 = sys.getallocatedblocks
    func2 = sys.gettotalrefcount
    gc.collect()
    return func1(), func2()
Exemple #9
0
def dash_R(the_module, test, indirect_test, huntrleaks):
    """Run a test multiple times, looking for reference leaks.

    Returns:
        False if the test didn't leak references; True if we detected refleaks.
    """
    # This code is hackish and inelegant, but it seems to do the job.
    import copyreg
    import collections.abc

    if not hasattr(sys, 'gettotalrefcount'):
        raise Exception("Tracking reference leaks requires a debug build "
                        "of Python")

    # Save current values for dash_R_cleanup() to restore.
    fs = warnings.filters[:]
    ps = copyreg.dispatch_table.copy()
    pic = sys.path_importer_cache.copy()
    try:
        import zipimport
    except ImportError:
        zdc = None # Run unmodified on platforms without zipimport support
    else:
        zdc = zipimport._zip_directory_cache.copy()
    abcs = {}
    for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
        if not isabstract(abc):
            continue
        for obj in abc.__subclasses__() + [abc]:
            abcs[obj] = obj._abc_registry.copy()

    nwarmup, ntracked, fname = huntrleaks
    fname = os.path.join(support.SAVEDCWD, fname)
    repcount = nwarmup + ntracked
    rc_deltas = [0] * repcount
    alloc_deltas = [0] * repcount

    print("beginning", repcount, "repetitions", file=sys.stderr)
    print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr)
    sys.stderr.flush()
    for i in range(repcount):
        indirect_test()
        alloc_after, rc_after = dash_R_cleanup(fs, ps, pic, zdc, abcs)
        sys.stderr.write('.')
        sys.stderr.flush()
        if i >= nwarmup:
            rc_deltas[i] = rc_after - rc_before
            alloc_deltas[i] = alloc_after - alloc_before
        alloc_before, rc_before = alloc_after, rc_after
    print(file=sys.stderr)
    # These checkers return False on success, True on failure
    def check_rc_deltas(deltas):
        return any(deltas)
    def check_alloc_deltas(deltas):
        # At least 1/3rd of 0s
        if 3 * deltas.count(0) < len(deltas):
            return True
        # Nothing else than 1s, 0s and -1s
        if not set(deltas) <= {1,0,-1}:
            return True
        return False
    failed = False
    for deltas, item_name, checker in [
        (rc_deltas, 'references', check_rc_deltas),
        (alloc_deltas, 'memory blocks', check_alloc_deltas)]:
        if checker(deltas):
            msg = '%s leaked %s %s, sum=%s' % (
                test, deltas[nwarmup:], item_name, sum(deltas))
            print(msg, file=sys.stderr)
            sys.stderr.flush()
            with open(fname, "a") as refrep:
                print(msg, file=refrep)
                refrep.flush()
            failed = True
    return failed
Exemple #10
0
def dash_R(the_module, test, indirect_test, huntrleaks):
    """Run a test multiple times, looking for reference leaks.

    Returns:
        False if the test didn't leak references; True if we detected refleaks.
    """
    # This code is hackish and inelegant, but it seems to do the job.
    import copyreg
    import collections.abc

    if not hasattr(sys, 'gettotalrefcount'):
        raise Exception("Tracking reference leaks requires a debug build "
                        "of Python")

    # Save current values for dash_R_cleanup() to restore.
    fs = warnings.filters[:]
    ps = copyreg.dispatch_table.copy()
    pic = sys.path_importer_cache.copy()
    try:
        import zipimport
    except ImportError:
        zdc = None # Run unmodified on platforms without zipimport support
    else:
        zdc = zipimport._zip_directory_cache.copy()
    abcs = {}
    for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
        if not isabstract(abc):
            continue
        for obj in abc.__subclasses__() + [abc]:
            abcs[obj] = _get_dump(obj)[0]

    # bpo-31217: Integer pool to get a single integer object for the same
    # value. The pool is used to prevent false alarm when checking for memory
    # block leaks. Fill the pool with values in -1000..1000 which are the most
    # common (reference, memory block, file descriptor) differences.
    int_pool = {value: value for value in range(-1000, 1000)}
    def get_pooled_int(value):
        return int_pool.setdefault(value, value)

    nwarmup, ntracked, fname = huntrleaks
    fname = os.path.join(support.SAVEDCWD, fname)
    repcount = nwarmup + ntracked
    rc_deltas = [0] * repcount
    alloc_deltas = [0] * repcount
    fd_deltas = [0] * repcount

    print("beginning", repcount, "repetitions", file=sys.stderr)
    print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr,
          flush=True)
    # initialize variables to make pyflakes quiet
    rc_before = alloc_before = fd_before = 0
    for i in range(repcount):
        indirect_test()
        alloc_after, rc_after, fd_after = dash_R_cleanup(fs, ps, pic, zdc,
                                                         abcs)
        print('.', end='', file=sys.stderr, flush=True)
        if i >= nwarmup:
            rc_deltas[i] = get_pooled_int(rc_after - rc_before)
            alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before)
            fd_deltas[i] = get_pooled_int(fd_after - fd_before)
        alloc_before = alloc_after
        rc_before = rc_after
        fd_before = fd_after
    print(file=sys.stderr)

    # These checkers return False on success, True on failure
    def check_rc_deltas(deltas):
        # Checker for reference counters and memomry blocks.
        #
        # bpo-30776: Try to ignore false positives:
        #
        #   [3, 0, 0]
        #   [0, 1, 0]
        #   [8, -8, 1]
        #
        # Expected leaks:
        #
        #   [5, 5, 6]
        #   [10, 1, 1]
        return all(delta >= 1 for delta in deltas)

    def check_fd_deltas(deltas):
        return any(deltas)

    failed = False
    for deltas, item_name, checker in [
        (rc_deltas, 'references', check_rc_deltas),
        (alloc_deltas, 'memory blocks', check_rc_deltas),
        (fd_deltas, 'file descriptors', check_fd_deltas)
    ]:
        # ignore warmup runs
        deltas = deltas[nwarmup:]
        if checker(deltas):
            msg = '%s leaked %s %s, sum=%s' % (
                test, deltas, item_name, sum(deltas))
            print(msg, file=sys.stderr, flush=True)
            with open(fname, "a") as refrep:
                print(msg, file=refrep)
                refrep.flush()
            failed = True
    return failed
Exemple #11
0
    def cleanup(warning_filters, copyreg_dispatch_table, path_importer_cache,
                zip_directory_cache, abcs):
        import copyreg
        import re
        import warnings
        import _strptime
        import linecache
        import urllib.parse
        import urllib.request
        import mimetypes
        import doctest
        import struct
        import filecmp
        import collections.abc
        from distutils.dir_util import _path_created
        from weakref import WeakSet

        # Clear the warnings registry, so they can be displayed again
        for mod in sys.modules.values():
            if hasattr(mod, '__warningregistry__'):
                del mod.__warningregistry__

        # Restore some original values.
        warnings.filters[:] = warning_filters
        copyreg.dispatch_table.clear()
        copyreg.dispatch_table.update(copyreg_dispatch_table)
        sys.path_importer_cache.clear()
        sys.path_importer_cache.update(path_importer_cache)
        try:
            import zipimport
        except ImportError:
            pass  # Run unmodified on platforms without zipimport support
        else:
            zipimport._zip_directory_cache.clear()
            zipimport._zip_directory_cache.update(zip_directory_cache)

        # clear type cache
        sys._clear_type_cache()

        # Clear ABC registries, restoring previously saved ABC registries.
        for a in collections.abc.__all__:
            abc = getattr(collections.abc, a)
            if not isabstract(abc):
                continue
            for obj in abc.__subclasses__() + [abc]:
                obj._abc_registry = abcs.get(obj, WeakSet()).copy()
                obj._abc_cache.clear()
                obj._abc_negative_cache.clear()

        # Flush standard output, so that buffered data is sent to the OS and
        # associated Python objects are reclaimed.
        for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__):
            if stream is not None:
                stream.flush()

        # Clear assorted module caches.
        _path_created.clear()
        re.purge()
        _strptime._regex_cache.clear()
        urllib.parse.clear_cache()
        urllib.request.urlcleanup()
        linecache.clearcache()
        mimetypes._default_mime_types()
        filecmp._cache.clear()
        struct._clearcache()
        doctest.master = None
        try:
            import ctypes
        except ImportError:
            # Don't worry about resetting the cache if ctypes is not supported
            pass
        else:
            ctypes._reset_cache()

        # Collect cyclic trash and read memory statistics immediately after.
        func1 = sys.getallocatedblocks
        func2 = sys.gettotalrefcount
        gc.collect()
        return func1(), func2()
Exemple #12
0
def dash_R(the_module, test, indirect_test, huntrleaks):
    """Run a test multiple times, looking for reference leaks.

    Returns:
        False if the test didn't leak references; True if we detected refleaks.
    """
    # This code is hackish and inelegant, but it seems to do the job.
    import copyreg
    import collections.abc

    if not hasattr(sys, 'gettotalrefcount'):
        raise Exception("Tracking reference leaks requires a debug build "
                        "of Python")

    # Save current values for dash_R_cleanup() to restore.
    fs = warnings.filters[:]
    ps = copyreg.dispatch_table.copy()
    pic = sys.path_importer_cache.copy()
    try:
        import zipimport
    except ImportError:
        zdc = None # Run unmodified on platforms without zipimport support
    else:
        zdc = zipimport._zip_directory_cache.copy()
    abcs = {}
    for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
        if not isabstract(abc):
            continue
        for obj in abc.__subclasses__() + [abc]:
            abcs[obj] = obj._abc_registry.copy()

    nwarmup, ntracked, fname = huntrleaks
    fname = os.path.join(support.SAVEDCWD, fname)
    repcount = nwarmup + ntracked
    rc_deltas = [0] * repcount
    alloc_deltas = [0] * repcount
    fd_deltas = [0] * repcount

    print("beginning", repcount, "repetitions", file=sys.stderr)
    print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr,
          flush=True)
    # initialize variables to make pyflakes quiet
    rc_before = alloc_before = fd_before = 0
    for i in range(repcount):
        indirect_test()
        alloc_after, rc_after, fd_after = dash_R_cleanup(fs, ps, pic, zdc,
                                                         abcs)
        print('.', end='', flush=True)
        if i >= nwarmup:
            rc_deltas[i] = rc_after - rc_before
            alloc_deltas[i] = alloc_after - alloc_before
            fd_deltas[i] = fd_after - fd_before
        alloc_before = alloc_after
        rc_before = rc_after
        fd_before = fd_after
    print(file=sys.stderr)
    # These checkers return False on success, True on failure
    def check_rc_deltas(deltas):
        return any(deltas)
    def check_alloc_deltas(deltas):
        # At least 1/3rd of 0s
        if 3 * deltas.count(0) < len(deltas):
            return True
        # Nothing else than 1s, 0s and -1s
        if not set(deltas) <= {1,0,-1}:
            return True
        return False
    failed = False
    for deltas, item_name, checker in [
        (rc_deltas, 'references', check_rc_deltas),
        (alloc_deltas, 'memory blocks', check_alloc_deltas),
        (fd_deltas, 'file descriptors', check_rc_deltas)]:
        if checker(deltas):
            msg = '%s leaked %s %s, sum=%s' % (
                test, deltas[nwarmup:], item_name, sum(deltas))
            print(msg, file=sys.stderr, flush=True)
            with open(fname, "a") as refrep:
                print(msg, file=refrep)
                refrep.flush()
            failed = True
    return failed
Exemple #13
0
def dash_R(the_module, test, indirect_test, huntrleaks):
    """Run a test multiple times, looking for reference leaks.

    Returns:
        False if the test didn't leak references; True if we detected refleaks.
    """
    # This code is hackish and inelegant, but it seems to do the job.
    import copyreg
    import collections.abc

    if not hasattr(sys, 'gettotalrefcount'):
        raise Exception("Tracking reference leaks requires a debug build "
                        "of Python")

    # Save current values for dash_R_cleanup() to restore.
    fs = warnings.filters[:]
    ps = copyreg.dispatch_table.copy()
    pic = sys.path_importer_cache.copy()
    try:
        import zipimport
    except ImportError:
        zdc = None # Run unmodified on platforms without zipimport support
    else:
        zdc = zipimport._zip_directory_cache.copy()
    abcs = {}
    for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
        if not isabstract(abc):
            continue
        for obj in abc.__subclasses__() + [abc]:
            abcs[obj] = obj._abc_registry.copy()

    # bpo-31217: Integer pool to get a single integer object for the same
    # value. The pool is used to prevent false alarm when checking for memory
    # block leaks. Fill the pool with values in -1000..1000 which are the most
    # common (reference, memory block, file descriptor) differences.
    int_pool = {value: value for value in range(-1000, 1000)}
    def get_pooled_int(value):
        return int_pool.setdefault(value, value)

    nwarmup, ntracked, fname = huntrleaks
    fname = os.path.join(support.SAVEDCWD, fname)
    repcount = nwarmup + ntracked
    rc_deltas = [0] * repcount
    alloc_deltas = [0] * repcount
    fd_deltas = [0] * repcount

    print("beginning", repcount, "repetitions", file=sys.stderr)
    print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr,
          flush=True)
    # initialize variables to make pyflakes quiet
    rc_before = alloc_before = fd_before = 0
    for i in range(repcount):
        indirect_test()
        alloc_after, rc_after, fd_after = dash_R_cleanup(fs, ps, pic, zdc,
                                                         abcs)
        print('.', end='', file=sys.stderr, flush=True)
        if i >= nwarmup:
            rc_deltas[i] = get_pooled_int(rc_after - rc_before)
            alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before)
            fd_deltas[i] = get_pooled_int(fd_after - fd_before)
        alloc_before = alloc_after
        rc_before = rc_after
        fd_before = fd_after
    print(file=sys.stderr)

    # These checkers return False on success, True on failure
    def check_rc_deltas(deltas):
        # Checker for reference counters and memomry blocks.
        #
        # bpo-30776: Try to ignore false positives:
        #
        #   [3, 0, 0]
        #   [0, 1, 0]
        #   [8, -8, 1]
        #
        # Expected leaks:
        #
        #   [5, 5, 6]
        #   [10, 1, 1]
        return all(delta >= 1 for delta in deltas)

    def check_fd_deltas(deltas):
        return any(deltas)

    failed = False
    for deltas, item_name, checker in [
        (rc_deltas, 'references', check_rc_deltas),
        (alloc_deltas, 'memory blocks', check_rc_deltas),
        (fd_deltas, 'file descriptors', check_fd_deltas)
    ]:
        # ignore warmup runs
        deltas = deltas[nwarmup:]
        if checker(deltas):
            msg = '%s leaked %s %s, sum=%s' % (
                test, deltas, item_name, sum(deltas))
            print(msg, file=sys.stderr, flush=True)
            with open(fname, "a") as refrep:
                print(msg, file=refrep)
                refrep.flush()
            failed = True
    return failed
Exemple #14
0
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import copyreg
    import collections.abc

    # Restore some original values.
    warnings.filters[:] = fs
    copyreg.dispatch_table.clear()
    copyreg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass  # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)
    if _asyncio is not None:
        _asyncio._clear_caches()

    if cinder is not None:
        # disable shadow byte code, we don't want to allocate new caches while
        # cleaning caches
        cinder.setknobs({"shadowcode": False})
        cinder.clear_caches()
        has_caches = [
            x for x in gc.get_objects()
            if isinstance(x, (type, FunctionType, ModuleType))
        ]

        for has_cache in has_caches:
            cinder.clear_shadow_cache(has_cache)
            if type(has_cache) is FunctionType:
                # The function could create lambdas or comprehensions which
                # get optimized
                for const in has_cache.__code__.co_consts:
                    if isinstance(const, CodeType):
                        has_caches.append(const)
            elif isinstance(has_cache, CodeType):
                for const in has_cache.co_consts:
                    if isinstance(const, CodeType):
                        has_caches.append(const)
        # Extension types aren't tracked by the GC, so find their shadow
        # refs, and then use those weak refs to clear the underlying
        # objects
        has_caches = [
            x for x in gc.get_objects() if type(x).__name__ == "shadow_ref"
        ]
        for has_cache in has_caches:
            cinder.clear_shadow_cache(has_cache())
        del has_cache
        del has_caches

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    abs_classes = [
        getattr(collections.abc, a) for a in collections.abc.__all__
    ]
    abs_classes = filter(isabstract, abs_classes)
    for abc in abs_classes:
        for obj in abc.__subclasses__() + [abc]:
            for ref in abcs.get(obj, set()):
                if ref() is not None:
                    obj.register(ref())
            obj._abc_caches_clear()

    clear_caches()
    clear_asyncio_caches()

    if cinder is not None:
        cinder.setknobs(CINDER_KNOBS)
Exemple #15
0
def dash_R(the_module, test, indirect_test, huntrleaks):
    """Run a test multiple times, looking for reference leaks.

    Returns:
        False if the test didn't leak references; True if we detected refleaks.
    """
    import copyreg
    import collections.abc
    if not hasattr(sys, 'gettotalrefcount'):
        raise Exception(
            'Tracking reference leaks requires a debug build of Python')
    fs = warnings.filters[:]
    ps = copyreg.dispatch_table.copy()
    pic = sys.path_importer_cache.copy()
    try:
        import zipimport
    except ImportError:
        zdc = None
    else:
        zdc = zipimport._zip_directory_cache.copy()
    abcs = {}
    for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
        if not isabstract(abc):
            continue
        for obj in (abc.__subclasses__() + [abc]):
            abcs[obj] = obj._abc_registry.copy()
    nwarmup, ntracked, fname = huntrleaks
    fname = os.path.join(support.SAVEDCWD, fname)
    repcount = nwarmup + ntracked
    rc_deltas = [0] * repcount
    alloc_deltas = [0] * repcount
    fd_deltas = [0] * repcount
    print('beginning', repcount, 'repetitions', file=sys.stderr)
    print(('1234567890' * (repcount // 10 + 1))[:repcount],
          file=sys.stderr,
          flush=True)
    rc_before = alloc_before = fd_before = 0
    for i in range(repcount):
        indirect_test()
        alloc_after, rc_after, fd_after = dash_R_cleanup(
            fs, ps, pic, zdc, abcs)
        print('.', end='', file=sys.stderr, flush=True)
        if i >= nwarmup:
            rc_deltas[i] = rc_after - rc_before
            alloc_deltas[i] = alloc_after - alloc_before
            fd_deltas[i] = fd_after - fd_before
        alloc_before = alloc_after
        rc_before = rc_after
        fd_before = fd_after
    print(file=sys.stderr)

    def check_rc_deltas(deltas):
        return any(deltas)

    def check_alloc_deltas(deltas):
        if 3 * deltas.count(0) < len(deltas):
            return True
        if not set(deltas) <= {1, 0, -1}:
            return True
        return False

    failed = False
    for deltas, item_name, checker in [
        (rc_deltas, 'references', check_rc_deltas),
        (alloc_deltas, 'memory blocks', check_alloc_deltas),
        (fd_deltas, 'file descriptors', check_rc_deltas)
    ]:
        if checker(deltas):
            msg = '%s leaked %s %s, sum=%s' % (test, deltas[nwarmup:],
                                               item_name, sum(deltas))
            print(msg, file=sys.stderr, flush=True)
            with open(fname, 'a') as refrep:
                print(msg, file=refrep)
                refrep.flush()
            failed = True
    return failed
Exemple #16
0
def dash_R(the_module, test, indirect_test, huntrleaks):
    """Run a test multiple times, looking for reference leaks.

    Returns:
        False if the test didn't leak references; True if we detected refleaks.
    """
    # This code is hackish and inelegant, but it seems to do the job.
    import copyreg
    import collections.abc

    if not hasattr(sys, 'gettotalrefcount'):
        raise Exception("Tracking reference leaks requires a debug build "
                        "of Python")

    # Save current values for dash_R_cleanup() to restore.
    fs = warnings.filters[:]
    ps = copyreg.dispatch_table.copy()
    pic = sys.path_importer_cache.copy()
    try:
        import zipimport
    except ImportError:
        zdc = None  # Run unmodified on platforms without zipimport support
    else:
        zdc = zipimport._zip_directory_cache.copy()
    abcs = {}
    for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
        if not isabstract(abc):
            continue
        for obj in abc.__subclasses__() + [abc]:
            abcs[obj] = obj._abc_registry.copy()

    nwarmup, ntracked, fname = huntrleaks
    fname = os.path.join(support.SAVEDCWD, fname)
    repcount = nwarmup + ntracked
    rc_deltas = [0] * repcount
    alloc_deltas = [0] * repcount

    #print("beginning", repcount, "repetitions", file=sys.stderr)  # <- pytest-leaks edit
    #print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr)  # <- pytest-leaks edit
    sys.stderr.flush()
    for i in range(repcount):
        indirect_test()
        alloc_after, rc_after = dash_R_cleanup(fs, ps, pic, zdc, abcs)
        #sys.stderr.write('.')  # <- pytest-leaks edit
        #sys.stderr.flush()  # <- pytest-leaks edit
        if i >= nwarmup:
            rc_deltas[i] = rc_after - rc_before
            alloc_deltas[i] = alloc_after - alloc_before
        alloc_before, rc_before = alloc_after, rc_after
    #print(file=sys.stderr)  # <- pytest-leaks edit

    # These checkers return False on success, True on failure
    def check_rc_deltas(deltas):
        # Checker for reference counters and memomry blocks.
        #
        # bpo-30776: Try to ignore false positives:
        #
        #   [3, 0, 0]
        #   [0, 1, 0]
        #   [8, -8, 1]
        #
        # Expected leaks:
        #
        #   [5, 5, 6]
        #   [10, 1, 1]
        return all(delta >= 1 for delta in deltas)

    failed = False
    leaks = OrderedDict()  # <- pytest-leaks edit
    for deltas, item_name, checker in [
        (rc_deltas, 'references', check_rc_deltas),
        (alloc_deltas, 'memory blocks', check_rc_deltas)
    ]:
        # ignore warmup runs
        deltas = deltas[nwarmup:]
        if checker(deltas):
            # <pytest-leaks edit>
            leaks[item_name] = deltas
            continue
            # </pytest-leaks edit>
            msg = '%s leaked %s %s, sum=%s' % (test, deltas, item_name,
                                               sum(deltas))
            print(msg, file=sys.stderr)
            sys.stderr.flush()
            with open(fname, "a") as refrep:
                print(msg, file=refrep)
                refrep.flush()
            failed = True
    return leaks  # <- pytest-leaks edit