예제 #1
0
 def test_get_stats(self):
     stats = gc.get_stats()
     self.assertEqual(len(stats), 3)
     for st in stats:
         self.assertIsInstance(st, dict)
         self.assertEqual(set(st),
                          {"collected", "collections", "uncollectable"})
         self.assertGreaterEqual(st["collected"], 0)
         self.assertGreaterEqual(st["collections"], 0)
         self.assertGreaterEqual(st["uncollectable"], 0)
     # Check that collection counts are incremented correctly
     if gc.isenabled():
         self.addCleanup(gc.enable)
         gc.disable()
     old = gc.get_stats()
     gc.collect(0)
     new = gc.get_stats()
     self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
     self.assertEqual(new[1]["collections"], old[1]["collections"])
     self.assertEqual(new[2]["collections"], old[2]["collections"])
     gc.collect(2)
     new = gc.get_stats()
     self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
     self.assertEqual(new[1]["collections"], old[1]["collections"])
     self.assertEqual(new[2]["collections"], old[2]["collections"] + 1)
예제 #2
0
def test_inspect_span_from_stack_does_not_create_reference_cycle():
    # inspect_span_from_stack inspects the stack via stack frames. This can
    # very easily lead to the creation of reference cycles. These are not
    # free-d using reference counting and therefore the GC needs to clean them
    # up. If reference cycles are created frequently and therefore the GC runs
    # frequently, this can have a significant impact on CPU usage and overall
    # latency.
    #
    # This test makes sure that this function doesn't create a reference cycle
    # by testing whether the GC is able to collect any objects after calling
    # this function.

    # Run a collection to ensure that all reference cycles that may have been
    # created up to this point to be collected, so that they don't mess up our
    # measurement.
    gc.collect()

    previous_stats = gc.get_stats()
    inspect_span_from_stack()
    gc.collect()
    stats = gc.get_stats()

    for previous_generation, current_generation in zip(previous_stats, stats):
        assert previous_generation['collected'] == current_generation[
            'collected']
예제 #3
0
def show_gc_leak(method):
    """
    打印并获取gc信息
    1.不可达对象统计
    2.不可回收对象统计
    :param method: 打印函数引用
    :return:
    """
    _unreachable = gc.collect()
    method(
        '%s, %s, %s, %s, %s' %
        (
            "unreachable object num: %d" % (_unreachable,),
            "garbage object num: %d" % (len(gc.garbage)),
            "gc object num: %d" % (len(gc.get_objects())),
            "gc threshold num: %d" % (len(gc.get_threshold())),
            "gc stats num: %d" % (len(gc.get_stats())),
        )
    )
    return {
        'unreachable object num': _unreachable,
        'garbage object num': len(gc.garbage),
        "gc object num": len(gc.get_objects()),
        # "gc object info": gc.get_objects(),
        "gc threshold num": len(gc.get_threshold()),
        "gc threshold info": str(gc.get_threshold()),
        "gc stats num": len(gc.get_stats()),
        "gc stats info": str(gc.get_stats()),
    }
예제 #4
0
 async def dev_collect(self):
     res = {
         "before": gc.get_stats()
     }
     gc.collect()
     res['after'] = gc.get_stats()
     return res
 def test_get_stats(self):
     stats = gc.get_stats()
     self.assertEqual(len(stats), 3)
     for st in stats:
         self.assertIsInstance(st, dict)
         self.assertEqual(set(st),
                          {"collected", "collections", "uncollectable"})
         self.assertGreaterEqual(st["collected"], 0)
         self.assertGreaterEqual(st["collections"], 0)
         self.assertGreaterEqual(st["uncollectable"], 0)
     # Check that collection counts are incremented correctly
     if gc.isenabled():
         self.addCleanup(gc.enable)
         gc.disable()
     old = gc.get_stats()
     gc.collect(0)
     new = gc.get_stats()
     self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
     self.assertEqual(new[1]["collections"], old[1]["collections"])
     self.assertEqual(new[2]["collections"], old[2]["collections"])
     gc.collect(2)
     new = gc.get_stats()
     self.assertEqual(new[0]["collections"], old[0]["collections"] + 1)
     self.assertEqual(new[1]["collections"], old[1]["collections"])
     self.assertEqual(new[2]["collections"], old[2]["collections"] + 1)
예제 #6
0
def benchmark_simplesearch():
    import gc
    n = 100000 * 50
    test_n = 1000
    data = create_dataset(n,20)
    
    tl0 = time.time()
    a = SimpleStringSearch(data)
    tl1 = time.time()
    print("finished dataset creation")
    testdata = create_dataset(test_n, 3)
    num = 0
    t0 = time.time()

    print(gc.get_stats())
    for i in testdata:
        #print(i)
        temp = a.findStringIdx(i)
        num += len(temp)
    t1 = time.time()
    print(num)
    ctime = t1 - t0
    print("creation time cost", tl1 - tl0,"total time cost", ctime, " avg time ", ctime / test_n)
    print(gc.get_stats())

    sys.stdin.readline()
예제 #7
0
def load_img_fastai(img_bytes):
    img = Image.open(io.BytesIO(img_bytes.read()))
    img = img.convert('RGB')
    img.to_thumb(224, 224)
    img = np.array(img)

    gc.get_stats()
    gc.collect()
    gc.get_stats()
    return img
예제 #8
0
    def post(self):
        data = request.files
        image = data["img"]

        img = load_img_fastai(image)

        result = learn.predict(img)[0]
        gc.get_stats()
        gc.collect()
        gc.get_stats()
        return jsonify(result)
예제 #9
0
def memusage(do_dump_rpy_heap=True, do_objgraph=True):
    # type: (Optional[bool], Optional[bool]) -> str
    """Returning a str of memory usage stats"""
    def trap_err(func, *args, **kwargs):
        try:
            return func(*args, **kwargs)
        except Exception as e:  # pragma: nocover
            # include both __str/repr__, sometimes one's useless
            buf.writelines([func.__name__, ': ', repr(e), ': ', str(e)])

    buf = StringIO()
    rusage = trap_err(resource.getrusage, resource.RUSAGE_SELF)
    buf.writelines([repr(rusage), '\n\n'])
    trap_err(pmap_extended, buf)
    trap_err(jemalloc_stats, buf)
    trap_err(glibc_malloc_info, buf)
    if hasattr(gc, 'get_stats'):
        buf.writelines(['\n\n', gc.get_stats(), '\n\n'])
    if do_dump_rpy_heap:
        # dump rpython's heap before objgraph potentially pollutes the
        # heap with its heavy workload
        trap_err(dump_rpy_heap, buf)
    trap_err(get_stats_asmmemmgr, buf)
    buf.write('\n\n')
    if do_objgraph:
        trap_err(objgraph.show_most_common_types, limit=0, file=buf)
    return buf.getvalue()
예제 #10
0
파일: __init__.py 프로젝트: btimby/prom
    def gather(self):
        """Perform a GC and build memory graph."""
        LOGGER.debug('Forcing GC colection')
        gc.collect()

        # Attempt to log some GC stats.
        try:
            self.stats = gc.get_stats()

        except AttributeError:
            # get_stats() only exists in Python 3.4+, so we can continue
            # without stats if the function does not exist.
            pass

        LOGGER.debug('Gathering memory graph')
        objs = gc.get_objects()

        self.graph = {}
        for obj in objs:
            node = (
                obj_dump(obj),
                # Approximation:
                sys.getsizeof(obj),
                [id(o) for o in gc.get_referents(obj)],
                [id(o) for o in gc.get_referrers(obj)],
            )
            self.graph[id(obj)] = node
예제 #11
0
def test_models():
    latent_channel_dim = 4

    test_encoder_output_channels(input_data_shape=(1, 3, 28, 28),
                                 latent_channel_dim=latent_channel_dim)
    test_encoder_output_channels(input_data_shape=(8, 3, 200, 200),
                                 latent_channel_dim=latent_channel_dim)

    test_decoder_output_channels(input_data_shape=(1, 3, 28, 28),
                                 latent_channel_dim=latent_channel_dim)
    test_decoder_output_channels(input_data_shape=(8, 3, 200, 200),
                                 latent_channel_dim=latent_channel_dim)

    test_autoencoder(input_data_shape=(1, 3, 28, 28),
                     latent_channel_dim=latent_channel_dim)
    test_autoencoder(input_data_shape=(8, 3, 200, 200),
                     latent_channel_dim=latent_channel_dim)

    print_model_details(AutoEncoder(latent_channel_dim))

    gc.collect()
    torch.cuda.empty_cache()

    print(gc.get_count())
    print(gc.get_stats())
예제 #12
0
def main():
    a = 4
    b = 5

    c_list = []
    c_list.append(123)
    c_list.append(456)
    # reference cycle
    c_list.append(c_list)
    c_list[2].append(789)

    # foo = ['hi']
    # c_list = foo

    print(c_list)

    print("Stats: {}".format(gc.get_stats()))
    print("Count: {}".format(gc.get_count()))
    print("GC enabled: {}".format(gc.isenabled()))
    print("Threshold: {}".format(gc.get_threshold()))
    print("c_list is tracked: {}".format(gc.is_tracked(c_list)))

    """
    The count returned is generally one higher than you might expect,
    because it includes the (temporary) reference as an argument to getrefcount().
    """
    print("Reference count for c_list: {}".format(sys.getrefcount(c_list)))
    del c_list[2]
    print("Reference count for c_list: {}".format(sys.getrefcount(c_list)))

    print("Collecting: {}".format(gc.collect()))

    print("Done.")
예제 #13
0
def get_debug_info(civcoms):
    code = "<html><head><meta http-equiv=\"refresh\" content=\"20\">" \
       + "<link href='/css/bootstrap.min.css' rel='stylesheet'></head>" \
       + "<body><div class='container'>" \
       + "<h2>Freeciv WebSocket Proxy Status</h2>" \
       + "<font color=\"green\">Process status: OK</font><br>"

    code += "<b>Process Uptime: " + \
        str(int(time.time() - startTime)) + " s.</b><br>"

    code += ("Python version: %s %s (%s)<br>" % (
        platform.python_implementation(),
        platform.python_version(),
        platform.python_build()[0],
    ))

    cpu = ' '.join(platform.processor().split())
    code += ("Platform: %s %s on '%s' <br>" % (
        platform.machine(),
        platform.system(),
        cpu))

    code += ("Tornado version %s <br>" % (tornado_version))

    try:
        f = open("/proc/loadavg")
        contents = f.read()
        code += "Load average: " + contents
        f.close()
    except:
        print("Cannot open uptime file: /proc/uptime")

    try:
        code += "<h3>Memory usage:</h3>"
        code += "Memory: " + str(memory() / 1048576) + " MB <br>"
        code += "Resident: " + str(resident() / 1048576) + " MB <br>"
        code += "Stacksize: " + str(stacksize() / 1048576) + " MB <br>"
        try:
          code += "Garabage collection stats: " + str(gc.get_stats()) + " <br>"
          code += "Garabage list: " + str(gc.garbage) + " <br>"
        except AttributeError:
          pass

        code += ("<h3>Logged in users  (count %i) :</h3>" % len(civcoms))
        for key in list(civcoms.keys()):
            code += (
                "username: <b>%s</b> <br>IP:%s <br>Civserver: (%d)<br>Connect time: %d<br><br>" %
                (civcoms[key].username,
                 civcoms[key].civwebserver.ip,
                    civcoms[key].civserverport,
                    time.time() -
                    civcoms[key].connect_time))

    except:
        print(("Unexpected error:" + str(sys.exc_info()[0])))
        raise

    code += "</div></body></html>"

    return code
예제 #14
0
def main():
    a = 4
    b = 5

    c_list = []
    c_list.append(123)
    c_list.append(456)
    # reference cycle
    c_list.append(c_list)
    c_list[2].append(789)

    # foo = ['hi']
    # c_list = foo

    print(c_list)

    print("Stats: {}".format(gc.get_stats()))
    print("Count: {}".format(gc.get_count()))
    print("GC enabled: {}".format(gc.isenabled()))
    print("Threshold: {}".format(gc.get_threshold()))
    print("c_list is tracked: {}".format(gc.is_tracked(c_list)))
    """
    The count returned is generally one higher than you might expect,
    because it includes the (temporary) reference as an argument to getrefcount().
    """
    print("Reference count for c_list: {}".format(sys.getrefcount(c_list)))
    del c_list[2]
    print("Reference count for c_list: {}".format(sys.getrefcount(c_list)))

    print("Collecting: {}".format(gc.collect()))

    print("Done.")
예제 #15
0
def dump_gc_stat(f: 'TextIO') -> None:
    f.write("  GC stat:\n")
    for i, generation in enumerate(gc.get_stats()):
        f.write(f"    Generation {i}:\n")
        f.write(f"      collections   : {generation.get('collections')}\n")
        f.write(f"      collected     : {generation.get('collected')}\n")
        f.write(f"      uncollectable : {generation.get('uncollectable')}\n")
예제 #16
0
    def process(self, task: Task) -> None:  # type: ignore
        sample = task.get_resource("sample")
        headers = task.headers

        if headers["type"] == "sample":
            self.log.info("Analyzing original binary")
            self.analyze_sample(sample)
        elif headers["type"] == "analysis":
            sample_hash = hashlib.sha256(sample.content or b"").hexdigest()
            self.log.info(f"Processing analysis, sample: {sample_hash}")
            dumps = task.get_resource("dumps.zip")
            dumps_metadata = task.get_payload("dumps_metadata")
            with dumps.extract_temporary() as tmpdir:  # type: ignore
                dump_infos = []
                for dump_metadata in dumps_metadata:
                    dump_path = os.path.join(tmpdir, dump_metadata["filename"])
                    if not self._is_safe_path(tmpdir, dump_path):
                        self.log.warning(
                            f"Path traversal attempt: {dump_path}")
                        continue
                    dump_base = int(dump_metadata["base_address"], 16)
                    dump_infos.append(DumpInfo(path=dump_path, base=dump_base))
                self.analyze_dumps(sample, dump_infos)

        self.log.debug("Printing gc stats")
        self.log.debug(gc.get_stats())
예제 #17
0
def get_debug_info(civcoms):
    code = "<html><head><meta http-equiv=\"refresh\" content=\"20\"></head><body><h2>Freeciv WebSocket Proxy Status</h2>"
    code += "<font color=\"green\">Process status: OK</font><br>"

    code += "<h3>Process Uptime: " + \
        str(int(time.time() - startTime)) + " s.</h3>"
    current_time = strftime("%Y-%m-%d %H:%M:%S", gmtime())
    code += "<b>Current time:</b><br> " + current_time

    code += ("<br><br>Python version: %s %s (%s)<br>" % (
        platform.python_implementation(),
        platform.python_version(),
        platform.python_build()[0],
    ))

    cpu = ' '.join(platform.processor().split())
    code += ("Platform: %s %s on '%s' <br>" %
             (platform.machine(), platform.system(), cpu))

    code += ("Tornado version %s <br>" % (tornado_version))

    try:
        f = open("/proc/loadavg")
        contents = f.read()
        code += "Load average: " + contents
        f.close()
    except:
        print("Cannot open uptime file: /proc/uptime")

    try:
        code += "<h3>Memory usage:</h3>"
        code += "Memory: " + str(memory() / 1048576) + " MB <br>"
        code += "Resident: " + str(resident() / 1048576) + " MB <br>"
        code += "Stacksize: " + str(stacksize() / 1048576) + " MB <br>"
        code += "Garabage collection stats: " + str(gc.get_stats()) + " <br>"
        code += "Garabage list: " + str(gc.garbage) + " <br>"

        code += ("<h3>Logged in users  (count %i) :</h3>" % len(civcoms))
        for key in list(civcoms.keys()):
            code += (
                "username: <b>%s</b> <br>IP:%s <br>Civserver: (%d)<br>Connect time: %d<br><br>"
                % (civcoms[key].username,
                   civcoms[key].civwebserver.ip, civcoms[key].civserverport,
                   time.time() - civcoms[key].connect_time))

        code += "<h3>Thread dumps:</h3>"

        for threadId, stack in list(sys._current_frames().items()):
            code += ("<br><br><b><u># ThreadID: %s</u></b><br>" % threadId)
            for filename, lineno, name, line in traceback.extract_stack(stack):
                code += ('File: "%s", line %d, in %s: ' %
                         (filename, lineno, name))
                if line:
                    code += (" <b>%s</b> <br>" % (line.strip()))

    except:
        print(("Unexpected error:" + str(sys.exc_info()[0])))
        raise

    return code
예제 #18
0
def dispose(message, wait=1, printdeleted=False):
    s = time.time()
    print("gc { Waiting", wait, "seconds before starting }")
    time.sleep(wait)
    print("Started...")
    print("Collecting garbage to dispose... please wait")
    gc.set_debug(gc.DEBUG_SAVEALL)
    gc.collect()
    print(gc.get_stats()[2]['collected'], "items should be cleaned up")
    c = time.time()
    x = 0
    print("Collection took", "{0:.4f}".format(c - s - wait),
          "seconds. Starting disposal in", wait, "second.")
    time.sleep(wait)
    for item in gc.garbage:
        if printdeleted:
            print("Garbage found! Deleting", item)
        del item
        x += 1
    d = time.time()
    print("Deleted", x, "items in", "{0:.4f}".format(d - c - wait), "seconds.")
    print("gc took a total of", "{0:.4f}".format(d - s),
          "seconds to complete.")
    print("gc took a total of", "{0:.4f}".format(d - s - (wait * 2)),
          "seconds to calculate.(ignores sleep time)")
    time.sleep(wait)
    exit(text + " gc { " + message + " }")
예제 #19
0
    def collect(self):
        collected = CounterMetricFamily(
            'python_gc_objects_collected',
            'Objects collected during gc',
            labels=['generation'],
        )
        uncollectable = CounterMetricFamily(
            'python_gc_objects_uncollectable',
            'Uncollectable object found during GC',
            labels=['generation'],
        )

        collections = CounterMetricFamily(
            'python_gc_collections',
            'Number of times this generation was collected',
            labels=['generation'],
        )

        for generation, stat in enumerate(gc.get_stats()):
            generation = str(generation)
            collected.add_metric([generation], value=stat['collected'])
            uncollectable.add_metric([generation], value=stat['uncollectable'])
            collections.add_metric([generation], value=stat['collections'])

        return [collected, uncollectable, collections]
예제 #20
0
    def collect(self):
        collected = CounterMetricFamily(
            f"{self.namespace}python_gc_objects_collected",
            "Objects collected during gc",
            labels=["generation"],
        )
        uncollectable = CounterMetricFamily(
            f"{self.namespace}python_gc_objects_uncollectable",
            "Uncollectable object found during GC",
            labels=["generation"],
        )

        collections = CounterMetricFamily(
            f"{self.namespace}python_gc_collections",
            "Number of times this generation was collected",
            labels=["generation"],
        )

        for generation, stat in enumerate(gc.get_stats()):
            generation = str(generation)
            collected.add_metric([generation], value=stat["collected"])
            uncollectable.add_metric([generation], value=stat["uncollectable"])
            collections.add_metric([generation], value=stat["collections"])

        return [collected, uncollectable, collections]
예제 #21
0
    def collect(self):

        # @stats is a pretty-printer object with __str__() returning a nice table,
        # plus some fields that contain data from that table.
        # unfortunately, fields are pretty-printed themselves (i. e. '4.5MB').
        stats = gc.get_stats(memory_pressure=False)  # type: ignore
        # @s contains same fields as @stats, but as actual integers.
        s = stats._s  # type: ignore

        # also note that field naming is completely braindead
        # and only vaguely correlates with the pretty-printed table.
        # >>>> gc.get_stats(False)
        # Total memory consumed:
        #     GC used:            8.7MB (peak: 39.0MB)        # s.total_gc_memory, s.peak_memory
        #        in arenas:            3.0MB                  # s.total_arena_memory
        #        rawmalloced:          1.7MB                  # s.total_rawmalloced_memory
        #        nursery:              4.0MB                  # s.nursery_size
        #     raw assembler used: 31.0kB                      # s.jit_backend_used
        #     -----------------------------
        #     Total:              8.8MB                       # stats.memory_used_sum
        #
        #     Total memory allocated:
        #     GC allocated:            38.7MB (peak: 41.1MB)  # s.total_allocated_memory, s.peak_allocated_memory
        #        in arenas:            30.9MB                 # s.peak_arena_memory
        #        rawmalloced:          4.1MB                  # s.peak_rawmalloced_memory
        #        nursery:              4.0MB                  # s.nursery_size
        #     raw assembler allocated: 1.0MB                  # s.jit_backend_allocated
        #     -----------------------------
        #     Total:                   39.7MB                 # stats.memory_allocated_sum
        #
        #     Total time spent in GC:  0.073                  # s.total_gc_time

        pypy_gc_time = CounterMetricFamily(
            "pypy_gc_time_seconds_total", "Total time spent in PyPy GC", labels=[],
        )
        pypy_gc_time.add_metric([], s.total_gc_time / 1000)
        yield pypy_gc_time

        pypy_mem = GaugeMetricFamily(
            "pypy_memory_bytes",
            "Memory tracked by PyPy allocator",
            labels=["state", "class", "kind"],
        )
        # memory used by JIT assembler
        pypy_mem.add_metric(["used", "", "jit"], s.jit_backend_used)
        pypy_mem.add_metric(["allocated", "", "jit"], s.jit_backend_allocated)
        # memory used by GCed objects
        pypy_mem.add_metric(["used", "", "arenas"], s.total_arena_memory)
        pypy_mem.add_metric(["allocated", "", "arenas"], s.peak_arena_memory)
        pypy_mem.add_metric(["used", "", "rawmalloced"], s.total_rawmalloced_memory)
        pypy_mem.add_metric(["allocated", "", "rawmalloced"], s.peak_rawmalloced_memory)
        pypy_mem.add_metric(["used", "", "nursery"], s.nursery_size)
        pypy_mem.add_metric(["allocated", "", "nursery"], s.nursery_size)
        # totals
        pypy_mem.add_metric(["used", "totals", "gc"], s.total_gc_memory)
        pypy_mem.add_metric(["allocated", "totals", "gc"], s.total_allocated_memory)
        pypy_mem.add_metric(["used", "totals", "gc_peak"], s.peak_memory)
        pypy_mem.add_metric(["allocated", "totals", "gc_peak"], s.peak_allocated_memory)
        yield pypy_mem
예제 #22
0
def gc_stat():
    """print gc generation info"""
    print("  GC stat:")
    for i, generation in enumerate(gc.get_stats()):
        print(f"    Generation {i}:")
        print(f"      collections   : {generation.get('collections')}")
        print(f"      collected     : {generation.get('collected')}")
        print(f"      uncollectable : {generation.get('uncollectable')}")
예제 #23
0
def collect_and_log_garbage(log_handler=None):
    garbageCollection = gc.get_stats()
    garbageCollectionLog = '\n'.join([
        f'Generation {n}: {repr(d)}' for n, d in enumerate(garbageCollection)
    ])
    if log_handler is not None:
        logger.debug('Logging garbage collection:\n' + garbageCollectionLog)
    return garbageCollectionLog
예제 #24
0
def get_stats(graph=None) -> Dict:
    try:
        stats = {
            "active_threads": threading.active_count(),
            "thread_names": [thread.name for thread in threading.enumerate()],
            "graph_size_bytes": asizeof.asizeof(graph),
            "garbage_collector": garbage_collector.get_stats(),
            "process": get_all_process_info(),
        }
        if sys.platform == "linux":
            stats.update(
                {
                    "maxrss_parent_bytes": resource.getrusage(
                        resource.RUSAGE_SELF
                    ).ru_maxrss
                    * 1024,
                    "maxrss_children_bytes": resource.getrusage(
                        resource.RUSAGE_CHILDREN
                    ).ru_maxrss
                    * 1024,
                }
            )
        else:
            stats.update({"maxrss_parent_bytes": 0, "maxrss_children_bytes": 0})
        stats["maxrss_total_bytes"] = (
            stats["maxrss_parent_bytes"] + stats["maxrss_children_bytes"]
        )
        num_fds_parent = (
            stats["process"].get("parent", {}).get("num_file_descriptors", 0)
        )
        num_fds_children = sum(
            [
                v["num_file_descriptors"]
                for v in stats["process"].get("children", {}).values()
            ]
        )
        stats.update(
            {
                "graph_size_human_readable": iec_size_format(stats["graph_size_bytes"]),
                "maxrss_parent_human_readable": iec_size_format(
                    stats["maxrss_parent_bytes"]
                ),
                "maxrss_children_human_readable": iec_size_format(
                    stats["maxrss_children_bytes"]
                ),
                "maxrss_total_human_readable": iec_size_format(
                    stats["maxrss_total_bytes"]
                ),
                "num_fds_parent": num_fds_parent,
                "num_fds_children": num_fds_children,
                "num_fds_total": num_fds_parent + num_fds_children,
            }
        )
    except Exception:
        log.exception("Error while trying to get stats")
        return {}
    else:
        return stats
예제 #25
0
    def __gc_info():
        d = {}

        d['gc.get_stats'] = gc.get_stats()
        d['gc.isenabled'] = gc.isenabled()
        d['gc.get_debug'] = gc.get_debug()
        d['gc.get_threshold'] = gc.get_threshold()

        return d
예제 #26
0
def get_debug_info(civcoms):
    code = "<html><head><meta http-equiv=\"refresh\" content=\"20\">" \
       + "<link href='//play.freeciv.org/css/bootstrap.min.css' rel='stylesheet'></head>" \
       + "<body><div class='container'>" \
       + "<h2>Freeciv WebSocket Proxy Status</h2>" \
       + "<font color=\"green\">Process status: OK</font><br>"

    code += "<b>Process Uptime: " + \
        str(int(time.time() - startTime)) + " s.</b><br>"

    code += ("Python version: %s %s (%s)<br>" % (
        platform.python_implementation(),
        platform.python_version(),
        platform.python_build()[0],
    ))

    cpu = ' '.join(platform.processor().split())
    code += ("Platform: %s %s on '%s' <br>" %
             (platform.machine(), platform.system(), cpu))

    code += ("Tornado version %s <br>" % (tornado_version))

    try:
        f = open("/proc/loadavg")
        contents = f.read()
        code += "Load average: " + contents
        f.close()
    except:
        print("Cannot open uptime file: /proc/uptime")

    try:
        code += "<h3>Memory usage:</h3>"
        code += "Memory: " + str(memory() / 1048576) + " MB <br>"
        code += "Resident: " + str(resident() / 1048576) + " MB <br>"
        code += "Stacksize: " + str(stacksize() / 1048576) + " MB <br>"
        try:
            code += "Garabage collection stats: " + str(
                gc.get_stats()) + " <br>"
            code += "Garabage list: " + str(gc.garbage) + " <br>"
        except AttributeError:
            pass

        code += ("<h3>Logged in users  (count %i) :</h3>" % len(civcoms))
        for key in list(civcoms.keys()):
            code += (
                "username: <b>%s</b> <br>IP:%s <br>Civserver: (%d)<br>Connect time: %d<br><br>"
                % (civcoms[key].username,
                   civcoms[key].civwebserver.ip, civcoms[key].civserverport,
                   time.time() - civcoms[key].connect_time))

    except:
        print(("Unexpected error:" + str(sys.exc_info()[0])))
        raise

    code += "</div></body></html>"

    return code
예제 #27
0
파일: reporter.py 프로젝트: rurbin3/yawast
def save_output(spinner=None):
    # add some extra debug data
    register_info("memsize_issues", total_size(_issues))
    register_info("memsize_info", total_size(_info))
    register_info("memsize_data", total_size(_data))
    register_info("memsize_evidence", total_size(_evidence))
    register_info("gc_stats", gc.get_stats())
    register_info("gc_objects", len(gc.get_objects()))

    if spinner:
        spinner.stop()
    print("Saving...")
    if spinner:
        spinner.start()

    vulns = {}
    for vuln in Vulnerabilities:
        vulns[vuln.name] = {
            "severity": vuln.severity,
            "description": vuln.description,
            "id": vuln.id,
        }

    data = {
        "_info": _convert_keys(_info),
        "data": _convert_keys(_data),
        "issues": _convert_keys(_issues),
        "evidence": _convert_keys(_evidence),
        "vulnerabilities": vulns,
    }
    json_data = json.dumps(data, indent=4)

    try:
        zf = zipfile.ZipFile(f"{_output_file}.zip", "x", zipfile.ZIP_BZIP2)

        with ExecutionTimer() as tm:
            zf.writestr(
                f"{os.path.basename(_output_file)}",
                json_data.encode("utf_8", "backslashreplace"),
            )

        zf.close()

        orig = "{0:cM}".format(Size(len(json_data)))
        comp = "{0:cM}".format(Size(os.path.getsize(f"{_output_file}.zip")))

        if spinner:
            spinner.stop()
        print(
            f"Saved {_output_file}.zip (size reduced from {orig} to {comp} in {tm.to_ms()}ms)"
        )
    except Exception as error:
        if spinner:
            spinner.stop()

        print(f"Error writing output file: {error}")
예제 #28
0
    def __del__(self):
        """Destructor to delete class objects and its class members
        """

        if 'pydevd' in sys.modules:
            print("\n[D1] From destructor in ComplexNumber : ")
            print('|Destructor called, object of class ComplexNumber deleted|')
            print("len(gc.get_objects):", len(gc.get_objects()))
            print("gc.get_stats:", gc.get_stats())
            print("gc.get_count:", gc.get_count())
예제 #29
0
 def create_arrays_thread(n1=10, n2=20):
     ctx = ctx_factory()
     queue = cl.CommandQueue(ctx)
     for i1 in range(n2):
         for i in range(n1):
             acl = cl.array.zeros(queue, 10, dtype=np.float32)
             acl.get()
         # Garbage collection triggers the error
         print("collected ", str(gc.collect()))
         print("stats ", gc.get_stats())
예제 #30
0
def run_gc():
    result = {
        'memory_before': get_memory_available_in_mb(),
        'memory_after': '',
        'memory_saved': '',
        'memory_info_before':
        str(psutil.Process(os.getpid()).memory_full_info()),
        'gc_stats_before': gc.get_stats()
    }
    gc.set_debug(gc.DEBUG_UNCOLLECTABLE | gc.DEBUG_SAVEALL)
    gc.collect()

    result['memory_after'] = get_memory_available_in_mb()
    result['memory_saved'] = dom_utils.get_int_number_from_text(
        result['memory_before']) - dom_utils.get_int_number_from_text(
            result['memory_after'])
    result['memory_info_after'] = str(
        psutil.Process(os.getpid()).memory_full_info())
    result['gc_stats_after'] = gc.get_stats()
    return result
예제 #31
0
    def run(self, args):

        print("Gxf %s - %s\n" % (gxf.__version__, gxf.__author__))

        gcstats = gc.get_stats()

        headers = ["generation", "collections", "collected", "uncollectable"]
        tbldata = [["gen %d" % i] + [s[h] for h in headers[1:]]
                   for i, s in enumerate(gcstats)]

        print("Garbage collector statistics:\n")
        print(tabulate.tabulate(tbldata, headers=headers))
예제 #32
0
def f4():
    '''垃圾自动回收'''
    print(gc.get_count())     #回收计数
    # print(gc.get_objects()) #收集器所追踪的所有对象列表
    print(gc.get_stats())     #回收统计
    print(gc.get_threshold()) #回收阈值


    a=A()
    print(gc.get_count())
    # print(gc.get_objects())
    print(gc.get_stats())
    print(gc.get_threshold())


    del a
    print(gc.get_count())
    # print(gc.get_objects())
    print(gc.get_stats())
    print(gc.get_threshold())
    print(gc.get_freeze_count())
예제 #33
0
파일: meta.py 프로젝트: Ark444/gxf
    def run(self, args):

        print("Gxf %s - %s\n" % (gxf.__version__, gxf.__author__))

        gcstats = gc.get_stats()

        headers = ["generation", "collections", "collected", "uncollectable"]
        tbldata = [["gen %d" % i] + [s[h] for h in headers[1:]]
                   for i, s in enumerate(gcstats)]

        print("Garbage collector statistics:\n")
        print(tabulate.tabulate(tbldata, headers=headers))
예제 #34
0
    def report(self):
        # CPU
        if runtime_info.OS_LINUX or runtime_info.OS_DARWIN:
            cpu_time = read_cpu_time()
            if cpu_time != None:
                cpu_time_metric = self.report_metric(Metric.TYPE_COUNTER, Metric.CATEGORY_CPU, Metric.NAME_CPU_TIME, Metric.UNIT_NANOSECOND, cpu_time)
                if cpu_time_metric.has_measurement():
                    cpu_usage = (cpu_time_metric.measurement.value / (60 * 1e9)) * 100
                    try:
                        cpu_usage = cpu_usage / multiprocessing.cpu_count()
                    except Exception:
                        pass

                    self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_CPU, Metric.NAME_CPU_USAGE, Metric.UNIT_PERCENT, cpu_usage)


        # Memory
        if runtime_info.OS_LINUX or runtime_info.OS_DARWIN:
            max_rss = read_max_rss()
            if max_rss != None:
                self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_MEMORY, Metric.NAME_MAX_RSS, Metric.UNIT_KILOBYTE, max_rss)

        if runtime_info.OS_LINUX:
            current_rss = read_current_rss()
            if current_rss != None:
                self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_MEMORY, Metric.NAME_CURRENT_RSS, Metric.UNIT_KILOBYTE, current_rss)

            vm_size = read_vm_size()
            if vm_size != None:
                self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_MEMORY, Metric.NAME_VM_SIZE, Metric.UNIT_KILOBYTE, vm_size)


        # GC stats
        gc_count0, gc_count1, gc_count2 = gc.get_count()
        total_gc_count = gc_count0 + gc_count1 + gc_count2
        self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_GC, Metric.NAME_GC_COUNT, Metric.UNIT_NONE, total_gc_count)

        if min_version(3, 4):
            gc_stats = gc.get_stats()
            if gc_stats and gc_stats[0] and gc_stats[1] and gc_stats[2]:
                total_collections = gc_stats[0]['collections'] + gc_stats[1]['collections'] + gc_stats[2]['collections']
                self.report_metric(Metric.TYPE_COUNTER, Metric.CATEGORY_GC, Metric.NAME_GC_COLLECTIONS, Metric.UNIT_NONE, total_collections)

                total_collected = gc_stats[0]['collected'] + gc_stats[1]['collected'] + gc_stats[2]['collected']
                self.report_metric(Metric.TYPE_COUNTER, Metric.CATEGORY_GC, Metric.NAME_GC_COLLECTED, Metric.UNIT_NONE, total_collected)

                total_uncollectable = gc_stats[0]['uncollectable'] + gc_stats[1]['uncollectable'] + gc_stats[2]['uncollectable']
                self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_GC, Metric.NAME_GC_UNCOLLECTABLE, Metric.UNIT_NONE, total_uncollectable)

        # Runtime
        thread_count = threading.active_count()
        self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_RUNTIME, Metric.NAME_THREAD_COUNT, Metric.UNIT_NONE, thread_count)
    def report(self):
        # CPU
        if not runtime_info.OS_WIN:
            cpu_time = read_cpu_time()
            if cpu_time != None:
                cpu_time_metric = self.report_metric(Metric.TYPE_COUNTER, Metric.CATEGORY_CPU, Metric.NAME_CPU_TIME, Metric.UNIT_NANOSECOND, cpu_time)
                if cpu_time_metric.has_measurement():
                    cpu_usage = (cpu_time_metric.measurement.value / (60 * 1e9)) * 100
                    try:
                        cpu_usage = cpu_usage / multiprocessing.cpu_count()
                    except Exception:
                        pass

                    self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_CPU, Metric.NAME_CPU_USAGE, Metric.UNIT_PERCENT, cpu_usage)


        # Memory
        if not runtime_info.OS_WIN:
            max_rss = read_max_rss()
            if max_rss != None:
                self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_MEMORY, Metric.NAME_MAX_RSS, Metric.UNIT_KILOBYTE, max_rss)

        if runtime_info.OS_LINUX:
            current_rss = read_current_rss()
            if current_rss != None:
                self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_MEMORY, Metric.NAME_CURRENT_RSS, Metric.UNIT_KILOBYTE, current_rss)

            vm_size = read_vm_size()
            if vm_size != None:
                self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_MEMORY, Metric.NAME_VM_SIZE, Metric.UNIT_KILOBYTE, vm_size)


        # GC stats
        gc_count0, gc_count1, gc_count2 = gc.get_count()
        total_gc_count = gc_count0 + gc_count1 + gc_count2
        self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_GC, Metric.NAME_GC_COUNT, Metric.UNIT_NONE, total_gc_count)

        if min_version(3, 4):
            gc_stats = gc.get_stats()
            if gc_stats and gc_stats[0] and gc_stats[1] and gc_stats[2]:
                total_collections = gc_stats[0]['collections'] + gc_stats[1]['collections'] + gc_stats[2]['collections']
                self.report_metric(Metric.TYPE_COUNTER, Metric.CATEGORY_GC, Metric.NAME_GC_COLLECTIONS, Metric.UNIT_NONE, total_collections)

                total_collected = gc_stats[0]['collected'] + gc_stats[1]['collected'] + gc_stats[2]['collected']
                self.report_metric(Metric.TYPE_COUNTER, Metric.CATEGORY_GC, Metric.NAME_GC_COLLECTED, Metric.UNIT_NONE, total_collected)

                total_uncollectable = gc_stats[0]['uncollectable'] + gc_stats[1]['uncollectable'] + gc_stats[2]['uncollectable']
                self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_GC, Metric.NAME_GC_UNCOLLECTABLE, Metric.UNIT_NONE, total_uncollectable)

        # Runtime
        thread_count = threading.active_count()
        self.report_metric(Metric.TYPE_STATE, Metric.CATEGORY_RUNTIME, Metric.NAME_THREAD_COUNT, Metric.UNIT_NONE, thread_count)
예제 #36
0
파일: devTools.py 프로젝트: blitzmann/Pyfa
 def gc_collect(self, evt):
     print(gc.collect())
     print(gc.get_debug())
     print(gc.get_stats())
예제 #37
0
def get_debug_info(civcoms):
    code = "<html><head><meta http-equiv=\"refresh\" content=\"20\"></head><body><h2>Freeciv WebSocket Proxy Status</h2>"
    code += "<font color=\"green\">Process status: OK</font><br>"

    code += "<h3>Process Uptime: " + \
        str(int(time.time() - startTime)) + " s.</h3>"
    current_time = strftime("%Y-%m-%d %H:%M:%S", gmtime())
    code += "<b>Current time:</b><br> " + current_time

    code += ("<br><br>Python version: %s %s (%s)<br>" % (
        platform.python_implementation(),
        platform.python_version(),
        platform.python_build()[0],
    ))

    cpu = ' '.join(platform.processor().split())
    code += ("Platform: %s %s on '%s' <br>" % (
        platform.machine(),
        platform.system(),
        cpu))

    code += ("Tornado version %s <br>" % (tornado_version))

    try:
        f = open("/proc/loadavg")
        contents = f.read()
        code += "Load average: " + contents
        f.close()
    except:
        print("Cannot open uptime file: /proc/uptime")

    try:
        code += "<h3>Memory usage:</h3>"
        code += "Memory: " + str(memory() / 1048576) + " MB <br>"
        code += "Resident: " + str(resident() / 1048576) + " MB <br>"
        code += "Stacksize: " + str(stacksize() / 1048576) + " MB <br>"
        code += "Garabage collection stats: " + str(gc.get_stats()) + " <br>"
        code += "Garabage list: " + str(gc.garbage) + " <br>"

        code += ("<h3>Logged in users  (count %i) :</h3>" % len(civcoms))
        for key in list(civcoms.keys()):
            code += (
                "username: <b>%s</b> <br>IP:%s <br>Civserver: (%d)<br>Connect time: %d<br><br>" %
                (civcoms[key].username,
                 civcoms[key].civwebserver.ip,
                    civcoms[key].civserverport,
                    time.time() -
                    civcoms[key].connect_time))

        code += "<h3>Thread dumps:</h3>"

        for threadId, stack in list(sys._current_frames().items()):
            code += ("<br><br><b><u># ThreadID: %s</u></b><br>" % threadId)
            for filename, lineno, name, line in traceback.extract_stack(stack):
                code += ('File: "%s", line %d, in %s: ' %
                         (filename, lineno, name))
                if line:
                    code += (" <b>%s</b> <br>" % (line.strip()))

    except:
        print(("Unexpected error:" + str(sys.exc_info()[0])))
        raise

    return code
예제 #38
0
music_dir = 'Music/'
me_dir = 'ME/'
sound_dir = 'Sounds/'
map_dir = 'Maps/'
spr_dir = 'Sprites/'
ui_dir = 'UI/'
script_dir = 'EventScripts/'

import random
import pickle
import io
import gc
gc.get_stats()
import pygame
pygame.init()
pygame.mixer.init(frequency=22050, size=-16, channels=4, buffer=40960)
music = pygame.mixer.music
pygame.font.init()
Time = pygame.time
clock = pygame.time.Clock()

import pygame.midi
pygame.midi.init()

import math
import time
from time import time
import engine
import project
from pytmx.util_pygame import load_pygame
import datetime
예제 #39
0
thresholds = prometheus_client.Gauge(
    'python_gc_threshold',
    'GC thresholds by generation',
    ['generation'])
set_function_on_map_gauge(thresholds, (0, 1, 2), gc.get_threshold)

if sys.version_info >= (3, 4):
    # The following 3 metrics are gauges because they come from a
    # callback, but their values behave like counters (the values
    # returned by gc.get_stats() are counters).
    collections = prometheus_client.Gauge(
        'python_gc_collections_total',
        'Number of GC collections that occurred by generation',
        ['generation'])
    set_function_on_map_gauge(collections, (0, 1, 2), lambda: [
        x['collections'] for x in gc.get_stats()])

    collected = prometheus_client.Gauge(
        'python_gc_collected_total',
        'Number of garbage collected objects by generation',
        ['generation'])
    set_function_on_map_gauge(collected, (0, 1, 2), lambda: [
        x['collected'] for x in gc.get_stats()])

    uncollectables = prometheus_client.Gauge(
        'python_gc_uncollectables',
        'Number of uncollectable objects by generation',
        ['generation'])
    set_function_on_map_gauge(uncollectables, (0, 1, 2), lambda: [
        x['uncollectable'] for x in gc.get_stats()])