Example #1
0
    def test_stracker_create_summary(self):
        """Test that a summary is created correctly.

        This can only be done heuristically, e.g that most recent objects are
        included.
        Also check that summaries managed by the tracker are excluded if
        ignore_self is enabled.

        """
        # at the beginning, there should not be an indicator object listed
        tmp_tracker = tracker.SummaryTracker()
        sn = tmp_tracker.create_summary()
        self.assertEqual(self._contains_indicator(sn), None)
        # now an indicator object should be listed
        o = self._get_indicator()
        sn = tmp_tracker.create_summary()
        self.assertEqual(self._contains_indicator(sn), 1)
        # with ignore_self enabled a second summary should not list the first
        # summary
        sn = tmp_tracker.create_summary()
        sn2 = tmp_tracker.create_summary()
        tmp = summary._sweep(summary.get_diff(sn, sn2))
        self.assertEqual(len(tmp), 0)
        # but with ignore_self turned off, there should be some difference
        tmp_tracker = tracker.SummaryTracker(ignore_self=False)
        sn = tmp_tracker.create_summary()
        tmp_tracker.new_obj = self._get_indicator()
        sn2 = tmp_tracker.create_summary()
        tmp = summary._sweep(summary.get_diff(sn, sn2))
        self.failIfEqual(len(tmp), 0)
Example #2
0
 def test(self):
     testDir = os.path.dirname(os.path.abspath(sys.modules[__name__].__file__))
     testFileSmall = testDir + "/solvency/2.0/random/spv_20_instance.xbrl"
     application = Tk()
     cntlrWinMain = CntlrWinMain(application)
     application.protocol("WM_DELETE_WINDOW", cntlrWinMain.quit)
     
     cntlrWinMain.setTestMode(True)
     for pluginMethod in pluginClassMethods("DevTesting.GetTestContext"):
         testContext = pluginMethod()
         break
     testContext.checkMemoryOnClose = True
     testContext.dumpFilePrefix = testDir + "/tmp/dump_"
     
     tr = tracker.SummaryTracker()
     
     for idx in range(4):
         print("\nIteration " + str(idx))
         cntlrWinMain.fileOpenFile(testFileSmall)
         cntlrWinMain.logClear()
         cntlrWinMain.fileClose()    
         
         tr.print_diff()  
         if idx > 1:
             assert testContext.diffNumObjects < 8000, "Check for new objects leak"  
def main():
    tr = tracker.SummaryTracker()
    print(random.random())
    user_input = None
    bidders = []

    # Hardcoding the bidders for the profiling demo.
    bidders.append(Bidder("Jojo", 3000, random.random(), 1.2))
    bidders.append(Bidder("Melissa", 7000, random.random(), 1.5))
    bidders.append(Bidder("Priya", 15000, random.random(), 1.1))
    bidders.append(Bidder("Kewei", 800, random.random(), 1.9))
    bidders.append(Bidder("Scott", 4000, random.random(), 2))

    # Ideally we would do something like this, probably generate the
    # threat and increase values though, I'm lazy.

    # while user_input != 'q':
    #     bidder_name = input("Enter bidder name: ")
    #     bidder_money = float(input("Enter bidder budget: "))
    #     bidder_threat = float(input("Enter bid probability (0-1): "))
    #     bidder_increase = float(input("Enter the percentage increase of the "
    #                                   "new bid: "))
    #     bidders.append(Bidder(bidder_name, bidder_money, bidder_threat,
    #                           bidder_increase))
    #     user_input = input("Continue? q for quit, y for yes")

    print("\n\nStarting Auction!!")
    print("------------------")
    my_auction = Auction(bidders)
    my_auction.simulate_auction("Antique Vase", 100)
    print("-" * 50)
    tr.print_diff()
Example #4
0
def main(args):
    tr = tracker.SummaryTracker()

    t = pitree()
    t.add(2413, 2414, "zero")
    t.add(2400, 3290, "one")
    t.add(1250, 2913, "two")
    t.add(2999, 4601, "three")
    t.add(1639, 3007, "four")
    t.add(1639, 3007, "four'")

    print("t")
    for i in t.search(0,sys.maxsize): print(i)

    print("update t:")
    for i in t.search(1250, 1251):
        i = t.update_item(i, i.data + "---")
        print(i.begin, " ", i.end, " ", i.data)

    print("r = copy of t")
    r = t.copy()

    print("update r:")
    for i in r.search(123, 2400):
        i = r.update_item(i, i.data + "*")
        print(i.begin, " ", i.end, " ", i.data)

    print("update again t:")
    for i in t.search(4600, 4601):
        i = t.update_item(i, i.data + "###")
        print(i.begin, " ", i.end, " ", i.data)

    print("t")
    for i in t.search(0,sys.maxsize): print(i)

    print("r")
    for i in r.search(0,sys.maxsize): print(i)

    print("s = copy of r")
    s = r.copy()

    print("add to s")
    s.add(113, 1784, "five")
    s.add(114, 1784, "six")
    s.add(114, 1784, "seven")

    print("t")
    for i in t.search(0,sys.maxsize): print(i)

    print("r")
    for i in r.search(0,sys.maxsize): print(i)

    print("s")
    for i in s.search(0,sys.maxsize): print(i)

    pitree.print_stats([t.get_stats(), r.get_stats(), s.get_stats()])

    tr.print_diff()

    return 0
Example #5
0
def test_kmer_memory_usage():
    with open('memory_profiler.log', 'w') as fp:
        with redirect_stdout(fp):
            tr = tracker.SummaryTracker()
            tr.print_diff()
            with open(CHROM_GRAPH, 'rb') as gp:
                ra = RandomAccess(gp)
                tr.print_diff()
                ra_generator = iter(ra.values())
                tr.print_diff()
                print('10 kmers')
                kmers = list(islice(ra_generator, 10))
                tr.print_diff()
                print('100 kmers')
                kmers2 = list(islice(ra_generator, 100))
                tr.print_diff()
                print('1000 kmers')
                kmers3 = list(islice(ra_generator, 1000))
                tr.print_diff()
                print('kmer.kmer')
                for kmer in kmers3:
                    kmer.kmer
                tr.print_diff()
                print('kmer.coverage')
                for kmer in kmers3:
                    kmer.coverage
                tr.print_diff()
                print('kmer.edges')
                for kmer in kmers3:
                    kmer.edges
                tr.print_diff()
Example #6
0
def test_env_memory_cleanup(agent_id, seed, primative_scenarios):
    # Run once to initialize globals
    _, action, agent_type = (100, None, AgentType.Buddha)
    _env_memory_buildup(agent_id, seed, primative_scenarios, action,
                        agent_type)
    gc.collect()

    # Memory size check
    size = muppy.get_size(muppy.get_objects())
    gc.collect()
    _env_memory_buildup(agent_id, seed, primative_scenarios, action,
                        agent_type)
    end_size = muppy.get_size(muppy.get_objects())
    gc.collect()

    def success_condition():
        return end_size - size < EPISODE_MEMORY_GROWTH_LIMIT

    if not success_condition():
        # Get a diff for failure case
        tr = tracker.SummaryTracker()
        tr.print_diff()
        _env_memory_buildup(agent_id, seed, primative_scenarios, action,
                            agent_type)
        diff = tr.diff()
        summary.print_(diff)
        diff = None
        gc.collect()
        assert success_condition(), f"Size diff {end_size - size}"
Example #7
0
            def consumer(inQ, outQ):
                while True:
                    try:
                        # get a new message
                        val = inQ.get()
                        # this is the 'TERM' signal
                        if val is None:
                            break
                        # process the data
                        ret = f(val)

                        if args.debug:
                            from pympler import summary
                            from pympler import muppy
                            all_objects = muppy.get_objects()
                            sum1 = summary.summarize(all_objects)
                            print("summary:")
                            summary.print_(sum1)
                            from pympler import tracker
                            tr = tracker.SummaryTracker()
                            print("diff:")
                            tr.print_diff()

                        outQ.put(ret)
                    except Exception as e:
                        print("error!", e)
                        break
Example #8
0
    def __init__(self, root=None, *args):
        super(IsomerDebugger, self).__init__("DBG", *args)

        if not root:
            from isomer.logger import root

            self.root = root
        else:
            self.root = root

        if objgraph is None:
            self.log("Cannot use objgraph.", lvl=warn)

        try:
            self.fireEvent(cli_register_event("errors", cli_errors))
            self.fireEvent(cli_register_event("log_level", cli_log_level))
            self.fireEvent(cli_register_event("comp_graph", cli_comp_graph))
            self.fireEvent(cli_register_event("locations", cli_locations))
            self.fireEvent(cli_register_event("test_exception", cli_exception_test))
        except AttributeError:
            pass  # We're running in a test environment and root is not yet running

        try:
            self.tracker = tracker.SummaryTracker()
        except AttributeError:
            self.log("No pympler library for memory analysis installed.", lvl=warn)

        self.log("Started. Notification users: ", self.config.notificationusers)
Example #9
0
def test_smarts_episode_memory_cleanup(agent_id, seed, primative_scenarios,
                                       agent_params):
    MAX_EPISODE_STEPS = 100
    EPISODE_COUNT = 100
    STEPS_PER_YIELD = 10

    _, action, agent_type = agent_params

    env_and_agent_spec = env_and_spec(action, agent_type, MAX_EPISODE_STEPS,
                                      primative_scenarios, seed, agent_id)

    size = 0
    last_size = 0
    gc.collect()
    tr = tracker.SummaryTracker()
    try:
        for current_episode in _every_nth_episode(
                agent_id,
                EPISODE_COUNT,
                env_and_agent_spec,
                steps_per_yield=STEPS_PER_YIELD):
            gc.collect()
            all_objects = muppy.get_objects()
            size = muppy.get_size(all_objects)
            tr.print_diff(summary.summarize(all_objects))
            print(flush=True)
            all_objects = None
            if current_episode > STEPS_PER_YIELD:
                assert (size - last_size < EPISODE_MEMORY_GROWTH_LIMIT
                        ), f"End size delta {size - last_size}"
            last_size = size
    finally:
        env_and_agent_spec[0].close()
Example #10
0
def test_smarts_fast_reset_memory_cleanup(agent_id, seed,
                                          social_agent_scenarios):
    agent_type = AgentType.Buddha
    # Run once to initialize globals and test to see if smarts is working
    _memory_buildup(agent_id, seed, social_agent_scenarios, 1, None,
                    agent_type)

    tr = tracker.SummaryTracker()
    gc.collect()
    initial_size = muppy.get_size(muppy.get_objects())

    for _ in range(100):
        _memory_buildup(
            agent_id,
            seed,
            social_agent_scenarios,
            1,
            None,
            agent_type,
            max_episode_steps=2,
        )

    gc.collect()
    end_size = muppy.get_size(muppy.get_objects())
    gc.collect()
    tr.print_diff()

    # Check for a major leak
    assert (end_size - initial_size < SMARTS_MEMORY_GROWTH_LIMIT
            ), f"End size delta {end_size - initial_size}"
Example #11
0
    def on_epoch_end(self, epoch, log={}):
        x = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
        web_browser_debug = True
        print(x)

        if x > 40000:
            if web_browser_debug:
                if epoch == 0:
                    start_in_background()
                    tr = tracker.SummaryTracker()
                    tr.print_diff()
            else:
                global memlist
                all_objects = muppy.get_objects(include_frames=True)
                # print(len(all_objects))
                sum1 = summary.summarize(all_objects)
                memlist.append(sum1)
                summary.print_(sum1)
                if len(memlist) > 1:
                    # compare with last - prints the difference per epoch
                    diff = summary.get_diff(memlist[-2], memlist[-1])
                    summary.print_(diff)
                my_types = muppy.filter(all_objects, Type=types.ClassType)

                for t in my_types:
                    print(t)
Example #12
0
    def test_stracker_diff(self):
        """Test that the diff is computed correctly.

        This includes that
        - newly created objects are listed
        - removed objects are not listed anymore
        - if objects disappear, they should be listed as negatives
        """
        stracker = tracker.SummaryTracker()
        # for now, no object should be listed
        diff = stracker.diff()
        self.assert_(self._contains_indicator(diff) == None)
        # now an indicator object should be included in the diff
        o = self._get_indicator()
        diff = stracker.diff()
        self.assert_(self._contains_indicator(diff) == 1)
        # now it should be gone again, compared to the
        # previously stored summary
        o = self._get_indicator()
        sn1 = stracker.create_summary()
        o = None
        diff = stracker.diff(summary1=sn1)
        self.assert_(self._contains_indicator(diff) == -1)
        # comparing two homemade summaries should work, too
        o = None
        sn1 = stracker.create_summary()
        o = self._get_indicator()
        sn2 = stracker.create_summary()
        diff = stracker.diff(summary1=sn1, summary2=sn2)
        self.assert_(self._contains_indicator(diff) == 1)
        # providing summary2 without summary1 should raise an exception
        self.assertRaises(ValueError, stracker.diff, summary2=sn2)
Example #13
0
def main():
    tr = tracker.SummaryTracker()

    points = [get_point(i) for i in xrange(1000000)]
    lines = [get_line(i) for i in xrange(100000)]

    tr.print_diff()
Example #14
0
 def __init__(self):
     self.initTime = 0
     self.endTime = 0
     self.obj_tr = tracker.SummaryTracker()
     self.cls_tr = classtracker.ClassTracker()
     self.output_snap = len(
         [name for name in os.listdir('.') if os.path.isfile(name)])
Example #15
0
    def summary_tracker(self):
        """Get a summary tracker.

        Example:

            JSX> st = j.tools.memprof.summary_tracker()
            JSX> #....
            JSX> st.print_diff()
                                    types |   # objects |   total size
            ============================== | =========== | ============
                                    list |       19224 |      1.90 MB
                                    str |       22126 |      1.51 MB
                parso.python.tree.Operator |        5731 |    537.28 KB
                                    int |       13404 |    366.43 KB
            parso.python.tree.PythonNode |        5840 |    365.00 KB
                    parso.python.tree.Name |        4594 |    358.91 KB
                parso.python.tree.Keyword |        1312 |    123.00 KB
                parso.python.tree.Newline |        1508 |    117.81 KB
                parso.python.tree.String |         428 |     33.44 KB
                parso.python.tree.Param |         422 |     29.67 KB
                parso.python.tree.ExprStmt |         525 |     28.71 KB
                parso.python.tree.IfStmt |         190 |     10.39 KB
                parso.python.tree.Number |         132 |     10.31 KB
                parso.python.tree.Function |         144 |     10.12 KB
                    function (<lambda>) |          53 |      7.04 KB

        Returns:
            [SummaryTracker]: [summary tracker]

        """
        return tracker.SummaryTracker()
Example #16
0
def monitor_memory(cycle=1):
    from pympler import tracker, muppy, summary
    memory_tracker: tracker.SummaryTracker = tracker.SummaryTracker()
    while not terminate.is_set():
        # print(2)
        sleep(cycle)
        # print(3)
        memory_tracker.print_diff()
Example #17
0
def check_memory_usage():

    mem = tracker.SummaryTracker()
    memory = pd.DataFrame(mem.create_summary(), columns=['object', 'number_of_objects', 'memory'])
    memory['mem_per_object'] = memory['memory'] / memory['number_of_objects']
    print(memory.sort_values('memory', ascending=False).head(10))
    print("============================================================")
    print(memory.sort_values('mem_per_object', ascending=False).head(10))
Example #18
0
def check_memory_growth(function, *args, **kwargs):  #2
    """Measure the memory usage of `function`.
    """
    measurer = tracker.SummaryTracker()  #3
    for _ in range(2):  #4
        measurer.diff()  #5
    function(*args, **kwargs)  #6
    return measurer.diff()  #7
Example #19
0
 def __init__(self, logger=None):
     self.Logger = logger
     memoryFileStr = "memory.txt"
     if (os.path.exists(memoryFileStr)):
         os.remove(memoryFileStr)
     self.memoryFile = open(memoryFileStr, "w+")
     self.sumPrev = None
     self.tr = tracker.SummaryTracker()
Example #20
0
 def __init__(self):
     super(MemoryProfilerDaemonProcess, self).__init__()
     config = get_config()
     self.enabled = config.getboolean('daemon', 'memory-profiler', False)
     self.interval = config.getint('debug', 'memory_profiler_interval', 60)
     self.track = config.getint('debug', 'memory_profiler_track_changes', 0)
     self.verbose = config.getint('debug', 'memory_profiler_verbose', 0)
     self.summary_tracker = tracker.SummaryTracker()
 def process_request(self, request):
     if not self.is_active:
         return
     if self.track_usage:
         self.tracker = tracker.SummaryTracker()
         # drop previous diff, we want to see what was
         # created during request processing
         self.tracker.diff()
Example #22
0
    def run(self):
        # A dict of protocol: {"application_instance":, "connected":, "disconnected":} dicts
        self.connections = {}
        # Make the factory
        self.http_factory = HTTPFactory(self)
        self.ws_factory = WebSocketFactory(self, server="Daphne")
        self.ws_factory.setProtocolOptions(
            autoPingTimeout=self.ping_timeout,
            allowNullOrigin=True,
            openHandshakeTimeout=self.websocket_handshake_timeout)
        if self.verbosity <= 1:
            # Redirect the Twisted log to nowhere
            globalLogBeginner.beginLoggingTo([lambda _: None],
                                             redirectStandardIO=False,
                                             discardBuffer=True)
        else:
            globalLogBeginner.beginLoggingTo([STDLibLogObserver(__name__)])

        # Detect what Twisted features are enabled
        if http.H2_ENABLED:
            logger.info("HTTP/2 support enabled")
        else:
            logger.info(
                "HTTP/2 support not enabled (install the http2 and tls Twisted extras)"
            )

        # Kick off the timeout loop
        reactor.callLater(1, self.application_checker)
        reactor.callLater(2, self.timeout_checker)
        reactor.callLater(10, self.monitoring)

        from pympler import tracker
        self.tr = tracker.SummaryTracker()

        for socket_description in self.endpoints:
            logger.info("Configuring endpoint %s", socket_description)
            ep = serverFromString(reactor, str(socket_description))
            listener = ep.listen(self.http_factory)
            listener.addCallback(self.listen_success)
            listener.addErrback(self.listen_error)
            self.listeners.append(listener)

        # Set the asyncio reactor's event loop as global
        # TODO: Should we instead pass the global one into the reactor?
        asyncio.set_event_loop(reactor._asyncioEventloop)

        # Verbosity 3 turns on asyncio debug to find those blocking yields
        if self.verbosity >= 3:
            asyncio.get_event_loop().set_debug(True)

        reactor.addSystemEventTrigger("before", "shutdown",
                                      self.kill_all_applications)
        if not self.abort_start:
            # Trigger the ready flag if we had one
            if self.ready_callable:
                self.ready_callable()
            # Run the reactor
            reactor.run(installSignalHandlers=self.signal_handlers)
Example #23
0
def main(global_config, **settings):
    """ This function returns a Pyramid WSGI application.
    """
    # memory tracker
    settings['memory_tracker'] = tracker.SummaryTracker()
    config = Configurator(settings=settings)
    config.include('.models')

    renderer = JSON()
    renderer.add_adapter(datetime.date, lambda obj, request: obj.isoformat())
    config.add_renderer('json', renderer)

    here = os.path.dirname(__file__)
    stats_filename = os.path.join(here, 'dist', 'entrypoints.json')
    if os.path.isfile(stats_filename):
        with open(stats_filename) as infile:
            stats = json.load(infile)
    else:
        stats = dict(
            entrypoints=dict(index=dict(assets=list()),
                             admin=dict(assets=list())))
    config.add_request_method(
        lambda r: stats,
        'webpack_entrypoints',
        reify=True
        )

    favicon_stats = os.path.join(here, 'dist', 'favicon-stats.json')
    if os.path.isfile(favicon_stats):
        with open(favicon_stats) as infile:
            iconstats = json.load(infile)
    else:
        iconstats = dict(html=list())
    config.add_request_method(
        lambda r: iconstats,
        'favicon_stats',
        reify=True
        )

    # FIXME make tests
    JWT_SECRET = os.environ.get('JWT_SECRET', 'secret')
    config.set_jwt_authentication_policy(JWT_SECRET,
                                         callback=groupfinder)

    authz_policy = ACLAuthorizationPolicy()
    config.set_authorization_policy(authz_policy)

    config.include('.routes')

    # config.set_request_property is removed in pyramid > 1.9
    # config.set_request_property('.util.get_user', 'user', reify=True)

    config.add_request_method('.util.get_user', 'user', reify=True)
    application = config.make_wsgi_app()
    # add wsgi middleware here

    return application
Example #24
0
 def __init__(self, get_response):
     """Construct and configure the middleware, one time."""
     if PYMPLER_ENABLED and not settings.DEBUG:
         self.memory_tracker = tracker.SummaryTracker()
         self.class_tracker = classtracker.ClassTracker()
         self.class_tracker.track_class(Template)
         self.object_count = len(muppy.get_objects())
         self.get_response = get_response
     else:
         raise MiddlewareNotUsed('PymplerMiddleware will not be used.')
Example #25
0
def build_faiss(tfidf_model, bert_model):
    tr = tracker.SummaryTracker()
    print(f"Building indices ...")
    c = collection.find().count()
    # c = 5000
    batch_size = 500
    encoder = None
    bert_index = None
    tfidf_index = None
    # if hasattr(model, 'encode'):
    #     encoder =  lambda x: model.encode(x).astype("float32")
    # else:
    #     encoder = lambda x:model.transform(x).toarray().astype("float32")
    i = 0
    ids = []
    while i < c:
        print(i)
        docs = []
        for text, ind in pull_and_preprocess_from_mongo(i, batch_size):
            # docs.append(x.get("title","") + " " + x.get('description',"")+ " " + " ".join(filter(None,x.get('content',{}).get('text',[]))))
            docs.append(text)
            ids.append(ind)
        print("Downloaded batch", i)
        tfidf_embeddings = tfidf_model.transform(docs).toarray().astype(
            "float32")
        print("Computed tfidf embeddings")
        bert_embeddings = bert_model.encode([doc[:100] for doc in docs
                                             ]).astype("float32")
        print("Computed bert embeddings")
        if i == 0:
            bert_index = faiss.IndexFlatIP(bert_embeddings.shape[1])
            tfidf_index = faiss.IndexFlatIP(tfidf_embeddings.shape[1])

        # print(bert_embeddings.shape[1])
        # print(tfidf_embeddings.shape[1])
        faiss.normalize_L2(bert_embeddings)
        faiss.normalize_L2(tfidf_embeddings)
        print(tr.print_diff())

        # Step 3: Pass the index to IndexIDMap
        # index = faiss.IndexIDMap(index)
        # Step 4: Add vectors and their IDs
        # print("range",len(np.arange(i,i+len(embeddings))))
        # print("embeds",len(embeddings))
        # idmap.add_with_ids(embeddings,np.arange(i,i+len(embeddings)))

        bert_index.add(bert_embeddings)
        tfidf_index.add(tfidf_embeddings)
        i += len(tfidf_embeddings)
    faiss.write_index(bert_index, f"models/bert.index")
    faiss.write_index(tfidf_index, f"models/tfidf.index")
    dump(ids, 'models/ids.joblib')
    print(f"Completed indices.")
    # upload_indices_and_vectors()
    return [tfidf_index, bert_index]
Example #26
0
    def __init__(self):
        super().__init__()
        self.initUI()
        self.tracker = tracker.SummaryTracker()

        self.client = qtIndiBase.Client(host='astrocomp.fritz.box')
        self.client.signals.serverConnected.connect(self.serverConnected)
        self.client.signals.newProperty.connect(self.connectDevice)
        self.client.signals.newNumber.connect(self.showStat)
        self.client.signals.defNumber.connect(self.showStat)
        self.startCommunication()
Example #27
0
def memory_usage_thread_func():
    tr = tracker.SummaryTracker()
    sleep(20)
    first_summary = tr.create_summary()
    while True:
        latest_summary = tr.create_summary()
        tr.print_diff(summary1=first_summary, summary2=latest_summary)
        self_use = resource.getrusage(
            resource.RUSAGE_SELF).ru_maxrss / (1000 * 1000)
        print('Memory usage: {} Mb'.format(self_use))
        sleep(10)
Example #28
0
    def test_(self):
        ip_store.load(db)
        tr = tracker.SummaryTracker()
        print(sys.getrefcount(None))

        for i in range(10):
            for probe in probe_list:
                ip_store.search(probe[0])

        tr.print_diff()
        print(sys.getrefcount(None))
Example #29
0
 def _measure_memory(*args, **kwargs):                       #6
     """This replaces the function that is to be measured.
     """
     measurer = tracker.SummaryTracker()                     #7
     for _ in range(2):                                      #8
         measurer.diff()                                     #9
     try:
         res = function(*args, **kwargs)                     #10
         return res
     finally:                                                #11
         memory[function.__name__] = (measurer.diff())
Example #30
0
    def __init__(self, capture_interval=10, *args, **kwargs):
        # type: (int, Any, Any) -> None
        """
        :param capture_interval: How often to capture memory usage snapshot.
        :type capture_interval: ``int``
        """
        super(PeriodicMemorySummaryCaptureThread,
              self).__init__(name="PeriodicMemorySummaryCaptureThread")

        self._capture_interval = capture_interval

        self._profiling_data = []  # type: List[Dict[str, Any]]
        self._tracker = tracker.SummaryTracker()