示例#1
0
def log_mem_usage(signum, frame, fname=None):
    global _count
    _count += 1
    gc.collect()
    if not fname:
        fname = filename + '_memory_%02d.log' % _count
    with open(fname, 'wb') as f:
        f.write('gc.garbage: %d\n\n' % len(gc.garbage))
        objgraph.show_most_common_types(limit=50, file=f)
        f.write('\n\n')
        buf = StringIO()
        objgraph.show_growth(limit=50, file=buf)
        buf = buf.getvalue()
        f.write(buf)
    if _count < 2:
        return
    for tn, l in enumerate(buf.splitlines()[:10]):
        l = l.strip()
        if not l:
            continue
        type_ = l.split()[0]
        objects = objgraph.by_type(type_)
        objects = random.sample(objects, min(50, len(objects)))
        objgraph.show_chain(
            objgraph.find_backref_chain(objects[0], objgraph.is_proper_module),
            filename=fname[:-4] + '_type_%02d_backref.png' % tn)
        objgraph.show_backrefs(
            objects,
            max_depth=5,
            extra_info=lambda x: hex(id(x)),
            filename=fname[:-4] + '_type_%02d_backrefs.png' % tn,
        )
示例#2
0
 def debug_leaks(*args):
     import gc
     gc.collect()
     print 'collect()', len(gc.get_objects())
     import objgraph
     #print len(objgraph.by_type('PropertyStorage'))
     objgraph.show_growth()
示例#3
0
    def test_memory_usage(self):
        '''
        Links about debugging memory usage
        http://stackoverflow.com/questions/110259/which-python-memory-profiler-is-recommended?answertab=votes#tab-top
        
        http://mg.pov.lt/objgraph/
        '''
        return
        # setup the pins and activity chunk
        import objgraph
        x = []
        y = [x, [x], dict(x=x)]
        objgraph.show_refs([y], filename='sample-graph.png')

        admin_user_id = 1
        pins = list(Pin.objects.filter(user=admin_user_id)[:3])
        activities = [p.create_activity() for p in pins]
        # try a batch import
        for x in range(500):
            feedly.batch_import(admin_user_id, activities, 10)
            if x % 10 == 0:
                print 'growth', x
                print objgraph.show_growth(limit=5)
        print 'growth'
        print objgraph.show_growth(limit=5)
        objgraph.show_refs([Mutation], filename='sample-graph.png')
示例#4
0
文件: gc.py 项目: yuanchenhuan/essay
def main2():
    objgraph.show_growth(
        limit=3
    )  # We take a snapshot of all the objects counts that are alive before we call our function
    for idx in range(10):
        computate_something(idx)
        objgraph.show_growth()
示例#5
0
文件: job.py 项目: nfredrik/mrq
  def trace_memory_start(self):
    """ Starts measuring memory consumption """

    objgraph.show_growth(limit=10)

    gc.collect()
    self._memory_start = self.worker.get_memory()
示例#6
0
文件: test.py 项目: sjmf/reportgen
def report_test():
    os.makedirs(TEST_PATH, exist_ok=True)
    print_memory('pre')

    for i, line in enumerate(DATAFILES):

        objgraph.show_growth(limit=3)

        input_files, location = line[0], line[1]
        report_name = TEST_PATH + '/test_' + str(i) + '.pdf'

        log.info(input_files)
        log.info(location)
        log.info(report_name)

        try:
            report.report(input_files, report_name, location=location)

            log.info("Report saved to {}".format(report_name))

        except Exception as e:
            log.exception("Exception occurred when reporting (test failed):")

        print_memory(i)

    plot_object_counts()
def target(export_dir, config, pid):
    print("===== before =====")
    # memory_usage(pid)
    print("=====common types=====")
    objgraph.show_most_common_types()
    print("=====common types=====")
    print("=====growth=====")
    objgraph.show_growth()
    print("=====growth=====")

    print("===== before =====")
    estimator = MyEstimator(model_dir=config.model_dir, config=config)
    result_dir = estimator.export_savedmodel(export_dir, serving_input_fn)
    print("Result dir: ", result_dir)
    time.sleep(1)
    print("Show stats:")
    clean(estimator)
    print("===== after =====")
    # memory_usage(pid)
    print("=====common types=====")
    objgraph.show_most_common_types()
    print("=====common types=====")
    print("=====growth=====")
    objgraph.show_growth()
    print("=====growth=====")
    print("===== after =====")
示例#8
0
文件: tests.py 项目: WoLpH/Feedly
    def test_memory_usage(self):
        '''
        Links about debugging memory usage
        http://stackoverflow.com/questions/110259/which-python-memory-profiler-is-recommended?answertab=votes#tab-top
        
        http://mg.pov.lt/objgraph/
        '''
        return
        # setup the pins and activity chunk
        import objgraph
        x = []
        y = [x, [x], dict(x=x)]
        objgraph.show_refs([y], filename='sample-graph.png')

        admin_user_id = 1
        pins = list(Pin.objects.filter(user=admin_user_id)[:3])
        activities = [p.create_activity() for p in pins]
        # try a batch import
        for x in range(500):
            feedly.batch_import(admin_user_id, activities, 10)
            if x % 10 == 0:
                print 'growth', x
                print objgraph.show_growth(limit=5)
        print 'growth'
        print objgraph.show_growth(limit=5)
        objgraph.show_refs([Mutation], filename='sample-graph.png')
示例#9
0
文件: job.py 项目: frankrousseau/mrq
    def trace_memory_start(self):
        """ Starts measuring memory consumption """

        objgraph.show_growth(limit=10)

        gc.collect()
        self._memory_start = self.worker.get_memory()
示例#10
0
文件: misc.py 项目: Dansiman/DueUtil
async def meminfo(ctx, **_):
    mem_info = StringIO()
    objgraph.show_most_common_types(file=mem_info)
    await util.say(ctx.channel, "```%s```" % mem_info.getvalue())
    mem_info = StringIO()
    objgraph.show_growth(file=mem_info)
    await util.say(ctx.channel, "```%s```" % mem_info.getvalue())
示例#11
0
文件: job.py 项目: nfredrik/mrq
  def trace_memory_stop(self):
    """ Stops measuring memory consumption """

    objgraph.show_growth(limit=10)

    trace_type = get_current_config()["trace_memory_type"]
    if trace_type:

      objgraph.show_chain(
        objgraph.find_backref_chain(
          random.choice(objgraph.by_type(trace_type)),
          objgraph.is_proper_module
        ),
        filename='%s/%s-%s.png' % (get_current_config()["trace_memory_output_dir"], trace_type, self.id)
      )

    gc.collect()
    self._memory_stop = self.worker.get_memory()

    diff = self._memory_stop - self._memory_start

    log.debug("Memory diff for job %s : %s" % (self.id, diff))

    # We need to update it later than the results, we need them off memory already.
    self.collection.update({
      "_id": self.id
    }, {"$set": {
      "memory_diff": diff
    }}, w=1)
示例#12
0
 def sigusr1_handler(signal, frame):
     logger.info( "######SIGUSR1 Received######")
     # Unfortunately, the objgraph functions use 'print', so I haven't figured out
     # how to get them into the logger...
     objgraph.show_most_common_types()
     objgraph.show_growth( limit=3)
     logger.info( "###########################")
def log_mem_usage(signum, frame, fname=None):
    global _count
    _count += 1
    gc.collect()
    if not fname:
        fname = filename + '_memory_%02d.log' % _count
    with open(fname, 'wb') as f:
        f.write('gc.garbage: %d\n\n' % len(gc.garbage))
        objgraph.show_most_common_types(limit=50, file=f)
        f.write('\n\n')
        buf = StringIO()
        objgraph.show_growth(limit=50, file=buf)
        buf = buf.getvalue()
        f.write(buf)
    if _count < 2:
        return
    for tn, l in enumerate(buf.splitlines()[:10]):
        l = l.strip()
        if not l:
            continue
        type_ = l.split()[0]
        objects = objgraph.by_type(type_)
        objects = random.sample(objects, min(50, len(objects)))
        objgraph.show_chain(
            objgraph.find_backref_chain(
                objects[0],
                objgraph.is_proper_module),
            filename=fname[:-4] + '_type_%02d_backref.png' % tn
        )
        objgraph.show_backrefs(
            objects,
            max_depth=5,
            extra_info=lambda x: hex(id(x)),
            filename=fname[:-4] + '_type_%02d_backrefs.png' % tn,
        )
示例#14
0
 def dump_objgraph(self):
     import gc
     gc.collect()
     import objgraph
     print "Dumping object growth ****"
     objgraph.show_growth(limit=100)
     print "****"
示例#15
0
文件: main.py 项目: damui/powerpool
 def dump_objgraph(self):
     import gc
     gc.collect()
     import objgraph
     print "Dumping object growth ****"
     objgraph.show_growth(limit=100)
     print "****"
    def fileNew(self):
        try:
            print(
                u'openSystemConfig内存使用:',
                round(
                    psutil.Process(getpid()).memory_info().rss / 1024 / 1024,
                    4))
            #print(u'memory_info:', psutil.Process(getpid()).memory_info())
            #print(u'memory_info:', psutil.Process(getpid()).memory_full_info())
            objgraph.show_growth(shortnames=False)
            #templist = objgraph.by_type('QIcon')
            #print('objgraph.by_type:', templist)
            #if len(templist) >= 1:
            #  objgraph.show_backrefs(templist[-1], filename='obj.png')

            # textEdit = textedit.TextEdit()
            #self.systemConfigWindow = Ui_QSystemConfigWindow()
            #a = sys.getrefcount(self.systemConfigWindow)
            #self.mdi.addSubWindow(self.systemConfigWindow).show()
            #self.mdi.closeActiveSubWindow()

            #self.systemConfigWindow.systemConfigInfoSignal.connect(self.setSystemConfigInfo)
            #self.systemConfigWindow.pushbuttonOKOnActivated()
            self.taskConfigWindow = Ui_QTaskConfigWindow()
            self.mdi.addSubWindow(self.taskConfigWindow).show()
            #self.mdi.closeActiveSubWindow()
            #self.systemConfigWindow = None
            #self.taskConfigWindow = None
            #templist = None
            '''
            sleep(5)


            self.taskConfigWindow = Ui_QTaskConfigWindow()
            self.mdi.addSubWindow(self.taskConfigWindow).show()
            self.taskConfigWindow.systemConfigInfo = self.systemConfigInfo
            # textEdit.close()
            sleep(5)
            for win_temp in self.mdi.subWindowList():
                win_temp_widget = win_temp.widget()
                if (win_temp_widget is not None) and (isinstance(win_temp_widget, Ui_QTaskConfigWindow)):
                    self.mdi.setActiveSubWindow(win_temp)
                    self.mdi.closeActiveSubWindow()
                    
                elif (win_temp_widget is not None) and (isinstance(win_temp_widget, Ui_QSystemConfigWindow)):
                    self.mdi.setActiveSubWindow(win_temp)
                    #self.mdi.removeSubWindow(self.systemConfigWindow)
                    self.mdi.closeActiveSubWindow()
            #self.mdi.removeSubWindow(self.systemConfigWindow)
            #b = sys.getrefcount(self.systemConfigWindow)
            win_temp = None
            win_temp_widget = None
            self.systemConfigInfo = None
            self.systemConfigWindow = None
            self.taskConfigWindow = None
'''

        except Exception as e:
            print(print_exc())
示例#17
0
    def onShowGrowth(self):
        import gc
        import objgraph
        gc.collect()
        logging.getLogger().info("Added objects since last run")
        objgraph.show_growth(limit=100)

        
def gc_debug_diff(since, peak_stats={}, hide_growth=False):
    sio = StringIO.StringIO()
    objgraph.show_growth(limit=0, shortnames=False, peak_stats=peak_stats, file=sio)
    result = sio.getvalue()
    if not hide_growth:
        logging.info("Show Growth since running %s", since)
        for x in result.strip().split('\n')[:200]:
            logging.info(":: %s" % x)
    return result
示例#19
0
 def print_leaks(self, prefix=''):
     if not USE_MELIAE:
         return
     objgraph.show_growth()
     tmp = tempfile.mkstemp(prefix='pcp-test')[1]
     scanner.dump_all_objects(tmp)
     leakreporter = loader.load(tmp)
     summary = leakreporter.summarize()
     print('{0}: {1}'.format(prefix, summary))
示例#20
0
 def show_leaks():
     gc.collect()
     gc.collect()
     sio = SIO()
     objgraph.show_growth(file=sio)
     if sio.getvalue():
         print("    Memory Growth")
         for line in sio.getvalue().split('\n'):
             print("    ", line)
示例#21
0
 def print_leaks(self, prefix=''):
     if not USE_MELIAE:
         return
     objgraph.show_growth()
     tmp = tempfile.mkstemp(prefix='pcp-test')[1]
     scanner.dump_all_objects(tmp)
     leakreporter = loader.load(tmp)
     summary = leakreporter.summarize()
     print('{0}: {1}'.format(prefix, summary))
示例#22
0
    def trace_memory_start(self):
        """ Starts measuring memory consumption """

        self.trace_memory_clean_caches()

        objgraph.show_growth(limit=30)

        gc.collect()
        self._memory_start = self.worker.get_memory()["total"]
示例#23
0
 def dump_objgraph(self):
     """ This is a debugging method designed to be called from the datagram
     port. It helps us debug a memory 'leak' """
     import gc
     gc.collect()
     import objgraph
     print "Dumping object growth ****"
     objgraph.show_growth(limit=100)
     print "****"
示例#24
0
    def trace_memory_start(self):
        """ Starts measuring memory consumption """

        self.trace_memory_clean_caches()

        objgraph.show_growth(limit=30)

        gc.collect()
        self._memory_start = self.worker.get_memory()["total"]
示例#25
0
    async def _print_memory_usage(self):
        import objgraph
        import gc

        while True:
            print("Stream backlog : {}".format(self.stream.qsize()))
            gc.collect()
            objgraph.show_growth()
            await asyncio.sleep(60)
示例#26
0
def show_growth(*args, **kwargs):
    """
    Wrapper around objgraph show_growth that just returns the stdout.
    """
    output = None
    with capture() as out:
        objgraph.show_growth(*args, **kwargs)
        output = out
    return output[0]
示例#27
0
 def extract(self, x):
     features = self.sess2.run(self.features, feed_dict={self.x1: x})
     objgraph.show_growth()
     #self.sess2._default_graph_context_manager()
     #self.sess2._default_session_context_manager()
     #self.sess2.reset(self.sess2.graph)
     #self.sess2.reset(self.x)
     #self.sess2.as_default()
     return features
示例#28
0
 def dump_objgraph(self):
     """ This is a debugging method designed to be called from the datagram
     port. It helps us debug a memory 'leak' """
     import gc
     gc.collect()
     import objgraph
     print "Dumping object growth ****"
     objgraph.show_growth(limit=100)
     print "****"
示例#29
0
def main():
    orignal_str = '  41 414- 34(#$&*@#&$  f)'
    objgraph.show_growth()
    for _ in range(10001):
        slowest_replace(orignal_str=orignal_str)
        slow_replace(orignal_str=orignal_str)
        fast_replace(orignal_str=orignal_str)
        fastest_replace(orignal_str=orignal_str)

    objgraph.show_growth()
示例#30
0
    def deco(self, *args, **kwargs):
        if self.debug_memory:
            objgraph.show_growth(limit=1)

        try:
            return f(self, *args, **kwargs)
        finally:
            if self.debug_memory:
                log.info('New objects:')
                objgraph.show_growth(file=LoggerFile('moya.srv'))
示例#31
0
 async def growth(self, limit=None):
     buf = io.StringIO()
     objgraph.show_growth(shortnames=False, limit=limit, file=buf)
     result = []
     for line in buf.getvalue().splitlines():
         name, count, change = line.rsplit(maxsplit=2)
         count = int(count)
         change = int(change.strip('+'))
         result.append((name, count, change))
     return result
示例#32
0
def main():
    orignal_str = '  41 414- 34(#$&*@#&$  f)'
    objgraph.show_growth()
    for _ in range(10001):
        slowest_replace(orignal_str=orignal_str)
        slow_replace(orignal_str=orignal_str)
        fast_replace(orignal_str=orignal_str)
        fastest_replace(orignal_str=orignal_str)

    objgraph.show_growth()
示例#33
0
 def on_key_press(self, symbol, modifiers):
     keycode = self.convert_keycode(symbol)
     modifiers = self.convert_keycode(modifiers)
     print "key press: %s" % keycode
     self.controller.process_key_press(keycode, modifiers)
     self.flip()
     if USE_OBJGRAPH:
         objgraph.show_growth()
     if USE_HEAPY:
         print hp.heap()
示例#34
0
    def deco(self, *args, **kwargs):
        if self.debug_memory:
            objgraph.show_growth(limit=1)

        try:
            return f(self, *args, **kwargs)
        finally:
            if self.debug_memory:
                runtime_log.info("New objects:")
                objgraph.show_growth(file=LoggerFile("moya.runtime"))
示例#35
0
    def test_sar(self):
        """Parses all the sar files and creates the pdf outputs"""
        for example in self.sar_files:
            print("Parsing: {0}".format(example))
            grapher = SarGrapher([example])
            stats = SarStats(grapher)
            usage = resource.getrusage(resource.RUSAGE_SELF)
            if USE_MELIAE:
                objgraph.show_growth()
                tmp = tempfile.mkstemp(prefix='sar-test')[1]
                scanner.dump_all_objects(tmp)
                leakreporter = loader.load(tmp)
                summary = leakreporter.summarize()

            print(
                "SAR parsing: {0} usertime={1} systime={2} mem={3} MB".format(
                    end_of_path(example), usage[0], usage[1],
                    (usage[2] / 1024.0)))

            if USE_PROFILER:
                self.profile.disable()
                str_io = StringIO.StringIO()
                sortby = 'cumulative'
                pstat = pstats.Stats(self.profile,
                                     stream=str_io).sort_stats(sortby)
                pstat.print_stats(TOP_PROFILED_FUNCTIONS)
                print("\nProfiling of sar.parse()")
                print(str_io.getvalue())

                # Set up profiling for pdf generation
                self.profile.enable()

            out = "{0}.pdf".format(example)
            stats.graph(example, [], out)
            if USE_PROFILER:
                self.profile.disable()
                str_io = StringIO.StringIO()
                sortby = 'cumulative'
                pstat = pstats.Stats(self.profile,
                                     stream=str_io).sort_stats(sortby)
                pstat.print_stats(TOP_PROFILED_FUNCTIONS)
                print("\nProfiling of sarstats.graph()")
                print(str_io.getvalue())

            print("Wrote: {0}".format(out))
            os.remove(out)
            grapher.close()
            del grapher
            del stats
            usage = resource.getrusage(resource.RUSAGE_SELF)
            print(
                "SAR graphing: {0} usertime={1} systime={2} mem={3} MB".format(
                    end_of_path(example), usage[0], usage[1],
                    (usage[2] / 1024.0)))
示例#36
0
文件: debug.py 项目: oaubert/advene
def debug_slow_update_hook(controller):
    """Debug method.

    This method will be regularly called (from the
    slow_update_display) when -d (debug) option is given.
    """
    log_global_memory_usage()
    log_imagecache_size(controller)
    if objgraph is not None:
        debug_log("------------ Object growth ---------------")
        objgraph.show_growth(shortnames=False, limit=30, file=DEBUGFILE)
示例#37
0
def mongodb(request):
    stream = StringIO()
    docs = memory_profiler.profile(_get_documents, stream=stream)(request)
    extra = '%s\nEXTRA:\n%s' % (stream.getvalue(), '')
    resp = render(request, "mongodb.html", {"documents": docs, "extra": extra})
    del docs
    print('objgraph.show_growth(limit=100):')
    objgraph.show_growth(limit=100)
    # print('objgraph.show_backrefs(get_objs()):')
    # objgraph.show_backrefs(get_objs())
    return resp
示例#38
0
文件: mem.py 项目: Alligator/homero
def profile():
  global since
  output = '{:-^79}'.format(' memory usage ')
  output += mem('')
  objgraph.show_growth()
  output += '{:-^79}'.format(' last called ')
  for name, c in since.iteritems():
    output += '{}: {}'.format(c, name)
  output += '{:-^79}'.format('')
  since = defaultdict(int)
  return output
示例#39
0
def dump_memory_usage():
    """
    This is a function that prints the memory usage of w3af in real time.
    :author: Andres Riancho ([email protected])
    """
    if not DEBUG_MEMORY:
        return
    else:
        if DEBUG_REFERENCES:
            print 'Object References:'
            print '=================='
            interesting = ['tuple', 'dict', 'list']
            for interesting_klass in interesting:
                interesting_instances = objgraph.by_type(interesting_klass)

                sample = random.sample(interesting_instances, min(
                    SAMPLE_LEN, len(interesting_instances)))

                for s in sample:
                    fmt = 'memory-refs/%s-backref-graph-%s.png'
                    fname = fmt % (interesting_klass, id(s))

                    ignores = [id(interesting_instances), id(s), id(sample)]
                    ignores.extend([id(v) for v in locals().values()])
                    ignores.extend([id(v) for v in globals().values()])
                    ignores.append(id(locals()))
                    ignores.append(id(globals()))
                    try:
                        objgraph.show_backrefs(s, highlight=inspect.isclass,
                                               extra_ignore=ignores, filename=fname,
                                               extra_info=_extra_info)
                    except:
                        pass

            print

        print 'Most common:'
        print '============'
        objgraph.show_most_common_types()

        print

        print 'Memory delta:'
        print '============='
        objgraph.show_growth(limit=25)

        sorted_cmds, shareds, _, _ = get_memory_usage(None, True, True, True)
        cmd = sorted_cmds[0]
        msg = "%8sB Private + %8sB Shared = %8sB" % (human(cmd[1] - shareds[cmd[0]]),
                                                     human(shareds[cmd[0]
                                                                   ]), human(cmd[1])
                                                     )
        print 'Total memory usage:', msg
示例#40
0
文件: mem.py 项目: Mayheim/homero
def profile(inp, say=None, bot=None):
  global since
  print
  print '{:-^79}'.format(' memory usage ')
  print mem('')
  objgraph.show_growth()
  print
  print '{:-^79}'.format(' last called ')
  for name, c in since.iteritems():
    print '{}: {}'.format(c, name)
  print '{:-^79}'.format('')
  since = defaultdict(int)
def gc_debug_diff(since, peak_stats={}, hide_growth=False):
    sio = StringIO.StringIO()
    objgraph.show_growth(limit=0,
                         shortnames=False,
                         peak_stats=peak_stats,
                         file=sio)
    result = sio.getvalue()
    if not hide_growth:
        logging.info("Show Growth since running %s", since)
        for x in result.strip().split('\n')[:200]:
            logging.info(":: %s" % x)
    return result
示例#42
0
 def on_selection_change(self, *args):
     #pass
     if self.selection:
         #from listscreen import ListScreenItem
         objgraph.show_growth()
         print '...'
         roots = objgraph.get_leaking_objects()
         objgraph.show_most_common_types(objects=roots)
         print '...'
         objgraph.show_refs(roots[:3], refcounts=True, filename='sad.png')
         #objgraph.show_chain(objgraph.find_backref_chain(self.selection[0].__self__, objgraph.is_proper_module),filename='chain.png')
         #objgraph.show_backrefs(self.selection[0].__self__, filename='sample-backref-graph.png')
         print '...'
示例#43
0
文件: dev.py 项目: jrosindell/OneZoom
def PAUSE_WEB2PY_FOR_PDB():
    #dangerous. Should only run when on localhost - as an additional check we shouldn't create a view
    if request.is_local:
        try:
            import pdb
            import objgraph
            objgraph.show_growth(limit=None, shortnames=False)
            pdb.set_trace()
            return({})
        except Exception as e:
            return {'errors':["{}".format(e), "perhaps you haven't installed pdb or objgraph"]}
    else:
        return {'errors':'not a local request'}
def search():
    
    print("---objects at start---")
    objgraph.show_growth(limit=3)
    
    rows = arcpy.SearchCursor("C:/temp/address_point.shp", "", "", "STREET_NAM", "STREET_NAM A")
    
    objgraph.show_refs([rows], filename='objgraph.png')
    
    objgraph.show_backrefs([rows], filename='objgraph-backrefs.png')
    
    current_street = ""

    print("---objects before---")
    objgraph.show_growth()
    
    for row in rows:
        if current_street != row.STREET_NAM:
            current_street = row.STREET_NAM        
            print(current_street)
            

    print("--objects after loop---")
    objgraph.show_growth()
    
    del rows
    
    print("---objects after cursor delete---")
    objgraph.show_growth()
示例#45
0
    def memory_profile(self):
        global LAST_HEAP
        print "{:=^78}".format(' profile begin')
        import gc
        gc.collect()
        print "{:-^78}".format(' growth')
        print objgraph.show_growth(limit=5)
        print "{:-^78}".format(' common types')
        print objgraph.show_most_common_types(limit=5)
        if LAST_HEAP:
            leftover = HEAP.heap() - LAST_HEAP
            print leftover.byrcs[0].byid
        LAST_HEAP = HEAP.heap()

        print "{:=^78}".format(' profile end')
示例#46
0
    def on_key_press(self, symbol, modifiers):
        if symbol == pyglet.window.key.M:
            import objgraph

            objgraph.show_growth()
            m = objgraph.by_type("Molecule")
            print("nr of Molecule:", len(m))
            # for n in m:
            #    objgraph.show_backrefs(n)
        elif symbol == pyglet.window.key.ESCAPE:
            self.window.close()
        elif symbol == pyglet.window.key.S:
            Gui.create_popup(self.window, self.batch, "Skipping level, Cheater!", on_escape=self.window.switch_level)
        elif symbol == pyglet.window.key.R:
            self.window.reset_level()
        elif symbol == pyglet.window.key.D:
            self.window.DEBUG_GRAPHICS = not self.window.DEBUG_GRAPHICS
示例#47
0
 def __call__(self):
     if self._debug_memory_use:
         print("DEPARTURE: before get")
         import objgraph
         objgraph.show_growth()
     try:
         try:
             data = self.get_departure_data()
         except (socket.timeout, urllib.error.URLError, urllib.error.HTTPError) as err:
             warnings.warn(str(err))
             return None
         data.sort(key=lambda x: x[2])
         return data
     finally:
         if self._debug_memory_use:
             print("DEPARTURE: after get")
             import objgraph
             objgraph.show_growth()
示例#48
0
文件: test.py 项目: sjmf/reportgen
def print_memory(i):
    global object_counts

    print("\n\n--------------------- MEMORY -------------------------\n")

    print("TOTAL OBJECTS\n")
    o = len(gc.get_objects())
    print(o)
    object_counts.append(o)
    del o
    print("\n")

    print("GROWTH\n")
    objgraph.show_growth()
    print("\n")

    print("COMMON TYPES\n")
    objgraph.show_most_common_types()
    print("\n")

    print("LEAKING OBJECTS\n")
    roots = objgraph.get_leaking_objects()
    print("\n")

    log.info("ROOTS pre-collect : {}\n".format(len(roots)))

    print("COMMON TYPES IN ROOTS\n")
    objgraph.show_most_common_types(objects=roots)
    print("\n")

    objgraph.show_refs(roots[:3], refcounts=True, filename=TEST_PATH + '/roots_' + str(i) + '.png')
    print("\n")

    log.info("Garbage pre collect:  " + str(len(gc.garbage)))
    gc.collect()
    log.info("Garbage post collect: " + str(len(gc.garbage)))
    print("\n")

    roots = objgraph.get_leaking_objects()
    log.info("ROOTS post-collect : {}".format(len(roots)))

    print("\n\n---------------------------------------------------\n")
示例#49
0
 def __init__(self, config, factory, fullscreen=True, width=800, height=600, margins=None,
              thumbnails=None):
     if fullscreen:
         pyglet.window.Window.__init__(self, fullscreen=fullscreen)
     else:
         pyglet.window.Window.__init__(self, width, height)
     MainWindow.__init__(self, config, factory, fullscreen, width, height, margins,
                         thumbnails)
     
     self.create_keycode_maps()
     
     if USE_OBJGRAPH:
         objgraph.show_growth()
     if USE_HEAPY:
         print hp.heap()
     
     glEnable(GL_BLEND)
     glShadeModel(GL_SMOOTH)
     glBlendFunc(GL_SRC_ALPHA, GL_ONE)
     glDisable(GL_DEPTH_TEST)
示例#50
0
    def debugrequest(self, event):
        try:
            self.log("Event: ", event.__dict__, lvl=critical)

            if event.action == "storejson":
                self.log("Storing received object to /tmp", lvl=critical)
                fp = open('/tmp/hfosdebugger_' + str(
                    event.user.useruuid) + "_" + str(uuid4()), "w")
                json.dump(event.data, fp, indent=True)
                fp.close()
            if event.action == "memdebug":
                self.log("Memory hogs:", lvl=critical)
                objgraph.show_most_common_types(limit=20)
            if event.action == "growth":
                self.log("Memory growth since last call:", lvl=critical)
                objgraph.show_growth()
            if event.action == "graph":
                objgraph.show_backrefs([self.root], max_depth=42,
                                       filename='backref-graph.png')
                self.log("Backref graph written.", lvl=critical)
            if event.action == "exception":
                class TestException(BaseException):
                    pass

                raise TestException
            if event.action == "heap":
                self.log("Heap log:", self.heapy.heap(), lvl=critical)
            if event.action == "buildfrontend":
                self.log("Sending frontend build command")

                self.fireEvent(frontendbuildrequest(force=True), "setup")
            if event.action == "logtail":
                self.fireEvent(logtailrequest(event.user, None, None,
                                              event.client), "logger")


        except Exception as e:
            self.log("Exception during debug handling:", e, type(e),
                     lvl=critical)
示例#51
0
 def dispatch(self, msg):
     mtype = msg["type"]
     if self.ignoreSelf and msg["from"] == self.ourJid:
         return
     if self.debug_memory_use:
         print("MEMDEBUG: before dispatch")
         import objgraph
         objgraph.show_growth()
     try:
         for handler in self.handlers:
             abort = handler(msg, errorSink=self.errorSink)
             if abort:
                 break
     except Exception as err:
         if self.errorSink is not None:
             self.errorSink.submit(self.xmpp, err, msg)
         else:
             raise
     finally:
         if self.debug_memory_use:
             print("MEMDEBUG: after dispatch")
             import objgraph
             objgraph.show_growth()
示例#52
0
def show_growth():
    if objgraph is None:
        return "objgraph not installed"
    objgraph.show_growth(limit=10)
    return "Printed to console"