def log_mem_usage(signum, frame, fname=None):
    global _count
    _count += 1
    gc.collect()
    if not fname:
        fname = filename + '_memory_%02d.log' % _count
    with open(fname, 'wb') as f:
        f.write('gc.garbage: %d\n\n' % len(gc.garbage))
        objgraph.show_most_common_types(limit=50, file=f)
        f.write('\n\n')
        buf = StringIO()
        objgraph.show_growth(limit=50, file=buf)
        buf = buf.getvalue()
        f.write(buf)
    if _count < 2:
        return
    for tn, l in enumerate(buf.splitlines()[:10]):
        l = l.strip()
        if not l:
            continue
        type_ = l.split()[0]
        objects = objgraph.by_type(type_)
        objects = random.sample(objects, min(50, len(objects)))
        objgraph.show_chain(
            objgraph.find_backref_chain(
                objects[0],
                objgraph.is_proper_module),
            filename=fname[:-4] + '_type_%02d_backref.png' % tn
        )
        objgraph.show_backrefs(
            objects,
            max_depth=5,
            extra_info=lambda x: hex(id(x)),
            filename=fname[:-4] + '_type_%02d_backrefs.png' % tn,
        )
Example #2
0
def cycle_ref():
    x = []
    y = [x, [x], dict(x=x)]

    objgraph.show_refs([y], filename='smaple-graph.png')
    objgraph.show_backrefs([x], filename='sample-backref-graph.png')
    objgraph.show_most_common_types()
Example #3
0
 def sigusr1_handler(signal, frame):
     logger.info( "######SIGUSR1 Received######")
     # Unfortunately, the objgraph functions use 'print', so I haven't figured out
     # how to get them into the logger...
     objgraph.show_most_common_types()
     objgraph.show_growth( limit=3)
     logger.info( "###########################")
Example #4
0
 def run(self, key, value=None):
     if value is not None:
         obj = list(filter(lambda o: str(o) == value, get_objects()))[0]
     if key == "graph":
         from objgraph import show_refs
         if value is None:
             p = self.console.parent
             show_refs(self.console if p is None else p,
                       refcounts=True,
                       max_depth=3)
         else:
             show_refs(obj, refcounts=True, max_depth=3)
     elif key == "growth":
         from objgraph import get_leaking_objects, show_most_common_types
         show_most_common_types(objects=get_leaking_objects())
     elif key == "info":
         from psutil import Process
         p = Process(os.getpid())
         print_formatted_text(p.memory_info())
     elif key == "leaking":
         from objgraph import get_leaking_objects
         print_formatted_text(get_leaking_objects())
     elif key == "objects":
         data = [["Object", "#References"]]
         for o in get_objects():
             if isinstance(o, (Console, Module)):
                 data.append([str(o), str(getrefcount(o))])
         t = BorderlessTable(data, "Consoles/Modules")
         print_formatted_text(t.table)
     elif key == "refs":
         if value is not None:
             print_formatted_text(getrefcount(obj), ":")
             pprint(get_referrers(obj))
Example #5
0
    def print_top_types(self, limit: int) -> None:
        """Print top x object in memory

        Arguments:
            limit {int} -- max number of results
        """
        show_most_common_types(limit=int(limit))
Example #6
0
def log_mem_usage(signum, frame, fname=None):
    global _count
    _count += 1
    gc.collect()
    if not fname:
        fname = filename + '_memory_%02d.log' % _count
    with open(fname, 'wb') as f:
        f.write('gc.garbage: %d\n\n' % len(gc.garbage))
        objgraph.show_most_common_types(limit=50, file=f)
        f.write('\n\n')
        buf = StringIO()
        objgraph.show_growth(limit=50, file=buf)
        buf = buf.getvalue()
        f.write(buf)
    if _count < 2:
        return
    for tn, l in enumerate(buf.splitlines()[:10]):
        l = l.strip()
        if not l:
            continue
        type_ = l.split()[0]
        objects = objgraph.by_type(type_)
        objects = random.sample(objects, min(50, len(objects)))
        objgraph.show_chain(
            objgraph.find_backref_chain(objects[0], objgraph.is_proper_module),
            filename=fname[:-4] + '_type_%02d_backref.png' % tn)
        objgraph.show_backrefs(
            objects,
            max_depth=5,
            extra_info=lambda x: hex(id(x)),
            filename=fname[:-4] + '_type_%02d_backrefs.png' % tn,
        )
def target(export_dir, config, pid):
    print("===== before =====")
    # memory_usage(pid)
    print("=====common types=====")
    objgraph.show_most_common_types()
    print("=====common types=====")
    print("=====growth=====")
    objgraph.show_growth()
    print("=====growth=====")

    print("===== before =====")
    estimator = MyEstimator(model_dir=config.model_dir, config=config)
    result_dir = estimator.export_savedmodel(export_dir, serving_input_fn)
    print("Result dir: ", result_dir)
    time.sleep(1)
    print("Show stats:")
    clean(estimator)
    print("===== after =====")
    # memory_usage(pid)
    print("=====common types=====")
    objgraph.show_most_common_types()
    print("=====common types=====")
    print("=====growth=====")
    objgraph.show_growth()
    print("=====growth=====")
    print("===== after =====")
Example #8
0
    def _run_worker(self):
        '''Run a single consumer.

        Coroutine.
        '''
        _logger.debug('Worker start.')

        while True:
            priority, item = yield From(self._item_queue.get())

            if item == self.POISON_PILL:
                _logger.debug('Worker quitting.')
                return

            else:
                _logger.debug(__('Processing item {0}.', item))
                self._item_get_semaphore.release()
                self._token_queue.get_nowait()
                yield From(self._process_item(item))
                self._token_queue.task_done()

                if os.environ.get('OBJGRAPH_DEBUG'):
                    import gc
                    import objgraph
                    gc.collect()
                    objgraph.show_most_common_types(25)
                if os.environ.get('FILE_LEAK_DEBUG'):
                    import subprocess
                    output = subprocess.check_output(
                        ['lsof', '-p', str(os.getpid()), '-n'])
                    for line in output.decode('ascii', 'replace').split('\n'):
                        if 'REG' in line and \
                                (os.getcwd() in line or '/tmp/' in line):
                            print('FILELEAK', line)
Example #9
0
    def _run_worker(self):
        '''Run a single consumer.

        Coroutine.
        '''
        _logger.debug('Worker start.')

        while True:
            priority, item = yield From(self._item_queue.get())

            if item == self.POISON_PILL:
                _logger.debug('Worker quitting.')
                return

            else:
                _logger.debug(__('Processing item {0}.', item))
                self._item_get_semaphore.release()
                self._token_queue.get_nowait()
                yield From(self._process_item(item))
                self._token_queue.task_done()

                if os.environ.get('OBJGRAPH_DEBUG'):
                    import gc
                    import objgraph
                    gc.collect()
                    objgraph.show_most_common_types(25)
                if os.environ.get('FILE_LEAK_DEBUG'):
                    import subprocess
                    output = subprocess.check_output(
                        ['lsof', '-p', str(os.getpid()), '-n'])
                    for line in output.decode('ascii', 'replace').split('\n'):
                        if 'REG' in line and \
                                (os.getcwd() in line or '/tmp/' in line):
                            print('FILELEAK', line)
Example #10
0
def print_summary(f):
    '''
    Log a summary of current memory usage. This is very expensive
    when there is a lot of memory used.
    '''
    if os.path.isfile(f):
        print_objects(f)

    if not config.read('Crawl', 'DebugMemory'):
        return

    mem = {}
    for d in debugs:
        mem.update(d())

    LOGGER.info('Memory summary:')

    for k in sorted(mem.keys()):
        v = mem[k]
        LOGGER.info('  %s len %d bytes %s', k, v['len'], _in_millions(v['bytes']))

    LOGGER.info('Top objects:')

    gc.collect()
    lines = io.StringIO()
    objgraph.show_most_common_types(limit=20, file=lines)
    lines.seek(0)
    for l in lines.read().splitlines():
        LOGGER.info('  %s', l)
Example #11
0
async def meminfo(ctx, **_):
    mem_info = StringIO()
    objgraph.show_most_common_types(file=mem_info)
    await util.say(ctx.channel, "```%s```" % mem_info.getvalue())
    mem_info = StringIO()
    objgraph.show_growth(file=mem_info)
    await util.say(ctx.channel, "```%s```" % mem_info.getvalue())
Example #12
0
def print_summary(f):
    '''
    Log a summary of current memory usage. This is very expensive
    when there is a lot of memory used.
    '''
    if os.path.isfile(f):
        print_objects(f)

    if not config.read('Crawl', 'DebugMemory'):
        return

    mem = {}
    for d in debugs:
        mem.update(d())

    LOGGER.info('Memory summary:')

    for k in sorted(mem.keys()):
        v = mem[k]
        LOGGER.info('  %s len %d bytes %s', k, v['len'], _in_millions(v['bytes']))

    LOGGER.info('Top objects:')

    gc.collect()
    lines = io.StringIO()
    objgraph.show_most_common_types(limit=20, file=lines)
    lines.seek(0)
    for l in lines.read().splitlines():
        LOGGER.info('  %s', l)
Example #13
0
    def test_lots_of_queries(self):
        import resource
        import objgraph

        class LoadTest(Model):
            k = columns.Integer(primary_key=True)
            v = columns.Integer()

        sync_table(LoadTest)
        gc.collect()
        objgraph.show_most_common_types()

        print("Starting...")

        for i in range(1000000):
            if i % 25000 == 0:
                # print memory statistic
                print("Memory usage: %s" %
                      (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss))

            LoadTest.create(k=i, v=i)

        objgraph.show_most_common_types()

        raise Exception("you shouldn't be here")
Example #14
0
def main():
    d = {}
    l = f()
    d['k'] = l

    print("list l has {0} objects of type Foo()".format(len(l)))

    objgraph.show_most_common_types()
Example #15
0
def show_leaked_objects():
    """ not very useful """
    func_that_will_leak()
    gc.collect()
    roots = objgraph.get_leaking_objects()
    print('roots len = %s' % len(roots))
    objgraph.show_most_common_types(objects=roots)
    objgraph.show_refs(roots[:3], refcounts=True, filename='roots.png')
def mem():
    collections = cache.get("mtg_coll")
    cards_name = []
    for collection in collections:
        for card in collection["cards"]:
            cards_name.append(card["name"])
    if request.args.get("measure"):
        objgraph.show_most_common_types()
    return json.dumps(cards_name)
def main():
    from keras.models import Sequential
    from keras.layers import Dense
    from keras.optimizers import Adam
    from keras.losses import categorical_crossentropy

    model = Sequential()
    model.add(Dense(128, input_dim=6 * 54, activation='relu'))
    model.add(Dense(128, activation='relu'))
    model.add(Dense(12, activation='softmax'))
    model.compile(loss=categorical_crossentropy, optimizer=Adam(lr=0.001))

    #model.load_weights("./save/mcts_nn_cube.h5")
    max_random = 5
    while True:
        max_random += 1

        def f(model, max_random):
            for i in range(100):
                r = 1 + np.random.choice(max_random)
                print()
                print("random dist: {}/{}".format(r, max_random), "step:", i)
                state = State()
                state.reset_and_randomize(r)
                mcts = MCTSAgent(model, state, max_depth=100)
                #print(mcts.initial_node.state)
                if mcts.is_terminal():
                    print("Done!")
                else:
                    mcts.search(steps=10000)
                    prior, _ = mcts.initial_node.state.calculate_priors_and_value(
                        model)
                    prior2 = mcts.initial_node.prior_probabilities
                    probs = mcts.action_probabilities(inv_temp=1)
                    q = mcts.initial_node.mean_action_values
                    model.fit(state.input_array(),
                              probs.reshape((1, 12)),
                              epochs=1,
                              verbose=0)
                    print("Prior:",
                          "[" + "".join(prob_box(p) for p in prior) + "]")
                    print("PrDir:",
                          "[" + "".join(prob_box(p) for p in prior2) + "]")
                    print("Prob: ",
                          "[" + "".join(prob_box(p) for p in probs) + "]")
                    print("Q:    ",
                          "[" + "".join(prob_box(max(0, p)) for p in q) + "]")

        gc.collect()
        objgraph.show_most_common_types(limit=20)

        p = Process(target=f, args=(model, max_random))
        p.start()
        p.join()

        model.save_weights("./save/mcts_nn_cube.h5")
def calculate_rebot_model(args):
    if args[0] == '--reference':
        xml = OutputParser().parse(args[1])
    else:
        xml = ResultFromXML(args[0])
    p = psutil.Process(os.getpid())
    print 'Process memory usage after xml parsing %f M' % (float(p.get_memory_info().rss) / (1024**2))
    print 'Most common types'
    objgraph.show_most_common_types()
    return xml
def calculate_rebot_model(args):
    if args[0] == '--reference':
        xml = OutputParser().parse(args[1])
    else:
        xml = ResultFromXML(args[0])
    p = psutil.Process(os.getpid())
    print 'Process memory usage after xml parsing %f M' % (
        float(p.get_memory_info().rss) / (1024**2))
    print 'Most common types'
    objgraph.show_most_common_types()
    return xml
Example #20
0
def run_objgraph(types):
    import objgraph
    import os
    import random
    objgraph.show_most_common_types(limit=50, shortnames=False)
    for type_ in types:
        count = objgraph.count(type_)
        print '%s objects: %d' % (type_, count)
        if count:
            objgraph.show_backrefs(
                random.choice(objgraph.by_type(type_)), max_depth=20,
                filename='/tmp/backrefs_%s_%d.dot' % (type_, os.getpid()))
Example #21
0
def dump_memory_usage():
    """
    This is a function that prints the memory usage of w3af in real time.
    :author: Andres Riancho ([email protected])
    """
    if not DEBUG_MEMORY:
        return
    else:
        if DEBUG_REFERENCES:
            print 'Object References:'
            print '=================='
            interesting = ['tuple', 'dict', 'list']
            for interesting_klass in interesting:
                interesting_instances = objgraph.by_type(interesting_klass)

                sample = random.sample(interesting_instances, min(
                    SAMPLE_LEN, len(interesting_instances)))

                for s in sample:
                    fmt = 'memory-refs/%s-backref-graph-%s.png'
                    fname = fmt % (interesting_klass, id(s))

                    ignores = [id(interesting_instances), id(s), id(sample)]
                    ignores.extend([id(v) for v in locals().values()])
                    ignores.extend([id(v) for v in globals().values()])
                    ignores.append(id(locals()))
                    ignores.append(id(globals()))
                    try:
                        objgraph.show_backrefs(s, highlight=inspect.isclass,
                                               extra_ignore=ignores, filename=fname,
                                               extra_info=_extra_info)
                    except:
                        pass

            print

        print 'Most common:'
        print '============'
        objgraph.show_most_common_types()

        print

        print 'Memory delta:'
        print '============='
        objgraph.show_growth(limit=25)

        sorted_cmds, shareds, _, _ = get_memory_usage(None, True, True, True)
        cmd = sorted_cmds[0]
        msg = "%8sB Private + %8sB Shared = %8sB" % (human(cmd[1] - shareds[cmd[0]]),
                                                     human(shareds[cmd[0]
                                                                   ]), human(cmd[1])
                                                     )
        print 'Total memory usage:', msg
Example #22
0
 def on_selection_change(self, *args):
     #pass
     if self.selection:
         #from listscreen import ListScreenItem
         objgraph.show_growth()
         print '...'
         roots = objgraph.get_leaking_objects()
         objgraph.show_most_common_types(objects=roots)
         print '...'
         objgraph.show_refs(roots[:3], refcounts=True, filename='sad.png')
         #objgraph.show_chain(objgraph.find_backref_chain(self.selection[0].__self__, objgraph.is_proper_module),filename='chain.png')
         #objgraph.show_backrefs(self.selection[0].__self__, filename='sample-backref-graph.png')
         print '...'
Example #23
0
def memory_tracing(key_type: str = "lineno", limit: int = 15):
    """
    Traces memory consumption and prints memory-usage statistics when leaving the context
    :param key_type:
    :param limit:
    :return:
    """
    tracemalloc.start()
    print("--- Tracing memory... ---")
    try:
        # Do computation ...
        yield None
    finally:
        snapshot = tracemalloc.take_snapshot()
        # snapshot = snapshot.filter_traces((
        #     tracemalloc.Filter(False, "<frozen importlib._bootstrap>"),
        #     tracemalloc.Filter(False, "<unknown>"),
        # ))
        top_stats = snapshot.statistics(key_type)
        print("--- Memory usage statistics: ---")
        print("Top %s lines:" % limit)
        for index, stat in enumerate(top_stats[:limit], 1):
            frame = stat.traceback[0]
            # replace "/path/to/module/file.py" with "module/file.py"
            filename = os.sep.join(frame.filename.split(os.sep)[-2:])
            print("#%s: %s:%s: %.1f KiB"
                  % (index, filename, frame.lineno, stat.size / 1024))
            line = linecache.getline(frame.filename, frame.lineno).strip()
            if line:
                print('    %s' % line)

        other = top_stats[limit:]
        if other:
            size = sum(stat.size for stat in other)
            print("%s other: %.1f KiB" % (len(other), size / 1024))
        total = sum(stat.size for stat in top_stats)
        print("\nTotal allocated size: %.1f KiB" % (total / 1024))

        # May also be useful:
        import objgraph
        print("\nTypes of most common instances:")
        objgraph.show_most_common_types(limit=limit)
        print("\nObjects that do not have any referents:")
        objgraph.get_leaking_objects()
        print("\nIncrease in peak object counts since last call:")
        objgraph.show_growth(limit=limit)
        print("\ntuple objects tracked by the garbage collector:")
        objgraph.by_type('tuple')
        print("\ndict objects tracked by the garbage collector:")
        objgraph.by_type('dict')
        print("--- End of memory tracing ---")
Example #24
0
def _memprofile():
	""" mem profile app """
	from wxgtd.main import run
	run()
	import gc
	gc.collect()
	while gc.collect() > 0:
		print 'collect'

	import objgraph
	objgraph.show_most_common_types(20)

	import pdb
	pdb.set_trace()
Example #25
0
def objectoriented():
    import matplotlib.figure as figure
    #from matplotlib.figure import Figure
    from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas

    for i in range(total_loops):
        if i % sample_rate == 0:
            print(i)
            objgraph.show_most_common_types()
        fig = figure.Figure()
        canvas = FigureCanvas(fig)
        ax = fig.add_subplot(1, 1, 1)
        test(ax)
        save_figure_to_ram(fig)
Example #26
0
def main():

    d = {}
    l = f()

    d['k'] = l

    print("list l has {0} objects of type Foo()".format(len(l)))
    objgraph.show_most_common_types()

    objgraph.show_backrefs(random.choice(objgraph.by_type('Foo')),
                           filename="foo_refs.png")

    objgraph.show_refs(d, filename='sample-graph.png')
Example #27
0
 def process_event(self, pyg_event_tuple):
     pyg_event = pyg_event_tuple[0]
     if pyg_event.type == pygame.QUIT or (
             pyg_event.type == pygame.KEYDOWN
             and pyg_event.key == pygame.K_ESCAPE):  # If user clicked close
         for listener in self.quit_listeners:
             listener()
     else:
         screen_pos = pyg_event_tuple[1]
         event = Event()
         event.screen_mouse_position = screen_pos
         canvas_point_list = self.find_canvases(screen_pos, self.screen)
         for active_canvas in canvas_point_list:
             if pyg_event.type == pygame.MOUSEBUTTONDOWN:
                 if pyg_event.button == 1:
                     active_canvas.mouse_pressed(event)
                     if event.consumed:
                         self.mouse_released_canvas = active_canvas
                 elif pyg_event.button == 4:
                     active_canvas.mouse_wheel_scrolled_up(event)
                 elif pyg_event.button == 5:
                     active_canvas.mouse_wheel_scrolled_down(event)
             elif pyg_event.type == pygame.MOUSEBUTTONUP:
                 if pyg_event.button == 1:
                     active_canvas.mouse_released(event)
                     if event.consumed and active_canvas is self.mouse_released_canvas:
                         self.mouse_released_canvas = None
             elif pyg_event.type == pygame.KEYDOWN:
                 if pyg_event.key == pygame.K_LEFT:
                     active_canvas.left_key_pressed(event)
                 elif pyg_event.key == pygame.K_RIGHT:
                     active_canvas.right_key_pressed(event)
                 elif pyg_event.key == pygame.K_UP:
                     active_canvas.up_key_pressed(event)
                 elif pyg_event.key == pygame.K_DOWN:
                     active_canvas.down_key_pressed(event)
                 elif pyg_event.key == pygame.K_0:
                     gc.collect()
                     print("\nprinting most common types..")
                     objgraph.show_most_common_types(limit=10)
                 #     x = []
                 #     y = [x, [x], dict(x=x)]
                 #     gc.collect()
                 #     objgraph.show_refs([y])
             if event.consumed:
                 break
         if pyg_event.type == pygame.MOUSEBUTTONUP and \
                 self.mouse_released_canvas is not None and \
                 pyg_event.button == 1:
             self.mouse_released_canvas.mouse_canceled(event)
Example #28
0
def _memprofile():
    """ mem profile app """
    from tbviewer import main
    main.run()
    import gc
    gc.collect()
    while gc.collect() > 0:
        print('collect')

    import objgraph
    objgraph.show_most_common_types(20)

    import pdb
    pdb.set_trace()
def _memprofile():
	''' mem profile app '''
	from photomagick.main import main
	main()
	import gc
	gc.collect()
	while gc.collect() > 0:
		print 'collect'

	import objgraph
	objgraph.show_most_common_types(20)

	import pdb
	pdb.set_trace()
 def record_log(self, idle=1):
     """
         5 分钟打印一次 日志
         记录抓取日志,用于调整各个线程参数设置
     """
     while True:
         print(
             'time_now:%s; sending_queue:%d; sended_queue:%d; response_queue:%d; store_queue:%d'
             % (str(datetime.today()), self.sending_queue.qsize(),
                self.sended_queue.qsize(), self.response_queue.qsize(),
                self.store_queue.qsize()))
         objgraph.show_most_common_types()
         time.sleep(300)
         if self.is_finish():
             break
Example #31
0
    def memory_profile(self):
        global LAST_HEAP
        print "{:=^78}".format(' profile begin')
        import gc
        gc.collect()
        print "{:-^78}".format(' growth')
        print objgraph.show_growth(limit=5)
        print "{:-^78}".format(' common types')
        print objgraph.show_most_common_types(limit=5)
        if LAST_HEAP:
            leftover = HEAP.heap() - LAST_HEAP
            print leftover.byrcs[0].byid
        LAST_HEAP = HEAP.heap()

        print "{:=^78}".format(' profile end')
Example #32
0
def pyplot(close=False, total_close=False):
    import matplotlib.pyplot as plt

    for i in range(total_loops):
        if i % sample_rate == 0:
            print(i)
            objgraph.show_most_common_types()
        fig = plt.figure()
        ax = fig.add_subplot(1, 1, 1)
        test(ax)
        save_figure_to_ram(fig)
        if close:
            fig.clf()
        if total_close:
            plt.close()
Example #33
0
    def memory_profile(self):
        global LAST_HEAP
        print "{:=^78}".format(' profile begin')
        import gc
        gc.collect()
        print "{:-^78}".format(' growth')
        print objgraph.show_growth(limit=5)
        print "{:-^78}".format(' common types')
        print objgraph.show_most_common_types(limit=5)
        if LAST_HEAP:
            leftover = HEAP.heap() - LAST_HEAP
            print leftover.byrcs[0].byid
        LAST_HEAP = HEAP.heap()

        print "{:=^78}".format(' profile end')
def main():
    from keras.models import Sequential
    from keras.layers import Dense
    from keras.optimizers import Adam
    from keras.losses import categorical_crossentropy

    model = Sequential()
    model.add(Dense(128, input_dim=6*54, activation='relu'))
    model.add(Dense(128, activation='relu'))
    model.add(Dense(12, activation='softmax'))
    model.compile(loss=categorical_crossentropy,
                  optimizer=Adam(lr=0.001))

    #model.load_weights("./save/mcts_nn_cube.h5")
    max_random = 5
    while True:
        max_random += 1
        def f(model, max_random):
            for i in range(100):
                r = 1 + np.random.choice(max_random)
                print()
                print("random dist: {}/{}".format(r, max_random), "step:", i)
                state = State()
                state.reset_and_randomize(r)
                mcts = MCTSAgent(model, state, max_depth=100)
                #print(mcts.initial_node.state)
                if mcts.is_terminal():
                    print("Done!")
                else:
                    mcts.search(steps = 10000)
                    prior, _ = mcts.initial_node.state.calculate_priors_and_value(model)
                    prior2 = mcts.initial_node.prior_probabilities
                    probs = mcts.action_probabilities(inv_temp = 1)
                    q = mcts.initial_node.mean_action_values
                    model.fit(state.input_array(), probs.reshape((1,12)), epochs=1, verbose=0)
                    print("Prior:", "[" + "".join(prob_box(p) for p in prior) + "]")
                    print("PrDir:", "[" + "".join(prob_box(p) for p in prior2) + "]")
                    print("Prob: ", "[" + "".join(prob_box(p) for p in probs) + "]")
                    print("Q:    ", "[" + "".join(prob_box(max(0,p)) for p in q) + "]")

        gc.collect()
        objgraph.show_most_common_types(limit=20)
            
        p = Process(target=f, args=(model, max_random))
        p.start()
        p.join()

        model.save_weights("./save/mcts_nn_cube.h5")
Example #35
0
    def get(self, name=None):
        import gc
        import objgraph
        # 强制进行垃圾回收
        gc.collect()
        # 打印出对象数目最多的 50 个类型信息
        objgraph.show_most_common_types(limit=5)

        # if self.request.uri in FUCK_THE_SPIDER:
        for i in FUCK_THE_SPIDER:
            if self.request.uri.startswith(i):
                self.redirect('http://zt.bdinfo.net/speedtest/wo3G.rar')
                print('!!!!f**k: ' + self.request.uri)
                return
        if name.endswith('.md'):
            name = removeSuffix(name)
            self.redirect('/' + name, permanent=True)
            return
        if name.endswith('.html'):
            name = removeSuffix(name)
            self.redirect('/' + name, permanent=True)
            return
        if name is None or name == '':
            mds = search(MD_PATH, '*.md', NOT_IN)
            name = removeSuffix(mds[0][0])
            del mds

            url_name = urllib.parse.quote(name)
            self.redirect('/' + url_name)
        else:
            content = getContent(name)

            content = md.convert(content)
            # print(md.toc)
            modify_time = getModifyTime(name)
            pre, old = preAndOld(name)

            self.render('./blog.html',
                        title=name,
                        content=content,
                        time=time,
                        modify_time=modify_time,
                        pre=pre,
                        old=old,
                        author=AUTHOR,
                        author_link=AUTHOR_LINK,
                        toc=md.toc,
                        urllib=urllib)
def print_stats(process, dispatches, uptime):
    memory_usage = process.memory_full_info().uss
    memory_usage = memory_usage / 1024**2
    cpu_usage = process.cpu_percent() / psutil.cpu_count()
    print(f"CPU usage: {cpu_usage:.2f}%")
    print(f"Memory Usage: {memory_usage:.2f} MiB")
    for key, value in dispatches.items():
        print(f"Worker ID {key} Dispatches: {value}")

    total = sum(dispatches.values())
    delta = datetime.datetime.utcnow() - uptime
    seconds = delta.total_seconds() or 1.0
    events_per_second = total / seconds
    print(f"Total: {total} ({events_per_second:.2f}/s)")
    print(f"Common Types:")
    objgraph.show_most_common_types()
def temp_save(q, kb_res):
    global spider_count

    mutex.acquire()

    temp_data[q] = kb_res

    if spider_count % 100 == 0:
        print('save...')
        with open('datasets/all_cqa_question_to_kb_data.json', 'w') as f:
            json.dump(temp_data, f)
            gc.collect()
            objgraph.show_most_common_types(limit=50)

    spider_count = spider_count + 1

    mutex.release()
Example #38
0
    def parse_category(self, category):
        self.lock.acquire()
        try:
            page, start  = self.read_page(category.id)
            start = int(start) if start else 0
            page = int(page) if page else 1
            print 'starting category %s from %s page %s item' % (category.id, page, start)
            baselink = category.link
            while True:
                if page>1:
                    link = baselink.replace('.html', '/%s.html' % page)
                else:
                    link = baselink
                items = self.getItems(link)
                if not items:
                    print 'no items on the page %s at %s' % (page, link)
                    break
                items = items[start:]

                threads = []
                lock = threading.Semaphore(self.concurrent_products)
                for item in items:
                    th = threading.Thread(target=self.parse_items_thread, args=(lock, category, [item],))
                    th.start()
                    threads.append(th)

                for thread in threads:
                    thread.join()
                    del thread
                    start += 1
                    self.write_page(category.id, page, start)
                del threads
                del lock
                page += 1
                start = 0
                self.write_page(category.id, page, start)
                gc.collect()
                objgraph.show_most_common_types(limit=30)

        except:
            print 'Error while parsing category'
            traceback.print_exc()
            pass
        self.lock.release()
        connection.close()
        print 'Category %s finished' % category.id
Example #39
0
def get_info():
    gc.collect()
    snapshot2 = tracemalloc.take_snapshot()
    with open("flaskMemoy.log", 'a+') as f:
        f.write("====================================")
        f.write(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))

        f.write("\n")
        top_stats = snapshot2.compare_to(b_snapshot, 'lineno')
        for stat in top_stats[:10]:
            f.write(str(stat))
            f.write("\n")
        f.write("====================================")
        f.write("\n")
        objgraph.show_most_common_types(limit=5, file=f)
        f.write("====================================")
        f.write("\n")
    return jsonify(text="ok")
Example #40
0
def test_lots_of_queries():
    sync_table(LoadTest)
    import objgraph
    gc.collect()
    objgraph.show_most_common_types()

    print("Starting...")

    for i in range(1000000):
        if i % 25000 == 0:
            # print memory statistic
            print("Memory usage: %s" % (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss))

        LoadTest.create(k=i, v=i)

    objgraph.show_most_common_types()

    raise Exception("you shouldn't be here")
Example #41
0
def test_lots_of_queries():
    sync_table(LoadTest)
    import objgraph
    gc.collect()
    objgraph.show_most_common_types()

    print "Starting..."

    for i in range(1000000):
        if i % 25000 == 0:
            # print memory statistic
            print "Memory usage: %s" % (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)

        LoadTest.create(k=i, v=i)

    objgraph.show_most_common_types()

    raise Exception("you shouldn't be here")
Example #42
0
    def debugrequest(self, event):
        try:
            self.log("Event: ", event.__dict__, lvl=critical)

            if event.action == "storejson":
                self.log("Storing received object to /tmp", lvl=critical)
                fp = open(
                    '/tmp/hfosdebugger_' + str(event.user.useruuid) + "_" +
                    str(uuid4()), "w")
                json.dump(event.data, fp, indent=True)
                fp.close()
            if event.action == "memdebug":
                self.log("Memory hogs:", lvl=critical)
                objgraph.show_most_common_types(limit=20)
            if event.action == "growth":
                self.log("Memory growth since last call:", lvl=critical)
                objgraph.show_growth()
            if event.action == "graph":
                objgraph.show_backrefs([self.root],
                                       max_depth=42,
                                       filename='backref-graph.png')
                self.log("Backref graph written.", lvl=critical)
            if event.action == "exception":

                class TestException(BaseException):
                    pass

                raise TestException
            if event.action == "heap":
                self.log("Heap log:", self.heapy.heap(), lvl=critical)
            if event.action == "buildfrontend":
                self.log("Sending frontend build command")

                self.fireEvent(frontendbuildrequest(force=True), "setup")
            if event.action == "logtail":
                self.fireEvent(
                    logtailrequest(event.user, None, None, event.client),
                    "logger")

        except Exception as e:
            self.log("Exception during debug handling:",
                     e,
                     type(e),
                     lvl=critical)
Example #43
0
def print_memory(i):
    global object_counts

    print("\n\n--------------------- MEMORY -------------------------\n")

    print("TOTAL OBJECTS\n")
    o = len(gc.get_objects())
    print(o)
    object_counts.append(o)
    del o
    print("\n")

    print("GROWTH\n")
    objgraph.show_growth()
    print("\n")

    print("COMMON TYPES\n")
    objgraph.show_most_common_types()
    print("\n")

    print("LEAKING OBJECTS\n")
    roots = objgraph.get_leaking_objects()
    print("\n")

    log.info("ROOTS pre-collect : {}\n".format(len(roots)))

    print("COMMON TYPES IN ROOTS\n")
    objgraph.show_most_common_types(objects=roots)
    print("\n")

    objgraph.show_refs(roots[:3], refcounts=True, filename=TEST_PATH + '/roots_' + str(i) + '.png')
    print("\n")

    log.info("Garbage pre collect:  " + str(len(gc.garbage)))
    gc.collect()
    log.info("Garbage post collect: " + str(len(gc.garbage)))
    print("\n")

    roots = objgraph.get_leaking_objects()
    log.info("ROOTS post-collect : {}".format(len(roots)))

    print("\n\n---------------------------------------------------\n")
Example #44
0
def __dbg_mem(strn):
	if __DBGMEM:
		import gc, objgraph
		print
		print '#' * 80
		print '#' * 80
		print '##', strn
		print 'Collect', gc.collect()
		print 'Collect', gc.collect()
	
		roots = objgraph.get_leaking_objects()
		if roots:
			print len(roots)
			objgraph.show_most_common_types(objects=roots)
			objgraph.show_refs(roots[:3], refcounts=True, filename='tmp/%s.png' % strn.lower())
		else:
			print 'Nothing'
		print '#' * 80	
		print '#' * 80
		print
def  sortpro(eachapp,eachmem):
	all_pro = psutil.get_process_list()
	for each_pro in all_pro:
		r_each_pro = str(each_pro)
		pro_n = re.search(r'\'.+\..+\'',r_each_pro)	
		if pro_n:
			p_ln = pro_n.group()
			p_ln = p_ln[1:-1]
			if p_ln.lower() == eachapp:
				memdect(each_pro,eachmem)
			else:
				pass
		else:
			pass
	gc.collect()
	f1 = open("D:\\pylog.txt",'a')
	sys.stdout = f1
	objgraph.show_most_common_types(limit=30)
	print "================================="
	return
def test_reid_writing():
    gallery_dataset = dataset_builder.build(gallery_cfg)
    query_dataset = dataset_builder.build(gallery_cfg)
    gallery_dataloader = torch.utils.data.DataLoader(gallery_dataset,
                                                     batch_size=10,
                                                     num_workers=0)
    model_cfg = model_cfg_builder.build(cfg)
    model = model_builder.build(model_cfg[0])
    model = DataParallel(model)
    output_file = tempfile.mktemp()
    objgraph.show_growth()
    write_to_h5(gallery_dataloader, model, output_file)
    objgraph.show_most_common_types()
    objgraph.show_growth()
    time.sleep(1)
    query_dataloader = torch.utils.data.DataLoader(query_dataset,
                                                   batch_size=10,
                                                   num_workers=0)
    output_file = tempfile.mktemp()
    write_to_h5(query_dataloader, model, output_file)
Example #47
0
    def debugrequest(self, event):
        try:
            self.log("Event: ", event.__dict__, lvl=critical)

            if event.action == "storejson":
                self.log("Storing received object to /tmp", lvl=critical)
                fp = open('/tmp/hfosdebugger_' + str(
                    event.user.useruuid) + "_" + str(uuid4()), "w")
                json.dump(event.data, fp, indent=True)
                fp.close()
            if event.action == "memdebug":
                self.log("Memory hogs:", lvl=critical)
                objgraph.show_most_common_types(limit=20)
            if event.action == "growth":
                self.log("Memory growth since last call:", lvl=critical)
                objgraph.show_growth()
            if event.action == "graph":
                objgraph.show_backrefs([self.root], max_depth=42,
                                       filename='backref-graph.png')
                self.log("Backref graph written.", lvl=critical)
            if event.action == "exception":
                class TestException(BaseException):
                    pass

                raise TestException
            if event.action == "heap":
                self.log("Heap log:", self.heapy.heap(), lvl=critical)
            if event.action == "buildfrontend":
                self.log("Sending frontend build command")

                self.fireEvent(frontendbuildrequest(force=True), "setup")
            if event.action == "logtail":
                self.fireEvent(logtailrequest(event.user, None, None,
                                              event.client), "logger")


        except Exception as e:
            self.log("Exception during debug handling:", e, type(e),
                     lvl=critical)
Example #48
0
    def test_lots_of_queries(self):
        import resource
        import objgraph

        class LoadTest(Model):
            k = columns.Integer(primary_key=True)
            v = columns.Integer()

        sync_table(LoadTest)
        gc.collect()
        objgraph.show_most_common_types()

        print("Starting...")

        for i in range(1000000):
            if i % 25000 == 0:
                # print memory statistic
                print("Memory usage: %s" % (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss))

            LoadTest.create(k=i, v=i)

        objgraph.show_most_common_types()

        raise Exception("you shouldn't be here")
Example #49
0
    def __call__(self, environ, start_response):
        with self.lock:
            profiler = cProfile.Profile()

            def run_app(*a, **kw):
                self.response = self.app(environ, start_response)

            profiler.runcall(run_app, environ, start_response)

            profiler.snapshot_stats()

            stats = pstats.Stats(profiler)
            stats.sort_stats('calls') #cumulative

            # Redirect output
            out = StringIO()
            stats.stream = out

            stats.print_stats()

            resp = ''.join(self.response)

            # Lets at least only put this on html-like responses.
            if resp.strip().startswith('<'):
                ## The profiling info is just appended to the response.
                ##  Browsers don't mind this.
                resp += ('<pre style="text-align:left; '
                         'border-top: 4px dashed red; padding: 1em;">')
                resp += cgi.escape(out.getvalue(), True)

                ct = objgraph.show_most_common_types()
                print ct

                resp += ct if ct else '---'

                output = StringIO()
                pprint.pprint(environ, output, depth=3)

                resp += cgi.escape(output.getvalue(), True)
                resp += '</pre>'

            return resp
    def checkmem(): objgraph.show_most_common_types(limit=5)
except ImportError:
Example #51
0
    def modelCheck(self):
        """ This method modifies the current state """
        backtrack = False
        path_end_cached_state = False
#       c = 0
        if MEMORY_PROFILING:
            objgraph.show_growth()
        while len(self.state_stack) > 0:
            log.warning("------> starting model checker transition (stack len: %d) <-------" % len(self.state_stack))
            start_state = self.state_stack.pop()
            self.state = start_state
            if not config.get("runtime.replay"):
                start_state.restoreState(self.strategy)
#           self._debugDumpStateStack()
            log.debug("Current:\n%s" % start_state)

            if MEMORY_PROFILING and self.good_transitions_count > 500:
                uo = gc.collect()
                log.debug("Garbage collector found %d unreachable objects" % uo)
                objgraph.show_growth()
                objgraph.show_most_common_types()
                # Use the code below to get a graph of the references to a
                # random object of a particular type. Install xdot for best results.
#               objgraph.show_chain(objgraph.find_backref_chain(
#                   random.choice(objgraph.by_type('EateController')),
#                   inspect.ismodule))
                utils.crash("memory testing")
            
#           c += 1
#           pprint(state_list, file("state_list%d.txt" % c, "w"), width=100)
#           print c

            # replay the list of actions if necessary
            if backtrack:
                log.info("Backtracking, previous path early termination: %s" % str(path_end_cached_state))
                path_end_cached_state = False

                pop = None
                if self.useDpor:
                    stats.pushProfile("dpor")
                    start_state.model = self.initial_state.model
                    start_state.invariant_dispatcher = self.initial_state.invariant_dispatcher
                    pop, start_state = self.dpor.startBacktracking(start_state)
                    stats.popProfile()              

                self.state = start_state

                if config.get("runtime.replay"):
                    replayed_state = self.replayActions(start_state)

                    # Sanity checks
                    if self.state_debug and replayed_state != start_state:
                        pprint(replayed_state, file("state_now.txt", "w"), width=100)
                        pprint(start_state, file("state_before.txt", "w"), width=100)
                        utils.crash("State after replay is different than original (debug enabled)")
                    start_state = replayed_state

                self.state = start_state

                if pop:
                    path_end_cached_state = True
                    if self.packetTranslator is not None:
                        self.packetTranslator.stopSniffing()
                    start_state.testPoint("path_end", cached_state=path_end_cached_state)
                    start_state.checkNewViolations() # clean the new_violation state
                    continue


            next_action = self.chooseAction(start_state)
            log.debug("Next action: " + str(next_action))

            # when dpor decided that the available enabled actions do not need to be executed
            if self.useDpor and next_action is None:
                backtrack = True
                path_end_cached_state = True
                if self.packetTranslator is not None:
                    self.packetTranslator.stopSniffing()
                start_state.testPoint("path_end", cached_state=path_end_cached_state)
                start_state.checkNewViolations() # clean the new_violation state
                continue

            if self.state_debug and backtrack:
                for a in self.getEnabledActions(start_state):
                    assert a in self.getEnabledActions(start_state) or a.node_name == "model_checker"

            if self.graph != None:
                self.graph.startTransition(start_state)

            # Performing the transition would make us to go over the cutoff limit, backtrack
            if config.get("model.cutoff") > 0 and start_state.path_length+1 > config.get("model.cutoff"):
                log.warning("Path cutoff limit reached")
                path_end_cached_state = True
                backtrack = True
                if self.packetTranslator is not None:
                    self.packetTranslator.stopSniffing()
                start_state.testPoint("path_end", cached_state=path_end_cached_state)
                start_state.checkNewViolations() # clean the new_violation state
                continue

            # make a copy of the model before modifying it
            new_state = None
            if not config.get("runtime.replay") or self.state_debug:
                new_state = start_state.copy()
                new_state.restoreState()
            else:
                new_state = start_state
                start_state = new_state.minimal_copy()

            if self.state_debug:
                new_state.state_replay_list += [start_state]

            self.state = new_state

            # add the current state back to the stack, if there is still something to explore
            if start_state.hasAvailableActions():
                if not config.get("runtime.replay") or self.state_debug:
                    start_state.storeState(self.strategy)
                self.state_stack.append(start_state)

            if next_action.target == "port_stats_special":
                self.portStatsSpecial(new_state, next_action.args[0])

            self.checkManageFaults(next_action, new_state)

            new_state.testPoint("transition_start")

            ### Execute the transition ###
            new_state.executeAction(next_action, start_state.path_length+1)

            if self.graph != None:
                self.graph.endTransition(new_state, str(next_action))

            new_state.testPoint("transition_end")
            if new_state.checkNewViolations(): # Check if we need to go on or backtrack
                log.info("Invariant violations reported, path exploration stopped")
                path_end_cached_state = True
                backtrack = True
                if self.packetTranslator is not None:
                    self.packetTranslator.stopSniffing()
                new_state.testPoint("path_end", cached_state=path_end_cached_state)
                new_state.checkNewViolations() # clean the new_violation state
                continue

            if self.max_path_length < new_state.path_length:
                self.max_path_length = new_state.path_length

            self.good_transitions_count += 1

            # It we reached a new state, hash it and put on top of the stack
            new_state.setAvailableActions(self.getEnabledActions(new_state))
            state_hash = hash(new_state)
            if not state_hash in self.unique_states:
                self.unique_states.add(state_hash)
                if new_state.hasAvailableActions():
                    if not config.get("runtime.replay") or self.state_debug:
                        new_state.storeState(self.strategy)
                    self.state_stack.append(new_state)
                    backtrack = False
                    log.debug("New state with ID: %d" % new_state.state_id)
                else:
                    log.info("No more actions available, end state reached.")
                    backtrack = True
                    if self.packetTranslator is not None:
                        self.packetTranslator.stopSniffing()
                    new_state.testPoint("path_end", cached_state=path_end_cached_state)
                    new_state.checkNewViolations() # clean the new_violation state
            else:
                log.info("Reached a known state, stopping path exploration")
                path_end_cached_state = True
                self.old_states_count += 1
                backtrack = True
                if self.packetTranslator is not None:
                    self.packetTranslator.stopSniffing()
                new_state.testPoint("path_end", cached_state=path_end_cached_state)
                new_state.checkNewViolations() # clean the new_violation state

        new_state.testPoint("path_end", cached_state=path_end_cached_state)
        new_state.checkNewViolations() # clean the new_violation state

        if self.packetTranslator is not None:
            self.packetTranslator.destroyTopo()
Example #52
0
def get_info_from_stream(section_name):
    tries = 0
    while True:
        if PRINT_MEMORY_INFO:
            if section_name == "iem":
                gc.collect()
                logger.debug("NEW GRAPH - " + time.strftime("%c"))
                graphOutput = StringIO()
                objgraph.show_most_common_types(file=graphOutput)
                logger.debug(graphOutput.getvalue())
        try:

            stream_url = stream_confs.get(section_name, 'stream_url')
            stream_obj = Stream.objects.filter(url=stream_url)
            if not stream_obj:
                stream_obj = Stream(url=stream_url, name=stream_confs.get(
                    section_name, 'stream_name'))
                stream_obj.save()
            else:
                stream_obj = stream_obj[0]

            logger.debug("Got stream object for: " + section_name)

            stream_data = get_stream(stream_url)

            logger.debug("Done getting stream for: " + section_name)

            set_up_bracket(section_name, stream_obj)

            if not stream_data:
                if stream_obj.up and tries <= 2:
                    logger.debug("Failed getting up stream, stream_url: " + stream_url
                                 + " tries: " + str(tries))
                    tries += 1
                    time.sleep(5)
                    continue
                logger.info(
                    "There doesn't seem to be any stream available for: " + stream_url)
                tries = 0
                stream_obj.up = False
                stream_obj.save()
                time.sleep(60)
                continue
            else:
                tries = 0

            headers = {'Client-ID': 'jy4zwuphqfdvh2nfygxkzb66z23wjz',
                       'Accept': 'application/vnd.twitchtv.v3+json'}
            res = requests.get(
                "https://api.twitch.tv/kraken/channels/" + stream_url, headers=headers)
            json_res = res.json()
            if json_res["game"] != "StarCraft II":
                logger.info(
                    "Current game is not SCII for stream: " + stream_url)
                stream_obj.up = False
                stream_obj.save()
                time.sleep(60)
                continue
            if any(x in json_res["status"].lower() for x in ["rerun", "rebroadcast"]):
                logger.info("Showing rerun in stream: " + stream_url)
                stream_obj.up = False
                stream_obj.save()
                time.sleep(60)
                continue

            # At this point we know we have a stream that is up and we need to
            # parse it

            stream_obj.up = True
            stream_obj.save()

            # Get game objects
            game_objects = Game.objects.filter(stream=stream_obj)
            if not game_objects:
                game = Game(stream=stream_obj)
                game.save()
            else:
                game = game_objects[0]

            im = get_screenshot(stream_data, section_name)

            display_type = stream_confs.get(section_name, "display_types")

            my_data = get_data_from_image(im, display_type)

            for key in my_data.keys():
                logger.debug(str(key) + ": " + str(my_data[key]))

            if 'l_name' not in my_data:
                logger.info("No correct conf for: " + stream_url)
                stream_obj.up = False
                stream_obj.save()
                time.sleep(60)
                continue

            if not game_live(my_data):
                if game.game_on:
                    game.game_off_time = timezone.now()
                game.game_on = False
                game.save()
                time.sleep(10)
                logger.info("No live game for: " + stream_url)
                continue
            else:
                game.game_on = True

            if get_score(my_data) != 100:
                save_main_image(display_type, im)

            players = get_players(stream_obj)
            p_l = players[0]
            p_r = players[1]

            categories = ['name', 'score', 'supply',
                          'minerals', 'gas', 'workers', 'army']
            for category in categories:
                if 'l_' + category in my_data:
                    setattr(p_l, category, my_data['l_' + category])
                    setattr(p_r, category, my_data['r_' + category])
            p_l.save()
            p_r.save()

            # Game stuff
            if 'time' in my_data:
                game.current_time = my_data['time']
            if 'map' in my_data:
                game.on_map = my_data['map']

            game.save()

            logger.info("Done with loop for: " + stream_url)
            time.sleep(2)
        except Exception as _:
            logger.error("Something went wrong for stream: " +
                         stream_url, exc_info=True)
Example #53
0
def show_types():
    if objgraph is None:
        return "objgraph not installed"
    objgraph.show_most_common_types(limit=20)
    return "Printed to console"
Example #54
0
def printMemoryCount():
    result = {} 
    for o in gc.get_objects(): 
        t = type(o)
        #print(t)
        count = result.get(t, 0) 
        result[t] = count + 1 
    
    for k,v in result.items():
        print(k,v)

    return result

if __name__ == "__main__":
    
    objgraph.show_most_common_types()
    
    _entries = [Dummy()] * 9999999
    
    print('test')
    objgraph.show_most_common_types()
    #memory_tracker = tracker.SummaryTracker()
    objgraph.show_growth(limit=10)

    #table = testCase()
    #memory_tracker.print_diff()


    printMemoryCount()
    ##objgraph.show_growth(limit=10)
    #import inspect, random
Example #55
0
log = logging.getLogger(__name__)

class Worker(multiprocessing.Process):

    queue = queue

    def run(self):
        #log.warn("run")
        print thread.get_ident()
        count = queue.get()
        print 'In %s, count: %s' % (self.name, count)
        self.queue.put(count+1)
        time.sleep(random.randint(2, 3))
        count = queue.get()
        self.queue.put(count-1)
        print 'In %s, count: %s' % (self.name, count-1)
        return

if __name__ == '__main__':
    jobs = []
    for i in range(5):
        p = Worker()
        jobs.append(p)
        count = queue.get()
        print '@@', i, count
        queue.put(count)
        p.start()
    for j in jobs:
        j.join()
    objgraph.show_most_common_types(limit=20)