Beispiel #1
0
    def test_profile_single_context(self):
        
        def id_callback():
            return self.callback_count
        def a():
            pass

        self.callback_count = 1
        yappi.set_context_id_callback(id_callback)
        yappi.start(profile_threads=False)
        a() # context-id:1
        self.callback_count = 2
        a() # context-id:2
        stats = yappi.get_func_stats()
        fsa = utils.find_stat_by_name(stats, "a")
        self.assertEqual(fsa.ncall, 1)
        yappi.stop()
        yappi.clear_stats()
        
        self.callback_count = 1
        yappi.start() # profile_threads=True
        a() # context-id:1
        self.callback_count = 2
        a() # context-id:2
        stats = yappi.get_func_stats()
        fsa = utils.find_stat_by_name(stats, "a")
        self.assertEqual(fsa.ncall, 2)
Beispiel #2
0
    def test_filter(self):
        def a(): pass
        def b(): a()
        def c(): b()

        _TCOUNT = 5

        ts = []
        yappi.start()
        for i in range(_TCOUNT):
            t = threading.Thread(target=c)
            t.start()
            ts.append(t)

        for t in ts:
            t.join()

        yappi.stop()

        fstats = yappi.get_func_stats(filter={"ctx_id":9})
        self.assertTrue(fstats.empty())
        fstats = yappi.get_func_stats(filter={"ctx_id":0, "name":"c"}) # main thread
        self.assertTrue(fstats.empty())

        for i in range(1, _TCOUNT):
            fstats = yappi.get_func_stats(filter={"ctx_id":i, "name":"a", 
                "ncall":1})
            self.assertEqual(fstats.pop().ncall, 1)
            fstats = yappi.get_func_stats(filter={"ctx_id":i, "name":"b"})
            self.assertEqual(fstats.pop().ncall, 1)
            fstats = yappi.get_func_stats(filter={"ctx_id":i, "name":"c"})
            self.assertEqual(fstats.pop().ncall, 1)
 def test_merge_multithreaded_stats(self):
     import threading
     import _yappi
     timings = {"a_1":2, "b_1":1}
     _yappi._set_test_timings(timings)
     def a(): pass
     def b(): pass
     yappi.start()
     t = threading.Thread(target=a)
     t.start()
     t.join()
     t = threading.Thread(target=b)
     t.start()
     t.join()
     yappi.get_func_stats().save("ystats1.ys")
     yappi.clear_stats()
     _yappi._set_test_timings(timings)
     self.assertEqual(len(yappi.get_func_stats()), 0)
     self.assertEqual(len(yappi.get_thread_stats()), 1)
     t = threading.Thread(target=a)
     t.start()
     t.join()
     
     self.assertEqual(_yappi._get_start_flags()["profile_builtins"], 0)
     self.assertEqual(_yappi._get_start_flags()["profile_multithread"], 1)
     yappi.get_func_stats().save("ystats2.ys")
    
     stats = yappi.YFuncStats(["ystats1.ys", "ystats2.ys",])
     fsa = utils.find_stat_by_name(stats, "a")
     fsb = utils.find_stat_by_name(stats, "b")
     self.assertEqual(fsa.ncall, 2)
     self.assertEqual(fsb.ncall, 1)
     self.assertEqual(fsa.tsub, fsa.ttot, 4)
     self.assertEqual(fsb.tsub, fsb.ttot, 1)
Beispiel #4
0
    def stop(self):
        self._running = False
        self._tick_timer.cancel()
        self._stop_time = time.time()

        if self._app.args.yappi and USE_YAPPI:
            yappi.get_func_stats().print_all()
 def stats_string(self, id):
     output = StringIO.StringIO()
     yappi.get_func_stats().print_all(out=output)
     m = ProfilerResponse()
     m.id = id
     m.ystats_string = output.getvalue()
     self.writer.addCommand(m)
Beispiel #6
0
 def wrapped(*args, **kwargs):
     yappi.start()
     result = func(*args, **kwargs)
     yappi.stop()
     prof_file = "%s.%s" % (func.__name__, time.time())
     #prof_file = "callgrind.a.1"
     yappi.get_func_stats().save(prof_file, "ystat")
     return result
Beispiel #7
0
 def on_close(self, e):
     self.netcontroller.running = False
     #self.net_thread.quit()
     if self.args.profile:
         try:
             import yappi
             yappi.get_func_stats().print_all()
         except ImportError:
             pass
def update():

	computer.run( clock.value )

	if io.hasExited:
	 
		clock.stop()
		print( 'See you later!' )

		yappi.get_func_stats().print_all()
    def cmd_stop(self, args):
        """stop profiling"""
        print "Profile results:"
        import yappi  # We do the import here so that we won't barf if run normally and yappi not available

        yappi.get_func_stats().print_all(
            columns={0: ("name", 50), 1: ("ncall", 5), 2: ("tsub", 8), 3: ("ttot", 8), 4: ("tavg", 8)}
        )
        yappi.get_thread_stats().print_all()
        yappi.stop()
Beispiel #10
0
def yappi_prof_call(func, *args):
    '''
        https://code.google.com/p/yappi/wiki/usageyappi_v092
    '''
    import yappi
    yappi.start()
    result = func(*args)
    yappi.get_func_stats().print_all()
    yappi.get_thread_stats().print_all()
    return result
Beispiel #11
0
def clusterizacao(request, debate_id, qtdGrupos=3):
	
## COLOCAR COMO OPÇÃO PARA O USUÁRIO SE ELE QUER AGRUPAR 
## PELO POSICIONAMENTO INICIAL OU FINAL 
	print "view-clusterização em funcionamento!!!"
	inicio = datetime.now()
	print inicio,"view clusterizacao"
	
	yappi.set_clock_type('cpu')
	yappi.start(builtins=True)
	start = time.time()
	
	auxResult = clusterArgInicial(debate_id)
	
	duration = time.time() - start
	stats = yappi.get_func_stats()
	stats.save('clusterArgInicial.out', type = 'callgrind')
	
	
	
	tese = auxResult[5]
	
	
	yappi.set_clock_type('cpu')
	yappi.start(builtins=True)
	start = time.time()
	
#	resultado = gruposArgumentacao(auxResult, qtdeGrupos=3, LSA=True, Normalizacao=True)
# 	resultado = gruposArgumentacao(auxResult, qtdeGrupos=4, LSA=True, Normalizacao=True)
# 	resultado = gruposArgumentacao(auxResult, qtdeGrupos=5, LSA=True, Normalizacao=True)
# 	resultado = gruposArgumentacao(auxResult, qtdeGrupos=6, LSA=True, Normalizacao=True)
	
	resultado = gruposArgumentacao(auxResult, qtdeGrupos=int(qtdGrupos), LSA=None, Normalizacao=True)
	
	duration = time.time() - start
	stats = yappi.get_func_stats()
	stats.save('gruposArgumentacao.out', type = 'callgrind')
	
	grupo1 = resultado[0]
	grupo2 = resultado[1]
	grupo3 = resultado[2]
	grupo4 = resultado[3]
	grupo5 = resultado[4]
	grupo6 = resultado[5]

	
	
	
	context = RequestContext(request,{'results' : [grupo1,grupo2,grupo3,grupo4,\
										len(grupo1),len(grupo2),len(grupo3),len(grupo4), tese, \
										grupo5, len(grupo5), grupo6, len(grupo6)],
										'grupo' : Grupo.objects.filter(idgrupo=1064)[0]})
	
	return render(request, 'clusterizacao.html',context)
Beispiel #12
0
def main():
    print('Main TID: {}'.format(gettid()))
    args = Bunch(channel=None,
                 devices=[],
                 generate=False,
                 ui='main.ui')
    yappi.start()
    exit_value = epyqlib.__main__.main(args=args)
    yappi.stop()
    yappi.get_func_stats().save('yappi.stats', type='pstat')
    yappi.get_thread_stats().print_all()
    return exit_value
Beispiel #13
0
def run():
    host = "http://localhost:8080"
    urls = ["/client.py", "/echo_server.py"]
    global all_task
    all_task = len(urls)

    yappi.start()
    for url in urls:
        fetch(host + url)
        print "fetch url:", url

    yappi.get_func_stats().print_all()
    IOLoop.instance().start()
Beispiel #14
0
 def test_no_stats_different_clock_type_load(self):
     def a(): pass
     yappi.start()
     a()
     yappi.stop()
     yappi.get_func_stats().save("ystats1.ys")
     yappi.clear_stats()
     yappi.set_clock_type("WALL")
     yappi.start()
     yappi.stop()
     stats = yappi.get_func_stats().add("ystats1.ys")
     fsa = utils.find_stat_by_name(stats, 'a')
     self.assertTrue(fsa is not None)
Beispiel #15
0
 def tearDown(self):
     fstats = yappi.get_func_stats()
     if not fstats._debug_check_sanity():
         sys.stdout.write("ERR: Duplicates found in Func stats\r\n")
         
         fstats.debug_print()
     for fstat in fstats:
         if not fstat.children._debug_check_sanity():
             sys.stdout.write("ERR: Duplicates found in ChildFunc stats\r\n")
             fstat.children.print_all()
     tstats = yappi.get_func_stats()
     if not tstats._debug_check_sanity():
         sys.stdout.write("ERR: Duplicates found in Thread stats\r\n")
         tstats.print_all()
Beispiel #16
0
def main(argv):
    parser = argparse.ArgumentParser(description='Convert a FASTA and QUAL pair of files into a FASQ file')
    parser.add_argument('fasta_filename',
                        help='the FASTA format input file path')
    parser.add_argument('qual_filename',
                        help='the QUAL format input file path')
    parser.add_argument('-q', action='store_true',
                        help="run quietly")
    parser.add_argument('-d', action='store_true',
                        help="print diagnostics to stderr")
    parser.add_argument('-z','--gzip', action='store_true',
                       help='input & output are gzipped')
    parser.add_argument('-p', '--profile', action='store_true',
                        help="profile this execution")
    parser.add_argument('-o', '--output',
                        default=sys.stdout, type=argparse.FileType('w'),
                        help='the output FASTQ file path')
    args = parser.parse_args()

    global debug_flag
    debug_flag = args.d

    debug_flag and sys.stderr.write("%r\n" % args)

    if args.profile:
        import yappi # https://code.google.com/p/yappi/wiki/apiyappi
        yappi.start(builtins=True)

    if args.gzip:
        import gzip
        fasta = gzip.open(args.fasta_filename)
        qual = gzip.open(args.qual_filename)
        outf = gzip.GzipFile(fileobj=args.output, mode='wb')
    else:
        fasta = open(args.fasta_filename)
        qual = open(args.qual_filename)
        outf = args.output

    # alphabet irrelevant (and makes it slow) records = PairedFastaQualIterator(fasta, qual, alphabet=IUPAC.ambiguous_dna)
    records = PairedFastaQualIterator(fasta, qual)
    count = SeqIO.write(records, outf, "fastq")
    outf.close()

    if args.profile:
        yappi.get_thread_stats().print_all(sys.stderr)
        yappi.get_func_stats().sort("subtime").print_all(sys.stderr)

    print("Converted %i records" % count)
Beispiel #17
0
    def test_basic(self):
        yappi.set_clock_type('wall')
        def dummy():
            pass
        def a():
            time.sleep(0.2)
        class Worker1(threading.Thread):
            def a(self):
                time.sleep(0.3)
            def run(self):
                self.a()
        yappi.start(builtins=False, profile_threads=True)

        c = Worker1()
        c.start()
        c.join()
        a()
        stats = yappi.get_func_stats()
        fsa1 = utils.find_stat_by_name(stats, 'Worker1.a')
        fsa2 = utils.find_stat_by_name(stats, 'a')
        self.assertTrue(fsa1 is not None)
        self.assertTrue(fsa2 is not None)
        self.assertTrue(fsa1.ttot > 0.2)
        self.assertTrue(fsa2.ttot > 0.1)
        tstats = yappi.get_thread_stats()
        self.assertEqual(len(tstats), 2)
        tsa = utils.find_stat_by_name(tstats, 'Worker1')
        tsm = utils.find_stat_by_name(tstats, '_MainThread')
        dummy() # call dummy to force ctx name to be retrieved again.
        self.assertTrue(tsa is not None)
        # TODO: I put dummy() to fix below, remove the comments after a while.
        self.assertTrue( # FIX: I see this fails sometimes?
            tsm is not None, 
            'Could not find "_MainThread". Found: %s' % (', '.join(utils.get_stat_names(tstats)))) 
Beispiel #18
0
    def stop_profiler(self):
        """
        Stop yappi and write the stats to the output directory.
        Return the path of the yappi statistics file.
        """
        if not self.profiler_running:
            raise RuntimeError("Profiler is not running")

        if not HAS_YAPPI:
            raise RuntimeError("Yappi cannot be found. Plase install the yappi library using your preferred package "
                               "manager and restart Tribler afterwards.")

        yappi.stop()

        yappi_stats = yappi.get_func_stats()
        yappi_stats.sort("tsub")

        log_dir = os.path.join(self.session.config.get_state_dir(), 'logs')
        file_path = os.path.join(log_dir, 'yappi_%s.stats' % self.profiler_start_time)
        # Make the log directory if it does not exist
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)

        yappi_stats.save(file_path, type='callgrind')
        yappi.clear_stats()
        self.profiler_running = False
        return file_path
Beispiel #19
0
    def test_callback(self):
        self.context_id = 0
        yappi.set_context_id_callback(lambda: self.context_id)
        yappi.start()
        a()
        self.context_id = 1
        a()
        self.context_id = 2
        a()

        # Re-schedule context 1.
        self.context_id = 1
        a()
        yappi.stop()

        threadstats = yappi.get_thread_stats().sort('id', 'ascending')
        self.assertEqual(3, len(threadstats))
        self.assertEqual(0, threadstats[0].id)
        self.assertEqual(1, threadstats[1].id)
        self.assertEqual(2, threadstats[2].id)

        self.assertEqual(1, threadstats[0].sched_count)
        self.assertEqual(2, threadstats[1].sched_count)  # Context 1 ran twice.
        self.assertEqual(1, threadstats[2].sched_count)

        funcstats = yappi.get_func_stats()
        self.assertEqual(4, utils.find_stat_by_name(funcstats, 'a').ncall)
Beispiel #20
0
    def test_basic(self):
        import threading
        import time
        yappi.set_clock_type('wall')
        def a():
            time.sleep(0.2)
        class Worker1(threading.Thread):
            def a(self):
                time.sleep(0.3)                
            def run(self):
                self.a()
        yappi.start(builtins=False, profile_threads=True)

        c = Worker1()
        c.start()
        c.join()        
        a()
        stats = yappi.get_func_stats()
        fsa1 = utils.find_stat_by_name(stats, 'Worker1.a')
        fsa2 = utils.find_stat_by_name(stats, 'a')
        self.assertTrue(fsa1 is not None)
        self.assertTrue(fsa2 is not None)
        self.assertTrue(fsa1.ttot > 0.2)
        self.assertTrue(fsa2.ttot > 0.1)
        tstats = yappi.get_thread_stats()
        self.assertEqual(len(tstats), 2)
        tsa = utils.find_stat_by_name(tstats, 'Worker1')
        tsm = utils.find_stat_by_name(tstats, '_MainThread')
        self.assertTrue(tsa is not None)
        self.assertTrue(tsm is not None) # FIX: I see this fails sometimes?
    def test_singlethread_profiling(self):
        yappi.set_clock_type("wall")

        def a():
            time.sleep(0.2)

        class Worker1(threading.Thread):
            def a(self):
                time.sleep(0.3)

            def run(self):
                self.a()

        yappi.start(profile_threads=False)

        c = Worker1()
        c.start()
        c.join()
        a()
        stats = yappi.get_func_stats()
        fsa1 = utils.find_stat_by_name(stats, "Worker1.a")
        fsa2 = utils.find_stat_by_name(stats, "a")
        self.assertTrue(fsa1 is None)
        self.assertTrue(fsa2 is not None)
        self.assertTrue(fsa2.ttot > 0.1)
    def test_print_formatting(self):
        def a():
            pass

        def b():
            a()

        func_cols = {1: ("name", 48), 0: ("ncall", 5), 2: ("tsub", 8)}
        thread_cols = {1: ("name", 48), 0: ("ttot", 8)}

        yappi.start()
        a()
        b()
        yappi.stop()
        fs = yappi.get_func_stats()
        cs = fs[1].children
        ts = yappi.get_thread_stats()
        # fs.print_all(out=sys.stderr, columns={1:("name", 70), })
        # cs.print_all(out=sys.stderr, columns=func_cols)
        # ts.print_all(out=sys.stderr, columns=thread_cols)
        # cs.print_all(out=sys.stderr, columns={})

        self.assertRaises(yappi.YappiError, fs.print_all, columns={1: ("namee", 9)})
        self.assertRaises(yappi.YappiError, cs.print_all, columns={1: ("dd", 0)})
        self.assertRaises(yappi.YappiError, ts.print_all, columns={1: ("tidd", 0)})
Beispiel #23
0
def yappi_logger(path):
    while True:
        time.sleep(60)
        yf = yappi.get_func_stats()
        they = sorted(yf, key=lambda x: x.ttot, reverse=True)
        #they = sorted(filter(lambda x: not x.builtin, yf), key=lambda x: x.ttot, reverse=True)
        opath = path + time.strftime('%Y%m%d_%H%M%S') + '.txt'
        with open(opath, 'wb') as fout:
            if True:
                sum_func_time = sum(map(lambda x: x.tsub, they))
                fout.write('total {} seconds function time\n'.format(sum_func_time))
                fout.write('func+subs\tfunc\ttot/call\tncalls\n')
                for x in they:
                    fout.write(_yfstr(x))
            else:
                yf.print_all(
                    out=fout,
                    columns={
                        0:("name",80),
                        1:("ncall", 9),
                        2:("tsub", 8),
                        3:("ttot", 8),
                        4:("tavg",8),
                    }
                )
        sys.stderr.write('wrote profiling\n{}\n'.format(opath))
        if os.path.exists('bobreak'):
            pdb.set_trace()
Beispiel #24
0
def main4():
    import yappi
    yappi.start()
    main2()
    stats = yappi.get_func_stats()
    stats.sort('ttot')
    stats.print_all()
Beispiel #25
0
 def test_children_stat_functions(self):
     _timings = {"a_1":5, "b_1":3, "c_1":1}
     _yappi._set_test_timings(_timings)
     def b(): 
         pass
     def c():
         pass
     def a():
         b()
         c()
     yappi.start()
     a()
     b() # non-child call
     c() # non-child call
     stats = yappi.get_func_stats()
     fsa = utils.find_stat_by_name(stats, 'a')
     childs_of_a = fsa.children.get().sort("tavg", "desc")
     prev_item = None
     for item in childs_of_a:
         if prev_item:
             self.assertTrue(prev_item.tavg > item.tavg)
         prev_item = item
     childs_of_a.sort("name", "desc")
     prev_item = None
     for item in childs_of_a:
         if prev_item:
             self.assertTrue(prev_item.name > item.name)
         prev_item = item
     childs_of_a.clear()
     self.assertTrue(childs_of_a.empty())
Beispiel #26
0
    def run_backend_and_or_ui(self):
        if self.args.start_frame is not None:
            print "fast-forwarding to frame %d..." % self.args.start_frame
            self.time_increment = 1. / self.args.frame_rate
            for n in range(self.args.start_frame):
                self._proceed_and_update()
            print "ok"
        else:
            self.entity.update(self.input)
            self.update()
                
        run_backend = not self.args.ui_only
        run_ui = not self.args.backend_only

        if run_ui:
            self._scene_class = self._entity_scene_module.Scene

        if self.args.with_profiler:
            import yappi
            yappi.start()

        if run_backend and run_ui:
            if self.args.websockets:
                websocket_server = self._create_websocket_server()
                self._start_in_new_thread(websocket_server)

            self._server = self._create_single_process_server()
            self._set_up_timed_refresh()
            self._start_in_new_thread(self._server)
            client = SingleProcessClient(self._server)
            self.run_ui(client)
        elif run_backend:
            self._server = self._create_websocket_server()
            self._set_up_timed_refresh()
            try:
                self._server.start()
            except KeyboardInterrupt:
                pass
        elif run_ui:
            if self.args.no_websockets:
                client = None
            else:
                client = WebsocketClient(self.args.backend_host)
            self.run_ui(client)

        if self.args.with_profiler:
            yappi.get_func_stats().print_all()
 def test_lambda(self):
     f = lambda: time.sleep(0.3)
     yappi.set_clock_type("wall")
     yappi.start()
     f()
     stats = yappi.get_func_stats()
     fsa = utils.find_stat_by_name(stats, "<lambda>")
     self.assertTrue(fsa.ttot > 0.1)
Beispiel #28
0
def _stop_profiling(filename, format):
    logging.debug("Stopping CPU profiling")
    with _lock:
        if yappi.is_running():
            yappi.stop()
            stats = yappi.get_func_stats()
            stats.save(filename, format)
            yappi.clear_stats()
Beispiel #29
0
Datei: cpu.py Projekt: EdDev/vdsm
    def stop(self):
        if not yappi.is_running():
            raise UsageError("CPU profiler is not running")

        logging.info("Stopping CPU profiling")
        yappi.stop()
        stats = yappi.get_func_stats()
        stats.save(self.filename, self.format)
        yappi.clear_stats()
Beispiel #30
0
 def test_generator(self):
     def _gen(n):
         while(n > 0):
             yield n
             n -= 1
     yappi.start()
     for x in _gen(5):
         pass
     self.assertTrue(yappi.convert2pstats(yappi.get_func_stats()) is not None)
Beispiel #31
0
async def on_message(message):
    Bot_Prefix = "+"
    if getenv("PRODUCTION") is None:
        Bot_Prefix = "-"

    if not message.content.startswith(Bot_Prefix):
        return

    # Split input
    args = message.content[len(Bot_Prefix):].split()

    if len(args) == 0:
        return

    cmd = args[0].lower()

    # the arg array ex. ["hello", "world"]
    args = args[1:]

    s = message.channel.send

    if (cmd == "8" or cmd == "report" or cmd == "define" or cmd == "stars"
            or cmd == "homepage" or cmd == "clapify" or cmd == "cookie"
            or cmd == "say") and Preconditions.args_are_valid(args):
        return await s(embed=EmbedUtil.prep(
            "That command expected an argument (or arguments), but you didn't give it any!",
            "[Read the docs?](https://cakebot.club/docs/commands/)",
        ))

    tcu_result = TextCommandsUtil.handle_common_commands(args, cmd)
    if tcu_result != "":
        return await s(tcu_result)

    if cmd == "help":
        return await s(embed=EmbedUtil.prep(
            title="Help",
            description=
            "You can check out [this page of our website](https://cakebot.club/docs/commands/) for a full command list!",
        ))

    elif cmd == "ping":
        return await s(f"­ЪЈЊ - websocket responded in {client.latency}")

    elif cmd == "invite":
        return await s(embed=EmbedUtil.prep(
            "Invite Cakebot",
            f"[Click here to invite me!]({oauth_url(580573141898887199, permissions=discord.Permissions.all())})",
        ))

    elif cmd == "info":
        return await s(
            TextCommandsUtil.data_template.format(
                message.guild.name,
                str(message.guild.owner),
                len(message.guild.members),
                message.guild.region,
                message.guild.id,
                message.guild.premium_subscription_count,
                str(message.guild.is_icon_animated()),
                str(message.guild.created_at),
                str(message.guild.large),
                str(message.guild.mfa_level == 1),
            ))

    elif cmd == "report":
        return await GitHubUtil.report(s, g, args, message)

    elif cmd == "iss":
        m = await s("Calculating...")
        imp = IssApi.IssLocater()
        lat = imp.lat
        lon = imp.lon
        from reverse_geocoder import search

        geodata = search((lat, lon))
        location = "{0}, {1}".format(geodata[0]["admin1"], geodata[0]["cc"])

        await m.delete()
        return await s(embed=EmbedUtil.prep(
            "International Space Station", "Where it is right now!").add_field(
                name="Location above Earth", value=str(location),
                inline=False).add_field(
                    name="Latitude", value=str(lat), inline=False).add_field(
                        name="Longitude", value=str(lon), inline=False))

    elif cmd == "fact":
        return await s(embed=EmbedUtil.prep("Random Fact", FactImp().fact()))

    elif cmd == "slots":
        slotz = result()
        top = row()
        btm = row()
        form = "win" if slotz[0] == 1 else "lose"
        return await s(
            f"Рађ{top[0]}{top[1]}{top[2]}\n"
            # the line above contains unicode, DO NOT REMOVE
            + f"**>** {slotz[1][0]}{slotz[1][1]}{slotz[1][2]} **<**\n" +
            f"   {btm[0]}{btm[1]}{btm[2]}" + f"\n**You {form}!**")

    elif cmd == "reboot":
        if message.author.id in UserUtil.admins():
            await s("Restarting. This may take up to 5 minutes.")
            # make the bot crash, forcing our server to turn it back on
            _exit(1)
        else:
            return await s(":x: **You are not authorized to run this!**")

    elif cmd == "stars":
        try:
            return await s(
                f"`{args[0]}` has *{g.get_repo(args[0]).stargazers_count}* stars."
            )
        except:
            return await s(
                "Failed to get count. Is the repository valid and public?")

    elif cmd == "homepage":
        try:
            url = g.get_repo(args[0]).homepage
            if url is None:
                url = "(error: homepage not specified by owner)"
            return await s(f"{args[0]}'s homepage is located at {url}")
        except:
            return await s(
                "Failed to fetch homepage. Is the repository valid and public?"
            )

    elif cmd == "boomer":
        return await s(file=discord.File("content/boomer.jpeg"))

    elif cmd == "cookie" or cmd == "cookies":
        subcommand = args[0]
        args = args[1:]
        userId = TextCommandsUtil.get_mentioned_id(args)

        if subcommand in ["balance", "bal"]:
            count = 0
            if userId == 0:
                # assume user wants themself
                count = Database.get_count(message.author.id, config)
            else:
                count = Database.get_count(userId, config)

            return await s(embed=EmbedUtil.prep(
                title="Cookies",
                description=f"User has {count} cookies.",
            ))

        elif subcommand in ["give", "to"]:
            if userId == 0:
                return await s(
                    "I don't see who I should give the cookie to. Try mentioning them."
                )

            new_count = Database.add_cookie(userId, config)

            return await s(
                f"Gave <@!{userId}> a cookie. They now have {new_count} cookies."
            )

    elif cmd == "define":
        if wordsapi_token is None:
            return await s(
                "This command is disabled due to a configuration error on my host's end - didn't find a WordsAPI token in the config!"
            )
        return await s(embed=TextCommandsUtil.define(args, wordsapi_token))

    elif cmd == "start-profiler":
        if message.author.id in UserUtil.admins():
            await s(
                "Started the profiler. Once you are done, run stop-profiler.")
            yappi.set_clock_type("wall")
            yappi.start()
        else:
            return await s(":x: **You are not authorized to run this!**")

    elif cmd == "stop-profiler":
        if message.author.id in UserUtil.admins():
            await s("Saved profiler results to `profile.txt`.")
            yappi.stop()
            yappi.get_func_stats().print_all(open("profile.txt", "w"))
        else:
            return await s(":x: **You are not authorized to run this!**")
Beispiel #32
0
def _dump_profile():
    import yappi
    yappi.get_func_stats().save('master_prof.out', type='pstat')
Beispiel #33
0
    def lineReceived(self, line):
        anon_tunnel = self.anon_tunnel
        profile = self.profile

        if line == 'threads':
            for thread in threading.enumerate():
                print "%s \t %d" % (thread.name, thread.ident)
        elif line == 'p':
            if profile:
                for func_stats in yappi.get_func_stats().sort("subtime")[:50]:
                    print "YAPPI: %10dx  %10.3fs" % (
                        func_stats.ncall, func_stats.tsub), func_stats.name
            else:
                logger.error("Profiling disabled!")

        elif line == 'P':
            if profile:
                filename = 'callgrindc_%d.yappi' % anon_tunnel.dispersy.lan_address[
                    1]
                yappi.get_func_stats().save(filename, type='callgrind')
            else:
                logger.error("Profiling disabled!")

        elif line == 't':
            if profile:
                yappi.get_thread_stats().sort("totaltime").print_all()

            else:
                logger.error("Profiling disabled!")

        elif line == 'c':
            print "========\nCircuits\n========\nid\taddress\t\t\t\t\tgoal\thops\tIN (MB)\tOUT (MB)\tinfohash\ttype"
            for circuit_id, circuit in anon_tunnel.community.circuits.items():
                info_hash = circuit.info_hash.encode(
                    'hex')[:10] if circuit.info_hash else '?'
                print "%d\t%s:%d\t%d\t%d\t\t%.2f\t\t%.2f\t\t%s\t%s" % (
                    circuit_id, circuit.first_hop[0], circuit.first_hop[1],
                    circuit.goal_hops, len(circuit.hops), circuit.bytes_down /
                    1024.0 / 1024.0, circuit.bytes_up / 1024.0 / 1024.0,
                    info_hash, circuit.ctype)

        elif line.startswith('s'):
            cur_path = os.getcwd()
            line_split = line.split(' ')
            filename = 'test_file' if len(line_split) == 1 else line_split[1]

            if not os.path.exists(filename):
                logger.info("Creating torrent..")
                with open(filename, 'wb') as fp:
                    fp.write(os.urandom(50 * 1024 * 1024))
                tdef = TorrentDef()
                tdef.add_content(os.path.join(cur_path, filename))
                tdef.set_tracker("udp://fake.net/announce")
                tdef.set_private()
                tdef.finalize()
                tdef.save(os.path.join(cur_path, filename + '.torrent'))
            else:
                logger.info("Loading existing torrent..")
                tdef = TorrentDef.load(filename + '.torrent')
            logger.info("loading torrent done, infohash of torrent: %s" %
                        (tdef.get_infohash().encode('hex')[:10]))

            defaultDLConfig = DefaultDownloadStartupConfig.getInstance()
            dscfg = defaultDLConfig.copy()
            dscfg.set_hops(1)
            dscfg.set_dest_dir(cur_path)

            anon_tunnel.session.lm.threadpool.call(
                0, anon_tunnel.session.start_download, tdef, dscfg)
        elif line.startswith('i'):
            # Introduce dispersy port from other main peer to this peer
            line_split = line.split(' ')
            to_introduce_ip = line_split[1]
            to_introduce_port = int(line_split[2])
            self.anon_tunnel.community.add_discovered_candidate(
                Candidate((to_introduce_ip, to_introduce_port), tunnel=False))
        elif line.startswith('d'):
            line_split = line.split(' ')
            filename = 'test_file' if len(line_split) == 1 else line_split[1]

            logger.info("Loading torrent..")
            tdef = TorrentDef.load(filename + '.torrent')
            logger.info("Loading torrent done")

            defaultDLConfig = DefaultDownloadStartupConfig.getInstance()
            dscfg = defaultDLConfig.copy()
            dscfg.set_hops(1)
            dscfg.set_dest_dir(
                os.path.join(
                    os.getcwd(),
                    'downloader%s' % anon_tunnel.session.get_dispersy_port()))

            def start_download():
                def cb(ds):
                    logger.info(
                        'Download infohash=%s, down=%s, progress=%s, status=%s, seedpeers=%s, candidates=%d'
                        % (tdef.get_infohash().encode('hex')[:10],
                           ds.get_current_speed('down'), ds.get_progress(),
                           dlstatus_strings[ds.get_status()],
                           sum(ds.get_num_seeds_peers()),
                           sum(1 for _ in anon_tunnel.community.
                               dispersy_yield_verified_candidates())))
                    return 1.0, False

                download = anon_tunnel.session.start_download(tdef, dscfg)
                download.set_state_callback(cb, delay=1)

            anon_tunnel.session.lm.threadpool.call(0, start_download)

        elif line == 'q':
            anon_tunnel.stop()
            return

        elif line == 'r':
            print "circuit\t\t\tdirection\tcircuit\t\t\tTraffic (MB)"
            from_to = anon_tunnel.community.relay_from_to
            for key in from_to.keys():
                relay = from_to[key]
                logger.info("%s-->\t%s\t\t%.2f" % (
                    (key[0], key[1]),
                    (relay.sock_addr, relay.circuit_id),
                    relay.bytes[1] / 1024.0 / 1024.0,
                ))
Beispiel #34
0
    ICPol.ICPol.PROP_CONF = c * 0.25

    yappi.start()

    graphs = []
    for i in range(len(GRAPH_LIST)):
        G = GetGraph(GRAPH_LIST[i])
        icPol = ICPol.ICPol(G)
        FILENAME = "Result/" + datetime.datetime.now().strftime(
            "%Y-%m-%d %H:%M:%S")

        DoExperiment(N_EXP, icPol, i)
        with open(FILENAME + "/Information.txt", 'w') as outfile:
            outfile.write(ExperimentInfo(icPol, i))
            outfile.write('\nYAPPI LOG\n---------')
            stat = yappi.get_func_stats().print_all(out=outfile)

    yappi.stop()

ICPol.ICPol.C_INC_MODE = 'logistic'

for c in range(5):
    ICPol.ICPol.ACC_CONF = c * 0.25
    ICPol.ICPol.PROP_CONF = 0.5

    yappi.start()

    graphs = []
    for i in range(len(GRAPH_LIST)):
        G = GetGraph(GRAPH_LIST[i])
        icPol = ICPol.ICPol(G)
    if args.verb:
        print('Full logging...')
        logging.basicConfig(level=logging.DEBUG)
    else:
        print('Error only logging...')
        logging.basicConfig(level=logging.ERROR)

    if args.xprof:
        import yappi
        import time
        yappi.set_clock_type("cpu")
        yappi.start()

    asyncio.run(
        local_benchmark.main_perf(messages_num=args.paym,
                                  wait_num=args.wait,
                                  verbose=args.verb))

    if args.xprof:

        columns = {
            0: ("name", 100),
            1: ("ncall", 20),
            2: ("tsub", 8),
            3: ("ttot", 8),
            4: ("tavg", 8)
        }

        yappi.get_func_stats().strip_dirs().print_all(columns=columns)
        yappi.get_thread_stats().print_all()
Beispiel #36
0
def run_and_get_func_stats(func, *args, **kwargs):
    run_with_yappi(func, *args, **kwargs)
    return yappi.get_func_stats()
Beispiel #37
0
# Requires yappi to be installed, use easy_install yappi

import yappi
import sys
from time import time
from tribler import run

if __name__ == '__main__':
    t1 = time()
    yappi.start()
    run()
    yappi.stop()
    print >> sys.stderr, "YAPPI: %s tribler has run for %s seconds" % \
        (yappi.get_clock_type(), time() - t1)
    yappi_stats = yappi.get_func_stats()
    yappi_stats.sort("tsub")
    count = 0
    for func_stat in yappi_stats:
        print >> sys.stderr, "YAPPI: %10dx  %10.3fs %s" % \
            (func_stat.ncall, func_stat.tsub, func_stat.name)
        count += 1
        if count >= 50:
            break
Beispiel #38
0
    def run_from_argv(self, argv):
        """
        Execute command. Usually from script

        if __name__ == "__main__":
            import sys
            sys.exit(Command.run_from_argv())
        """
        parser = self.create_parser()
        self.add_default_arguments(parser)
        self.add_arguments(parser)
        options = parser.parse_args(argv)
        cmd_options = vars(options)
        args = cmd_options.pop("args", ())
        loglevel = cmd_options.pop("loglevel")
        if loglevel:
            self.setup_logging(loglevel)
        enable_profiling = cmd_options.pop("enable_profiling", False)
        show_metrics = cmd_options.pop("show_metrics", False)
        show_usage = cmd_options.pop("show_usage", False)
        self.no_progressbar = cmd_options.pop("no_progressbar", False)
        if enable_profiling:
            # Start profiler
            import yappi

            yappi.start()
        try:
            if show_usage:
                import resource

                start_usage = resource.getrusage(resource.RUSAGE_SELF)
            return self.handle(*args, **cmd_options) or 0
        except CommandError as e:
            self.print(str(e))
            return 1
        except KeyboardInterrupt:
            self.print("Ctrl+C")
            return 3
        except AssertionError as e:
            if e.args and e.args[0]:
                self.print("ERROR: %s" % e.args[0])
            else:
                self.print("Assertion error: %s" % e)
            return 4
        except Exception:
            from noc.core.debug import error_report

            error_report()
            return 2
        finally:
            if show_usage:
                stop_usage = resource.getrusage(resource.RUSAGE_SELF)
                self.show_usage(start_usage, stop_usage)
            if enable_profiling:
                i = yappi.get_func_stats()
                i.print_all(
                    out=self.stdout,
                    columns={
                        0: ("name", 80),
                        1: ("ncall", 10),
                        2: ("tsub", 8),
                        3: ("ttot", 8),
                        4: ("tavg", 8),
                    },
                )
            if show_metrics:
                from noc.core.perf import apply_metrics

                d = apply_metrics({})
                self.print("Internal metrics:")
                for k in d:
                    self.print("%40s : %s" % (k, d[k]))
Beispiel #39
0
        func()


def func3():
    result = []
    a = 0
    for i in range(LARGE_NUMBER):
        a += i
    result.append(a)


def main():
    t = threading.Thread(target=func)
    t2 = threading.Thread(target=func2)
    t3 = threading.Thread(target=func3)

    t.start()
    t2.start()
    t3.start()

    t.join()
    t2.join()
    t3.join()


if __name__ == '__main__':
    yappi.start()
    main()
    yappi.get_func_stats().strip_dirs().print_all()
    yappi.get_thread_stats().print_all()
def main(args, datasets):

    do_prof = args.do_profile
    do_tensorflow = not args.disable_tensorflow

    #use the environment variable for cupy/cuda choice
    args.use_cuda = USE_CUPY

    analysis_corrections = None
    if "analyze" in args.action:
        analysis_corrections = AnalysisCorrections(args, do_tensorflow)

    # Optionally disable pinned memory (will be somewhat slower)
    if args.use_cuda:
        import cupy
        if not args.pinned:
            cupy.cuda.set_allocator(None)
            cupy.cuda.set_pinned_memory_allocator(None)

    #Use sync-only datasets
    if args.do_sync:
        datasets = datasets_sync

    #Filter datasets by era
    datasets_to_process = []
    for ds in datasets:
        if args.datasets is None or ds[0] in args.datasets:
            if args.eras is None or ds[1] in args.eras:
                datasets_to_process += [ds]
                print("Will consider dataset", ds)
    if len(datasets) == 0:
        raise Exception("No datasets considered, please check the --datasets and --eras options")
    datasets = datasets_to_process

    hmumu_utils.NUMPY_LIB, hmumu_utils.ha = choose_backend(args.use_cuda)
    Dataset.numpy_lib = hmumu_utils.NUMPY_LIB
    NUMPY_LIB = hmumu_utils.NUMPY_LIB 

    # All analysis definitions (cut values etc) should go here
    analysis_parameters = {
        "baseline": {

            "nPV": 0,
            "NdfPV": 4,
            "zPV": 24,

            # Will be applied with OR
            "hlt_bits": {
                "2016": ["HLT_IsoMu24", "HLT_IsoTkMu24"],
                "2017": ["HLT_IsoMu27"],
                "2018": ["HLT_IsoMu24"],
                },

            "muon_pt": 20,
            "muon_pt_leading": {"2016": 26.0, "2017": 29.0, "2018": 26.0},
            "muon_eta": 2.4,
            "muon_iso": 0.25,
            "muon_id": {"2016": "medium", "2017": "medium", "2018": "medium"},
            "muon_trigger_match_dr": 0.1,
            "muon_iso_trigger_matched": 0.15,
            "muon_id_trigger_matched": {"2016": "tight", "2017": "tight", "2018": "tight"},
 
            "do_rochester_corrections": True, 
            "do_lepton_sf": True,
            
            "do_jec": True,
            "jec_tag": {"2016": "Summer16_07Aug2017_V11", "2017": "Fall17_17Nov2017_V32", "2018": "Autumn18_V16"}, 
            "jet_mu_dr": 0.4,
            "jet_pt_leading": {"2016": 35.0, "2017": 35.0, "2018": 35.0},
            "jet_pt_subleading": {"2016": 25.0, "2017": 25.0, "2018": 25.0},
            "jet_eta": 4.7,
            "jet_id": "tight",
            "jet_puid": "loose",
            "jet_veto_eta": [2.65, 3.139],
            "jet_veto_raw_pt": 50.0,  
            "jet_btag": {"2016": 0.6321, "2017": 0.4941, "2018": 0.4184},
            "do_factorized_jec": args.do_factorized_jec,

            "softjet_pt": 5.0,
            "softjet_evt_dr2": 0.04, 

            "cat5_dijet_inv_mass": 400.0,
            "cat5_abs_jj_deta_cut": 2.5,

            "masswindow_z_peak": [76, 106],
            "masswindow_h_sideband": [110, 150],
            "masswindow_h_peak": [115, 135],

            "inv_mass_bins": 41,

            "extra_electrons_pt": 20,
            "extra_electrons_eta": 2.5,
            "extra_electrons_iso": 0.4, #Check if we want to apply this
            "extra_electrons_id": "mvaFall17V1Iso_WP90",

            "save_dnn_vars": True,
            "dnn_vars_path": "{0}/dnn_vars".format(args.out),

            #If true, apply mjj > cut, otherwise inverse
            "vbf_filter_mjj_cut": 350,
            "vbf_filter": {
                "dy_m105_160_mg": True,
                "dy_m105_160_amc": True,
                "dy_m105_160_vbf_mg": False,
                "dy_m105_160_vbf_amc": False, 
            },
            "ggh_nnlops_reweight": {
                "ggh_amc": 1,
                "ggh_amcPS": 1,
                "ggh_amcPS_TuneCP5down": 1,
                "ggh_amcPS_TuneCP5up": 1,
                "ggh_powheg": 2,
                "ggh_powhegPS": 2,
            },
            "ZpT_reweight": {
                "2016": {
                    "dy_0j": 2, 
                    "dy_1j": 2, 
                    "dy_2j": 2, 
                    "dy_m105_160_amc": 2, 
                    "dy_m105_160_vbf_amc": 2,
                },
                "2017": {
                    "dy_0j": 1,
                    "dy_1j": 1,
                    "dy_2j": 1,
                    "dy_m105_160_amc": 1,
                    "dy_m105_160_vbf_amc": 1,
                },
                "2018": {
                    "dy_0j": 1,
                    "dy_1j": 1,
                    "dy_2j": 1,
                    "dy_m105_160_amc": 1,
                    "dy_m105_160_vbf_amc": 1,
                },
            },
           
            #Pisa Group's DNN input variable order for keras
            "dnnPisa_varlist1_order": ['Mqq_log','Rpt','qqDeltaEta','ll_zstar','NSoft5','minEtaHQ','Higgs_pt','log(Higgs_pt)','Higgs_eta','Mqq','QJet0_pt_touse','QJet1_pt_touse','QJet0_eta','QJet1_eta','QJet0_phi','QJet1_phi','QJet0_qgl','QJet1_qgl'],
            "dnnPisa_varlist2_order": ['Higgs_m','Higgs_mRelReso','Higgs_mReso'],
            #Irene's DNN input variable order for keras
            "dnn_varlist_order": ['softJet5', 'dRmm','dEtamm','M_jj','pt_jj','eta_jj','phi_jj','M_mmjj','eta_mmjj','phi_mmjj','dEta_jj','Zep','dRmin_mj', 'dRmax_mj', 'dRmin_mmj','dRmax_mmj','dPhimm','leadingJet_pt','subleadingJet_pt', 'leadingJet_eta','subleadingJet_eta','leadingJet_qgl','subleadingJet_qgl','cthetaCS','Higgs_pt','Higgs_eta','Higgs_mass'],
            "dnn_input_histogram_bins": {
                "softJet5": (0,10,10),
                "dRmm": (0,5,11),
                "dEtamm": (-2,2,11),
                "dPhimm": (-2,2,11),
                "M_jj": (0,2000,11),
                "pt_jj": (0,400,11),
                "eta_jj": (-5,5,11),
                "phi_jj": (-5,5,11),
                "M_mmjj": (0,2000,11),
                "eta_mmjj": (-3,3,11),
                "phi_mmjj": (-3,3,11),
                "dEta_jj": (-3,3,11),
                "Zep": (-2,2,11),
                "dRmin_mj": (0,5,11),
                "dRmax_mj": (0,5,11),
                "dRmin_mmj": (0,5,11),
                "dRmax_mmj": (0,5,11),
                "leadingJet_pt": (0, 200, 11),
                "subleadingJet_pt": (0, 200, 11),
                "leadingJet_eta": (-5, 5, 11),
                "subleadingJet_eta": (-5, 5, 11),
                "leadingJet_qgl": (0, 1, 11),
                "subleadingJet_qgl": (0, 1, 11),
                "cthetaCS": (-1, 1, 11),
                "Higgs_pt": (0, 200, 11),
                "Higgs_eta": (-3, 3, 11),
                "Higgs_mass": (110, 150, 11),
                "dnn_pred": (0, 1, 1001),
                "dnn_pred2": (0, 1, 11),
                "bdt_ucsd": (-1, 1, 11),
                "bdt2j_ucsd": (-1, 1, 11),
                "bdt01j_ucsd": (-1, 1, 11),
                "MET_pt": (0, 200, 11),
                "hmmthetacs": (-1, 1, 11),
                "hmmphics": (-4, 4, 11),
            },

            "categorization_trees": {},
            "do_bdt_ucsd": True,
            "do_dnn_pisa": True,
        },
    }
    histo_bins = {
        "muon_pt": np.linspace(0, 200, 101, dtype=np.float32),
        "muon_eta": np.linspace(-2.5, 2.5, 21, dtype=np.float32),
        "npvs": np.linspace(0, 100, 101, dtype=np.float32),
        "dijet_inv_mass": np.linspace(0, 2000, 11, dtype=np.float32),
        "inv_mass": np.linspace(70, 150, 11, dtype=np.float32),
        "numjet": np.linspace(0, 10, 11, dtype=np.float32),
        "jet_pt": np.linspace(0, 300, 101, dtype=np.float32),
        "jet_eta": np.linspace(-4.7, 4.7, 11, dtype=np.float32),
        "pt_balance": np.linspace(0, 5, 11, dtype=np.float32),
        "numjets": np.linspace(0, 10, 11, dtype=np.float32),
        "jet_qgl": np.linspace(0, 1, 11, dtype=np.float32),
        "higgs_inv_mass_uncertainty": np.linspace(0, 10, 101, dtype=np.float32),
        "higgs_rel_inv_mass_uncertainty": np.linspace(0, 0.05, 101, dtype=np.float32)
    }
    for hname, bins in analysis_parameters["baseline"]["dnn_input_histogram_bins"].items():
        histo_bins[hname] = np.linspace(bins[0], bins[1], bins[2], dtype=np.float32)

    for masswindow in ["z_peak", "h_peak", "h_sideband"]:
        mw = analysis_parameters["baseline"]["masswindow_" + masswindow]
        histo_bins["inv_mass_{0}".format(masswindow)] = np.linspace(mw[0], mw[1], 41, dtype=np.float32)

    histo_bins["dnn_pred2"] = {
        "h_peak": np.array([0., 0.905, 0.915, 0.925, 0.935, 0.94, 0.945, 0.95, 0.955, 0.96, 0.965,0.97, 0.975,0.98, 0.985,1.0], dtype=np.float32),
        "z_peak": np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 1.0], dtype=np.float32),
        "h_sideband": np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 1.0], dtype=np.float32),
    }

    analysis_parameters["baseline"]["histo_bins"] = histo_bins

    #analysis_parameters["oldjec"] = copy.deepcopy(analysis_parameters["baseline"])
    #analysis_parameters["oldjec"]["jec_tag"]["2018"] = "Autumn18_V8"

    #Run baseline analysis
    outpath = "{0}/partial_results".format(args.out)
    try:
        os.makedirs(outpath)
    except FileExistsError as e:
            pass

    with open('{0}/parameters.pkl'.format(outpath), 'wb') as handle:
        pickle.dump(analysis_parameters, handle, protocol=pickle.HIGHEST_PROTOCOL)

    #Recreate dump of all filenames
    cache_filename = args.cache_location + "/datasets.json"
    if ("cache" in args.action) and (args.jobfiles is None):
        print("--action cache and no jobfiles specified, creating datasets.json dump of all filenames")
        if not os.path.isdir(args.cache_location):
            os.makedirs(args.cache_location)
        filenames_cache = {}
        for dataset in datasets:
            dataset_name, dataset_era, dataset_globpattern, is_mc = dataset
            filenames_all = glob.glob(args.datapath + dataset_globpattern, recursive=True)
            filenames_all = [fn for fn in filenames_all if not "Friend" in fn]
            filenames_cache[dataset_name + "_" + dataset_era] = [
                fn.replace(args.datapath, "") for fn in filenames_all]

            if len(filenames_all) == 0:
                raise Exception("Dataset {0} matched 0 files from glob pattern {1}, verify that the data files are located in {2}".format(
                    dataset_name, dataset_globpattern, args.datapath
                ))
    
        #save all dataset filenames to a json file 
        print("Creating a json dump of all the dataset filenames based on data found in {0}".format(args.datapath))
        if os.path.isfile(cache_filename):
            print("Cache file {0} already exists, we will not overwrite it to be safe.".format(cache_filename), file=sys.stderr)
            print("Delete it or change --cache-location and try again.", file=sys.stderr)
            sys.exit(1)
        with open(cache_filename, "w") as fi:
            fi.write(json.dumps(filenames_cache, indent=2))

    if ("cache" in args.action or "analyze" in args.action) and (args.jobfiles is None):
        #Create a list of job files for processing
        jobfile_data = []
        print("Loading list of filenames from {0}".format(cache_filename))
        if not os.path.isfile(cache_filename):
            raise Exception("Cached dataset list of filenames not found in {0}, please run this code with --action cache".format(
                cache_filename))
        filenames_cache = json.load(open(cache_filename, "r"))

        for dataset in datasets:
            dataset_name, dataset_era, dataset_globpattern, is_mc = dataset
            try:
                filenames_all = filenames_cache[dataset_name + "_" + dataset_era]
            except KeyError as e:
                print("Could not load {0} from {1}, please make sure this dataset has been added to cache".format(
                    dataset_name + "_" + dataset_era, cache_filename), file=sys.stderr)
                raise e

            filenames_all_full = [args.datapath + "/" + fn for fn in filenames_all]
            chunksize = args.chunksize * chunksize_multiplier.get(dataset_name, 1)
            print("Saving dataset {0}_{1} with {2} files in {3} files per chunk to jobfiles".format(
                dataset_name, dataset_era, len(filenames_all_full), chunksize))
            jobfile_dataset = create_dataset_jobfiles(dataset_name, dataset_era,
                filenames_all_full, is_mc, chunksize, args.out)
            jobfile_data += jobfile_dataset
            print("Dataset {0}_{1} consists of {2} chunks".format(
                dataset_name, dataset_era, len(jobfile_dataset)))

        assert(len(jobfile_data) > 0)
        assert(len(jobfile_data[0]["filenames"]) > 0)

    #For each dataset, find out which chunks we want to process
    if "cache" in args.action or "analyze" in args.action:
        jobfile_data = []
        if not (args.jobfiles_load is None):
            args.jobfiles = [l.strip() for l in open(args.jobfiles_load).readlines()]
        if args.jobfiles is None:
            print("You did not specify to process specific dataset chunks, assuming you want to process all chunks")
            print("If this is not true, please specify e.g. --jobfiles data_2018_0.json data_2018_1.json ...")
            args.jobfiles = []
            for dataset in datasets:
                dataset_name, dataset_era, dataset_globpattern, is_mc = dataset
                jobfiles_dataset = glob.glob(args.out + "/jobfiles/{0}_{1}_*.json".format(dataset_name, dataset_era))
                assert(len(jobfiles_dataset) > 0)
                if args.maxchunks > 0:
                    jobfiles_dataset = jobfiles_dataset[:args.maxchunks]
                args.jobfiles += jobfiles_dataset
       
        #Now load the jobfiles 
        assert(len(args.jobfiles) > 0)
        print("You specified --jobfiles {0}, processing only these dataset chunks".format(" ".join(args.jobfiles))) 
        jobfile_data = []
        for f in args.jobfiles:
            jobfile_data += [json.load(open(f))]

        chunkstr = " ".join(["{0}_{1}_{2}".format(
            ch["dataset_name"], ch["dataset_era"], ch["dataset_num_chunk"])
            for ch in jobfile_data])
        print("Will process {0} dataset chunks: {1}".format(len(jobfile_data), chunkstr))
        assert(len(jobfile_data) > 0)

    #Start the profiler only in the actual data processing
    if do_prof:
        import yappi
        filename = 'analysis.prof'
        yappi.set_clock_type('cpu')
        yappi.start(builtins=True)

    if "cache" in args.action:
        print("Running the 'cache' step of the analysis, ROOT files will be opened and branches will be uncompressed")
        print("Will retrieve dataset filenames based on existing ROOT files on filesystem in datapath={0}".format(args.datapath)) 
       
        try:
            os.makedirs(cmdline_args.cache_location)
        except Exception as e:
            pass

        run_cache(args, outpath, jobfile_data, analysis_parameters)
    
    if "analyze" in args.action:
        run_analysis(args, outpath, jobfile_data, analysis_parameters, analysis_corrections)

    if "merge" in args.action:
        with ProcessPoolExecutor(max_workers=args.nthreads) as executor:
            for dataset in datasets:
                dataset_name, dataset_era, dataset_globpattern, is_mc = dataset
                fut = executor.submit(merge_partial_results, dataset_name, dataset_era, outpath)
        print("done merging")
    if do_prof:
        stats = yappi.get_func_stats()
        stats.save(filename, type='callgrind')

    import resource
    total_memory = resource.getrusage(resource.RUSAGE_CHILDREN).ru_maxrss
    total_memory += resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
    print("maxrss={0} MB".format(total_memory/1024))
Beispiel #41
0
    def _on_keyboard_down(self, _keyboard, keycode, _text, modifiers):
        self.last_key_down = keycode
        ctrl_pressed = "ctrl" in modifiers
        if self.controls.note.focus:
            return  # when making notes, don't allow keyboard shortcuts
        popup = self.popup_open
        if popup:
            if keycode[1] in ["f5", "f6", "f7", "f8"]:  # switch between popups
                popup.dismiss()
                return
            elif keycode[1] in ["enter", "numpadenter"]:
                fn = getattr(popup.content, "on_submit", None)
                if fn:
                    fn()
                return
            else:
                return
        shift_pressed = "shift" in modifiers
        shortcuts = self.shortcuts
        if keycode[1] == "spacebar":
            self.toggle_continuous_analysis()
        elif keycode[1] == "k":
            self.board_gui.toggle_coordinates()
        elif keycode[1] in ["pause", "break", "f15"] and not ctrl_pressed:
            self.controls.timer.paused = not self.controls.timer.paused
        elif keycode[1] in ["`", "~", "f12"]:
            self.zen = (self.zen + 1) % 3
        elif keycode[1] in ["left", "z"]:
            self("undo", 1 + shift_pressed * 9 + ctrl_pressed * 9999)
        elif keycode[1] in ["right", "x"]:
            self("redo", 1 + shift_pressed * 9 + ctrl_pressed * 9999)
        elif keycode[1] == "home":
            self("undo", 9999)
        elif keycode[1] == "end":
            self("redo", 9999)
        elif keycode[1] == "pageup":
            self.controls.move_tree.make_selected_node_main_branch()
        elif keycode[1] == "n" and not ctrl_pressed:
            self("find-mistake", "undo" if shift_pressed else "redo")
        elif keycode[1] == "delete" and ctrl_pressed:
            self.controls.move_tree.delete_selected_node()
        elif keycode[1] == "c" and not ctrl_pressed:
            self.controls.move_tree.toggle_selected_node_collapse()
        elif keycode[1] == "n" and ctrl_pressed:
            self("new-game-popup")
        elif keycode[1] == "l" and ctrl_pressed:
            self("analyze-sgf-popup")
        elif keycode[1] == "s" and ctrl_pressed:
            self("save-game")
        elif keycode[1] == "d" and ctrl_pressed:
            self("save-game-as-popup")
        elif keycode[1] == "c" and ctrl_pressed:
            Clipboard.copy(self.game.root.sgf())
            self.controls.set_status(i18n._("Copied SGF to clipboard."),
                                     STATUS_INFO)
        elif keycode[1] == "v" and ctrl_pressed:
            self.load_sgf_from_clipboard()
        elif keycode[1] in shortcuts.keys() and not ctrl_pressed:
            shortcut = shortcuts[keycode[1]]
            if isinstance(shortcut, Widget):
                shortcut.trigger_action(duration=0)
            else:
                self(*shortcut)
        elif keycode[1] == "f9" and self.debug_level >= OUTPUT_EXTRA_DEBUG:
            import yappi

            yappi.set_clock_type("cpu")
            yappi.start()
            self.log("starting profiler", OUTPUT_ERROR)
        elif keycode[1] == "f10" and self.debug_level >= OUTPUT_EXTRA_DEBUG:
            import time
            import yappi

            stats = yappi.get_func_stats()
            filename = f"callgrind.{int(time.time())}.prof"
            stats.save(filename, type="callgrind")
            self.log(f"wrote profiling results to {filename}", OUTPUT_ERROR)
Beispiel #42
0
def main(argv):
    # setup the argument parser
    arg_parser = argparse.ArgumentParser(description='backtest')
    arg_parser.add_argument('-m',
                            '--mode',
                            required=False,
                            default='B',
                            help="set backtest mode(B or T)")
    arg_parser.add_argument('-d',
                            '--days',
                            required=False,
                            default=1,
                            type=int,
                            help="set backtest days")

    arg_parser.add_argument('-sd',
                            '--startDate',
                            required=False,
                            default='',
                            help="set backtest days")

    arg_parser.add_argument('-s',
                            '--vt_symbol',
                            required=False,
                            default='rb1801',
                            help="set backtest vt_symbol")

    arg_parser.add_argument('-s2',
                            '--vt_symbol2',
                            required=False,
                            default='',
                            help="set spread vt_symbol2")

    arg_parser.add_argument('-hd',
                            '--historyDays',
                            required=False,
                            default=0,
                            type=int,
                            help="set history days")

    arg_parser.add_argument('-sf',
                            '--settingFile',
                            required=False,
                            default='CTA_setting_multi.json',
                            help="setting file name")

    arg_parser.add_argument('-o',
                            '--optimization',
                            required=False,
                            default=False,
                            type=bool,
                            help="parameter optimization")

    arg_parser.add_argument('-yappi',
                            '--yappi',
                            required=False,
                            default=False,
                            type=bool,
                            help="yappi status")

    # parse arguments
    cmd = arg_parser.parse_args(argv)

    if cmd.yappi:
        import yappi
        yappi.set_clock_type("cpu")
        yappi.start()

    backtesting(settingFile=cmd.settingFile,
                startDate=cmd.startDate,
                days=cmd.days,
                mode=cmd.mode,
                vt_symbol=cmd.vt_symbol,
                vt_symbol2=cmd.vt_symbol2,
                historyDays=cmd.historyDays,
                optimization=cmd.optimization)

    if cmd.yappi:
        yappi.get_func_stats().print_all()
        yappi.get_thread_stats().print_all()
Beispiel #43
0
    def test_merge_aabab_aabbc(self):
        _timings = {
            "a_1": 15,
            "a_2": 14,
            "b_1": 12,
            "a_3": 10,
            "b_2": 9,
            "c_1": 4
        }
        _yappi._set_test_timings(_timings)

        def a():
            if self._ncall == 1:
                self._ncall += 1
                a()
            elif self._ncall == 5:
                self._ncall += 1
                a()
            else:
                b()

        def b():
            if self._ncall == 2:
                self._ncall += 1
                a()
            elif self._ncall == 6:
                self._ncall += 1
                b()
            elif self._ncall == 7:
                c()
            else:
                return

        def c():
            pass

        self._ncall = 1
        stats = utils.run_and_get_func_stats(a, )
        stats.save("ystats1.ys")
        yappi.clear_stats()
        _yappi._set_test_timings(_timings)
        #stats.print_all()

        self._ncall = 5
        stats = utils.run_and_get_func_stats(a, )
        stats.save("ystats2.ys")

        #stats.print_all()

        def a():  # same name but another function(code object)
            pass

        yappi.start()
        a()
        stats = yappi.get_func_stats().add(["ystats1.ys", "ystats2.ys"])
        #stats.print_all()
        self.assertEqual(len(stats), 4)

        fsa = None
        for stat in stats:
            if stat.name == "a" and stat.ttot == 45:
                fsa = stat
                break
        self.assertTrue(fsa is not None)

        self.assertEqual(fsa.ncall, 7)
        self.assertEqual(fsa.nactualcall, 3)
        self.assertEqual(fsa.ttot, 45)
        self.assertEqual(fsa.tsub, 10)
        fsb = utils.find_stat_by_name(stats, "b")
        fsc = utils.find_stat_by_name(stats, "c")
        self.assertEqual(fsb.ncall, 6)
        self.assertEqual(fsb.nactualcall, 3)
        self.assertEqual(fsb.ttot, 36)
        self.assertEqual(fsb.tsub, 27)
        self.assertEqual(fsb.tavg, 6)
        self.assertEqual(fsc.ttot, 8)
        self.assertEqual(fsc.tsub, 8)
        self.assertEqual(fsc.tavg, 4)
        self.assertEqual(fsc.nactualcall, fsc.ncall, 2)
Beispiel #44
0
    def test_subsequent_profile(self):
        WORKER_COUNT = 5

        def a():
            pass

        def b():
            pass

        def c():
            pass

        _timings = {
            "a_1": 3,
            "b_1": 2,
            "c_1": 1,
        }

        yappi.start()

        def g():
            pass

        g()
        yappi.stop()
        yappi.clear_stats()
        _yappi._set_test_timings(_timings)
        yappi.start()

        _dummy = []
        for i in range(WORKER_COUNT):
            t = threading.Thread(target=a)
            t.start()
            t.join()
        for i in range(WORKER_COUNT):
            t = threading.Thread(target=b)
            t.start()
            _dummy.append(t)
            t.join()
        for i in range(WORKER_COUNT):
            t = threading.Thread(target=a)
            t.start()
            t.join()
        for i in range(WORKER_COUNT):
            t = threading.Thread(target=c)
            t.start()
            t.join()
        yappi.stop()
        yappi.start()

        def f():
            pass

        f()
        stats = yappi.get_func_stats()
        fsa = utils.find_stat_by_name(stats, 'a')
        fsb = utils.find_stat_by_name(stats, 'b')
        fsc = utils.find_stat_by_name(stats, 'c')
        self.assertEqual(fsa.ncall, 10)
        self.assertEqual(fsb.ncall, 5)
        self.assertEqual(fsc.ncall, 5)
        self.assertEqual(fsa.ttot, fsa.tsub, 30)
        self.assertEqual(fsb.ttot, fsb.tsub, 10)
        self.assertEqual(fsc.ttot, fsc.tsub, 5)

        # MACOSx optimizes by only creating one worker thread
        self.assertTrue(len(yappi.get_thread_stats()) >= 2)
def main(args):
    do_prof = args.do_profile
    do_tensorflow = not args.disable_tensorflow

    #use the environment variable for cupy/cuda choice
    args.use_cuda = USE_CUPY

    # Optionally disable pinned memory (will be somewhat slower)
    if args.use_cuda:
        import cupy
        if not args.pinned:
            cupy.cuda.set_allocator(None)
            cupy.cuda.set_pinned_memory_allocator(None)

    datasets = yaml.load(open(args.datasets_yaml), Loader=yaml.FullLoader)["datasets"]
    #Filter datasets by era
    datasets_to_process = []
    for ds in datasets:
        if args.datasets is None or ds["name"] in args.datasets:
            if args.eras is None or ds["era"] in args.eras:
                datasets_to_process += [ds]
    if len(datasets_to_process) == 0:
        raise Exception("No datasets considered, please check the --datasets and --eras options")
    datasets = datasets_to_process

    hmumu_utils.NUMPY_LIB, hmumu_utils.ha = choose_backend(args.use_cuda)
    Dataset.numpy_lib = hmumu_utils.NUMPY_LIB
    NUMPY_LIB = hmumu_utils.NUMPY_LIB 

    outpath_partial = "{0}/partial_results".format(args.out)
    try:
        os.makedirs(outpath_partial)
    except FileExistsError as e:
        print("Output path {0} already exists, not recreating".format(outpath_partial))

    #save the parameters as a pkl file
    from pars import analysis_parameters
    for analysis_name in analysis_parameters.keys():
        analysis_parameters[analysis_name]["do_factorized_jec"] = args.do_factorized_jec
        analysis_parameters[analysis_name]["dnn_vars_path"] = "{0}/dnn_vars".format(args.out)
 
    with open('{0}/parameters.pkl'.format(outpath_partial), 'wb') as handle:
        pickle.dump(analysis_parameters, handle, protocol=pickle.HIGHEST_PROTOCOL)

    #Recreate dump of all filenames
    cache_filename = "{0}/datasets.json".format(args.out)

    use_skim = False
    if args.cachepath is None:
        print("--cachepath not specified, will process unskimmed NanoAOD, which is somewhat slower!")
        print("Please see the README.md on how to skim the NanoAOD")
        datapath = args.datapath
    else:
        print("Processing skimmed NanoAOD")
        datapath = args.cachepath
        use_skim = True
    check_and_recreate_filename_cache(cache_filename, datapath, datasets, use_skim)

    #Create the jobfiles
    if args.jobfiles is None:
        create_all_jobfiles(datasets, cache_filename, datapath, args.chunksize, args.out)

    #For each dataset, find out which chunks we want to process
    if "analyze" in args.action:
        jobfile_data = load_jobfiles(datasets, args.jobfiles_load, args.jobfiles, args.maxchunks, args.out)

    #Start the profiler only in the actual data processing
    if do_prof:
        import yappi
        yappi.set_clock_type('cpu')
        yappi.start(builtins=True)

    #Run the physics analysis on all specified jobfiles  
    if "analyze" in args.action:
        print("Running the 'analyze' step of the analysis, processing the events into histograms with all systematics")
        analysis_corrections = AnalysisCorrections(args, do_tensorflow)
        run_analysis(args, outpath_partial, jobfile_data, analysis_parameters, analysis_corrections)
    
    if do_prof:
        stats = yappi.get_func_stats()
        stats.save("analysis.prof", type='callgrind')

    #Merge the partial results (pieces of each dataset)
    if "merge" in args.action:
        with ProcessPoolExecutor(max_workers=args.nthreads) as executor:
            for dataset in datasets:
                dataset_name = dataset["name"]
                dataset_era = dataset["era"]
                is_mc = dataset["is_mc"]
                fut = executor.submit(merge_partial_results, dataset_name, dataset_era, args.out, outpath_partial)
        print("done merging")

    #print memory usage
    total_memory = resource.getrusage(resource.RUSAGE_CHILDREN).ru_maxrss
    total_memory += resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
    print("maxrss={0} MB".format(total_memory/1024))
Beispiel #46
0

import yappi
#yappi.set_clock_type("waLL")
#yappi.start(builtins=True)
yappi.start()
class A:
    def bar(self):
        pass
def foo():
    def inner_foo():
        pass
    import time
    time.sleep(2.0)
    for i in range(20000000):
        pass
    a = A()
    a.bar()
    inner_foo()
    
foo()
#yappi.write_callgrind_stats()
yappi.get_func_stats().sort("totaLTiMe").print_all()
yappi.get_thread_stats().print_all()
#import cProfile
#cProfile.run('foo()', 'fooprof')
#import pstats
#p = pstats.Stats('fooprof')
#p.strip_dirs().sort_stats(-1).print_stats()
Beispiel #47
0
def profile(config_location="/etc/redeem"):
  import yappi
  yappi.start()
  main(config_location)
  yappi.get_func_stats().print_all()
Beispiel #48
0
def profile():
    import yappi
    yappi.start()
    main()
    yappi.get_func_stats().print_all()
Beispiel #49
0
    def test_merge_stats(self):
        _timings = {
            "a_1": 15,
            "b_1": 14,
            "c_1": 12,
            "d_1": 10,
            "e_1": 9,
            "f_1": 7,
            "g_1": 6,
            "h_1": 5,
            "i_1": 1
        }
        _yappi._set_test_timings(_timings)

        def a():
            b()

        def b():
            c()

        def c():
            d()

        def d():
            e()

        def e():
            f()

        def f():
            g()

        def g():
            h()

        def h():
            i()

        def i():
            pass

        yappi.start()
        a()
        a()
        yappi.stop()
        stats = yappi.get_func_stats()
        self.assertRaises(NotImplementedError, stats.save, "",
                          "INVALID_SAVE_TYPE")
        stats.save("ystats2.ys")
        yappi.clear_stats()
        _yappi._set_test_timings(_timings)
        yappi.start()
        a()
        stats = yappi.get_func_stats().add("ystats2.ys")
        fsa = utils.find_stat_by_name(stats, "a")
        fsb = utils.find_stat_by_name(stats, "b")
        fsc = utils.find_stat_by_name(stats, "c")
        fsd = utils.find_stat_by_name(stats, "d")
        fse = utils.find_stat_by_name(stats, "e")
        fsf = utils.find_stat_by_name(stats, "f")
        fsg = utils.find_stat_by_name(stats, "g")
        fsh = utils.find_stat_by_name(stats, "h")
        fsi = utils.find_stat_by_name(stats, "i")
        self.assertEqual(fsa.ttot, 45)
        self.assertEqual(fsa.ncall, 3)
        self.assertEqual(fsa.nactualcall, 3)
        self.assertEqual(fsa.tsub, 3)
        self.assertEqual(fsa.children[fsb].ttot, fsb.ttot)
        self.assertEqual(fsa.children[fsb].tsub, fsb.tsub)
        self.assertEqual(fsb.children[fsc].ttot, fsc.ttot)
        self.assertEqual(fsb.children[fsc].tsub, fsc.tsub)
        self.assertEqual(fsc.tsub, 6)
        self.assertEqual(fsc.children[fsd].ttot, fsd.ttot)
        self.assertEqual(fsc.children[fsd].tsub, fsd.tsub)
        self.assertEqual(fsd.children[fse].ttot, fse.ttot)
        self.assertEqual(fsd.children[fse].tsub, fse.tsub)
        self.assertEqual(fse.children[fsf].ttot, fsf.ttot)
        self.assertEqual(fse.children[fsf].tsub, fsf.tsub)
        self.assertEqual(fsf.children[fsg].ttot, fsg.ttot)
        self.assertEqual(fsf.children[fsg].tsub, fsg.tsub)
        self.assertEqual(fsg.ttot, 18)
        self.assertEqual(fsg.tsub, 3)
        self.assertEqual(fsg.children[fsh].ttot, fsh.ttot)
        self.assertEqual(fsg.children[fsh].tsub, fsh.tsub)
        self.assertEqual(fsh.ttot, 15)
        self.assertEqual(fsh.tsub, 12)
        self.assertEqual(fsh.tavg, 5)
        self.assertEqual(fsh.children[fsi].ttot, fsi.ttot)
        self.assertEqual(fsh.children[fsi].tsub, fsi.tsub)
Beispiel #50
0
    def test_profile_decorator(self):
        def aggregate(func, stats):
            fname = "%s.profile" % (func.__name__)
            try:
                stats.add(fname)
            except IOError:
                pass
            stats.save(fname)
            raise Exception("messing around")

        @yappi.profile(return_callback=aggregate)
        def a(x, y):
            if x + y == 25:
                raise Exception("")
            return x + y

        def b():
            pass

        try:
            os.remove(
                "a.profile")  # remove the one from prev test, if available
        except:
            pass

        # global profile is on to mess things up
        yappi.start()
        b()

        # assert functionality and call function at same time
        try:
            self.assertEqual(a(1, 2), 3)
        except:
            pass
        try:
            self.assertEqual(a(2, 5), 7)
        except:
            pass
        try:
            a(4, 21)
        except:
            pass
        stats = yappi.get_func_stats().add("a.profile")
        fsa = utils.find_stat_by_name(stats, 'a')
        self.assertEqual(fsa.ncall, 3)
        self.assertEqual(len(stats), 1)  # b() should be cleared out.

        @yappi.profile(return_callback=aggregate)
        def count_down_rec(n):
            if n == 0:
                return
            count_down_rec(n - 1)

        try:
            os.remove("count_down_rec.profile"
                      )  # remove the one from prev test, if available
        except:
            pass

        try:
            count_down_rec(4)
        except:
            pass
        try:
            count_down_rec(3)
        except:
            pass

        stats = yappi.YFuncStats("count_down_rec.profile")
        fsrec = utils.find_stat_by_name(stats, 'count_down_rec')
        self.assertEqual(fsrec.ncall, 9)
        self.assertEqual(fsrec.nactualcall, 2)
Beispiel #51
0
    serve(app, host='0.0.0.0', port=6543)


if __name__ == '__main__':
    if len(sys.argv) > 1 and sys.argv[1] == '--profile':
        try:
            import yappi

        except ImportError:
            print 'You need to install the yappi python module to profile Smewt!'
            sys.exit(1)

        try:
            yappi.start(True)
            main()

        finally:
            yappi.stop()

            #yappi.get_func_stats().print_all()

            print '\n\nTHREAD STATS'
            yappi.get_thread_stats().print_all()

            filename = 'callgrind.out.%d' % os.getpid()
            yappi.get_func_stats().save(filename, type='callgrind')
            print '\nWrote callgrind output to %s' % filename

    else:
        main()
Beispiel #52
0
                    and y.split('.')[-1] in data else "" for y in fields
                ]
    elif args.show_types:
        # do nothing
        pass
    elif args.verbose and istlog:
        mavutil.dump_message_verbose(sys.stdout, m)
        print("")
    else:
        # Otherwise we output in a standard Python dict-style format
        s = "%s.%02u: %s" % (time.strftime("%Y-%m-%d %H:%M:%S",
                                           time.localtime(timestamp)),
                             int(timestamp * 100.0) % 100, m)
        if args.show_source:
            s += " srcSystem=%u srcComponent=%u" % (m.get_srcSystem(),
                                                    m.get_srcComponent())
        if args.show_seq:
            s += " seq=%u" % m.get_seq()
        print(s)

    # Update our last timestamp value.
    last_timestamp = timestamp

if args.show_types:
    for msgType in available_types:
        print(msgType)

if args.profile:
    yappi.get_func_stats().print_all()
    yappi.get_thread_stats().print_all()
Beispiel #53
0
    return x + y


import yappi
if __name__ == "__main__":
    # yappi.set_clock_type("wall")
    yappi.set_clock_type("cpu")
    yappi.start()
    for i in range(10):
        t = threading.Thread(target=sum,
                             args=(
                                 100,
                                 i,
                             ),
                             name="hello" + str(i))
        t.start()
    main_thread = threading.currentThread()

    for t in threading.enumerate():
        if t is not main_thread:
            t.join()
    yappi.get_func_stats().print_all(
        columns={
            0: ("name", 50),
            1: ("ncall", 15),
            2: ("tsub", 18),
            3: ("ttot", 18),
            4: ("tavg", 18)
        })
    yappi.get_thread_stats().print_all()