Пример #1
1
def admin_menu():
    from pygame.display import set_mode, list_modes, set_caption
    from pygame import init, quit

    init()
    screen = set_mode(list_modes()[0])
    set_caption("Hero Misadventures")
    menu = Menu(("Debug", "Release"), screen, text_color=color("White"), surface=color("Black"),
                selection_color=color("Slate Gray"))
    while True:
        choose = menu.update()
        if choose == -1:
            continue
        else:
            if choose == 0:
                from cProfile import runctx
                from pstats import Stats

                runctx("from bin.Interaction import debug_menu; debug_menu(screen)", {"screen": screen}, {}, "test/profiling.prof")
                file = open("test/profiling.txt", "w")
                info = Stats("test/profiling.prof", stream=file)
                info.strip_dirs().sort_stats("cumulative").print_stats()
            elif choose == 1:
                quit()
                start_menu()
            return
 def stopTest(self, test):
     super(BenchTestResult, self).stopTest(test)
     if self._benchmark:
         self._profiler.disable()
         stats = Stats(self._profiler)
         stats.sort_stats(self._sort)
         stats.print_stats(self._limit)
Пример #3
0
 def expose(self, widget, event):
     context = widget.window.cairo_create()
     #r = (event.area.x, event.area.y, event.area.width, event.area.height)
     #context.rectangle(r[0]-.5, r[1]-.5, r[2]+1, r[3]+1)
     #context.clip()
     
     if False:
         import profile
         profile.runctx("self.draw(context, event.area)", locals(), globals(), "/tmp/pychessprofile")
         from pstats import Stats
         s = Stats("/tmp/pychessprofile")
         s.sort_stats('cumulative')
         s.print_stats()
     else:
         self.drawcount += 1
         start = time()
         self.animationLock.acquire()
         self.draw(context, event.area)
         self.animationLock.release()
         self.drawtime += time() - start
         #if self.drawcount % 100 == 0:
         #    print "Average FPS: %0.3f - %d / %d" % \
         #      (self.drawcount/self.drawtime, self.drawcount, self.drawtime)
         
     return False
    def __init__(self, *args, **kwds):
        Stats.__init__(self, *args, **kwds)

        self.workbook = xlsxwriter.Workbook(args[0] + ".xlsx")
        self.worksheet = self.workbook.add_worksheet()
        self.worksheet.set_column('A:F', 15)
        self.worksheet.set_column('G:H', 90)
Пример #5
0
def profile(to=None, sort_by='cumtime'):
	'''Profiles a chunk of code, use with the ``with`` statement::
	
	    from halonctl.debug import profile
	    
	    with profile('~/Desktop/stats'):
	    	pass # Do something performance-critical here...
	
	Results for individual runs are collected into ``to``. The specifics of how
	reports are done varies depending on what type ``to`` is.
	
	* **File-like objects**: Stats are dumped, according to ``sort_by``, into the stream, separated by newlines - watch out, the file/buffer may grow very big when used in loops.
	* **List-like objects**: A number of pstats.Stats objects are appended.
	* **str and unicode**: Treated as a path and opened for appending. Tildes (~) will be expanded, and intermediary directories created if possible.
	* **None or omitted**: Results are printed to sys.stderr.
	'''
	
	if isinstance(to, six.string_types):
		to = open_fuzzy(to, 'a')
	
	to_is_stream = hasattr(to, 'write')
	to_is_list = hasattr(to, 'append')
	
	p = Profile()
	p.enable()
	yield
	p.disable()
	
	ps = Stats(p, stream=to if to_is_stream else sys.stderr)
	ps.sort_stats('cumtime')
	
	if to_is_stream or to is None:
		ps.print_stats()
	elif to_is_list:
		to.append(ps)
def stats_for_fib(type_, fib):
    p = Profile()
    p.runcall(fib, 30)
    p.dump_stats(type_.lower().replace(' ', '_') + '.stats')
    s = Stats(p)
    s.strip_dirs().sort_stats('time', 'cumulative')
    print_stats(type_, s)
Пример #7
0
def run(number=100000):
    sys.path[0] = '.'
    path = os.getcwd()
    print("              msec    rps  tcalls  funcs")
    for framework in frameworks:
        os.chdir(os.path.join(path, framework))
        try:
            main = __import__('app', None, None, ['main']).main

            f = lambda: list(main(environ.copy(), start_response))
            time = timeit(f, number=number)
            st = Stats(profile.Profile().runctx(
                'f()', globals(), locals()))
            print("%-11s %6.0f %6.0f %7d %6d" % (framework, 1000 * time,
                  number / time, st.total_calls, len(st.stats)))
            if 0:
                st = Stats(profile.Profile().runctx(
                    'timeit(f, number=number)', globals(), locals()))
                st.strip_dirs().sort_stats('time').print_stats(10)
            del sys.modules['app']
        except ImportError:
            print("%-15s not installed" % framework)
        modules = [m for m in sys.modules.keys() if m.endswith('helloworld')]
        for m in modules:
            del sys.modules[m]
Пример #8
0
 def __analyze2 ():
     import profile
     profile.runctx("self.__analyze2()", locals(), globals(), "/tmp/pychessprofile")
     from pstats import Stats
     s = Stats("/tmp/pychessprofile")
     s.sort_stats('cumulative')
     s.print_stats()
Пример #9
0
def print_stats(limit=limit, sort=sort, strip_dirs=strip_dirs):
    if _have_stats:
        stats = Stats(_profile)
        if strip_dirs:
            stats.strip_dirs()
        apply(stats.sort_stats, sort)
        apply(stats.print_stats, limit)
Пример #10
0
    def __call__(self, environ, start_response):
        response_body = []

        def catching_start_response(status, headers, exc_info=None):
            start_response(status, headers, exc_info)
            return response_body.append

        def runapp():
            appiter = self._app(environ, catching_start_response)
            response_body.extend(appiter)
            if hasattr(appiter, 'close'):
                appiter.close()

        p = Profile()
        p.runcall(runapp)
        body = ''.join(response_body)
        stats = Stats(p)
        stats.sort_stats(*self._sort_by)

        self._stream.write('-' * 80)
        self._stream.write('\nPATH: %r\n' % environ.get('PATH_INFO'))
        stats.print_stats(*self._restrictions)
        self._stream.write('-' * 80 + '\n\n')

        return [body]
Пример #11
0
def write_profile(pfile='./logs/profile.out'):
    global BUBBLE_PROFILE
    if not BUBBLE_PROFILE:
        return
    BUBBLE_PROFILE.disable()
    #s = io.StringIO()
    s = StringIO()
    sortby = 'cumulative'
    #ps = Stats(BUBBLE_PROFILE).sort_stats(sortby)
    ps = Stats(BUBBLE_PROFILE,stream=s).sort_stats(sortby)
    ps.print_stats()
    # print(s.getvalue())
    # now=arrow.now()
    #pstats_file='./logs/profiling'+str(now)+'.pstats'
    #profile_text='./logs/profile'+str(now)+'.txt'
    pstats_file='./logs/profiling.pstats'
    profile_text='./logs/profile.txt'

    BUBBLE_PROFILE.dump_stats(pstats_file)

    with open(profile_text,'a+') as pf:
        pf.write(s.getvalue())
    print("end_profile")
    print('BUBBLE_PROFILE:pstats_file:'+pstats_file)
    print('BUBBLE_PROFILE:profile_text:'+profile_text)
Пример #12
0
    def get_stats(self, session):
        output = StringIO()
        stats = None
        temp_files = []
        try:
            for profile in session.profiles.all():
                if profile.dump.path:
                    log.debug('Adding local profile dump')
                    path = profile.dump.path
                else:
                    log.debug('Creating a temporary file for remote profile dump')
                    temp, path = mkstemp(dir=self.tempdir)
                    temp = fdopen(temp)
                    temp_files.append((temp, path))
                    log.debug('Copying content from remote dump to tempfile')
                    temp.write(profile.dump.read())
                    log.debug('Adding tempfile profile dump')
                if stats is None:
                    log.debug('Creating a Stats object')
                    stats = Stats(path, stream=output)
                else:
                    log.debug('Appending to existing Stats object')
                    stats.add(path)
        finally:
            for temp, path in temp_files:
                log.debug('Removing temporary file at %s' % (path,))
                temp.close()
                unlink(path)

        return stats, output
 def tearDownClass(cls):
     if cls.is_running:
         return
     urlopen('http://localhost:8000/quit')
     cls.cli.close()
     p = Stats(cls.profiler)
     p.strip_dirs()
     p.sort_stats('cumtime')
Пример #14
0
def profile():
    ''' Function used to profile code for speedups. '''    
    
    import cProfile
    cProfile.run('main(50)', 'pstats')
    from pstats import Stats
    p = Stats('pstats')
    p.strip_dirs().sort_stats('time').print_stats(10)
def stats_for_main():
    p = Profile()
    p.runcall(main)
    p.dump_stats('main.stats')
    s = Stats(p)
    s.strip_dirs().sort_stats('time', 'cumulative')
    print_stats('MAIN - ALL STATS', s)
    print_stats('MAIN - CALLERS', s, 'sleep')
    print_stats('MAIN - CALLEES', s, 'heavy')
Пример #16
0
def home_p(request):
    """Profiled version of home"""
    prof = Profile()
    prof = prof.runctx("home(request)", globals(), locals())
    stream = StringIO()
    stats = Stats(prof, stream=stream)
    stats.sort_stats("time").print_stats(80)
    log.info("Profile data:\n%s", stream.getvalue())
    return HttpResponse(u"OK")
Пример #17
0
def print_profile_data():
    """
    Print the collected profile data.
    """
    stream = StringIO()
    statistics = Stats(profiler, stream=stream)
    statistics.sort_stats('cumulative')
    statistics.print_stats()
    print(stream.getvalue())
Пример #18
0
def profile(func, file_path):
    pr = Profile()
    pr.enable()
    func()
    pr.disable()
    s = open(file_path, "w")
    sortby = "cumulative"
    ps = Stats(pr, stream=s).sort_stats(sortby)
    ps.print_stats()
Пример #19
0
 def handle(self, *args, **options):
     profile_file = options.pop('profile', None)
     if profile_file:
         profiler = Profile()
         profiler.runcall(self._handle, *args, **options)
         stats = Stats(profiler)
         stats.dump_stats(profile_file)
     else:
         self._handle(*args, **options)
Пример #20
0
def profile_call(_func, *args, **kwargs):
    p = Profile()
    rv = []
    p.runcall(lambda: rv.append(_func(*args, **kwargs)))
    p.dump_stats('/tmp/sentry-%s-%s.prof' % (time.time(), _func.__name__))

    stats = Stats(p, stream=sys.stderr)
    stats.sort_stats('time', 'calls')
    stats.print_stats()
    return rv[0]
Пример #21
0
def profile(func, args=None, kwargs=None, sort="time"):
    prof = profile_.Profile()
    if args is None:
        args = ()
    if kwargs is None:
        kwargs = {}
    ret = prof.runcall(func, *args, **kwargs)
    stats = Stats(prof)
    stats.sort_stats(sort)
    stats.print_stats()
    return ret
Пример #22
0
 def wrapper(*args, **kwg):
     f = func
     res = None
     try:
         cProfile.runctx("res = f(*args, **kwg)", globals(), locals(), filename)
         return res
     finally:
         if filename:
             pstats = Stats(filename)
             pstats.sort_stats(*sort_fields)
             pstats.print_stats(*p_amount)
Пример #23
0
def concat(pattern, outfile, mpi=None):
    if mpi:
        from mpi4py import MPI
        pattern = pattern % MPI.COMM_WORLD.rank
        outfile = outfile % MPI.COMM_WORLD.rank
    files = glob(pattern)
    if files:
        s = Stats(files[0])
        for f in files[1:]: s.add(f)
        s.dump_stats(outfile)
        for f in files:
            os.remove(f)
Пример #24
0
    def save_data(self):
        try:
            import gprof2dot
            import pyprof2calltree
        except ImportError:
            msg = ('Unable to start profiling.\n Please either '
                   'disable performance profiling in settings.yaml or '
                   'install all modules listed in test-requirements.txt.')
            raise error.ProfilingError(msg)

        self.profiler.disable()
        elapsed = time.time() - self.start
        pref_filename = os.path.join(
            self.paths['last_performance_test'],
            '{method:s}.{handler_name:s}.{elapsed_time:.0f}ms.{t_time}.'.
            format(
                method=self.method,
                handler_name=self.handler_name or 'root',
                elapsed_time=elapsed * 1000.0,
                t_time=time.time()))
        tree_file = pref_filename + 'prof'
        stats_file = pref_filename + 'txt'
        callgraph_file = pref_filename + 'dot'

        # write pstats
        with file(stats_file, 'w') as file_o:
            stats = Stats(self.profiler, stream=file_o)
            stats.sort_stats('time', 'cumulative').print_stats()

        # write callgraph in dot format
        parser = gprof2dot.PstatsParser(self.profiler)

        def get_function_name(args):
            filename, line, name = args
            module = os.path.splitext(filename)[0]
            module_pieces = module.split(os.path.sep)
            return "{module:s}:{line:d}:{name:s}".format(
                module="/".join(module_pieces[-4:]),
                line=line,
                name=name)

        parser.get_function_name = get_function_name
        gprof = parser.parse()

        with open(callgraph_file, 'w') as file_o:
            dot = gprof2dot.DotWriter(file_o)
            theme = gprof2dot.TEMPERATURE_COLORMAP
            dot.graph(gprof, theme)

        # write calltree
        call_tree = pyprof2calltree.CalltreeConverter(stats)
        with file(tree_file, 'wb') as file_o:
            call_tree.output(file_o)
Пример #25
0
def profiler(enable, outfile):
    try:
        if enable:
            profiler = Profile()
            profiler.enable()

        yield
    finally:
        if enable:
            profiler.disable()
            stats = Stats(profiler)
            stats.sort_stats('tottime')
            stats.dump_stats(outfile)
Пример #26
0
 def _call(self, *args, **kw):
     profile = RawProfile()
     def _run():
         with DisableGc():
             for _ in range(self._iterations):
                 _run.result = super(Profile, self)._call(*args, **kw)
     profile.runctx('_run()', {}, {'_run': _run})
     profile.create_stats()
     stats = Stats(profile)
     stats.sort_stats('cumulative')
     stats.fcn_list = stats.fcn_list[:self._max_lines]
     self._reporter(stats)
     return _run.result
Пример #27
0
def get_profile_report(profiler):
    from pstats import Stats
    from cStringIO import StringIO

    io = StringIO()
    stats = Stats(profiler, stream = io)

    io.write('\nby cumulative time:\n\n')
    stats.sort_stats('cumulative').print_stats(25)

    io.write('\nby number of calls:\n\n')
    stats.sort_stats('time').print_stats(25)

    return io.getvalue()
Пример #28
0
 def run(self):
     """method calling cProfile and printing the output"""
     tests=self.tests()
    
     for test in tests:
         tmpBuffer=StringIO.StringIO()
         profile=cProfile.Profile()
         profile.runctx('self.'+str(test[0])+"()",globals(),locals())
         stats=Stats(profile,stream=tmpBuffer)
         stats.sort_stats('time','calls')
         stats.print_stats(1)
         match=re.findall(r'\bin\b(.*?)\bCPU\b',tmpBuffer.getvalue())
        
         print str(test[1].__doc__ )+":"+str(match[0])+" CPU Time"
Пример #29
0
def profile_func(func):
    from cProfile import Profile
    from pstats import Stats

    p = Profile()
    rv = []
    p.runcall(lambda: rv.append(func()))
    p.dump_stats('/tmp/lektor-%s.prof' % func.__name__)

    stats = Stats(p, stream=sys.stderr)
    stats.sort_stats('time', 'calls')
    stats.print_stats()

    return rv[0]
Пример #30
0
def print_stats(profiler, printCallers=False):
    from pstats import Stats
    
    stats = Stats(profiler)
    stats.strip_dirs()
    stats.sort_stats('cumulative')
    
    if printCallers is True:
        stats.print_callers()
    else:    
        stats.print_stats()
Пример #31
0
    def process(self, profiler, start, end, environment, suggestive_file_name):
        """
        Process the results
        :param profiler:                The profiler
        :type  profiler:                cProfile.Profile
        :param start:                   Start of profiling
        :type start:                    int
        :param end:                     End of profiling
        :type end:                      int
        :param environment:             The environment
        :type  environment:             Environment
        :param suggestive_file_name:    A suggestive file name
        :type  suggestive_file_name:    str
        """

        stats = Stats(profiler, stream=self._stream)
        stats.sort_stats(*self._sort_by)

        self._stream.write('-' * 80)
        self._stream.write('\nPATH: %s\n' % environment.get('PATH_INFO'))
        stats.print_stats(*self._restrictions)
        self._stream.write('-' * 80 + '\n\n')
Пример #32
0
def main(argv=None):
    # print(argv)
    from cadnano import initAppWithGui
    # Things are a lot easier if we can pass None instead of sys.argv and only fall back to sys.argv when we need to.
    app = initAppWithGui(argv, do_exec=False)
    if app.argns.profile:
        print("Collecting profile data into cadnano.profile")
        import cProfile
        cProfile.runctx('app.exec_()',
                        None,
                        locals(),
                        filename='cadnano.profile')
        print("Done collecting profile data. Use -P to print it out.")
    if not app.argns.profile and not app.argns.print_stats:
        sys.exit(app.exec_())
    if app.argns.print_stats:
        from pstats import Stats
        s = Stats('cadnano.profile')
        print("Internal Time Top 10:")
        s.sort_stats('cumulative').print_stats(10)
        print("\nTotal Time Top 10:")
        s.sort_stats('time').print_stats(10)
Пример #33
0
def profile_request(path, cookie, f):
    a = app.configured_app()
    pr = cProfile.Profile()
    headers = {'Cookie': cookie}

    with a.test_request_context(path, headers=headers):
        pr.enable()

        # r = f()
        # assert type(r) == str, r
        f()

        pr.disable()

    # pr.dump_stats('gua_profile.out')
    # pr.create_stats()
    # s = Stats(pr)
    pr.create_stats()
    s = Stats(pr).sort_stats('cumulative')
    s.dump_stats('profile.pstat')

    s.print_stats('.*new_web.*')
Пример #34
0
        def wrapper(event, context, *args, **kwargs):
            if random.random() <= PROFILE_SAMPLE:  # nosec
                print_stats_filter = stats_filter or DEFAULT_FILTER
                print_stats_filter.append(stats_limit)

                profile = Profile()
                profile.enable()
                try:
                    return_value = func(event, context, *args, **kwargs)
                finally:
                    profile.disable()

                    stream = StringIO()
                    stats = Stats(profile, stream=stream)
                    stats.sort_stats("cumulative")
                    stats.print_stats(*print_stats_filter)
                    process_profiling_data(stream, logger, event)
            else:
                logger.info("Skipping profiling")
                return_value = func(event, context, *args, **kwargs)

            return return_value
Пример #35
0
    def __call__(self, environ, start_response):
        response_body = []

        def catching_start_response(status, headers, exc_info=None):
            start_response(status, headers, exc_info)
            return response_body.append

        def runapp():
            appiter = self._app(environ, catching_start_response)
            response_body.extend(appiter)
            if hasattr(appiter, 'close'):
                appiter.close()

        p = Profile()
        p.runcall(runapp)
        body = ''.join(response_body)
        stats = Stats(p, stream=self._stream)
        stats.sort_stats(*self._sort_by)
        self._stream.write('-' * 80)
        self._stream.write('\nPATH: %r\n' % environ.get('PATH_INFO'))
        stats.print_stats(*self._restrictions)
        self._stream.write('-' * 80 + '\n\n')
        return [body]
Пример #36
0
def run(frameworks, number, do_profile):
    print("Benchmarking frameworks:", ', '.join(frameworks))
    sys.path[0] = '.'
    path = os.getcwd()
    print("                ms     rps  tcalls  funcs")
    for framework in frameworks:
        os.chdir(os.path.join(path, framework))
        try:
            main = __import__('app', None, None, ['main']).main

            f = lambda: list(main(environ.copy(), start_response))
            time = timeit(f, number=number)
            st = Stats(profile.Profile().runctx('f()', globals(), locals()))
            print("%-11s %6.0f %7.0f %7d %6d" %
                  (framework, 1000 * time, number / time, st.total_calls,
                   len(st.stats)))
            if do_profile:
                st = Stats(profile.Profile().runctx('timeit(f, number=number)',
                                                    globals(), locals()))
                st.strip_dirs().sort_stats('time').print_stats(10)
            del sys.modules['app']
        except ImportError:
            print("%-15s not installed" % framework)
Пример #37
0
    def __call__(self, environ, start_response):
        response_body = []

        def catching_start_response(status, headers, exc_info=None):
            start_response(status, headers, exc_info)
            return response_body.append

        def runapp():
            appiter = self._app(environ, catching_start_response)
            response_body.extend(appiter)
            if hasattr(appiter, 'close'):
                appiter.close()

        p = Profile()
        start = time.time()
        p.runcall(runapp)
        body = b''.join(response_body)
        elapsed = time.time() - start

        if self._profile_dir is not None:
            prof_filename = os.path.join(
                self._profile_dir, '%s.%s.%06dms.%d.prof' %
                (environ['REQUEST_METHOD'],
                 environ.get('PATH_INFO').strip('/').replace('/', '.')
                 or 'root', elapsed * 1000.0, time.time()))
            p.dump_stats(prof_filename)

        else:
            stats = Stats(p, stream=self._stream)
            stats.sort_stats(*self._sort_by)

            self._stream.write('-' * 80)
            self._stream.write('\nPATH: %r\n' % environ.get('PATH_INFO'))
            stats.print_stats(*self._restrictions)
            self._stream.write('-' * 80 + '\n\n')

        return [body]
Пример #38
0
def main(args=None):
    print(welcome_message.format(version))
    app = cadnano.app()
    if "-p" in sys.argv:
        print("Collecting profile data into cadnano.profile")
        import cProfile
        cProfile.run('app.exec_()', 'cadnano.profile')
        print("Done collecting profile data. Use -P to print it out.")
        exit()
    elif "-P" in sys.argv:
        from pstats import Stats
        s = Stats('cadnano.profile')
        print("Internal Time Top 10:")
        s.sort_stats('cumulative').print_stats(10)
        print("")
        print("Total Time Top 10:")
        s.sort_stats('time').print_stats(10)
        exit()
    elif "-t" in sys.argv:
        print("running tests")
        from tests.runall import main as runTests
        runTests(useXMLRunner=False)
        exit()
    app.exec_()
Пример #39
0
def main(args):
    print(args)
    from cadnano import initAppWithGui
    app = initAppWithGui(args)
    if "-p" in args:
        print("Collecting profile data into cadnano.profile")
        import cProfile
        cProfile.runctx('app.exec_()',
                        None,
                        locals(),
                        filename='cadnano.profile')
        print("Done collecting profile data. Use -P to print it out.")
    elif "-P" in args:
        from pstats import Stats
        s = Stats('cadnano.profile')
        print("Internal Time Top 10:")
        s.sort_stats('cumulative').print_stats(10)
        print("\nTotal Time Top 10:")
        s.sort_stats('time').print_stats(10)
    # elif "-t" in sys.argv:
    #     print("running tests")
    #     from tests.runall import main as runTests
    #     runTests(useXMLRunner=False)
    sys.exit(app.exec_())
Пример #40
0
max_size = 10**4
data = [randint(0, max_size) for _ in range(max_size)]
test = lambda: insertion_sort(data)

# CProfile : 순수 python 프로파일보다 성능이 좋다. 프로그램에 미치는 영향을 최소화해준다.
from cProfile import Profile

profiler = Profile()
profiler.runcall(test)

# 테스트 실행
import sys
from pstats import Stats

stats = Stats(profiler)
stats = Stats(profiler, stream=STDOUT)
stats.strip_dirs()
stats.sort_stats('cumulative')
print('First Test')
stats.print_stats()
"""
항목의 의미
ncalls : 프로파일링을 수행하는 동안 함수 호출 횟수
tottime : 함수가 실행되는데 걸린 시간(초). 다른 함수를 호출하는데 걸린 시간은 제외
tottime percall : 함수를 호출하는데 걸린 평균 시간(초)
cumtime : 함수를 실행하는데 걸린 누적 시간(초). 다른 함수를 호출하는데 걸린 시간 포함
cumtime perclal : 함수를 실행하는데 걸린 평균 시간(초)
"""

# 46 내장 알고리즘 파트 참고
Пример #41
0
    if args.fieldset is not None:
        filename = 'peninsula'
        fieldset = peninsula_fieldset(args.fieldset[0],
                                      args.fieldset[1],
                                      mesh='flat')
        fieldset.write(filename)

    # Open fieldset file set
    fieldset = FieldSet.from_parcels('peninsula',
                                     extra_fields={'P': 'P'},
                                     allow_time_extrapolation=True)

    if args.profiling:
        from cProfile import runctx
        from pstats import Stats
        runctx(
            "pensinsula_example(fieldset, args.particles, mode=args.mode,\
                                   degree=args.degree, verbose=args.verbose,\
                                   output=not args.nooutput, method=method[args.method])",
            globals(), locals(), "Profile.prof")
        Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
    else:
        pensinsula_example(fieldset,
                           args.particles,
                           mode=args.mode,
                           degree=args.degree,
                           verbose=args.verbose,
                           output=not args.nooutput,
                           method=method[args.method])
Пример #42
0
import sys
from pstats import Stats

s = Stats(sys.argv[1])
s.sort_stats("cum").print_stats(sys.argv[2])
Пример #43
0
 def tearDownClass(cls):
     stats = Stats(cls.profiler)
     stats.strip_dirs()
     stats.sort_stats("cumtime")
     stats.print_stats(20)
Пример #44
0
import os

import sys
from pstats import Stats

profile_dat = os.path.expanduser(
    '/home/benkoziol/.PyCharm50/system/snapshots/ocgis9.pstat')


def profile_target():
    argv = [
        sys.argv[0],
    ]


# cProfile.run('profile_target()', filename=os.path.expanduser(profile_dat))

stats = Stats(profile_dat)
stats.strip_dirs()
stats.sort_stats('time', 'name')
stats.print_stats(0.01)
# stats.print_callers(0.01)
Пример #45
0
from curlylint.tests.utils import BlackRunner

from curlylint.cli import main

from memory_profiler import profile

runner = BlackRunner()

pr = cProfile.Profile()
pr.enable()

result = runner.invoke(main, ["--verbose", "tests/django/wagtailadmin/"])

pr.disable()
p = Stats(pr)

p.strip_dirs().sort_stats("cumulative").print_stats(10)

print(result.exit_code)
print(runner.stdout_bytes.decode())
print(runner.stderr_bytes.decode())

print("Measuring memory consumption")


@profile(precision=6)
def memory_consumption_run():
    runner.invoke(
        main,
        [
Пример #46
0
def compserver(payload, serial):
    app = flask.current_app
    (allow_profiler,
     default_profiler_output,
     profile_by_default) = _get_profiler_info()
    requested_profiler_output = payload.get(u'profiler_output',
                                            default_profiler_output)
    profile = payload.get(u'profile')
    profiling = (allow_profiler and
                 (profile or (profile_by_default and requested_profiler_output)))
    if profile and not allow_profiler:
        return ('profiling is disabled on this server', RC.FORBIDDEN)

    with ExitStack() as response_construction_context_stack:
        if profiling:
            from cProfile import Profile

            if (default_profiler_output == ':response' and
                    requested_profiler_output != ':response'):
                # writing to the local filesystem is disabled
                return ("local filepaths are disabled on this server, only"
                        " ':response' is allowed for the 'profiler_output' field",
                        RC.FORBIDDEN)

            profiler_output = requested_profiler_output
            profiler = Profile()
            profiler.enable()
            # ensure that we stop profiling in the case of an exception
            response_construction_context_stack.callback(profiler.disable)

        expr = '<failed to parse expr>'

        @response_construction_context_stack.callback
        def log_time(start=time()):
            app.logger.info('compute expr: %s\ntotal time (s): %.3f',
                            expr,
                            time() - start)

        ns = payload.get(u'namespace', {})
        compute_kwargs = payload.get(u'compute_kwargs') or {}
        odo_kwargs = payload.get(u'odo_kwargs') or {}
        dataset = _get_data()
        ns[':leaf'] = symbol('leaf', discover(dataset))

        expr = from_tree(payload[u'expr'], namespace=ns)
        assert len(expr._leaves()) == 1
        leaf = expr._leaves()[0]

        try:
            formatter = getattr(flask.current_app, 'log_exception_formatter',
                                _default_log_exception_formatter)
            result = serial.materialize(compute(expr,
                                                {leaf: dataset},
                                                **compute_kwargs),
                                        expr.dshape,
                                        odo_kwargs)
        except NotImplementedError as e:
            # Note: `sys.exc_info()[2]` holds the current traceback, for
            # Python 2 / 3 compatibility. It's important not to store a local
            # reference to it.
            formatted_tb = formatter(sys.exc_info()[2])
            error_msg = "Computation not supported:\n%s\n%s" % (e, formatted_tb)
            app.logger.error(error_msg)
            return (error_msg, RC.NOT_IMPLEMENTED)
        except Exception as e:
            formatted_tb = formatter(sys.exc_info()[2])
            error_msg = "Computation failed with message:\n%s: %s\n%s" % (type(e).__name__, e, formatted_tb)
            app.logger.error(error_msg)
            return (error_msg, RC.INTERNAL_SERVER_ERROR)

        response = {u'datashape': pprint(expr.dshape, width=0),
                    u'data': serial.data_dumps(result),
                    u'names': expr.fields}

    if profiling:
        import marshal
        from pstats import Stats

        if profiler_output == ':response':
            from pandas.compat import BytesIO
            file = BytesIO()
        else:
            file = open(_prof_path(profiler_output, expr), 'wb')

        with file:
            # Use marshal to dump the stats data to the given file.
            # This is taken from cProfile which unfortunately does not have
            # an api that allows us to pass the file object directly, only
            # a file path.
            marshal.dump(Stats(profiler).stats, file)
            if profiler_output == ':response':
                response[u'profiler_output'] = {'__!bytes': file.getvalue()}

    return serial.dumps(response)
Пример #47
0
if __name__ == '__main__':
    if conf.WINDOW_ICON is not None:
        pg.display.set_icon(pg.image.load(conf.WINDOW_ICON))
    if conf.WINDOW_TITLE is not None:
        pg.display.set_caption(conf.WINDOW_TITLE)
    if len(argv) >= 2 and argv[1] == 'profile':
        # profile
        from cProfile import run
        from pstats import Stats
        if len(argv) >= 3:
            t = int(argv[2])
        else:
            t = conf.DEFAULT_PROFILE_TIME
        t *= conf.FPS[None]
        fn = conf.PROFILE_STATS_FILE
        run('Game(Level).run(t)', fn, locals())
        Stats(fn).strip_dirs().sort_stats('cumulative').print_stats(20)
        os.unlink(fn)
    else:
        level = 0
        if len(argv) >= 2:
            level = int(argv[1])
        # run normally
        restarting = True
        while restarting:
            restarting = False
            Game(level_backends[level], level).run()

pg.quit()
Пример #48
0
from pstats import Stats
from random import randint

max_size = 10000 * 4
data = [randint(0, max_size) for _ in range(max_size)]


def insert_value(array, value):
    for i, existing in enumerate(array):
        if existing > value:
            array.insert(i, value)
            return
    array.append(value)


def insertion_sort(data):
    result = []
    for value in data:
        insert_value(result, value)
    return result


test = lambda: insertion_sort(data)
profiler = cProfile.Profile()
profiler.runcall(test)

stats = Stats(profiler)
#stats.strip_dirs()
#stats.sort_stats("cumulative")
stats.print_stats()
def main():
    """
    3D canteliver beam peridynamics simulation
    """
    parser = argparse.ArgumentParser()
    parser.add_argument("mesh_file_name",
                        help="run example on a given mesh file name")
    parser.add_argument('--optimised', action='store_const', const=True)
    parser.add_argument('--profile', action='store_const', const=True)
    parser.add_argument('--lumped', action='store_const', const=True)
    parser.add_argument('--lumped2', action='store_const', const=True)
    args = parser.parse_args()

    if args.profile:
        profile = cProfile.Profile()
        profile.enable()

    beams = [
        '1650beam792t.msh', '1650beam2652t.msh', '1650beam3570t.msh',
        '1650beam4095t.msh', '1650beam6256t.msh', '1650beam15840t.msh',
        '1650beam32370t.msh', '1650beam74800t.msh', '1650beam144900t.msh',
        '1650beam247500t.msh'
    ]
    assert args.mesh_file_name in beams, 'mesh_file_name = {} was not recognised, please check the mesh file is in the directory'.format(
        args.mesh_file_name)

    if args.optimised:
        print(args.mesh_file_name, 'EulerCromerOptimised')
    else:
        print(args.mesh_file_name, 'EulerCromer')
    mesh_file = pathlib.Path(__file__).parent.absolute() / args.mesh_file_name
    st = time.time()

    # Set simulation parameters
    volume_total = 1.65 * 0.6 * 0.25
    density_concrete = 2400
    youngs_modulus_concrete = 1. * 22e9
    youngs_modulus_steel = 1. * 210e9
    poisson_ratio = 0.25
    strain_energy_release_rate_concrete = 100
    strain_energy_release_rate_steel = 13000
    networks = {
        '1650beam792t.msh': 'Network1650beam792t.vtk',
        '1650beam2652t.msh': 'Network1650beam2652t.vtk',
        '1650beam3570t.msh': 'Network1650beam3570t.vtk',
        '1650beam4095t.msh': 'Network1650beam4095t.vtk',
        '1650beam6256t.msh': 'Network1650beam6256t.vtk',
        '1650beam15840t.msh': 'Network1650beam15840t.vtk',
        '1650beam32370t.msh': 'Network1650beam32370t.vtk',
        '1650beam74800t.msh': 'Network1650beam74800t.vtk',
        '1650beam144900t.msh': 'Network1650beam144900t.vtk',
        '1650beam247500t.msh': 'Network1650beam247500t.vtk'
    }
    network_file_name = networks[args.mesh_file_name]
    dxs = {
        '1650beam792t.msh': 0.075,
        '1650beam2652t.msh': 0.0485,
        '1650beam3570t.msh': 0.0485,
        '1650beam4095t.msh': 0.0423,
        '1650beam6256t.msh': 0.0359,
        '1650beam15840t.msh': 0.025,
        '1650beam32370t.msh': 0.020,
        '1650beam74800t.msh': 0.015,
        '1650beam144900t.msh': 0.012,
        '1650beam247500t.msh': 0.010
    }
    dx = dxs[args.mesh_file_name]
    horizon = dx * np.pi
    # Two materials in this example, that is 'concrete' and 'steel'
    # Critical strain, s0
    critical_strain_concrete = np.double(
        np.power(
            np.divide(5 * strain_energy_release_rate_concrete,
                      6 * youngs_modulus_steel * horizon), (1. / 2)))
    critical_strain_steel = np.double(
        np.power(
            np.divide(5 * strain_energy_release_rate_steel,
                      6 * youngs_modulus_steel * horizon), (1. / 2)))
    damping = 2.0e6  # damping term
    # Peridynamic bond stiffness, c
    bulk_modulus_concrete = youngs_modulus_concrete / (3 *
                                                       (1 - 2 * poisson_ratio))
    bulk_modulus_steel = youngs_modulus_steel / (3 * (1 - 2 * poisson_ratio))
    bond_stiffness_concrete = (np.double(
        (18.00 * bulk_modulus_concrete) / (np.pi * np.power(horizon, 4))))
    bond_stiffness_steel = (np.double(
        (18.00 * bulk_modulus_steel) / (np.pi * np.power(horizon, 4))))
    crack_length = np.double(0.0)
    model = OpenCL(mesh_file_name=args.mesh_file_name,
                   density=density_concrete,
                   horizon=horizon,
                   damping=damping,
                   bond_stiffness_concrete=bond_stiffness_concrete,
                   bond_stiffness_steel=bond_stiffness_steel,
                   critical_strain_concrete=critical_strain_concrete,
                   critical_strain_steel=critical_strain_steel,
                   crack_length=crack_length,
                   volume_total=volume_total,
                   bond_type=bond_type,
                   network_file_name=network_file_name,
                   initial_crack=[],
                   dimensions=3,
                   transfinite=0,
                   precise_stiffness_correction=1)
    saf_fac = 0.3  # Typical values 0.70 to 0.95 (Sandia PeridynamicSoftwareRoadmap) 0.5
    model.dt = (0.8 * np.power(
        2.0 * density_concrete * dx /
        (np.pi * np.power(model.horizon, 2.0) * dx *
         model.bond_stiffness_concrete), 0.5) * saf_fac)
    model.max_reaction = 500000  # in newtons, about 85 times self weight
    model.load_scale_rate = 1 / 500000

    # Set force and displacement boundary conditions
    boundary_function(model)
    boundary_forces_function(model)

    if args.optimised:
        if args.lumped:
            integrator = EulerCromerOptimisedLumped(model)
            method = 'EulerCromerOptimisedLumped'
        elif args.lumped2:
            integrator = EulerCromerOptimisedLumped2(model)
            method = 'EulerCromerOptimisedLumped2'
        else:
            integrator = EulerCromerOptimised(model)
            method = 'EulerCromerOptimised'
    else:
        integrator = EulerCromer(model)
        method = 'EulerCromer'

    # delete output directory contents, this is probably unsafe?
    shutil.rmtree('./output', ignore_errors=False)
    os.mkdir('./output')
    print(args.mesh_file_name, method)
    damage_sum_data, tip_displacement_data, tip_shear_force_data = model.simulate(
        model,
        sample=1,
        steps=200000,
        integrator=integrator,
        write=500,
        toolbar=0)
    plt.figure(1)
    plt.title('damage over time')
    plt.plot(damage_sum_data)
    plt.figure(2)
    plt.title('tip displacement over time')
    plt.plot(tip_displacement_data)
    plt.show()
    plt.figure(3)
    plt.title('shear force over time')
    plt.plot(tip_shear_force_data)
    plt.show()
    print('damage_sum_data', damage_sum_data)
    print('tip_displacement_data', tip_displacement_data)
    print('tip_shear_force_data', tip_shear_force_data)
    print('TOTAL TIME REQUIRED {}'.format(time.time() - st))
    if args.profile:
        profile.disable()
        s = StringIO()
        stats = Stats(profile, stream=s).sort_stats(SortKey.CUMULATIVE)
        stats.print_stats()
        print(s.getvalue())
    print('\n')
Пример #50
0
"""script for digesting profiling output
to profile functions, wrap them into decorator @profile('file_name.prof')

source: http://code.djangoproject.com/wiki/ProfilingDjango
"""

import sys

try:
    from pstats import Stats
    stats = Stats()
except ImportError:
    from hotshot import stats

_stats = stats.load(sys.argv[1])
# _stats.strip_dirs()
_stats.sort_stats('time', 'calls')
_stats.print_stats(20)
Пример #51
0
 def print_profile():
     stats = Stats(profiler)
     stats.sort_stats('tottime').print_stats(PROFILER_LINES)
Пример #52
0
        # package the results
        measurement = (xmeas, ymeasure, noise_level)
        # OptBayesExpt does Bayesian inference
        my_obe.pdf_update(measurement)

        # OptBayesExpt provides statistics to track progress
        sigma = my_obe.std()
        sig[i] = sigma

        # entertainment
        if i % 100 == 0:
            print("{:3d}, sigma = {}".format(i, sigma[0]))

# print out timing profile data
stats = Stats(profiler)
# 'tottime' sorts the output by time spent in a function, not counting
# called functions.
# 'cumtime' includes time spent on called functions.
# '30' limits the number of lines of output
stats.strip_dirs().sort_stats('tottime').print_stats(30)

########################################################################
#          PLOTTING
########################################################################
# plotting uses matplotlib.pyplot

plt.figure(figsize=(8, 5))

# (1) plot the true curve and the "measured" data
#
Пример #53
0
    for mol in [mol1, mol2]:
        print(mol.atom)
        mf = scf.RHF(mol).run()
        nconf = 5000
        wf = PySCFSlaterRHF(nconf, mol, mf)
        for i, func in enumerate([initial_guess]):
            for j in range(5):
                start = time.time()
                configs = func(mol, nconf)
                assert np.isnan(configs).sum() == 0
                assert np.isinf(configs).sum() == 0
                logval = wf.recompute(configs)[1]
                print(i, 'min', np.amin(logval), 'max', np.amax(logval),
                      'median', np.median(logval), 'mean', np.mean(logval))
                hist = np.histogram(logval, range=(-65, -10), bins=14)
                print('hist', hist[0])
                #print('bins', np.round(hist[1],1))
            print(time.time() - start, configs.shape)


if __name__ == "__main__":
    #test_compare_init_guess();
    import cProfile, pstats, io
    from pstats import Stats
    pr = cProfile.Profile()
    pr.enable()
    test()
    pr.disable()
    p = Stats(pr)
    print(p.sort_stats('cumulative').print_stats())
Пример #54
0
# coding: utf-8
from pstats import Stats
from pstats import SortKey

sp = Stats("profiling.txt")
sp = sp.sort_stats(SortKey.CUMULATIVE)
sp.print_stats()  
Пример #55
0
 def __init__(self, filename, strip_dirs=True):
     self.name = os.path.basename(filename)
     stats = Stats(filename)
     self.stats = stats.strip_dirs() if strip_dirs else stats
     self.timings, self.callers = _calc_frames(stats)
from matplotlib import pyplot as P
from cProfile import run
from pstats import Stats

from cyupdate_fast import cy_updatefast

dx = 0.1
dy = 0.1
dx2 = dx * dx
dy2 = dy * dy
n = 300
niter = 800
profile = '04_cython_profile.dat'


def calc(N, Niter=100, func=cy_updatefast, args=(dx2, dy2)):
    u = zeros([N, N])
    u[0] = 1
    for i in range(Niter):
        func(u, *args)
    return u


run('u = calc(n, Niter=niter)', profile)

cp = P.contourf(u)
cbar = P.colorbar()
P.show()

p = Stats(profile)
p.sort_stats('cumulative').print_stats(10)
Пример #57
0
from pathlib import Path

from wdom.parser import parse_html
from wdom.server import _tornado

# fake connection
_tornado.connections.append(1)  # type: ignore

root = Path(__file__).absolute().parent.parent
html_file = root / 'docs/_build/html/node.html'
with open(html_file) as f:
    real_html = f.read()

src = '<div>' + '''
  <div a="1">
    <span b="2">text</span>
    <span b="2">text</span>
    <span b="2">text</span>
    <span b="2">text</span>
  </div>
''' * 1000 + '</div>'

if __name__ == '__main__':
    profiler = Profile()
    # profiler.runcall(parse_html, real_html)
    profiler.runcall(parse_html, src)  # ~1.7 sec
    stats = Stats(profiler)
    stats.strip_dirs()
    stats.sort_stats('cumulative')
    stats.print_stats()
Пример #58
0
# Import main function to run
from dem_utils import dem_valid_data
# Set output directories for profile stats
profile_dir = r'E:\disbr007\umn\ms\scratch'
stats = os.path.join(profile_dir, '{}.stats'.format(__name__))
profile_txt = os.path.join(profile_dir, '{}_profile.txt'.format(__name__))
# Turn timing back on
prof.enable()

DEMS_PATH = r'E:\disbr007\umn\ms\scratch\banks_dems_multispec_test_5.shp'
OUT_SHP = r'E:\disbr007\umn\ms\scratch\banks_dems_multispec_test_5_vp.shp'
PRJ_DIR = r'E:\disbr007\umn\ms\scratch'
SCRATCH_DIR = r'E:\disbr007\umn\ms\scratch'
LOG_FILE = r'E:\disbr007\umn\ms\scratch\vp_profile_5.log'
PROCESSED = r'E:\disbr007\umn\ms\scratch\vp_processed_5.txt'

dem_valid_data.main(DEMS_PATH=DEMS_PATH,
                    OUT_SHP=OUT_SHP,
                    PRJ_DIR=PRJ_DIR,
                    SCRATCH_DIR=SCRATCH_DIR,
                    LOG_FILE=LOG_FILE,
                    PROCESSED=PROCESSED)

prof.disable()

prof.dump_stats(stats)

with open(profile_txt, 'wt') as output:
    stats = Stats(stats, stream=output)
    stats.sort_stats('cumulative', 'time')
    stats.print_stats()
Пример #59
0
 def write(self):
     with ProfileCsvStream(self.__filename, 'w') as output:
         Stats(self.__profiler, stream=output).print_stats()
Пример #60
0
    def filter(self, execution_func, prof_arg = None):
        import cProfile as profile
        from pstats import Stats

        tmpfile = tempfile.NamedTemporaryFile()
        try:
            file, line = prof_arg.split(':')
            line, func = line.split('(')
            func = func.strip(')')
        except:
            file = line = func = None

        try:
            profile.runctx('execution_func()',
                           globals(), locals(), tmpfile.name)
            out = StringIO()
            stats = Stats(tmpfile.name, stream=out)
            stats.sort_stats('time', 'calls')

            def parse_table(t, ncol):
                table = []
                for s in t:
                    t = [x for x in s.split(' ') if x]
                    if len(t) > 1:
                        table += [t[:ncol-1] + [' '.join(t[ncol-1:])]]
                return table

            def cmp(n):
                def _cmp(x, y):
                    return 0 if x[n] == y[n] else 1 if x[n] < y[n] else -1
                return _cmp

            if not file:
                stats.print_stats()
                stats_str = out.getvalue()
                statdata = stats_str.split('\n')
                headers = '\n'.join(statdata[:6])
                table = parse_table(statdata[6:], 6)
                from r2.lib.pages import Profiling
                res = Profiling(header = headers, table = table,
                                path = request.path).render()
                return [unicode(res)]
            else:
                query = "%s:%s" % (file, line)
                stats.print_callees(query)
                stats.print_callers(query)
                statdata = out.getvalue()

                data =  statdata.split(query)
                callee = data[2].split('->')[1].split('Ordered by')[0]
                callee = parse_table(callee.split('\n'), 4)
                callee.sort(cmp(1))
                callee = [['ncalls', 'tottime', 'cputime']] + callee
                i = 4
                while '<-' not in data[i] and i < len(data): i+= 1
                caller = data[i].split('<-')[1]
                caller = parse_table(caller.split('\n'), 4)
                caller.sort(cmp(1))
                caller = [['ncalls', 'tottime', 'cputime']] + caller
                from r2.lib.pages import Profiling
                res = Profiling(header = prof_arg,
                                caller = caller, callee = callee,
                                path = request.path).render()
                return [unicode(res)]
        finally:
            tmpfile.close()