def request_stats(): global _request_stats_context_cache if not _request_stats_context_cache or _request_stats_context_cache[ "last_time" ] < time() - _request_stats_context_cache.get("cache_time", DEFAULT_CACHE_TIME): cache_time = _request_stats_context_cache.get("cache_time", DEFAULT_CACHE_TIME) now = time() stats = [] for s in chain(_sort_stats(runners.locust_runner.request_stats), [RequestStats.sum_stats("Total")]): stats.append( { "method": s.method, "name": s.name, "num_reqs": s.num_reqs, "num_failures": s.num_failures, "avg_response_time": s.avg_response_time, "min_response_time": s.min_response_time, "max_response_time": s.max_response_time, "current_rps": s.current_rps, "median_response_time": s.median_response_time, "avg_content_length": s.avg_content_length, } ) report = {"stats": stats, "errors": list(runners.locust_runner.errors.iteritems())} if stats: report["total_rps"] = stats[len(stats) - 1]["current_rps"] report["fail_ratio"] = RequestStats.sum_stats("Total").fail_ratio # since generating a total response times dict with all response times from all # urls is slow, we make a new total response time dict which will consist of one # entry per url with the median response time as key and the number of requests as # value response_times = defaultdict(int) # used for calculating total median for i in xrange(len(stats) - 1): response_times[stats[i]["median_response_time"]] += stats[i]["num_reqs"] # calculate total median stats[len(stats) - 1]["median_response_time"] = median_from_dict( stats[len(stats) - 1]["num_reqs"], response_times ) is_distributed = isinstance(runners.locust_runner, MasterLocustRunner) if is_distributed: report["slave_count"] = runners.locust_runner.slave_count report["state"] = runners.locust_runner.state report["user_count"] = runners.locust_runner.user_count elapsed = time() - now cache_time = max( cache_time, elapsed * 2.0 ) # Increase cache_time when report generating starts to take longer time _request_stats_context_cache = {"last_time": elapsed - now, "report": report, "cache_time": cache_time} else: report = _request_stats_context_cache["report"] return json.dumps(report)
def request_stats(): global _request_stats_context_cache if not _request_stats_context_cache or _request_stats_context_cache["last_time"] < time() - _request_stats_context_cache.get("cache_time", DEFAULT_CACHE_TIME): cache_time = _request_stats_context_cache.get("cache_time", DEFAULT_CACHE_TIME) now = time() stats = [] for s in chain(_sort_stats(runners.locust_runner.request_stats), [RequestStats.sum_stats("Total")]): stats.append({ "method": s.method, "name": s.name, "num_reqs": s.num_reqs, "num_failures": s.num_failures, "avg_response_time": s.avg_response_time, "min_response_time": s.min_response_time, "max_response_time": s.max_response_time, "current_rps": s.current_rps, "median_response_time": s.median_response_time, "avg_content_length": s.avg_content_length, }) report = {"stats":stats, "errors":list(runners.locust_runner.errors.iteritems())} if stats: report["total_rps"] = stats[len(stats)-1]["current_rps"] report["fail_ratio"] = RequestStats.sum_stats("Total").fail_ratio # since generating a total response times dict with all response times from all # urls is slow, we make a new total response time dict which will consist of one # entry per url with the median response time as key and the number of requests as # value response_times = defaultdict(int) # used for calculating total median for i in xrange(len(stats)-1): response_times[stats[i]["median_response_time"]] += stats[i]["num_reqs"] # calculate total median stats[len(stats)-1]["median_response_time"] = median_from_dict(stats[len(stats)-1]["num_reqs"], response_times) is_distributed = isinstance(runners.locust_runner, MasterLocustRunner) if is_distributed: report["slave_count"] = runners.locust_runner.slave_count report["state"] = runners.locust_runner.state report["user_count"] = runners.locust_runner.user_count elapsed = time() - now cache_time = max(cache_time, elapsed * 2.0) # Increase cache_time when report generating starts to take longer time _request_stats_context_cache = {"last_time": elapsed - now, "report": report, "cache_time": cache_time} else: report = _request_stats_context_cache["report"] return json.dumps(report)
def request_stats_csv(): from core import locust_runner rows = [ ",".join([ '"Name"', '"# requests"', '"# failures"', '"Median response time"', '"Average response time"', '"Min response time"', '"Max response time"', '"Average Content-Length"', '"Reqests/s"', ]) ] for s in chain(_sort_stats(locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)]): rows.append('"%s",%i,%i,%i,%i,%i,%i,%i,%.2f' % ( s.name, s.num_reqs, s.num_failures, s.median_response_time, s.avg_response_time, s.min_response_time or 0, s.avg_content_length, s.max_response_time, s.total_rps, )) response = make_response("\n".join(rows)) response.headers["Content-type"] = "text/csv" return response
def distribution_stats_csv(): rows = [",".join(( '"Name"', '"# requests"', '"50%"', '"66%"', '"75%"', '"80%"', '"90%"', '"95%"', '"98%"', '"99%"', '"100%"', ))] for s in chain(_sort_stats(runners.locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)]): if s.num_reqs: rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i')) else: rows.append('"%s",0,"N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A"' % s.name) response = make_response("\n".join(rows)) file_name = "distribution_{0}.csv".format(time()) disposition = "attachment;filename={0}".format(file_name) response.headers["Content-type"] = "text/csv" response.headers["Content-disposition"] = disposition return response
def distribution_stats_csv(): from core import locust_runner rows = [ ",".join(( '"Name"', '"# requests"', '"50%"', '"66%"', '"75%"', '"80%"', '"90%"', '"95%"', '"98%"', '"99%"', '"100%"', )) ] for s in chain( _sort_stats(locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)]): rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i')) response = make_response("\n".join(rows)) response.headers["Content-type"] = "text/csv" return response
def request_stats_csv(): from core import locust_runner rows = [ ",".join([ '"Name"', '"# requests"', '"# failures"', '"Median response time"', '"Average response time"', '"Min response time"', '"Max response time"', '"Average Content-Length"', '"Reqests/s"', ]) ] for s in chain( _sort_stats(locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)]): rows.append('"%s",%i,%i,%i,%i,%i,%i,%i,%.2f' % ( s.name, s.num_reqs, s.num_failures, s.median_response_time, s.avg_response_time, s.min_response_time or 0, s.avg_content_length, s.max_response_time, s.total_rps, )) response = make_response("\n".join(rows)) response.headers["Content-type"] = "text/csv" return response
def distribution_stats_csv(): rows = [ ",".join(( '"Name"', '"# requests"', '"50%"', '"66%"', '"75%"', '"80%"', '"90%"', '"95%"', '"98%"', '"99%"', '"100%"', )) ] for s in chain( _sort_stats(runners.locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)]): if s.num_reqs: rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i')) else: rows.append( '"%s",0,"N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A","N/A"' % s.name) response = make_response("\n".join(rows)) file_name = "distribution_{0}.csv".format(time()) disposition = "attachment;filename={0}".format(file_name) response.headers["Content-type"] = "text/csv" response.headers["Content-disposition"] = disposition return response
def distribution_stats_csv(): rows = [ ",".join(( '"Name"', '"# requests"', '"50%"', '"66%"', '"75%"', '"80%"', '"90%"', '"95%"', '"98%"', '"99%"', '"100%"', )) ] for s in chain( _sort_stats(runners.locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)]): try: rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i')) except Exception, e: logger.error( "Failed to calculate percentile for url stats {0}".format( s.name)) logger.exception(e)
def request_stats_csv(): rows = [ ",".join( [ '"Method"', '"Name"', '"# requests"', '"# failures"', '"Median response time"', '"Average response time"', '"Min response time"', '"Max response time"', '"Average Content-Length"', '"Reqests/s"', ] ) ] for s in chain( _sort_stats(runners.locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)] ): rows.append( '"%s","%s",%i,%i,%i,%i,%i,%i,%i,%.2f' % ( s.method, s.name, s.num_reqs, s.num_failures, s.median_response_time, s.avg_response_time, s.min_response_time or 0, s.max_response_time, s.avg_content_length, s.total_rps, ) ) response = make_response("\n".join(rows)) file_name = "requests_{0}.csv".format(time()) disposition = "attachment;filename={0}".format(file_name) response.headers["Content-type"] = "text/csv" response.headers["Content-disposition"] = disposition return response
def distribution_stats_csv(): rows = [",".join(( '"Name"', '"# requests"', '"50%"', '"66%"', '"75%"', '"80%"', '"90%"', '"95%"', '"98%"', '"99%"', '"100%"', ))] for s in chain(_sort_stats(runners.locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)]): rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i')) response = make_response("\n".join(rows)) response.headers["Content-type"] = "text/csv" return response
def distribution_stats_csv(): rows = [",".join(( '"Name"', '"# requests"', '"50%"', '"66%"', '"75%"', '"80%"', '"90%"', '"95%"', '"98%"', '"99%"', '"100%"', ))] for s in chain(_sort_stats(runners.locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)]): try: rows.append(s.percentile(tpl='"%s",%i,%i,%i,%i,%i,%i,%i,%i,%i,%i')) except Exception, e: logger.error("Failed to calculate percentile for url stats {0}".format(s.name)) logger.exception(e)
def request_stats_csv(): rows = [ ",".join([ '"Method"', '"Name"', '"# requests"', '"# failures"', '"Median response time"', '"Average response time"', '"Min response time"', '"Max response time"', '"Average Content Size"', '"Reqests/s"', ]) ] for s in chain( _sort_stats(runners.locust_runner.request_stats), [RequestStats.sum_stats("Total", full_request_history=True)]): rows.append('"%s","%s",%i,%i,%i,%i,%i,%i,%i,%.2f' % ( s.method, s.name, s.num_reqs, s.num_failures, s.median_response_time, s.avg_response_time, s.min_response_time or 0, s.max_response_time, s.avg_content_length, s.total_rps, )) response = make_response("\n".join(rows)) file_name = "requests_{0}.csv".format(time()) disposition = "attachment;filename={0}".format(file_name) response.headers["Content-type"] = "text/csv" response.headers["Content-disposition"] = disposition return response