def get(self): recording.dont_record() if not self.request.path.endswith('/'): self.redirect(self.request.path + '/') return summaries = recording.load_summary_protos() data = self._get_summary_data(summaries) self.response.out.write(render('main.html', data))
def test_appstats_middleware(self): request = Request({}) middleware = AppStatsMiddleware() r = middleware.process_request(request) self.assertTrue(r is None) r = middleware.process_response(request, BaseResponse("", 200)) self.assertTrue(isinstance(r, BaseResponse)) summary = recording.load_summary_protos() self.assert_(summary)
def getCounters(self, from_time, until_time): result = {} summaries = recording.load_summary_protos() per_request_re = False if config.APPSTATS_PER_REQUESTS: per_request_re = re.compile(config.APPSTATS_PER_REQUESTS) for index, summary in enumerate(summaries): end_time = datetime.datetime.fromtimestamp( (summary.start_timestamp_milliseconds() + summary.duration_milliseconds()) / 1000 ) if end_time <= from_time or end_time > until_time: continue path_key = summary.http_path() if config.APPSTATS_DIFFERENTIATE_BETWEEN_POST_AND_GET: path_key += "." + summary.http_method() if not per_request_re: path_key = False elif not per_request_re.match(path_key): path_key = "Other" self.append(end_time, "AppStats.Requests.All", 1, result) self.append(end_time, "AppStats.Latency.Real.All", summary.duration_milliseconds(), result) self.append(end_time, "AppStats.Latency.API.All", summary.api_milliseconds(), result) self.append(end_time, "AppStats.Latency.Overhead.All", summary.overhead_walltime_milliseconds(), result) if path_key: self.append(end_time, "AppStats.Requests.All." + path_key, 1, result) self.append(end_time, "AppStats.Latency.Real.All." + path_key, summary.duration_milliseconds(), result) self.append(end_time, "AppStats.Latency.API.All." + path_key, summary.api_milliseconds(), result) for x in summary.rpc_stats_list(): rpc_key = x.service_call_name() value = x.total_amount_of_calls() self.append(end_time, "AppStats.RPC." + rpc_key, value, result) self.append(end_time, "AppStats.RPC.Total", value, result) if path_key: self.append(end_time, "AppStats.RPC." + rpc_key + "." + path_key, value, result) self.append(end_time, "AppStats.RPC.Total." + path_key, value, result) for key in result.keys(): if key.startswith("AppStats.Latency") or key.startswith("AppStats.Requests"): for percentile in config.APPSTATS_PERCENTILES: new_key = key.replace("All", str(percentile) + "thPercentile", 1) result[new_key] = {} for slot in result[key]: if key.startswith("AppStats.Latency") or key.startswith("AppStats.Requests"): result[key][slot].sort() for percentile in config.APPSTATS_PERCENTILES: len_percentile = int(math.ceil(len(result[key][slot]) / 100.0 * percentile)) new_key = key.replace("All", str(percentile) + "thPercentile", 1) result[new_key][slot] = int(sum(result[key][slot][:len_percentile]) / len_percentile) result[key][slot] = sum(result[key][slot]) return result
def get(self): recording.dont_record() if not self.request.path.endswith('/'): self.redirect(self.request.path + '/') return summaries = recording.load_summary_protos() allstats = {} pathstats = {} pivot_path_rpc = {} pivot_rpc_path = {} for index, summary in enumerate(summaries): path_key = recording.config.extract_key(summary) if path_key not in pathstats: pathstats[path_key] = [1, index + 1] else: values = pathstats[path_key] values[0] += 1 if len(values) >= 11: if values[-1]: values.append(0) else: values.append(index + 1) if path_key not in pivot_path_rpc: pivot_path_rpc[path_key] = {} for x in summary.rpc_stats_list(): rpc_key = x.service_call_name() value = x.total_amount_of_calls() if rpc_key in allstats: allstats[rpc_key] += value else: allstats[rpc_key] = value if rpc_key not in pivot_path_rpc[path_key]: pivot_path_rpc[path_key][rpc_key] = 0 pivot_path_rpc[path_key][rpc_key] += value if rpc_key not in pivot_rpc_path: pivot_rpc_path[rpc_key] = {} if path_key not in pivot_rpc_path[rpc_key]: pivot_rpc_path[rpc_key][path_key] = 0 pivot_rpc_path[rpc_key][path_key] += value allstats_by_count = [] for k, v in allstats.iteritems(): pivot = sorted(pivot_rpc_path[k].iteritems(), key=lambda x: (-x[1], x[0])) allstats_by_count.append((k, v, pivot)) allstats_by_count.sort(key=lambda x: (-x[1], x[0])) pathstats_by_count = [] for path_key, values in pathstats.iteritems(): pivot = sorted(pivot_path_rpc[path_key].iteritems(), key=lambda x: (-x[1], x[0])) rpc_count = sum(x[1] for x in pivot) pathstats_by_count.append( (path_key, rpc_count, values[0], values[1:], pivot)) pathstats_by_count.sort(key=lambda x: (-x[1], -x[2], x[0])) data = { 'requests': summaries, 'allstats_by_count': allstats_by_count, 'pathstats_by_count': pathstats_by_count, } self.response.out.write(render('main.html', data))
def get(self): recording.dont_record() if not self.request.path.endswith('/'): self.redirect(self.request.path + '/') return summaries = recording.load_summary_protos() allstats = {} pathstats = {} pivot_path_rpc = {} pivot_rpc_path = {} for index, summary in enumerate(summaries): path_key = recording.config.extract_key(summary) if path_key not in pathstats: pathstats[path_key] = [1, index+1] else: values = pathstats[path_key] values[0] += 1 if len(values) >= 11: if values[-1]: values.append(0) else: values.append(index+1) if path_key not in pivot_path_rpc: pivot_path_rpc[path_key] = {} for x in summary.rpc_stats_list(): rpc_key = x.service_call_name() value = x.total_amount_of_calls() if rpc_key in allstats: allstats[rpc_key] += value else: allstats[rpc_key] = value if rpc_key not in pivot_path_rpc[path_key]: pivot_path_rpc[path_key][rpc_key] = 0 pivot_path_rpc[path_key][rpc_key] += value if rpc_key not in pivot_rpc_path: pivot_rpc_path[rpc_key] = {} if path_key not in pivot_rpc_path[rpc_key]: pivot_rpc_path[rpc_key][path_key] = 0 pivot_rpc_path[rpc_key][path_key] += value allstats_by_count = [] for k, v in allstats.iteritems(): pivot = sorted(pivot_rpc_path[k].iteritems(), key=lambda x: (-x[1], x[0])) allstats_by_count.append((k, v, pivot)) allstats_by_count.sort(key=lambda x: (-x[1], x[0])) pathstats_by_count = [] for path_key, values in pathstats.iteritems(): pivot = sorted(pivot_path_rpc[path_key].iteritems(), key=lambda x: (-x[1], x[0])) rpc_count = sum(x[1] for x in pivot) pathstats_by_count.append((path_key, rpc_count, values[0], values[1:], pivot)) pathstats_by_count.sort(key=lambda x: (-x[1], -x[2], x[0])) data = {'requests': summaries, 'allstats_by_count': allstats_by_count, 'pathstats_by_count': pathstats_by_count, } self.response.out.write(render('main.html', data))
def test_appstats_middleware_request(self): response = self.client.get('/no_decorator') summary = recording.load_summary_protos() self.assert_(summary)
def FromMemcache(filter_timestamp=0, java_application=False): """Reads appstats data from memcache. Get all appstats full records from memcache which correspond to requests with a greater timestamp than filter_timestamp Args: filter_timestamp: only retrieve records with timestamp (in milliseconds) higher than this value. If 0, all records are retrieved. java_application: Boolean. If true, this function is being called by the download_appstats tool for a Java application. Returns: List of RequestStatProto protobufs. """ records = [] logging.info('Loading appstats summaries...') summaries = recording.load_summary_protos(java_application) logging.info('Loaded %d summaries. Loading full records...', len(summaries)) start_time = time.time() missing_full_records = 0 failed_requests = 0 for count, summary in enumerate(summaries): time_key = summary.start_timestamp_milliseconds() if time_key <= filter_timestamp: logging.info('Only %d records with newer timestamp.' ' Skipping rest.', count) break timestamp = int(time_key) * 0.001 record = recording.load_full_proto(timestamp, java_application) if not record: missing_full_records += 1 continue httpstatus = int(record.http_status()) if httpstatus >= 400: failed_requests += 1 continue records.append(record) if len(records) % 10 == 0: logging.info('Download in progress..completed %d..', len(records)) if not records: logging.info('No full records present in memcache for succesful requests.') else: end_time = time.time() elapsed = max(end_time - start_time, 0) time_per_record = elapsed/len(records) logging.info('Done. %d full records downloaded in %.2f secs ' '[%.2f secs per full record]', len(records), elapsed, time_per_record) if missing_full_records: logging.info('Skipped %d summaries with missing full records', missing_full_records) if failed_requests: logging.info('Skipped %d summaries corresponding to failed requests', failed_requests) return records
def load_summary_protos(filter_timestamp=0): """Fetch only AppStats summary protos and filter by optional timestamp.""" return [x for x in recording.load_summary_protos() if x.start_timestamp_milliseconds() > filter_timestamp]
def FromMemcache(filter_timestamp=0, java_application=False): """Reads appstats data from memcache. Get all appstats full records from memcache which correspond to requests with a greater timestamp than filter_timestamp Args: filter_timestamp: only retrieve records with timestamp (in milliseconds) higher than this value. If 0, all records are retrieved. java_application: Boolean. If true, this function is being called by the download_appstats tool for a Java application. Returns: List of RequestStatProto protobufs. """ records = [] logging.info('Loading appstats summaries...') summaries = recording.load_summary_protos(java_application) logging.info('Loaded %d summaries. Loading full records...', len(summaries)) start_time = time.time() missing_full_records = 0 failed_requests = 0 for count, summary in enumerate(summaries): time_key = summary.start_timestamp_milliseconds() if time_key <= filter_timestamp: logging.info( 'Only %d records with newer timestamp.' ' Skipping rest.', count) break timestamp = int(time_key) * 0.001 record = recording.load_full_proto(timestamp, java_application) if not record: missing_full_records += 1 continue httpstatus = int(record.http_status()) if httpstatus >= 400: failed_requests += 1 continue records.append(record) if len(records) % 10 == 0: logging.info('Download in progress..completed %d..', len(records)) if not records: logging.info( 'No full records present in memcache for succesful requests.') else: end_time = time.time() elapsed = max(end_time - start_time, 0) time_per_record = elapsed / len(records) logging.info( 'Done. %d full records downloaded in %.2f secs ' '[%.2f secs per full record]', len(records), elapsed, time_per_record) if missing_full_records: logging.info('Skipped %d summaries with missing full records', missing_full_records) if failed_requests: logging.info( 'Skipped %d summaries corresponding to failed requests', failed_requests) return records
def getCounters(self, from_time, until_time): result = {} summaries = recording.load_summary_protos() per_request_re = False if config.APPSTATS_PER_REQUESTS: per_request_re = re.compile(config.APPSTATS_PER_REQUESTS) for index, summary in enumerate(summaries): end_time = datetime.datetime.fromtimestamp( (summary.start_timestamp_milliseconds() + summary.duration_milliseconds()) / 1000) if end_time <= from_time or end_time > until_time: continue path_key = summary.http_path() if config.APPSTATS_DIFFERENTIATE_BETWEEN_POST_AND_GET: path_key += "." + summary.http_method() if not per_request_re: path_key = False elif not per_request_re.match(path_key): path_key = "Other" self.append(end_time, "AppStats.Requests.All", 1, result) self.append(end_time, "AppStats.Latency.Real.All", summary.duration_milliseconds(), result) self.append(end_time, "AppStats.Latency.API.All", summary.api_milliseconds(), result) self.append(end_time, "AppStats.Latency.Overhead.All", summary.overhead_walltime_milliseconds(), result) if path_key: self.append(end_time, "AppStats.Requests.All." + path_key, 1, result) self.append(end_time, "AppStats.Latency.Real.All." + path_key, summary.duration_milliseconds(), result) self.append(end_time, "AppStats.Latency.API.All." + path_key, summary.api_milliseconds(), result) for x in summary.rpc_stats_list(): rpc_key = x.service_call_name() value = x.total_amount_of_calls() self.append(end_time, "AppStats.RPC." + rpc_key, value, result) self.append(end_time, "AppStats.RPC.Total", value, result) if path_key: self.append(end_time, "AppStats.RPC." + rpc_key + "." + path_key, value, result) self.append(end_time, "AppStats.RPC.Total." + path_key, value, result) for key in result.keys(): if (key.startswith("AppStats.Latency") or key.startswith("AppStats.Requests")): for percentile in config.APPSTATS_PERCENTILES: new_key = key.replace("All", str(percentile) + "thPercentile", 1) result[new_key] = {} for slot in result[key]: if (key.startswith("AppStats.Latency") or key.startswith("AppStats.Requests")): result[key][slot].sort() for percentile in config.APPSTATS_PERCENTILES: len_percentile = int( math.ceil( len(result[key][slot]) / 100.0 * percentile)) new_key = key.replace("All", str(percentile) + "thPercentile", 1) result[new_key][slot] = \ int(sum(result[key][slot][:len_percentile]) / len_percentile) result[key][slot] = sum(result[key][slot]) return result
def load_summary_protos(filter_timestamp=0): """Fetch only AppStats summary protos and filter by optional timestamp.""" return [ x for x in recording.load_summary_protos() if x.start_timestamp_milliseconds() > filter_timestamp ]